Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 12 additions & 5 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -49,12 +49,19 @@ if(NOT SKBUILD)

add_library(MultiNEAT SHARED ${SOURCE_FILES})

# target_include_directories(
# MultiNEAT
# PRIVATE
# PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
# $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/multineat>
# cereal/include)

target_include_directories(
MultiNEAT
PRIVATE
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/multineat>
cereal/include)
MultiNEAT
PRIVATE
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/multineat>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/cereal/include>)

# Not doing this anymore because not all package managers have it.
# target_link_libraries(MultiNEAT PUBLIC cereal::cereal)
Expand Down
12 changes: 6 additions & 6 deletions src/Genome.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ namespace NEAT

m_ID = a_ID;
int t_innovnum = 1, t_nnum = 1;

// override seed_type if 0 hidden units are specified
if ((a_SeedType == 1) && (a_NumHidden == 0))
{
Expand Down Expand Up @@ -487,7 +487,7 @@ namespace NEAT
// Start very minimally - connect a random input to each output
// Also connect the bias to every output
for (unsigned int i = 0; i < a_NumOutputs; i++)
{
{
int t_inp_id = t_RNG.RandInt(1, a_NumInputs - 1);
int t_bias_id = a_NumInputs;
int t_outp_id = a_NumInputs + 1 + i;
Expand Down Expand Up @@ -807,7 +807,7 @@ namespace NEAT
bool Genome::HasLoops()
{
NeuralNetwork net;
BuildPhenotype(net);
BuildCPPN(net);

// convert the net to a Boost::Graph object
Graph graph(NumNeurons());
Expand Down Expand Up @@ -837,7 +837,7 @@ namespace NEAT


// This builds a fastnetwork structure out from the genome
void Genome::BuildPhenotype(NeuralNetwork &a_Net) const
void Genome::BuildCPPN(NeuralNetwork &a_Net) const
{
// first clear out the network
a_Net.Clear();
Expand Down Expand Up @@ -989,7 +989,7 @@ namespace NEAT
// Begin querying the CPPN
// Create the neural network that will represent the CPPN
NeuralNetwork t_temp_phenotype(true, rng);
BuildPhenotype(t_temp_phenotype);
BuildCPPN(t_temp_phenotype);
t_temp_phenotype.Flush();

// To ensure network relaxation
Expand Down Expand Up @@ -3669,7 +3669,7 @@ namespace NEAT


NeuralNetwork t_temp_phenotype(true, rng);
BuildPhenotype(t_temp_phenotype);
BuildCPPN(t_temp_phenotype);

// Find Inputs to Hidden connections.
for (unsigned int i = 0; i < input_count; i++)
Expand Down
2 changes: 1 addition & 1 deletion src/Genome.h
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ namespace NEAT
void SetOffspringAmount(double a_oa);

// This builds a fastnetwork structure out from the genome
void BuildPhenotype(NeuralNetwork &net) const;
void BuildCPPN(NeuralNetwork &net) const;

// Projects the phenotype's weights back to the genome
void DerivePhenotypicChanges(NeuralNetwork &a_Net);
Expand Down
13 changes: 7 additions & 6 deletions src/NeuralNetwork.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -263,10 +263,11 @@ NeuralNetwork::NeuralNetwork()
// clean up other neuron data as well
for (unsigned int i = 0; i < m_neurons.size(); i++)
{
m_neurons[i].m_a = 1;
m_neurons[i].m_a = 0.5; // default 1
m_neurons[i].m_b = 0;
m_neurons[i].m_timeconst = m_neurons[i].m_bias =
m_neurons[i].m_membrane_potential = 0;
m_neurons[i].m_timeconst = 0;
m_neurons[i].m_bias = 0;
m_neurons[i].m_membrane_potential = 0;
}
Clear();
}
Expand Down Expand Up @@ -339,15 +340,15 @@ void NeuralNetwork::Activate()
// this will happen.
for (unsigned int i = 0; i < m_connections.size(); i++)
{
m_neurons[m_connections[i].m_target_neuron_idx].m_activesum +=
m_connections[i].m_signal;
m_neurons[m_connections[i].m_target_neuron_idx].m_activesum += m_connections[i].m_signal;
}
// Now loop nodes_activesums, pass the signals through the activation function
// and store the result back to nodes_activations
// also skip inputs since they do not get an activation
for (unsigned int i = m_num_inputs; i < m_neurons.size(); i++)
for (unsigned int i = m_num_inputs; i < m_neurons.size(); i++)
{
double x = m_neurons[i].m_activesum;
// std::cout << "active sum, i.e x = " << x << std::endl;
m_neurons[i].m_activesum = 0;
// Apply the activation function
double y = 0.0;
Expand Down
2 changes: 1 addition & 1 deletion src/python/Genome.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ void export_Genome(pybind11::module_& mod) {

.def("PrintAllTraits", &Genome::PrintAllTraits)

.def("BuildPhenotype", &Genome::BuildPhenotype)
.def("BuildCPPN", &Genome::BuildCPPN)
.def("BuildHyperNEATPhenotype", &Genome::BuildHyperNEATPhenotype)
.def("BuildESHyperNEATPhenotype", &Genome::BuildESHyperNEATPhenotype)

Expand Down
5 changes: 5 additions & 0 deletions test_multineat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import multineat

genotype = multineat.Genome()

print(genotype)
2 changes: 1 addition & 1 deletion tests/python/test_raw_genome.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def parameters():
return params

def activate_network(self, genome, _input=None):
genome.BuildPhenotype(self._net)
genome.BuildCPPN(self._net)
_input = np.array([1, 2, 3], dtype=float) if _input is None else _input
self._net.Input(_input)
self._net.ActivateAllLayers()
Expand Down
2 changes: 1 addition & 1 deletion tests/python/test_xor.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class XorTest(unittest.TestCase):
def test_xor(self):
def evaluate(genome):
net = NEAT.NeuralNetwork()
genome.BuildPhenotype(net)
genome.BuildCPPN(net)

error = 0

Expand Down
2 changes: 1 addition & 1 deletion tests/xor/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ double xortest(Genome& g)
double f = 0;

NeuralNetwork net;
g.BuildPhenotype(net);
g.BuildCPPN(net);

static const std::vector< std::vector< double > > inputs {
{0.0,0.0,1.0},
Expand Down