Skip to content
This repository has been archived by the owner on Nov 22, 2018. It is now read-only.

Commit

Permalink
Fixed GetFittestOrganism() actually always returning the least fittes…
Browse files Browse the repository at this point in the history
…t. This commit is the first one completely solving the XOR Test!
  • Loading branch information
janhohenheim committed Jun 8, 2016
1 parent f8600ff commit 0047446
Show file tree
Hide file tree
Showing 9 changed files with 42 additions and 17 deletions.
2 changes: 1 addition & 1 deletion JNF_NEAT/genome.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ double Genome::GetGeneticalDistanceFrom(const Genome& other) const {
return disjointGenesInfluence + averageWeightDifference;
}

bool Genome::DoesContainGene(const Gene & gene) const {
bool Genome::DoesContainGene(const Gene& gene) const {
for (auto & g : genes) {
if (g.from == gene.from && g.to == gene.to && g.isRecursive == gene.isRecursive) {
return true;
Expand Down
2 changes: 1 addition & 1 deletion JNF_NEAT/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ int main() {
*/
std::vector<std::vector<float>> trainingData = { { 0.0f, 0.0f },{ 0.0f, 1.0f },{ 1.0f, 0.0f },{ 1.0f, 1.0f } };
for (auto & t : trainingData) {
std::cout << (int)(champ.GetOutputs(t).front() >= 0.5f) << "\t" << ((t.front() >= 0.5f) ^ (t.back() >= 0.5f)) << std::endl;
std::cout << (int)(champ.GetOutputs(t).front() >= 0.0f) << "\t" << ((t.front() >= 0.0f) ^ (t.back() >= 0.0f)) << std::endl;
}
return 0;
}
8 changes: 4 additions & 4 deletions JNF_NEAT/neural_network_trainer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -127,18 +127,18 @@ void NeuralNetworkTrainer::DeleteEmptySpecies() {
void NeuralNetworkTrainer::Repopulate() {
PrepareSpeciesForPopulation();
auto DidChanceOccure = [](float chance) {
auto num = rand() % 100;
return num < int(100.0f * chance);
auto num = rand() % 1000;
return num < int(1000.0f * chance);
};

for (auto& trainer : bodies) {
Species* sp = &SelectSpeciesToBreed();
auto & father = sp->GetOrganismToBreed();
auto& father = sp->GetOrganismToBreed();
if (DidChanceOccure(parameters.advanced.reproduction.chanceForInterspecialReproduction)){
sp = &SelectSpeciesToBreed();
}
auto& mother = sp->GetOrganismToBreed();
auto childNeuralNetwork(father.BreedWith(mother));
auto childNeuralNetwork(std::move(father.BreedWith(mother)));
Organism child(&*trainer, std::move(childNeuralNetwork));
FillOrganismIntoSpecies(std::move(child));
}
Expand Down
10 changes: 5 additions & 5 deletions JNF_NEAT/organism.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@ double Organism::GetOrCalculateFitness()
}

double Organism::GetOrCalculateRawFitness() {
if (!isFitnessUpToDate) {
fitness = trainer->GetFitness();
isFitnessUpToDate = true;
}
return fitness;
if (!isFitnessUpToDate) {
fitness = trainer->GetFitness();
isFitnessUpToDate = true;
}
return fitness;
}

NeuralNetwork Organism::BreedWith(Organism& partner)
Expand Down
12 changes: 11 additions & 1 deletion JNF_NEAT/species.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ void Species::SetPopulationsFitnessModifier() {
void Species::ElectRepresentative() {
if (!population.empty()) {
SelectRandomRepresentative();
//SelectFittestOrganismAsRepresentative();
}
}

Expand All @@ -72,6 +73,15 @@ void Species::SelectRandomRepresentative() {
}
}

void Species::SelectFittestOrganismAsRepresentative() {
if (representative == nullptr) {
representative = std::make_unique<Organism>(GetFittestOrganism());
}
else {
*representative = GetFittestOrganism();
}
}

template <class T>
constexpr bool Species::IsAboveCompatibilityThreshold(T t) const {
return t > representative->GetTrainingParameters().advanced.speciation.compatibilityThreshold;
Expand All @@ -96,7 +106,7 @@ Organism& Species::GetFittestOrganism() {
}
if (!isSortedByFitness) {
auto CompareOrganisms = [&](Organism* lhs, Organism* rhs) {
return lhs->GetOrCalculateFitness() < rhs->GetOrCalculateFitness();
return lhs->GetOrCalculateFitness() > rhs->GetOrCalculateFitness();
};
std::sort(population.begin(), population.end(), CompareOrganisms);
isSortedByFitness = true;
Expand Down
1 change: 1 addition & 0 deletions JNF_NEAT/species.h
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class Species {
private:
void ElectRepresentative();
void SelectRandomRepresentative();
void SelectFittestOrganismAsRepresentative();

template<class T>
constexpr bool IsAboveCompatibilityThreshold(T t) const;
Expand Down
1 change: 1 addition & 0 deletions JNF_NEAT/training_parameters.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ struct TrainingParameters {
} speciation;
struct Reproduction {
float chanceForInterspecialReproduction = 0.001f;
size_t minSpeciesSizeForChampConservation = 5;
} reproduction;
struct Structure {
size_t numberOfBiasNeurons = 1;
Expand Down
21 changes: 17 additions & 4 deletions JNF_NEAT/xor_solver.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,16 @@

void XORSolver::Reset()
{
correctEvaluations = 0;
currTraining = trainingData.begin();
results.clear();
}

void XORSolver::Update(const std::vector<float>& networkOutputs)
{
int xorResult = currTraining->first ^ currTraining->second;
int networksXorResult = int(networkOutputs.front() >= 0.0f);

if (xorResult == networksXorResult) {
correctEvaluations++;
}
results.push_back(networksXorResult);
++currTraining;
if (currTraining == trainingData.end()) {
currTraining = trainingData.begin();
Expand All @@ -22,6 +20,21 @@ void XORSolver::Update(const std::vector<float>& networkOutputs)

double XORSolver::GetFitness() const
{
size_t correctEvaluations = 0;
auto firstResult = results.front();
bool areAllResultsSame = true;
for (size_t i = 0; i < trainingData.size(); ++i) {
auto correctResult = trainingData[i].first ^ trainingData[i].second;
if (results[i] == correctResult) {
correctEvaluations++;
}
if (results[i] != firstResult) {
areAllResultsSame = false;
}
}
if (areAllResultsSame) {
return 0.0;
}
return double(correctEvaluations * correctEvaluations);
}

Expand Down
2 changes: 1 addition & 1 deletion JNF_NEAT/xor_solver.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class XORSolver : public IBody {
virtual std::vector<float> ProvideNetworkWithInputs() const override;

private:
int correctEvaluations = 0;
std::vector<std::pair<int,int>> trainingData = { {0, 0}, {0, 1}, {1, 0}, {1, 1} };
std::vector<int> results;
std::vector<std::pair<int,int>>::iterator currTraining = trainingData.begin();
};

0 comments on commit 0047446

Please sign in to comment.