Skip to content

Commit

Permalink
Neuroevolution: Takes model builder as parameter to enable support of…
Browse files Browse the repository at this point in the history
… arbitrary network architecture
  • Loading branch information
eriklindernoren committed Oct 4, 2017
1 parent d230537 commit 7d3e274
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 27 deletions.
2 changes: 1 addition & 1 deletion mlfromscratch/deep_learning/neural_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(self, optimizer, loss, validation_data=None):
self.errors = {"training": [], "validation": []}
self.loss_function = loss()
self.progressbar = progressbar.ProgressBar(widgets=bar_widgets)
self.id = 0

self.val_set = None
if validation_data:
X, y = validation_data
Expand Down
18 changes: 14 additions & 4 deletions mlfromscratch/examples/neuroevolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,33 @@

from mlfromscratch.supervised_learning import Neuroevolution
from mlfromscratch.utils import train_test_split, to_categorical, normalize, Plot
from mlfromscratch.deep_learning.optimizers import Adam
from mlfromscratch.deep_learning import NeuralNetwork
from mlfromscratch.deep_learning.layers import Activation, Dense
from mlfromscratch.deep_learning.loss_functions import CrossEntropy

from mlfromscratch.deep_learning.optimizers import Adam

def main():

X, y = datasets.make_classification(n_samples=1000, n_features=10, n_classes=4, n_clusters_per_class=1, n_informative=2)

y = to_categorical(y.astype("int"))

# Model builder
def model_builder(n_inputs, n_outputs):
model = NeuralNetwork(optimizer=Adam(), loss=CrossEntropy)
model.add(Dense(16, input_shape=(n_inputs,)))
model.add(Activation('relu'))
model.add(Dense(n_outputs))
model.add(Activation('softmax'))
return model

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4)

model = Neuroevolution(population_size=100,
mutation_rate=0.05,
loss=CrossEntropy)
model_builder=model_builder)

model = model.evolve(X_train, y_train, n_generations=300)
model = model.evolve(X_train, y_train, n_generations=500)

print ("Fitness: %.5f" % model.fitness)
print ("Accuracy: %.1f%%" % float(100*model.accuracy))
Expand Down
33 changes: 12 additions & 21 deletions mlfromscratch/supervised_learning/neuroevolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,6 @@
import copy

from mlfromscratch.utils.misc import bar_widgets
from mlfromscratch.deep_learning import NeuralNetwork
from mlfromscratch.deep_learning.layers import Activation, Dense
from mlfromscratch.deep_learning.optimizers import Adam


class Neuroevolution():
""" Evolutionary optimization of Neural Networks.
Expand All @@ -17,22 +13,17 @@ class Neuroevolution():
The number of neural networks that are allowed in the population at a time.
mutation_rate: float
The probability that a weight will be mutated.
loss: class
Loss function used to measure the model's performance. SquareLoss or CrossEntropy.
model_builder: method
A method which returns a user specified NeuralNetwork instance.
"""
def __init__(self, population_size, mutation_rate, loss):
def __init__(self, population_size, mutation_rate, model_builder):
self.population_size = population_size
self.mutation_rate = mutation_rate
self.loss_function = loss

def _build_mlp(self, id):
""" Returns a Multilayer Perceptron (a new individual in the population) """
model = NeuralNetwork(optimizer=Adam(), loss=self.loss_function)
model.add(Dense(16, input_shape=(self.X.shape[1],)))
model.add(Activation('relu'))
model.add(Dense(self.y.shape[1]))
model.add(Activation('softmax'))

self.model_builder = model_builder

def _build_model(self, id):
""" Returns a new individual """
model = self.model_builder(n_inputs=self.X.shape[1], n_outputs=self.y.shape[1])
model.id = id
model.fitness = 0
model.accuracy = 0
Expand All @@ -43,7 +34,7 @@ def _initialize_population(self):
""" Initialization of the neural networks forming the population"""
self.population = []
for _ in range(self.population_size):
model = self._build_mlp(id=np.random.randint(1000))
model = self._build_model(id=np.random.randint(1000))
self.population.append(model)

def _mutate(self, individual, var=1):
Expand All @@ -67,9 +58,9 @@ def _inherit_weights(self, child, parent):

def _crossover(self, parent1, parent2):
""" Performs crossover between the neurons in parent1 and parent2 to form offspring """
child1 = self._build_mlp(id=parent1.id+1)
child1 = self._build_model(id=parent1.id+1)
self._inherit_weights(child1, parent1)
child2 = self._build_mlp(id=parent2.id+1)
child2 = self._build_model(id=parent2.id+1)
self._inherit_weights(child2, parent2)

# Perform crossover
Expand Down Expand Up @@ -121,7 +112,7 @@ def evolve(self, X, y, n_generations):
# The 'winners' are selected for the next generation
next_population = [self.population[i] for i in range(n_winners)]

# Parents are selected as the fittest 60% in the population
# The fittest 60% of the population are selected as parents
parents = [self.population[i] for i in range(self.population_size - n_winners)]
for i in np.arange(0, len(parents), 2):
# Perform crossover to produce offspring
Expand Down
1 change: 0 additions & 1 deletion mlfromscratch/supervised_learning/regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,6 @@ class ElasticNet(Regression):
def __init__(self, degree=1, reg_factor=0.05, l1_ratio=0.5, n_iterations=3000,
learning_rate=0.01):
self.degree = degree
# Ridge Regression
self.regularization = l1_l2_regularization(alpha=reg_factor, l1_ratio=l1_ratio)
super(ElasticNet, self).__init__(n_iterations,
learning_rate)
Expand Down

0 comments on commit 7d3e274

Please sign in to comment.