Skip to content

Commit

Permalink
new file: class2_pybrain.py
Browse files Browse the repository at this point in the history
	new file:   class_pybrain.py
	modified:   iris_dataset.py
	modified:   neuralnets_functions.py
  • Loading branch information
cammneto committed May 18, 2022
1 parent eb8b171 commit a2e0d13
Show file tree
Hide file tree
Showing 5 changed files with 60 additions and 5 deletions.
Binary file modified __pycache__/neuralnets_functions.cpython-310.pyc
Binary file not shown.
30 changes: 30 additions & 0 deletions class2_pybrain.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import pybrain

from pybrain.tools.shortcuts import buildNetwork
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised import BackpropTrainer
from pybrain.structure.modules import SigmoidLayer

network = buildNetwork(2,3,1,outclass = SigmoidLayer, bias = False)

dataset = SupervisedDataSet(2,1)
dataset.addSample((0,0),(0,))
dataset.addSample((0,1),(1,))
dataset.addSample((1,0),(1,))
dataset.addSample((1,1),(0,))

optimizer = BackpropTrainer(module=network, dataset = dataset, learningrate = 0.3)
epochs = 5000
error = []
for epoch in range(epochs):
error_average = optimizer.train()
if epoch % 1000 == 0:
print('Epoch: ' + str(epoch + 1) + ' Error: ' + str(error_average))
error.append(error_average)
print('Epoch: ' + str(epoch + 1) + ' Error: ' + str(error_average))

import matplotlib.pyplot as plt
plt.xlabel('Epoch')
plt.ylabel('Error')
plt.plot(error)
plt.show()
26 changes: 26 additions & 0 deletions class_pybrain.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import pybrain

from pybrain.structure import FeedForwardNetwork
from pybrain.structure import SigmoidLayer, LinearLayer, BiasUnit
from pybrain.structure import FullConnection

network = FeedForwardNetwork()
input_layer = LinearLayer(2)
hidden_layer = SigmoidLayer(3)
output_layer = SigmoidLayer(1)
bias0 = BiasUnit()
bias1 = BiasUnit()

network.addModule(input_layer)
network.addModule(hidden_layer)
network.addModule(output_layer)
network.addModule(bias0)
network.addModule(bias1)

input_to_hidden = FullConnection(input_layer, hidden_layer)
hidden_to_output= FullConnection(hidden_layer, output_layer)
bias_hidden = FullConnection(bias0, hidden_layer)
bias_output = FullConnection(bias1, output_layer)

network.sortModules()
print(input_to_hidden.params)
5 changes: 2 additions & 3 deletions iris_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
input_x_delta0 = (input_layer.T).dot(delta_hidden_layer)
weights0 = weights0 + (input_x_delta0*learning_rate)
print('Epoch: ' + str(epoch + 1) + ' Error: ' + str(average))
#print(inputs)
#for i in range(len(inputs)):
print(iris.target_names[int(round(nnf.sigmoid_output(50,weights0,weights1)))])

for i in inputs:
print(iris.target_names[abs(int(round(nnf.sigmoid_output(i,weights0,weights1))))])
4 changes: 2 additions & 2 deletions neuralnets_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,6 @@ def sigmoid_derivative(sigmoid):
return sigmoid*(1 - sigmoid)

def sigmoid_output(instance,weights0,weights1):
hidden_layer = sigmoid_derivative(np.dot(instance, weights0))
output_layer = sigmoid_derivative(np.dot(hidden_layer, weights1))
hidden_layer = sigmoid(np.dot(instance, weights0))
output_layer = sigmoid(np.dot(hidden_layer, weights1))
return output_layer[0]

0 comments on commit a2e0d13

Please sign in to comment.