Skip to content

Commit

Permalink
Cleaned up imports
Browse files Browse the repository at this point in the history
  • Loading branch information
eriklindernoren committed Sep 20, 2017
1 parent 5f7d7f4 commit 2067d12
Show file tree
Hide file tree
Showing 57 changed files with 132 additions and 395 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ but rather to present the inner workings of them in a transparent way.
- [Deep Q-Network](mlfromscratch/reinforcement_learning/deep_q_network.py)

### Deep Learning
+ [Base Class](mlfromscratch/deep_learning/neural_network.py)
+ [Neural Network](mlfromscratch/deep_learning/neural_network.py)
+ [Layers](mlfromscratch/deep_learning/layers.py)
* Activation Layer
* Average Pooling Layer
Expand Down
8 changes: 3 additions & 5 deletions mlfromscratch/deep_learning/activation_functions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import numpy as np
import sys

# Collection of activation functions
# Reference: https://en.wikipedia.org/wiki/Activation_function
Expand Down Expand Up @@ -31,7 +30,6 @@ def function(self, x):
return 2 / (1 + np.exp(-2*x)) - 1

def gradient(self, x):
# Avoid overflow for large inputs
return 1 - np.power(self.function(x), 2)

class ReLU():
Expand Down Expand Up @@ -64,8 +62,8 @@ def gradient(self, x):
return np.where(x >= 0.0, 1, self.function(x) + self.alpha)

class SELU():
# Reference : https://arxiv.org/abs/1706.02515,
# https://github.com/bioinf-jku/SNNs/blob/master/SelfNormalizingNetworks_MLP_MNIST.ipynb
# Reference : https://arxiv.org/abs/1706.02515,
# https://github.com/bioinf-jku/SNNs/blob/master/SelfNormalizingNetworks_MLP_MNIST.ipynb
def __init__(self):
self.alpha = 1.6732632423543772848170429916717
self.scale = 1.0507009873554804934193349852946
Expand All @@ -83,5 +81,5 @@ def function(self, x):
return np.log(1 + np.exp(x))

def gradient(self, x):
return Sigmoid().function(x)
return 1 / (1 + np.exp(-x))

5 changes: 2 additions & 3 deletions mlfromscratch/deep_learning/layers.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@

from __future__ import print_function, division
import sys
import os
import math
import numpy as np
import copy
from mlfromscratch.deep_learning.activation_functions import Sigmoid, ReLU, SoftPlus, LeakyReLU, TanH, ELU, SELU, Softmax
from mlfromscratch.deep_learning.activation_functions import Sigmoid, ReLU, SoftPlus, LeakyReLU
from mlfromscratch.deep_learning.activation_functions import TanH, ELU, SELU, Softmax


class Layer(object):
Expand Down
2 changes: 1 addition & 1 deletion mlfromscratch/deep_learning/loss_functions.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import division
import numpy as np
from mlfromscratch.utils.data_operation import accuracy_score
from mlfromscratch.utils import accuracy_score
from mlfromscratch.deep_learning.activation_functions import Sigmoid

class Loss(object):
Expand Down
40 changes: 12 additions & 28 deletions mlfromscratch/deep_learning/neural_network.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,8 @@
from __future__ import print_function
from terminaltables import AsciiTable
import copy
import numpy as np
import progressbar

# Import helper functions
from mlfromscratch.utils.data_manipulation import train_test_split, to_categorical, normalize
from mlfromscratch.utils.data_manipulation import get_random_subsets, shuffle_data, batch_iterator
from mlfromscratch.utils.data_operation import accuracy_score
from mlfromscratch.utils import batch_iterator
from mlfromscratch.deep_learning.loss_functions import CrossEntropy
from mlfromscratch.utils.misc import bar_widgets

Expand All @@ -30,7 +25,7 @@ def __init__(self, optimizer, loss=CrossEntropy, validation_data=None):
self.layers = []
self.errors = {"training": [], "validation": []}
self.loss_function = loss()

self.validation_set = False
if validation_data:
self.validation_set = True
Expand All @@ -47,50 +42,41 @@ def add(self, layer):
# to the output shape of the last added layer
if self.layers:
layer.set_input_shape(shape=self.layers[-1].output_shape())

# If the layer has weights that needs to be initialized
if hasattr(layer, 'initialize'):
layer.initialize(optimizer=self.optimizer)

# Add layer to the network
self.layers.append(layer)

def train_on_batch(self, X, y):
# Calculate output
y_pred = self._forward_pass(X)
# Calculate the training loss
# Calculate the loss and accuracy of the prediction
loss = np.mean(self.loss_function.loss(y, y_pred))
acc = self.loss_function.acc(y, y_pred)
# Calculate the gradient of the loss function wrt y_pred
loss_grad = self.loss_function.gradient(y, y_pred)
# Calculate the accuracy of the prediction
acc = self.loss_function.acc(y, y_pred)
# Backprop. Update weights
# Backpropagate. Update weights
self._backward_pass(loss_grad=loss_grad)

return loss, acc


def fit(self, X, y, n_epochs, batch_size):

n_samples = np.shape(X)[0]
n_batches = int(n_samples / batch_size)

bar = progressbar.ProgressBar(widgets=bar_widgets)
for _ in bar(range(n_epochs)):
idx = range(n_samples)
np.random.shuffle(idx)

batch_t_error = 0 # Mean batch training error
batch_error = 0
for X_batch, y_batch in batch_iterator(X, y, batch_size=batch_size):
loss, _ = self.train_on_batch(X_batch, y_batch)
batch_t_error += loss
batch_error += loss

self.errors["training"].append(batch_error / n_batches)

# Save the epoch mean error
self.errors["training"].append(batch_t_error / n_batches)
if self.validation_set:
# Determine validation error
y_val_p = self._forward_pass(self.X_val)
validation_loss = np.mean(self.loss_function.loss(self.y_val, y_val_p))
y_val_pred = self._forward_pass(self.X_val)
validation_loss = np.mean(self.loss_function.loss(self.y_val, y_val_pred))
self.errors["validation"].append(validation_loss)

return self.errors["training"], self.errors["validation"]
Expand All @@ -116,7 +102,7 @@ def summary(self, name="Model Summary"):
print (AsciiTable([[name]]).table)
# Network input shape (first layer's input shape)
print ("Input Shape: %s" % str(self.layers[0].input_shape))
# Get each layer's configuration
# Iterate through network and get each layer's configuration
table_data = [["Layer Type", "Parameters", "Output Shape"]]
tot_params = 0
for layer in self.layers:
Expand All @@ -125,10 +111,8 @@ def summary(self, name="Model Summary"):
out_shape = layer.output_shape()
table_data.append([layer_name, str(params), str(out_shape)])
tot_params += params

# Print network configuration table
print (AsciiTable(table_data).table)

print ("Total Parameters: %d\n" % tot_params)

def predict(self, X):
Expand Down
2 changes: 1 addition & 1 deletion mlfromscratch/deep_learning/optimizers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from mlfromscratch.utils.data_manipulation import make_diagonal, normalize
from mlfromscratch.utils import make_diagonal, normalize

# Optimizers for models that use gradient based methods for finding the
# weights that minimizes the loss.
Expand Down
8 changes: 3 additions & 5 deletions mlfromscratch/examples/convolutional_neural_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,12 @@

# Import helper functions
from mlfromscratch.deep_learning import NeuralNetwork
from mlfromscratch.utils.data_manipulation import train_test_split, to_categorical, normalize
from mlfromscratch.utils.data_manipulation import get_random_subsets, shuffle_data
from mlfromscratch.utils import train_test_split, to_categorical, normalize
from mlfromscratch.utils import get_random_subsets, shuffle_data, Plot
from mlfromscratch.utils.data_operation import accuracy_score
from mlfromscratch.deep_learning.optimizers import StochasticGradientDescent, Adam, RMSprop, Adagrad, Adadelta
from mlfromscratch.deep_learning.loss_functions import CrossEntropy
from mlfromscratch.utils.misc import bar_widgets
from mlfromscratch.utils import Plot
from mlfromscratch.deep_learning.layers import Dense, Dropout, Conv2D, Flatten, Activation, MaxPooling2D
from mlfromscratch.deep_learning.layers import AveragePooling2D, ZeroPadding2D, BatchNormalization, RNN

Expand All @@ -25,7 +24,7 @@ def main():
# Conv Net
#----------

optimizer = Adadelta()
optimizer = Adam()

data = datasets.load_digits()
X = data.data
Expand Down Expand Up @@ -62,7 +61,6 @@ def main():
clf.add(BatchNormalization())
clf.add(Dense(10))
clf.add(Activation('softmax'))

print ()
clf.summary(name="ConvNet")

Expand Down
6 changes: 2 additions & 4 deletions mlfromscratch/examples/decision_tree_classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,8 @@
import os

# Import helper functions
from mlfromscratch.utils.data_manipulation import train_test_split, standardize
from mlfromscratch.utils.data_operation import accuracy_score
from mlfromscratch.utils.data_operation import mean_squared_error, calculate_variance
from mlfromscratch.utils import Plot
from mlfromscratch.utils import train_test_split, standardize, accuracy_score
from mlfromscratch.utils import mean_squared_error, calculate_variance, Plot
from mlfromscratch.supervised_learning import ClassificationTree

def main():
Expand Down
12 changes: 3 additions & 9 deletions mlfromscratch/examples/decision_tree_regressor.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,10 @@
from __future__ import division, print_function
import numpy as np
from sklearn import datasets
import matplotlib.pyplot as plt
import pandas as pd
import sys
import os

# Import helper functions
from mlfromscratch.utils.data_manipulation import train_test_split, standardize
from mlfromscratch.utils.data_operation import accuracy_score
from mlfromscratch.utils.data_operation import mean_squared_error, calculate_variance
from mlfromscratch.utils import Plot

from mlfromscratch.utils import train_test_split, standardize, accuracy_score
from mlfromscratch.utils import mean_squared_error, calculate_variance, Plot
from mlfromscratch.supervised_learning import RegressionTree

def main():
Expand Down
10 changes: 1 addition & 9 deletions mlfromscratch/examples/deep_q_network.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,6 @@
from __future__ import print_function
import sys
import os
import math
import random
import numpy as np
import progressbar
import gym
from collections import deque

from mlfromscratch.utils.data_manipulation import to_categorical
from mlfromscratch.utils import to_categorical
from mlfromscratch.deep_learning.optimizers import Adam
from mlfromscratch.deep_learning.loss_functions import SquareLoss
from mlfromscratch.deep_learning.layers import Dense, Dropout, Flatten, Activation, Reshape, BatchNormalization
Expand Down
8 changes: 3 additions & 5 deletions mlfromscratch/examples/demo.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
from __future__ import print_function
import sys, os
from sklearn import datasets
import numpy as np
import pandas as pd
import math
import matplotlib.pyplot as plt

from mlfromscratch.utils.data_manipulation import train_test_split, normalize, to_categorical
from mlfromscratch.utils.data_operation import accuracy_score
from mlfromscratch.deep_learning.optimizers import GradientDescent, Adam
from mlfromscratch.utils import train_test_split, normalize, to_categorical, accuracy_score
from mlfromscratch.deep_learning.optimizers import Adam
from mlfromscratch.deep_learning.loss_functions import CrossEntropy
from mlfromscratch.deep_learning.activation_functions import Softmax
from mlfromscratch.utils.kernels import *
Expand Down
7 changes: 2 additions & 5 deletions mlfromscratch/examples/gradient_boosting_classifier.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
from __future__ import division, print_function
import numpy as np
from sklearn import datasets
import sys
import os
import matplotlib.pyplot as plt

# Import helper functions
from mlfromscratch.utils.data_manipulation import train_test_split
from mlfromscratch.utils.data_operation import accuracy_score
from mlfromscratch.utils.loss_functions import CrossEntropy
from mlfromscratch.utils import train_test_split, accuracy_score
from mlfromscratch.deep_learning.loss_functions import CrossEntropy
from mlfromscratch.utils import Plot
from mlfromscratch.supervised_learning import GradientBoostingClassifier

Expand Down
8 changes: 2 additions & 6 deletions mlfromscratch/examples/gradient_boosting_regressor.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,14 @@
from __future__ import division, print_function
import numpy as np
from sklearn import datasets
import pandas as pd
import matplotlib.pyplot as plt
from scipy.optimize import line_search
import progressbar

# Import helper functions
from mlfromscratch.utils.data_manipulation import train_test_split, standardize, to_categorical
from mlfromscratch.utils.data_operation import mean_squared_error, accuracy_score
from mlfromscratch.utils import train_test_split, standardize, to_categorical
from mlfromscratch.utils import mean_squared_error, accuracy_score, Plot
from mlfromscratch.utils.loss_functions import SquareLoss
from mlfromscratch.utils.misc import bar_widgets
from mlfromscratch.supervised_learning import GradientBoostingRegressor
from mlfromscratch.utils import Plot


def main():
Expand Down
4 changes: 0 additions & 4 deletions mlfromscratch/examples/k_means.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
from __future__ import division, print_function
import sys
import os
import math
import random
from sklearn import datasets
import numpy as np

Expand Down
10 changes: 2 additions & 8 deletions mlfromscratch/examples/k_nearest_neighbors.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,10 @@
from __future__ import print_function
import sys
import os
import math
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets

# Import helper functions
from mlfromscratch.utils.data_manipulation import train_test_split, normalize
from mlfromscratch.utils.data_operation import euclidean_distance, accuracy_score
from mlfromscratch.unsupervised_learning import PCA
from mlfromscratch.utils import Plot
from mlfromscratch.utils import train_test_split, normalize, accuracy_score
from mlfromscratch.utils import euclidean_distance, Plot
from mlfromscratch.supervised_learning import KNN

def main():
Expand Down
9 changes: 2 additions & 7 deletions mlfromscratch/examples/linear_discriminant_analysis.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,12 @@
from __future__ import print_function
import sys
import os
from sklearn import datasets
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd

# Import helper functions
from mlfromscratch.supervised_learning import LDA
from mlfromscratch.utils.data_operation import calculate_covariance_matrix, accuracy_score
from mlfromscratch.utils.data_manipulation import normalize, standardize, train_test_split
from mlfromscratch.utils import calculate_covariance_matrix, accuracy_score
from mlfromscratch.utils import normalize, standardize, train_test_split, Plot
from mlfromscratch.unsupervised_learning import PCA
from mlfromscratch.utils import Plot

def main():
# Load the dataset
Expand Down
6 changes: 2 additions & 4 deletions mlfromscratch/examples/linear_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@
import pandas as pd
import matplotlib.pyplot as plt

# Import helper functions
from mlfromscratch.utils.data_manipulation import train_test_split, polynomial_features
from mlfromscratch.utils.data_operation import mean_squared_error
from mlfromscratch.utils import Plot
from mlfromscratch.utils import train_test_split, polynomial_features
from mlfromscratch.utils import mean_squared_error, Plot
from mlfromscratch.supervised_learning import LinearRegression

def main():
Expand Down
11 changes: 2 additions & 9 deletions mlfromscratch/examples/logistic_regression.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,11 @@
from __future__ import print_function
import sys
import os
import math
from sklearn import datasets
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

# Import helper functions
from mlfromscratch.utils.data_manipulation import make_diagonal, normalize, train_test_split
from mlfromscratch.utils.data_operation import accuracy_score
from mlfromscratch.utils.activation_functions import Sigmoid
from mlfromscratch.utils.optimizers import GradientDescent
from mlfromscratch.utils import make_diagonal, normalize, train_test_split, accuracy_score
from mlfromscratch.deep_learning.activation_functions import Sigmoid
from mlfromscratch.utils import Plot
from mlfromscratch.supervised_learning import LogisticRegression

Expand All @@ -30,7 +24,6 @@ def main():
y_pred = clf.predict(X_test)

accuracy = accuracy_score(y_test, y_pred)

print ("Accuracy:", accuracy)

# Reduce dimension to two using PCA and plot the results
Expand Down
Loading

0 comments on commit 2067d12

Please sign in to comment.