Skip to content

Commit

Permalink
runnotes
Browse files Browse the repository at this point in the history
  • Loading branch information
jcuberdruid committed Nov 4, 2023
1 parent 5479157 commit 29b4de2
Show file tree
Hide file tree
Showing 6 changed files with 27 additions and 11 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ mnist_train.csv
newclassify.py
accuracyLog.txt
old/
logs/*
*accuracyLog.txt
*.out
.vscode
12 changes: 12 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
CXX = clang++
CXXFLAGS = -Ofast -std=c++20
TARGET = runmain.out
SRC = main.cpp

$(TARGET): $(SRC)
$(CXX) $(CXXFLAGS) -o $(TARGET) $(SRC)

.PHONY: clean
clean:
rm -f $(TARGET)

3 changes: 0 additions & 3 deletions loadcsv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,8 @@ vector<vector<int>> one_hot_encode(const vector<int>& input, int num_classes) {
encoded[i][input[i]] = 1;
} else {
cerr << "Value out of range: " << input[i] << endl;
// Handle the error according to your needs.
// Here, we just print an error message.
}
}

return encoded;
}

Expand Down
10 changes: 5 additions & 5 deletions main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ int main()
cout << "#################################################################" << endl;
cout << "Generating Model" << endl;
cout << "#################################################################" << endl;

// Model
double learningRate = 0.00015666;
string optimizerType = "adam"; // supported: sgd, sgd_momentum, adagrad, rmsprop, adam
Model test("cross_entropy", optimizerType, learningRate);
string runNote = "tanh_256_adam";
Model test("cross_entropy", optimizerType, learningRate, runNote);

vector<CallBackFunctionType> callbacks;
test.addLayer("tanh", make_tuple(256, 784), callbacks);
Expand All @@ -40,10 +40,10 @@ int main()
vector<vector<int>> label_vec = one_hot_encode(labelNums, 10);

// subset data for faster testing:
// label_vec.resize(5000);
// images.resize(5000);
label_vec.resize(5000);
images.resize(5000);

test.teach(label_vec, images, 100);
test.teach(label_vec, images, 1);

cout << "train accuracy " << test.getLastAccuracy() << endl;

Expand Down
10 changes: 7 additions & 3 deletions model.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,9 @@ class Model
vector<int> labels;
Layer *topographyHead = NULL;
vector<CallBackFunctionType> callBacks;
string runNote;

Model(const string lossFunctionID, string optimizerType, double learningRate) : optimizerType(optimizerType), learningRate(learningRate)
Model(const string lossFunctionID, string optimizerType, double learningRate, string runNote="undefined") : optimizerType(optimizerType), learningRate(learningRate), runNote(runNote)
{
auto [lossFunc, lossDeriv] = getLossFunctions(lossFunctionID);
lossFunction = lossFunc;
Expand Down Expand Up @@ -164,7 +165,8 @@ class Model
}
cout << endl;
ofstream myfile;
myfile.open("training_accuracyLog_tanh_RMS.txt", ios::app);
string trainingLog = "logs/" + runNote + "_training.txt";
myfile.open(trainingLog, ios::app);
myfile << j << "," << float(correctPredictions) / float(totalPredictions) << "," << sumLoss / double(totalPredictions) << "\n";
sumLoss = 0.0;
myfile.close();
Expand Down Expand Up @@ -230,7 +232,8 @@ class Model
}
cout << endl;
ofstream myfile;
myfile.open("testing_accuracyLog_tanh_RMS.txt", ios::app);
string testingLog = "logs/" + runNote + "_testing.txt";
myfile.open(testingLog, ios::app);
myfile << j << "," << float(correctPredictions) / float(totalPredictions) << "," << sumLoss / double(totalPredictions) << "\n";
sumLoss = 0.0;
myfile.close();
Expand Down Expand Up @@ -299,3 +302,4 @@ class Model
}
}
};

2 changes: 2 additions & 0 deletions utils.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
#include<vector>

using namespace std;

vector<double> cast_vector_to_double(const vector<int>& int_vector) {
vector<double> double_vector;
double_vector.reserve(int_vector.size()); // Reserve space to avoid reallocations
Expand Down

0 comments on commit 29b4de2

Please sign in to comment.