-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlg_reg2.asv
52 lines (32 loc) · 1.28 KB
/
lg_reg2.asv
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
%% Initialization
clear ; close all; clc
%% Load Data
% The first two columns contains the X values and the third column
% contains the label (y).
trainingdata = csvread('finaldata.csv');
X = trainingdata(:, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); y = trainingdata(:, 11);
%% Regularized Logistic Regression
% Doing Feature Normalization
[X, mu, sigma] = featureNormalize(X);
% Initialize fitting parameters
initial_theta = ones(size(X, 2), 1);
display(initial_theta);
% Set regularization parameter lambda to 1
lambda = 0.02;
% Compute and display initial cost and gradient for regularized logistic
% regression
[cost, grad] = costFunctionReg(initial_theta, X, y, lambda);
fprintf('Cost at initial theta (zeros): %f\n', cost);
fprintf('\nProgram paused. Press enter to continue.\n');
pause;
% Set Options
options = optimset('GradObj', 'on', 'MaxIter', 400);
% Optimizing to get theta
[theta, J, exit_flag] = ...
fmincg(@(t)(costFunctionReg(t, X, y, lambda)), initial_theta, options);
% Compute accuracy on our training set
p = predict(theta, X);
fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100);
testdata = csvread('testdata.csv');
X2 = testdata(:, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); y = testdata(:, 11);
p2 = predict(theta, X);