-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLinear_regression.py
67 lines (48 loc) · 1.78 KB
/
Linear_regression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
data = pd.read_csv("D:\ML exercises\CourseraML-master\CourseraML-master\ex1\data\ex1data1.txt",header = None , names = ["population" ,"profit"])
data.head()
#---------------------------------- COST FUNCTION ------------------------------------ #
def compute_cost(X,y,theta):
inner = np.power(((X * theta.T) - y), 2)
return np.sum(inner) / (2 * len(X))
data.insert(0,"ones",1)
cols = data.shape[1]
X = data.iloc[:,0:cols-1]
y = data.iloc[:,cols-1:cols]
theta = np.matrix(np.array([0,0]))
X = np.matrix(X.values)
y = np.matrix(y.values)
## ------------------------------- Gradient descent ---------------------------------#
def grad_des(X,y,theta,alpha,iters):
temp = np.matrix(np.zeros(2))
parameters = 2
cost = np.zeros(iters)
for i in range(iters):
error = (X*theta.T)-y
for j in range(parameters):
term = np.multiply(error,X[:,j])
temp[0,j] = theta[0,j] - ((alpha / len(X)) * np.sum(term))
theta = temp
#cost[i] = compute_cost(X,y,theta)
return theta,cost
alpha = 0.01
iters = 2000
g,cost = grad_des(X,y,theta,alpha,iters)
print(g)
x = np.linspace(data.population.min(), data.population.max(), 2000)
f = g[0, 0] + (g[0, 1] * x)
print(f)
fig, ax = plt.subplots(figsize=(12,12))
ax.plot(x, f, 'r', label='Prediction')
ax.scatter(data.population, data.profit, label='Traning Data')
ax.legend(loc=2)
ax.set_xlabel('population')
ax.set_ylabel('profit')
ax.set_title('predicted profit vs. population Size')
plt.show()
#--------------------------- predict with values ------------------------#
get = int(input("please tell the value to predict"))
print(g[0,0] + g[0,1]*get,"is the approximation value")