-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy path1 variable linear regression.py
More file actions
82 lines (43 loc) · 1.29 KB
/
1 variable linear regression.py
File metadata and controls
82 lines (43 loc) · 1.29 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import numpy as np
# We are inputting sample data in x and the value in ys
x = np.array([140.00,155.00,159.00,179.00], dtype=np.float64)
y = np.array([60.00,62.00,67.00,70.00],dtype=np.float64 )
w = 0
b = 0
alpha = 1.0e-2
m = len(x)
iterations=50
gd = []
# impletmenting our Linear Regression
def model(x,w,b):
return w*x+b
# cost function
def cost_func(w,b):
cost = 0
for idx in range(m):
cost += (model(x[idx],w,b)-y[idx])**2.00
cost = cost/(2*m)
return cost
# you can further optimize it by combining both function and using one single loop
def update_w(w,b):
value = 0
for idx in range(m):
value += (model(x[idx],w,b) -y[idx] )* x[idx]
return value
def update_b(w,b):
value = 0
for idx in range(m):
value += model(x[idx],w,b) -y[idx]
return value
def gradient_descent():
global w
global b
for i in range(iterations):
tempw = w
w = tempw - alpha*((update_w(tempw,b))/m)
tempb = b
b = tempb - alpha *((update_b(tempw,tempb))/m)
cost = cost_func(w,b)
gd.append(cost)
gradient_descent()
print(gd[0:10], gd[9990:99999])