forked from neelabhpant/Deep-Learning-in-Python
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Gradient_Descent.py
33 lines (27 loc) · 883 Bytes
/
Gradient_Descent.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
from sklearn.metrics import mean_squared_error
import numpy as np
import matplotlib.pyplot as plt
def pred(input_data, target, weights):
return ((input_data * weights).sum())
def get_slope(input_data, target, weights):
preds = pred(input_data, target, weights)
error = target - preds
slope = 2 * input_data * error
return slope
def get_mse(input_data, target, weights):
preds = pred(input_data, target, weights)
return mean_squared_error([preds], [target])
weights = np.array([0, 2, 1])
input_data = np.array([1, 2, 3])
target = 0
n_updates = 20
mse_hist = []
for i in range(n_updates):
slope = get_slope(input_data, target, weights)
weights = weights + (learning_rate * slope)
mse = get_mse(input_data, target, weights)
mse_hist.append(mse)
plt.plot(mse_hist)
plt.xlabel('Iterations')
plt.ylabel('Mean Squared Error')
plt.show()