Skip to content

Instantly share code, notes, and snippets.

@ankitmishra88
Last active March 26, 2020 16:05
Show Gist options
  • Save ankitmishra88/f46294e85ba5698537cf1ad232d3a7cf to your computer and use it in GitHub Desktop.
Save ankitmishra88/f46294e85ba5698537cf1ad232d3a7cf to your computer and use it in GitHub Desktop.
import numpy as np
import matplotlib.pyplot as plt
def plot(x,y,m,b):
plt.scatter(x,y)
# datapoints
y_pred=[i*m+b for i in x]
plt.plot(x,y) #classifier line
plt.show()
def grad_desc(x,y):
m_curr=b_curr=0 #starting value m_curr,b_curr
iterations=1000 #Total iteration count
learning_rate=0.001 #Assumed learning rate
n=len(x) #length of x vector
for i in range(iterations):
y_pred = m_curr*x + b_curr #predicted value of y
md=(-2/n)*sum(x*(y-y_pred)) #value of derivative of cost w.r.t m
cost=sum((y-y_pred)**2)/n
bd=(-2/n)*sum(y-y_pred) #value of derivative of cost w.r.t b
m_curr=m_curr-learning_rate*md
b_curr=b_curr-learning_rate*bd
print('m {} b {} cost {}'.format(m_curr,b_curr,cost))
plot(x,y,m_curr,b_curr)
if __name__=="__main__":
x=np.array([2,3,4,5,6,7,8,9,11,10,12])
y=np.array([7,10,13,16,19,22,25,28,34,31,37])
grad_desc(x,y)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment