Skip to content

Instantly share code, notes, and snippets.

@victor-iyi
Created September 13, 2017 12:57
Show Gist options
  • Save victor-iyi/34a4c648119e577153cba724c100726d to your computer and use it in GitHub Desktop.
Save victor-iyi/34a4c648119e577153cba724c100726d to your computer and use it in GitHub Desktop.
LinearRegression with Gradient Descent Algorithm.
"""
@author Victor I. Afolabi
A.I. Engineer & Software developer
javafolabi@gmail.com
Created on 25 August, 2017 @ 8:15 PM.
Copyright © 2017. victor. All rights reserved.
"""
import numpy as np
class LinearRegression(object):
def __init__(self, learning_rate=1e-4):
"""
Linear Regression model
:param learning_rate:
"""
self.m = 0
self.b = 0
self.learning_rate = learning_rate
def fit(self, data, num_iter=1000):
for _ in range(num_iter):
self.__gradient_descent(data)
return self.m, self.b
def predict(self, x):
return (self.m * x) + self.b
def error(self, data):
total_error = 0
for _, d in enumerate(data):
x = d[0]
y = d[0]
total_error += (y - self.predict(x)) ** 2
def __gradient_descent(self, data):
n = len(data)
m_gradient = 0
b_gradient = 0
for _, d in enumerate(data):
x = d[0]
y = d[1]
m_gradient += (2 / n) * -x * (y - self.predict(x))
b_gradient += (2 / n) * -(y - self.predict(x))
self.m = self.m - (self.learning_rate * m_gradient)
self.b = self.b - (self.learning_rate * b_gradient)
if __name__ == '__main__':
clf = LinearRegression(learning_rate=1e-5)
data = np.genfromtxt('../datasets/data.csv', delimiter=',')
clf.fit(data)
print('m = {:.2f} b = {:.2f}'.format(clf.m, clf.b))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment