def hypothesis(a,theta):
return theta[0] + theta[1]*a
def error(x,y,theta):
err = 0
m = y.shape[0]
for i in range(m):
h = hypothesis(x[i],theta)
e = (y[i] - h)**2
err += e
return err/2
def gradient(x,y,theta): ##calculating slope
grad = np.zeros((2,))
m = y.shape[0]
for i in range(m):
h = hypothesis(x[i],theta)
grad[0] += (y[i] - h)
grad[1] += (y[i] - h)*x[i]
return grad
def gradientDes(x,y,learning_rate = 0.01):
itr = 0
theta = np.zeros((2,)) ## important
errorList = list()
while itr<=100 :
m = gradient(x,y,theta)
theta[0] = theta[0] - learning_rate*m[0]
theta[1] = theta[1] - learning_rate*m[1]
e = error(x,y,theta)
errorList.append(e)
itr += 1
return theta,errorList
finalT,errorList = gradientDes(x,y)
I have pasted the code i did for practice of linear regression class . For some reason it is giving error to me . Can anyone help me with debugging? Basically I am getting KeyError .
Here are my imports :
import pandas as pa
import matplotlib.pyplot as pp
import numpy as np