Getting error in linear regression code

def hypothesis(xv,theta):

return theta[0]+theta[1]*xv

def error(X,Y,theta):
err=0
m=X.shape[0]
for i in range(m) :
hx=hypothesis(X[i],theta)
err+=(hx-Y[i])**2
return err

##grad

def gradient(X,Y,theta):
grad=np.zeros((2,))
m=X.shape[0]

for i in range(m):
    hx=hypothesis(X[i],theta)
    grad[0]+=  (hx-Y[i])
    grad[1]+=   (hx-Y[i])*X[i]
    
return grad

def gradientDescent(X,Y,learning_rate=0.007) :
error_list = []
theta=np.zeros((2,))
for i in range(100):
grad=gradient(X,Y,theta)
error_list.append(error(X,Y,theta))
theta[0]=theta[0]-learning_rate-grad[0]
theta[1]=theta[1]-learning_rate-grad[1]
return error_list,theta

pls send the complete error