Regarding accuracy level

here is my code. after submission the accuracy i am getting is 10%, what improvisation should i do to have accuracy between 97-100% as told by you!!

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt

dfx = pd.read_csv(‘C:/Users/JAINY/Downloads/Training Data/Linear_X_Train.csv’)
dfy = pd.read_csv(‘C:/Users/JAINY/Downloads/Training Data/Linear_Y_Train.csv’)
dfxx = pd.read_csv(‘C:/Users/JAINY/Downloads/Test Cases/Linear_X_Test.csv’)

x = dfx.to_numpy()
y = dfy.to_numpy()
xx = dfxx.to_numpy()
x_new = (x-x.mean())/x.std()
y_new = y
xx_new = xx
plt.scatter(x_new,y_new)
#plt.show()

#def difference(x_new,y_new,theta):

theta[0] * x_new[i] + theta[1] - y_new[i]

theta[0] * x_new[i] + theta[1] - y_new[i]

def error(x_new,y_new,theta):
er = 0

for i in range(x_new.shape[0]):
    er = er + ( (theta[0] * x_new[i] + theta[1]) - y_new[i]) ** 2 
return er

def gradient(x_new,y_new,theta):
gradi = np.zeros((2,))
for i in range(x_new.shape[0]):
gradi[0] = (theta[0] * x_new[i] + theta[1] - y_new[i]) * x_new[i]
gradi[1] = (theta[0] * x_new[i] + theta[1] - y_new[i])
return gradi

def descent(x_new,y_new,rate=0.01):
theta = np.zeros((2,))
itr = 0
err_list = []
while (itr <= 1000):
e = error(x_new,y_new,theta)
err_list.append(e)
grad = gradient(x_new,y_new,theta)
theta[0] = theta[0] - rate * grad[0]
theta[1] = theta[1] - rate * grad[1]
itr = itr + 1
return theta,err_list

slope_inter,list_error = descent(x_new,y_new,rate=0.01)

plt.plot(list_error)
plt.show()
#print(list_error[0])
#plt.scatter(x_new,y_new,color=‘blue’)
#plt.show()
#plt.scatter(x_new,slope_inter[0]*x_new+slope_inter[1],marker=’^’,color=‘orange’)
#plt.show()
#plt.scatter(xx_new,slope_inter[0]*xx_new+slope_inter[1],marker=’^’,color=‘orange’)
#plt.show()

lm = np.zeros((1250))
lm1 = np.zeros((1250))
for i in range(1250):
lm[i] = slope_inter[0]*xx_new[i]+slope_inter[1]
#print(lm[0],lm[1])
file = np.savetxt(‘C:/Users/JAINY/Downloads/ans2.csv’,lm,delimiter=","‘w+’)

check for perfect error_list plot as shown in video. try to reduce the learning rate to 0.0001

still not working, here is the code.

Hi,
In the gradient function() you are overwriting the existing values of gradi , rather you should be adding new values to the existing ones, like this :-
gradi[0] += (theta[0]*X[i]+ theta[1] -Y[i])*X[i]
gradi[1] += (theta[0]*X[i]+ theta[1] -Y[i])

not this
gradi[0] = (theta[0]*X[i]+ theta[1] -Y[i])*X[i]
gradi[1] = (theta[0]*X[i]+ theta[1] -Y[i])

also use learning rate = 0.0001, otherwise your algo will diverge.

You can find the complete code here: https://ide.codingblocks.com/s/81375