Chemical Separator Code

def sigmoid(z):
return (1.0/(1.0+np.exp(-1.0z)))
def hypothesis(X,theta):
return sigmoid(np.dot(X,theta))
def error(X,y,theta):
hx=hypothesis(X,theta)
m=X.shape[0]
err_vec=y
np.log2(hx)+(1-y)np.log2(1-hx)
err_val=err_vec.sum()
return -err_val/m
def gradient(y,X,theta):
m=X.shape[0]
hx=hypothesis(X,theta)
grad=np.dot((X.T),(y-hx))
return grad/m
def predict(x,theta):
confidence=hypothesis(x,theta)
if confidence<0.5:
return 0
else:
return 1
def accuracy(X_test,y_test,theta):
y_pred=[]
for i in range(y_test.shape[0]):
p=predict(X_test[i],theta)
y_pred.appendĀ§
y_pred=np.array(y_pred)
correct_pred=0
for i in range(y_test.shape[0]):
if y_pred[i]==y_test[i]:
correct_pred+=1
acc=(float(correct_pred)/(y_test.shape[0]))
return acc
def gradientDescent(X,y,learning_rate=0.5,maxiter=500):
theta=np.random.random((X.shape[1],1))
err_list=[]
acc_list=[]
for i in range(maxiter):
e=error(X,y,theta)
acc=accuracy(X,y,theta)
grad=gradient(y,X,theta)
theta=theta+learning_rate
grad
err_list.append(e)
acc_list.append(acc)
return err_list,acc_list,theta