You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
importnumpyasnpX=np.array(([2, 9], [1, 5], [3, 6]), dtype=float)
y=np.array(([92], [86], [89]), dtype=float)
X=X/np.amax(X,axis=0) # maximum of X array longitudinallyy=y/100#Sigmoid Functiondefsigmoid (x):
return1/(1+np.exp(-x))
#Derivative of Sigmoid Functiondefderivatives_sigmoid(x):
returnx* (1-x)
#Variable initializationepoch=5000#Setting training iterationslr=0.1#Setting learning rateinputlayer_neurons=2#number of features in data sethiddenlayer_neurons=3#number of hidden layers neuronsoutput_neurons=1#number of neurons at output layer#weight and bias initializationwh=np.random.uniform(size=(inputlayer_neurons,hiddenlayer_neurons))
bh=np.random.uniform(size=(1,hiddenlayer_neurons))
wout=np.random.uniform(size=(hiddenlayer_neurons,output_neurons))
bout=np.random.uniform(size=(1,output_neurons))
#draws a random range of numbers uniformly of dim x*yforiinrange(epoch):
#Forward Propogationhinp1=np.dot(X,wh)
hinp=hinp1+bhhlayer_act=sigmoid(hinp)
outinp1=np.dot(hlayer_act,wout)
outinp=outinp1+boutoutput=sigmoid(outinp)
#BackpropagationEO=y-outputoutgrad=derivatives_sigmoid(output)
d_output=EO*outgradEH=d_output.dot(wout.T)
#how much hidden layer wts contributed to errorhiddengrad=derivatives_sigmoid(hlayer_act)
d_hiddenlayer=EH*hiddengrad# dotproduct of nextlayererror and currentlayeropwout+=hlayer_act.T.dot(d_output) *lrwh+=X.T.dot(d_hiddenlayer) *lrprint("Input: \n"+str(X))
print("Actual Output: \n"+str(y))
print("Predicted Output: \n" ,output)