import numpy as np
def sigmoid(x):
return 1/(1+np.exp(-x))#f(x)=1/(1+exp(-x))
def deriv_sigmoid(x):
fx=sigmoid(x)
return fx*(1-fx)#f'(x)=f(x)*(1-f(x))
def mse_loss(y_true,y_pred):
return ((y_true-y_pred)**2).mean()
class OurNeuralNetwork:
def __init__(self):
self.w1=np.random.normal()#权重
self.w2=np.random.normal()
self.w3=np.random.normal()
self.w4=np.random.normal()
self.w5=np.random.normal()
self.w6=np.random.normal()
self.b1=np.random.normal()#截距项
self.b2=np.random.normal()
self.b3=np.random.normal()
def feedforward(self,x):
h1=sigmoid(self.w1*x[0]+self.w2*x[1]+self.b1)
h2=sigmoid(self.w3*x[0]+self.w4*x[1]+self.b2)
o1=sigmoid(self.w5*h1+self.w6*h2+self.b3)
return o1
def train(self,data,all_y_trues):
learn_rate=0.1
epochs=1000
for epoch in range(epochs):
for x,y_true in zip(data,all_y_trues):
sum_h1=self.w1*x[0]+self.w2*x[1]+self.b1
h1=sigmoid(sum_h1)
sum_h2=self.w3*x[0]+self.w4*x[1]+self.b2
h2=sigmoid(sum_h2)
sum_o1=self.w5*h1+self.w6*h2+self.b3
o1=sigmoid(sum_o1)
y_pred=o1
d_L_d_ypred=-2*(y_true-y_pred)
#Neuron o1
d_ypred_d_w5=h1*deriv_sigmoid(sum_o1)
d_ypred_d_w6=h2*deriv_sigmoid(sum_o1)
d_ypred_d_b3=deriv_sigmoid(sum_o1)
d_ypred_d_h1=self.w5*deriv_sigmoid(sum_o1)
d_ypred_d_h2=self.w6*deriv_sigmoid(sum_o1)
#Neuron h1
d_h1_d_w1=x[0]*deriv_sigmoid(sum_h1)
d_h1_d_w2=x[1]*deriv_sigmoid(sum_h1)
d_h1_d_b1=deriv_sigmoid(sum_h1)
#Neuron h2
d_h2_d_w3=x[0]*deriv_sigmoid(sum_h2)
d_h2_d_w4=x[1]*deriv_sigmoid(sum_h2)
d_h2_d_b2=deriv_sigmoid(sum_h2)
#Neuron h1
self.w1-=learn_rate*d_L_d_ypred*d_ypred_d_h1*d_h1_d_w1
self.w2-=learn_rate*d_L_d_ypred*d_ypred_d_h1*d_h1_d_w2
self.b1-=learn_rate*d_L_d_ypred*d_ypred_d_h1*d_h1_d_b1
#Neuron h2
self.w3-=learn_rate*d_L_d_ypred*d_ypred_d_h2*d_h2_d_w3
self.w4-=learn_rate*d_L_d_ypred*d_ypred_d_h2*d_h2_d_w4
self.b2-=learn_rate*d_L_d_ypred*d_ypred_d_h2*d_h2_d_b2
#Neuron o1
self.w5-=learn_rate*d_L_d_ypred*d_ypred_d_w5
self.w6-=learn_rate*d_L_d_ypred*d_ypred_d_w6
self.b3-=learn_rate*d_L_d_ypred*d_ypred_d_b3
if epoch%10==0:
y_preds=np.apply_along_axis(self.feedforward,1,data)
loss=mse_loss(all_y_trues,y_preds)
print("Epoch %d loss:%.3f" % (epoch,loss))
#翼长 触角长
data=np.array([
[1.78,1.14],
[1.96,1.18],
[1.86,1.20],
[1.72,1.24],
[2.00,1.26],
[2.00,1.28],
[1.96,1.30],
[1.74,1.36],
[1.64,1.38],
[1.82,1.38],
[1.90,1.38],
[1.70,1.40],
[1.82,1.48],
[1.82,1.54],
[2.08,1.56],
])
#类别:Apf 1, Af 0
all_y_trues=np.array([
1,
1,
1,
1,
1,
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
])
network=OurNeuralNetwork()
network.train(data,all_y_trues)
test1=np.array([1.24,1.80])
test2=np.array([1.28,1.84])
test3=np.array([1.40,2.04])
print("test1: %.3f" % network.feedforward(test1))
print("test2: %.3f" % network.feedforward(test2))
print("test3: %.3f" % network.feedforward(test3))
#如果概率大于0.5则类型为Apf,小于0.5则类型为Af
for i in [test1,test2,test3]:
if network.feedforward(i)>0.5:
print("test类型:Apf")
else:
print("test类型:Af")
标签:ypred,sigmoid,h2,self,h1,算法,神经网络,实验,sum From: https://www.cnblogs.com/1759945451qq/p/16908189.html