Pushing to remote Perceptron.py, working as a buffer
This commit is contained in:
parent
827731a084
commit
d773729e6a
86
Perceptron.py
Executable file
86
Perceptron.py
Executable file
|
@ -0,0 +1,86 @@
|
|||
#!/bin/python3
|
||||
import numpy as np
|
||||
import random
|
||||
|
||||
cicli = 59990
|
||||
hidden_layer = 1
|
||||
epsilon = 0.01
|
||||
|
||||
def fdt(x,deriv=False):
|
||||
if(deriv==True):
|
||||
return (1-np.tanh(x)**2)
|
||||
|
||||
return np.tanh(x)
|
||||
|
||||
def error_func(output):
|
||||
e=0
|
||||
for i in range(len(output)):
|
||||
e += (output[i]-Y[i])**2
|
||||
return 0.5*e
|
||||
|
||||
#input data
|
||||
X = np.array([[0,1], # primo input
|
||||
[0,1],
|
||||
[1,1],
|
||||
[1,1]])
|
||||
|
||||
#output data
|
||||
Y = np.array([[0],
|
||||
[0],
|
||||
[1],
|
||||
[1]])
|
||||
|
||||
np.random.seed(1) #per avere sempre lo stesso
|
||||
|
||||
#sinapsi tra input e Perceptron di output
|
||||
syn0 = 0.1*(2*np.random.random((2,hidden_layer)) -1)
|
||||
|
||||
#print(syn0) #2x1
|
||||
|
||||
# fase forward
|
||||
l0 = X
|
||||
l1 = fdt(np.dot(l0,syn0)) #output layer hidden, 4x4 4 esempi, 4 neuroni di output
|
||||
|
||||
for i in range(cicli):
|
||||
print(str(int(100*i/cicli))+"---------------------------------------") #% sul numero totale di cicli
|
||||
# fase forward
|
||||
|
||||
old_error = error_func(l1)
|
||||
|
||||
#fase backward
|
||||
#calcolo delta_nu
|
||||
delta_nu = Y - l1
|
||||
|
||||
#print(syn0[:,0])
|
||||
sum = 0
|
||||
for nu in range(len(X)): #ciclo tra gli esempi
|
||||
sum += epsilon*(delta_nu[nu]*fdt(np.dot(X[nu],syn0),True))*X[nu]
|
||||
#print(sum)
|
||||
syn0[:,0] += sum
|
||||
|
||||
l1 = fdt(np.dot(l0,syn0)) #output layer hidden, 4x4 4 esempi, 4 neuroni di output
|
||||
|
||||
current_error = error_func(l1)
|
||||
print(old_error - current_error)
|
||||
|
||||
if (np.linalg.norm(sum) < 10**-8):
|
||||
print("Very low variation in weight-space - exiting")
|
||||
print("delta_w :"+str(np.linalg.norm(sum)))
|
||||
break
|
||||
if (error_func(l1) < 0.001):
|
||||
print("Very low error - exiting")
|
||||
break
|
||||
if (abs(current_error - old_error) < 10**-8):
|
||||
print("Very low error variation - exiting")
|
||||
break
|
||||
|
||||
|
||||
print("Error: "+str(error_func(l1)))
|
||||
for i in range(len(np.transpose(l1))):
|
||||
print(np.transpose(l1)[i])
|
||||
#for i in range(cicli):
|
||||
#l1 = fdt(np.dot(l0, syn0))
|
||||
#print(l1)
|
||||
#l2 = fdt(np.dot(l1, syn1))
|
||||
#print(l2)
|
||||
|
Loading…
Reference in New Issue
Block a user