import numpy as np # activation function def sigma(z): return np.where(z>0, z,0) # ReLU if __name__ == '__main__': xs = np.array([0,1]) # input N = xs.shape[0] print('N=',N) w = np.ones((N,N)) # weights b = np.zeros((N)) # biases print('w.shape = ',w.shape) print('b.shape = ',b.shape) # manipulate weight and biases b[1]=-0.5 w[0,1] = 0 print('w=\n',w) print('b=\n',b) # propagate inputs xs through the perceptron, ys = sigma(np.dot(w,xs) + b) for x,y in zip(xs,ys): print('x=',x,'y=',y)