in this paper, we use numpy to realize a simple BP neural network. Because it is regression instead of classification, so the output function chosen is the f (x) =x. The specific principles of the BP neural network are no longer introduced here.

 import numpy as NP class NeuralNetwork (object): def __init__ (self, input_nodes, hidden_nodes, output_nodes, learning_rate): Set number of nodes in input #, hidden and output layers. set of input layer, hidden layer and output layer number of node self.input_nodes = input_nodes self.hidden_nodes = hidden_nodes self.output_nodes = output_nodes Initialize weights #, initialization the weights and the learning rate of self.weights_input_to_hidden = np.random.normal (0, self.hidden_nodes**-0.5, (self.hidden_nodes, self.input_nodes) = np.random.normal (self.weights_hidden_to_output) 0, self.output_nodes**-0.5 (self.output_nodes, self.hidden_nodes) self.lr = learning_r) Ate # hidden layer activation function is sigmoid function, Activation function is the sigmoid function self.activation_function (lambda x: = 1/ (1 + np.exp (-x)) def (self) train, inputs_list, targets_list): Convert inputs list to 2D array # inputs = np.array (inputs_list, ndmin=2).T # input vector shape [feature_diemension, 1] targets = np.array (targets_list, ndmin=2).T Forward pass # # forward propagation, TODO: Hidden layer hidden_inputs = np.dot (self.weights_input_to_hidden, inputs) signals into hidden layer # hidden_outputs = self.activation_function (hidden_inputs) # signals from hidden layer # output layer, the output layer activation function is y = x final_inputs = np.dot (self.w Eights_hidden_to_output, hidden_outputs signals into final output #) layer final_outputs = final_inputs signals from final output layer # ### back-propagation Backward pass, used the gradient of the weights are updated ### # # Output layer error is error the difference between desired target and actual output. output_errors (targets_list-final_outputs) = Backpropagated error # # back propagation error errors propagated to the hidden layer hidden_errors = np.dot (output_errors, self.weights_hidden_to_output) * (hidden_outputs* (1-hidden_outputs)).T # Update the weights # update to update the weights between the hidden layer and the output layer weights of update hidden-to-output weights with gradient descent step Self.weights_hidden_to_output = output_errors * hidden_outputs.T * self.lr # update the input layer and the hidden layer weights of update input-to-hidden weights with gradient descent step (self.weights_input_to_hidden = inputs * hidden_errors * self.lr).T # redicted def run (self, inputs_list): Run a forward pass through # the network inputs = np.array (inputs_list, ndmin=2).T #### the Implement the forward pass here forward propagation #### # hidden layer Hidden (self.weights_input_to_hidden, hidden_inputs = layer np.dot inputs signals into hidden layer hidden_outputs #) = self.activation_function (hidden_inputs) # signals from hidden layer Output layer final_inputs # output layer = np.dot (self.weights_hidden_to_output, hidden_outputs) signals into final # all above output layer final_outputs signals from final output # = final_inputs layer return final_outputs 

is this, hope to help everyone to learn, I hope you will support a script.

This paper fixed link:http://www.script-home.com/python-uses-numpy-to-implement-bp-neural-network.html | Script Home | +Copy Link

Article reprint please specify:Python uses numpy to implement BP neural network | Script Home

You may also be interested in these articles!