//------------- public functions -----------// /* * processing functions */ public float[] process(float[] input) { //apply function y = f(Wx) //store local copy of the input and create space for bias ToolsCollection.CopyRBA(input, d_input); d_input[d_input_size - 1] = 1.0f; //put in bias //calculate and store activation ToolsCollection.SetValue(d_activation, 0.0f); for (int o = 0; o < d_output_size; o++) { for (int i = 0; i < d_input_size; i++) { d_activation[o] += d_weights[o, i] * d_input[i]; } } //calculate and store output for (int o = 0; o < d_output_size; o++) { d_output[o] = d_output_function.Compute((float)d_activation[o]); //TODO double? } //and return the output to be processed by later layers return(d_output); }
public float[] back_propagate(float[] out_error) { //only use after forward signal has been processed //store local copy of the output error d_output_error = ToolsCollection.Select(out_error, 0, d_output_size); //will remove bias of next layer //calculate and store activation error for (int o = 0; o < d_output_size; o++) { activation_error[o] = d_output_function_derivative.Compute((float)d_activation[o]) * d_output_error[o]; } //calculate and store the input error ToolsCollection.SetValue(input_error, 0); for (int i = 0; i < d_input_size; i++) { for (int o = 0; o < d_output_size; o++) { input_error[i] += activation_error[o] * d_weights[o, i]; } } //and return signal to be processed by previous layers return(input_error); }