private void Feed_Forward(double[] input) { Activation_Function af = new Activation_Function(); for (int i = 0; i < layer.Count; i++) { if (i == 0) { for (int j = 0; j < layer[i].neuron.Count; j++) { layer[i].neuron[j].output = input[j]; } } else { for (int j = 0; j < layer[i].neuron.Count; j++) { double weighted_sum = 0; for (int k = 0; k < layer[i - 1].neuron.Count; k++) { weighted_sum += layer[i - 1].neuron[k].output * layer[i - 1].neuron[k].weight[j]; } layer[i].neuron[j].output = af.hyperbolic_tangent(weighted_sum + layer[i - 1].bias.output); } } } }
private void Back_Propagate(double[] input) { Activation_Function af = new Activation_Function(); for (int i = layer.Count; i > 0; i--) { if (i == layer.Count) { for (int j = 0; j < layer[i - 1].neuron.Count; j++) { layer[i - 1].neuron[j].error = (input[j] - layer[i - 1].neuron[j].output) * af.hyperbolic_tangent_derivative(layer[i - 1].neuron[j].output); } } else { for (int j = 0; j < layer[i - 1].neuron.Count; j++) { double error_weight = 0; for (int k = 0; k < layer[i].neuron.Count; k++) { error_weight += layer[i - 1].neuron[j].weight[k] * layer[i].neuron[k].error; } layer[i - 1].neuron[j].error = error_weight * af.hyperbolic_tangent_derivative(layer[i - 1].neuron[j].output); } } } }