public double[] Forward(double[] x_values, bool debug = false) { if (debug) { Console.WriteLine("input:" + x_values[0]); } // preliminary values var hSums = new double[_numHidden]; // scratch array var oSums = new double[_numOutput]; for (int i = 0; i < _numInput; i++) { _inputs[i] = x_values[i]; // copy independent vars into input nodes } for (int j = 0; j < _numHidden; j++) for (int i = 0; i < _numInput; i++) { hSums[j] += _inputs[i] * ih_weights[i, j]; // full-connect network } // add the bias for (int j = 0; j < _numHidden; j++) hSums[j] += _hBiases[j]; if (debug) { Console.WriteLine("hSum"); Helper.ShowVector(hSums, 8, 12, true); } //activation for (int j = 0; j < _numHidden; j++) { _hiddens[j] = Activation.HyperTan(hSums[j]); } if (debug) { Console.WriteLine("after activation hsum"); Helper.ShowVector(hSums, 8, 12, true); } for (int k = 0; k < _numOutput; k++) for (int j = 0; j < _numHidden; j++) { oSums[k] += _hiddens[j] * _hoWeights[j, k]; } for (int k = 0; k < _numOutput; k++) { oSums[k] += _oBiases[k]; } if (debug) { Console.WriteLine("after add biases oSum"); Helper.ShowVector(oSums, 8, 12, true); } // no softmax activation in regression applied. Just copy Array.Copy(oSums, _outputs, _outputs.Length); double[] resRes = new double[_numOutput]; Array.Copy(_outputs, resRes, resRes.Length); // copy res_res to output[] if (debug) { Console.WriteLine("output:" + _outputs[0]); } return resRes; }
public override double Differentiate(double x, double a) { return(x < 0 ? Activation.ELU(x, a) + a : 1); //throw new System.NotImplementedException(); }
public override double[] Calculate(ref double[] LayerSum, double a = 0) { return(Activation.Softmax(LayerSum)); // throw new NotImplementedException(); }
public override double Differentiate(double x, double a = 0) { return(Activation.Sigmoid(x) * (1 - Activation.Sigmoid(x))); //throw new NotImplementedException(); }