public void SetActivationFunction(NeuralActivationFunction _neuralActivationFunction) { switch (_neuralActivationFunction) { case NeuralActivationFunction.Sigmoid: actFunction = ActivationFunctions.Sigmoid; actFunctionDer = ActivationFunctions.SigmoidDer; break; case NeuralActivationFunction.Tanh: actFunction = ActivationFunctions.Tanh; actFunctionDer = ActivationFunctions.TanhDer; break; case NeuralActivationFunction.Relu: actFunction = ActivationFunctions.Relu; actFunctionDer = ActivationFunctions.ReluDer; break; case NeuralActivationFunction.LeakyRelu: actFunction = ActivationFunctions.LeakyRelu; actFunctionDer = ActivationFunctions.LeakyReluDer; break; default: break; } }
public static double derLogisticsFunc(activationFunction funcName, double x) { switch (funcName) { case activationFunction.relu: if (x < 0) { return(0); } if (x > 0) { return(1); } return(0); case activationFunction.sigmoid: return(Math.Exp(x) / Math.Pow(1 + Math.Exp(x), 2)); //return logisticsFunc(activationFunction.sigmoid, x) * (1 - logisticsFunc(activationFunction.sigmoid, x)); case activationFunction.tanh: return(4 / Math.Pow((Math.Exp(-x) + Math.Exp(x)), 2)); default: throw new Exception("No function with name" + funcName); } }
//public static double logistics(double x) //{ // //} //public static double dervLogistics(double x) //{ // return Math.Exp(-x)/Math.Pow(1 + Math.Exp(-x),2); //} //http://kawahara.ca/what-is-the-derivative-of-relu/ public static double logisticsFunc(activationFunction funcName, double x) { switch (funcName) { case activationFunction.relu: return(Math.Max(0.0, x)); case activationFunction.sigmoid: return(1 / (1 + Math.Exp(-x))); case activationFunction.tanh: return(Math.Tanh(x)); default: throw new Exception("No function with name" + funcName); } }
//https://stats.stackexchange.com/questions/47590/what-are-good-initial-weights-in-a-neural-network/186351#186351 /// <summary> /// Initializes the layer. /// </summary> /// <returns>The init.</returns> /// <param name="nn">Number of neurons</param> /// <param name="ni">Number of inputs per neuron</param> /// <param name="no">Number of outputs per neuron.</param> /// <param name="logFunc">Logistics function for the layer.</param> /// <param name="hb">If set to <c>true</c> will add bias to layer.</param> public void init(int nn, int ni, int no, activationFunction logFunc, bool hb) { if (hb) { ni += 1; } hasBias = hb; outputs = new double[nn]; if (logFunc != activationFunction.input) { neurons = new neuron[nn]; for (int i = 0; i < nn; i++) { neurons[i] = new neuron(); neurons[i].logFunc = logFunc; double[] initWeights = new double[ni]; double r = 0; if (logFunc.Equals(activationFunction.sigmoid)) { r = 4 * Math.Sqrt(6 / (ni + no)); } else if (logFunc.Equals(activationFunction.tanh)) { r = Math.Sqrt(6 / (ni + no)); } else { r = 1;//IDK if this good } for (int j = 0; j < ni; j++) { initWeights[j] = random.NextDouble(-r, r); } neurons[i].init(ni, initWeights); } } }
public double solve(double[] inputs, double[] outputs, double targetError, double minVal, double maxVal, int initDirection) { //verify if we have enough data to solve it if (inputs.Length != layers[0].outputs.Length - 1) { throw new Exception("Not enough inputs"); } if (outputs.Length != layers[layers.Count - 1].outputs.Length) { throw new Exception("Not enough outputs"); } activationFunction activation = layers[1].neurons[0].logFunc; //double maxVal = 0; //double minVal = 0; double currentVal = 0.5; //double prevError = 0; int direction = initDirection; double prevError = forward(inputs.Concat(new double[] { currentVal }).ToArray()).summation((x, j) => Math.Pow(0.5 * (x - outputs[j]), 2)); while (true) { double error = forward(inputs.Concat(new double[] { currentVal }).ToArray()).summation((x, j) => Math.Pow(0.5 * (x - outputs[j]), 2)); if (targetError > error) { return(currentVal); } if (direction == 1) { if (currentVal.Equals(maxVal)) { return(currentVal); } if (prevError < error) { direction = -1; } double nextVal = currentVal + (currentVal / 2); if (nextVal > maxVal) { nextVal = maxVal; } currentVal = nextVal; } if (direction == -1) { if (currentVal.Equals(minVal)) { return(currentVal); } if (prevError < error) { direction = 1; } double nextVal = currentVal - (currentVal / 2); if (nextVal > maxVal) { nextVal = maxVal; } currentVal = nextVal; } prevError = error; //Console.WriteLine("Error: {0}, Direction: {1}, Value: {2}", error, direction, currentVal); } }
/// <summary> /// Adds a layer to the neural network. Bias set to false. /// </summary> /// <param name="NumberOfNeurons">The number of neurons for the layer.</param> /// <param name="NumberOfInputsPerNeuron">The number of inputs for each neuron in the layer.</param> /// <param name="ActivationFunction">The activation function for the layer (The squashing function).</param> public void addLayer(int NumberOfNeurons, int NumberOfInputsPerNeuron, int NumberOfOutputsPerNeuron, activationFunction ActivationFunction) { addLayer(NumberOfNeurons, NumberOfInputsPerNeuron, NumberOfOutputsPerNeuron, ActivationFunction, false); }
/// <summary> /// Adds a layer to the neural network. /// </summary> /// <param name="NumberOfNeurons">The number of neurons for the layer.</param> /// <param name="NumberOfInputsPerNeuron">The number of inputs for each neuron in the layer.</param> /// <param name="ActivationFunction">The activation function for the layer (The squashing function).</param> /// <param name="bias">Whether to add a bais or not.</param> public void addLayer(int NumberOfNeurons, int NumberOfInputsPerNeuron, int NumberOfOutputsPerNeuron, activationFunction ActivationFunction, bool bias) { layer l = new layer(); //l.init(x, y, logFunc); if (layers.Count == 0) { l.init(NumberOfNeurons, NumberOfInputsPerNeuron, NumberOfOutputsPerNeuron, ActivationFunction, bias); } else { l.init(NumberOfNeurons, NumberOfInputsPerNeuron, NumberOfOutputsPerNeuron, ActivationFunction, bias); //true); } layers.Add(l); }