public FastNN(int[] numNeurons, int miniBatchSize, bool l2loss, float dropoutProb = 0) { this.l2loss = l2loss; layers = new DenseLayer[numNeurons.Length - 1]; dropouts = new DropoutLayer[numNeurons.Length - 1]; activations = new Matrix <float> [numNeurons.Length]; singleActivations = new Matrix <float> [numNeurons.Length]; preActivations = new Matrix <float> [numNeurons.Length - 1]; deltas = new Matrix <float> [numNeurons.Length - 1]; this.miniBatchSize = miniBatchSize; optimizer = new Adam(0.001F); IActivationFunc activationFunc = new Relu(); IInitialization initialization = new HeNormal(); for (int i = 0; i < numNeurons.Length; i++) { activations[i] = DenseMatrix.Create(miniBatchSize, numNeurons[i], 0); singleActivations[i] = DenseMatrix.Create(1, numNeurons[i], 0); if (i == 0) { continue; } if (i == numNeurons.Length - 1) { activationFunc = new Linear(); } preActivations[i - 1] = DenseMatrix.Create(miniBatchSize, numNeurons[i], 0); layers[i - 1] = new DenseLayer(numNeurons[i - 1], numNeurons[i], activationFunc, initialization); deltas[i - 1] = DenseMatrix.Create(miniBatchSize, numNeurons[i], 0); if (dropoutProb > 0 && i < numNeurons.Length - 1) { dropouts[i - 1] = new DropoutLayer(miniBatchSize, numNeurons[i], dropoutProb); } } computeSDOutput = false; if (numNeurons.Last() == 2) { computeSDOutput = true; } }
static bool ReluValTest() { Relu relu = new Relu(); double[,,] input = new double[1, 1, 1]; input[0, 0, 0] = 1; double[,,] result = relu.Func(input); return(result[0, 0, 0] == input[0, 0, 0]); }
public void NeuralNetwork_CanCreate() { var ann = new NeuralNet(); var relu = new Relu(); var ans = relu.Apply(5); Assert.That(ans, Is.EqualTo(2)); }
private Layer ConvertRelu(paddle.OpDesc op) { var x = GetParameter(op.Inputs, "X").Arguments[0]; var output = GetParameter(op.Outputs, "Out").Arguments[0]; var layer = new Relu(GetVarShape(x)); layer.Input.SetConnection(_outputs[x]); _outputs[output] = layer.Output; return(layer); }
public void ReluPrimeTest() { var a = new Matrix(2, 2); a.InRandomize(); var b = a.Duplicate(); a = new Relu().Backward(a); b.InMap((x) => x > 0 ? 1 : 0); Assert.IsTrue(Math.Abs(a.FrobeniusNorm() - b.FrobeniusNorm()) < 0.1, new Relu().Type().ToString() + " Derivative."); }
public void ReluTest() { var a = new Matrix(2, 2); a.InRandomize(); var b = a.Duplicate(); a = new Relu().Forward(a); b.InMap((x) => Math.Max(x, 0)); Assert.IsTrue(Math.Abs(a.FrobeniusNorm() - b.FrobeniusNorm()) < 0.1, new Relu().Type().ToString() + " Activation."); }
private Layer ConvertReLU(LayerParameter layerParam) { var input = _outputs[layerParam.Bottom[0]]; var param = layerParam.ReluParam; if (param != null && param.NegativeSlope != 0) { throw new NotSupportedException("Non zero negative slope of relu is not supported."); } var layer = new Relu(input.Dimensions); layer.Input.SetConnection(input); _outputs[layerParam.Top[0]] = layer.Output; return(layer); }
public void BackPropagateTest() { int nbInput = 6; var input = NNArray.Random(nbInput); var output = new double[] { 0, 1 }; IActivation activation; activation = new IdentityActivation(); TrainNetwork(activation, input, output); activation = new Sigmoid(); TrainNetwork(activation, input, output); activation = new Tanh(); TrainNetwork(activation, input, output); activation = new Relu(); TrainNetwork(activation, input, output); }
private static void Main(string[] args) { var activation = new Relu(); var n = new FeedForward(); n.AddLayer(new NeuronLayer(2, activation)); n.AddLayer(new NeuronLayer(3, activation)); n.AddLayer(new NeuronLayer(1, activation)); n.Construct(); var str = ""; for (int i = 0; i < n.Output.Length; i++) { str += n.Output[i].ToString() + "\n"; } var inp = new MatrixFloat[] { new MatrixFloat(new float[, ] { { 1, 1 } }), new MatrixFloat(new float[, ] { { 0, 1 } }), new MatrixFloat(new float[, ] { { 1, 0 } }), new MatrixFloat(new float[, ] { { 0, 0 } }), }; var expected = new MatrixFloat[] { new MatrixFloat(new float[, ] { { 0 } }), new MatrixFloat(new float[, ] { { 1 } }), new MatrixFloat(new float[, ] { { 1 } }), new MatrixFloat(new float[, ] { { 0 } }) }; for (int i = 0; i < 500000; i++) { var error = 0f; for (int k = 0; k < 4; k++) { var np1 = inp[k]; for (int j = 0; j < np1.Columns; j++) { n.Input[j] = np1[0, j]; } var exp = expected[k]; n.Forward(); for (int z = 0; z < exp.Columns; z++) { var tmp = (n.Output[z] - exp[0, z]); error += tmp * tmp; } n.Backward(exp); n.Clear(); } Console.WriteLine(error); } Console.ReadKey(); }