public ANN(int numInputs, int numHidden, int numOutputs, int numNeuronsPerHidden, double alpha, ActivationFunctions hidden, ActivationFunctions output, bool useFileWeights = false, string folder = "")
    {
        this.numInputs           = numInputs;
        this.numHidden           = numHidden;
        this.numOutputs          = numOutputs;
        this.numNeuronsPerHidden = numNeuronsPerHidden;
        this.alpha          = alpha;
        this.hiddenFunction = hidden;
        this.outputFunction = output;

        this.useFileWeights = useFileWeights;
        this.folder         = folder;

        if (numHidden > 0)
        {
            // creates input layer
            layers.Add(new Layer(numNeuronsPerHidden, numInputs));
            // creates all hidden layers
            for (int i = 0; i < numHidden - 1; i++)
            {
                layers.Add(new Layer(numNeuronsPerHidden, numNeuronsPerHidden));
            }
            // creates output layer
            layers.Add(new Layer(numOutputs, numNeuronsPerHidden));
        }
        else
        {
            // creates input and output layers only
            layers.Add(new Layer(numOutputs, numInputs));
        }
    }
Esempio n. 2
0
    private void ActivationFunctionMutation()
    {
        string   debugMsg = "ActivationFunctionMutation";
        int      nodeRoll = Random.Range(0, Nodes.Count);
        NodeGene ng       = Nodes[nodeRoll];

        if (ng == null)
        {
            throw new System.Exception("Node not found! " + nodeRoll);
        }
        FTYPE fTYPERoll = ActivationFunctions.RandomFTYPE();

        if (fTYPERoll != ng.fTYPE)
        {
            debugMsg += " - Changing node " + nodeRoll + " from " + ng.GetFTYPE() + " to " + fTYPERoll;
            ng.SetFTYPE(fTYPERoll);
        }
        else
        {
            debugMsg += " - Not changing node. Random node selection was same as previous function type";
        }
        if (ArtGallery.DEBUG_LEVEL > ArtGallery.DEBUG.NONE)
        {
            Debug.Log(debugMsg);
        }
    }
Esempio n. 3
0
    public void Backpropagate(decimal[] Cost, decimal BPrefix = 0)
    {
        decimal[] NonLinearZs = new decimal[Perceptrons.Length];

        for (int i = 0; i < Perceptrons.Length; i++)
        {
            NonLinearZs[i] = Perceptrons[i].GetZ();
        }

        for (int i = 0; i < Perceptrons.Length; i++)
        {
            Perceptron P = Perceptrons[i];

            BPrefix = 1m;

            // Prepare the prefix being: (dC0/da0) * (da0/dZ0)
            BPrefix *= 2 * (P.CurrentActivation - Convert.ToDecimal(LastLabels[i].Strength));
            BPrefix *= ActivationFunctions.GetAppropriateDerivativeActivationFunction(LayerActivationFunction)
                           (NonLinearZs, i)[i];

            // Update current weights.
            for (int j = 0; j < P.Weights.Length; j++)
            {
                decimal LR = Convert.ToDecimal(parentNeuralNetwork.LearningRate);
                P.Weights[j].Value -= LR * BPrefix * PreviousLayer.GetInput()[j];
            }

            // Tell last layer to propagate using this perceptron's relative prefix.
            PreviousLayer.Backpropagate(Cost, BPrefix);
        }
    }
Esempio n. 4
0
        public static double Activate(double input, ActivationFunctions activation)
        {
            switch (activation)
            {
            case ActivationFunctions.Relu:
                return(ReluActivation(input));

            case ActivationFunctions.Sigmoid:
                return(SigmoidActivation(input));

            case ActivationFunctions.Tanh:
                return(TanhActivation(input));

            case ActivationFunctions.Sine:
                return(Math.Sin(input));

            case ActivationFunctions.GELU:
                return(GELUActivation(input));

            case ActivationFunctions.Ln:
                return(Math.Log(input, Math.E));

            default:
                throw new NotImplementedException();
            }
        }
Esempio n. 5
0
 public void SetActivationFunctionForLayersNeurons(ActivationFunctions activationFunction)
 {
     for (int i = 0; i < neurons.Count; i++)
     {
         neurons[i].SetActivationFunction(activationFunction);
     }
 }
Esempio n. 6
0
 private void GenerateCPPN()
 {
     foreach (NodeGene node in geno.Nodes)
     {
         node.fTYPE = ActivationFunctions.RandomFTYPE();
     }
 }
Esempio n. 7
0
        public static double ActivationFunction(double x, ActivationFunctions functionSet)
        {
            switch (functionSet)
            {
            case ActivationFunctions.Sigmoid:
                return(1 / (1 + ePow(-x)));

                break;

            case ActivationFunctions.SiLU:                     //(e^x (x + e^x + 1))/(e^x + 1)^2
                return(x * ActivationFunction(x, ActivationFunctions.Sigmoid));

                break;

            case ActivationFunctions.dSiLU:                     //(e^x (-e^x (x - 2) + x + 2))/(1 + e^x)^3
                double silu    = ActivationFunction(x, ActivationFunctions.SiLU);
                double sigmoid = ActivationFunction(x, ActivationFunctions.Sigmoid);
                return(silu + sigmoid * (1 - silu));

                break;

            default:
                throw new System.ArgumentException();
            }
        }
Esempio n. 8
0
    public void GenerateThumbnail()
    {
        Texture2D thumb = new Texture2D(32, 32, TextureFormat.ARGB32, false);

        //create a new texture
        Color[] pixels = new Color[thumb.width * thumb.height];
        //fill with black
        for (int c = 0; c < pixels.Length; c++)
        {
            pixels[c] = new Color(0f, 0f, 0f, 1f);
        }
        // plot the function on a line
        for (int x = 0; x < thumb.width; x++)
        {
            // scale from -PI to PI
            float scaledX = Scale(x, thumb.width) * Mathf.PI;
            float plot    = ActivationFunctions.Activation(fTYPE, scaledX);
            int   mappedPlot;
            //if (plot < -1 || plot > 1)
            mappedPlot = Remap(plot, -Mathf.PI, Mathf.PI, 0, thumb.height - 1);
            //else mappedPlot = Remap(plot, -1, 1, 0, thumb.height - 1);
            Color color = new Color(1f, 1f, 1f, 1f);
            if (ArtGallery.DEBUG_LEVEL >= ArtGallery.DEBUG.VERBOSE)
            {
                Debug.Log(mappedPlot);
            }
            pixels[x + mappedPlot * thumb.width] = color;
        }

        thumb.SetPixels(pixels);
        thumb.Apply();
        Texture = thumb;
        Image   = Sprite.Create(thumb, new Rect(0, 0, thumb.width, thumb.height), new Vector2(0.5f, 0.5f));
    }
Esempio n. 9
0
 public NeuronalNetwork(int inputLength, ActivationFunctions activationFunction, CostFunctions costFunction = CostFunctions.SquaredMean)
 {
     this.activationFunction = activationFunction;
     layers            = new List <Layer>();
     this.inputLength  = inputLength;
     this.costFunction = costFunction;
 }
Esempio n. 10
0
        public float activation(bool derivate = false)
        {
            if (type == PerceptronType.Type.input || type == PerceptronType.Type.bias)
            {
                return(state);
            }

            if (activation_type == ActivationType.Type.sigmoid)
            {
                return(ActivationFunctions.Sigmoid(state, derivate));
            }
            else if (activation_type == ActivationType.Type.relu)
            {
                return(ActivationFunctions.RelU(state, derivate));
            }
            else if (activation_type == ActivationType.Type.tanh)
            {
                return(ActivationFunctions.TanH(state, derivate));
            }
            else if (activation_type == ActivationType.Type.identity)
            {
                return(ActivationFunctions.Identity(state, derivate));
            }
            else if (activation_type == ActivationType.Type.lrelu)
            {
                return(ActivationFunctions.LeakyReLU(state, derivate));
            }
            else
            {
                return(ActivationFunctions.Sigmoid(state, derivate));
            }
        }
Esempio n. 11
0
        public double[] ExecuteLayer(double[] input, ActivationFunctions activation, out double[] neuronLinear)
        {
            List <double[]> fInput = new List <double[]>();

            fInput.Add(input);
            return(ExecuteLayer(fInput, activation, out neuronLinear));
        }
Esempio n. 12
0
        private void OnClickPredictBtn(object sender, RoutedEventArgs e)
        {
            byte[] imageBytes = images.Images[_currentIndex];

            DeepLearning.Math.Matrix x_train = new DeepLearning.Math.Matrix(1, 784);

            int x_train_row = x_train.X;
            int y_train_col = x_train.Y;

            Parallel.For(0, x_train_row, i => {
                for (int j = 0; j < y_train_col; j++)
                {
                    x_train[i, j] = imageBytes[j] / 255.0;
                }
            });

            DeepLearning.Math.Matrix matrix = net.Predict(x_train);

            matrix = ActivationFunctions.Softmax(matrix);

            string msg = "预测:";

            for (int i = 0; i < matrix.X; i++)
            {
                for (int j = 0; j < matrix.Y; j++)
                {
                    msg += $"[{j}]:{matrix[i,j]:P2}\n";
                }
                // msg += "\n";
            }

            Print(msg);

            Console.WriteLine(matrix);
        }
        /// <summary>
        /// This method calculates the results of the network at the output layer
        /// </summary>
        /// <param name="input">input layer data</param>
        /// <param name="numOfFeatures">number of input neurons</param>
        /// <param name="output">output parameter to store outputs at the output layer</param>
        /// <param name="outputSum">output sum of the last layer.</param>
        private void CalculateResultatOutputlayer(double[] input, int numOfFeatures, bool softmax, out double[] output, out double[] outputSum)
        {
            output = new double[m_OutputLayerNeurons];

            outputSum = new double[m_OutputLayerNeurons];

            int numOfHiddenNeuronsInLastHiddenLayer = m_HiddenLayerNeurons[m_HiddenLayerNeurons.Length - 1];

            for (int j = 0; j < m_OutputLayerNeurons; j++)
            {
                outputSum[j] = 0.0;

                for (int i = 0; i < numOfHiddenNeuronsInLastHiddenLayer; i++)
                {
                    outputSum[j] += m_Weights[m_HiddenLayerNeurons.Length][j, i] * input[i];
                }

                outputSum[j] += m_Biases[m_HiddenLayerNeurons.Length][j];

                if (softmax == false)
                {
                    output[j] = ActivationFunctions.Sigmoid(outputSum[j]);
                }
                else
                {
                    // Do nothing
                }
            }

            if (softmax == true)
            {
                output = ActivationFunctions.SoftMaxClassifier(outputSum);
            }
        }
Esempio n. 14
0
        private void Init()
        {
            //Init classes
            scalingFunction     = new ScalingFunction();
            weightsGenerator    = new WeightsGeneratorRNGCSP();
            activationFunctions = new ActivationFunctions();
            geneticAlgorithm    = new GeneticAlgorithm(weightsGenerator);
            neuralNetwork       = new List <NeuralNetwork>();
            perceptron          = new Perceptron();

            //Init Lists
            keyStore      = new List <BTCKeyStore>();
            dataSet       = new List <DataSet>();
            valkeyStore   = new List <BTCKeyStore>();
            valdataSet    = new List <DataSet>();
            neuralNetwork = new List <NeuralNetwork>();

            deathRate = 10;  //If too high, then chance plays an increasing role and skews the result.

            cb           = new CircularBuffer(deathRate);
            oldMetric    = double.MaxValue;
            attemptstats = new double[32];

            GenerateValidationDataset();
        }
Esempio n. 15
0
        /// <summary>
        /// This one calculates axon, added support for multiple AF
        /// </summary>
        /// <param name="x">Input</param>
        /// <param name="ActivationFunction">Activation function to use</param>
        /// <returns></returns>
        public float calcAxon(float x, ActivationFunctions ActivationFunction)
        {
            switch (ActivationFunction)
            {
            case ActivationFunctions.Any:
                int af = r.Next(1, 3);
                switch (af)
                {
                case 1: return(Logistic(x));

                case 2: return(TanH(x));

                case 3: return(ReLU(x));

                case 4: return(Step(x));

#if DEBUG
                default: throw new Exception("Please add the new AF here");
#endif
                }

            case ActivationFunctions.Logistic: return(Logistic(x));

            case ActivationFunctions.TanH: return(TanH(x));

            case ActivationFunctions.ReLu: return(ReLU(x));

            case ActivationFunctions.Step: return(Step(x));

#if DEBUG
            default: throw new Exception("Activation function selection error occured");
#endif
            }
        }
    public NeuralNetwork_Matrix(int inputCount, int hiddenCount, int outputCount, ActivationFunctions activation = ActivationFunctions.Sigmoid)
    {
        inputNodeCount  = inputCount;
        hiddenNodeCount = hiddenCount;
        outputNodeCount = outputCount;

        weightsInputToHidden  = new float[hiddenCount, inputCount];
        weightsHiddenToOutput = new float[outputCount, hiddenCount];
        RandomizeValues(weightsInputToHidden);
        RandomizeValues(weightsHiddenToOutput);

        biasHidden = new float[hiddenCount];
        biasOutput = new float[outputCount];

        for (int i = 0; i < hiddenCount; i++)
        {
            biasHidden[i] = UnityEngine.Random.Range(0f, 1f);
        }
        for (int i = 0; i < outputCount; i++)
        {
            biasOutput[i] = UnityEngine.Random.Range(0f, 1f);
        }

        activationFunction = new ActivationFunction(activation);
    }
Esempio n. 17
0
    Texture2D CreateCPPNImage(int width, int height)
    {
        GenerateCPPN();

        //Texture2D img = new Texture2D(width, height);

        for (int y = 0; y < height; y++)
        {
            for (int x = 0; x < width; x++)
            {
                float   scaledX    = Scale(x, width);
                float   scaledY    = Scale(y, height);
                float   distCenter = GetDistFromCenter(scaledX, scaledY);
                float[] hsv        = ProcessCPPNInput(scaledX, scaledY, GetDistFromCenter(scaledX, scaledY), BIAS);
                // This initial hue is in the range [-1,1] as in the MM-NEAT code
                float initialHue = ActivationFunctions.Activation(FTYPE.PIECEWISE, hsv[TWO_DIMENSIONAL_HUE_INDEX]);
                // However, C Sharp's Colors do not automatically map negative numbers to the proper hue range as in Java, so an additional step is needed
                float finalHue = initialHue < 0 ? initialHue + 1 : initialHue;
                Color colorHSV = Color.HSVToRGB(
                    finalHue,
                    ActivationFunctions.Activation(FTYPE.HLPIECEWISE, hsv[TWO_DIMENSIONAL_SATURATION_INDEX]),
                    Mathf.Abs(ActivationFunctions.Activation(FTYPE.PIECEWISE, hsv[TWO_DIMENSIONAL_BRIGHTNESS_INDEX])),
                    true
                    );


                img.SetPixel(x, y, colorHSV);
            }
        }

        img.Apply();
        return(img);
    }
Esempio n. 18
0
 public Node()
 {
     this.signal     = 0.0;
     this.tempSignal = 0.0;
     this.activation = ActivationFunctions.GetFunction("Identity");
     this.links      = new List <DecodedNetworks.Link>();
 }
Esempio n. 19
0
    private float applyActivationFunction(float x, ActivationFunctions activationFunction)
    {
        switch (activationFunction)
        {
        case ActivationFunctions.Sigmoid:
            x = 1.0f / (1.0f + Mathf.Exp(-x));
            break;

        case ActivationFunctions.Tanh:
            x = 2 / (1 + Mathf.Exp(-(2 * x))) - 1;
            break;

        case ActivationFunctions.ReLU:
            x = Mathf.Max(0, x);
            break;

        case ActivationFunctions.Binary_Step:
            if (x < 0)
            {
                x = 0;
            }
            else
            {
                x = 1;
            }
            break;
        }

        return(x);
    }
 public NeuralNetwork(NeuralNetworkSettings settings)
 {
     RND = new Random(settings.seed);
     SetupW(settings.configuration);
     activation    = settings.activationFunction;
     leariningRate = settings.leariningRate;
 }
Esempio n. 21
0
        private static NeuralNetwork InitializeNeuralNetwork(int seed)
        {
            Random random = new Random(seed == 0 ? new Random().Next() : seed);

            float RandomWeight() => (float)(random.NextDouble() * 2 - 1);

            Layer prevLayer;

            InputLayer li = new InputLayer(3, 5);

            prevLayer = li;

            ConvolutionalLayer l0 = new ConvolutionalLayer(8, 2, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l0;
            prevLayer.InitializeWeights(RandomWeight);

            ConvolutionalLayer l2 = new ConvolutionalLayer(16, 2, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l2;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l7 = new FullyConnectedLayer(16, prevLayer, ActivationFunctions.Sigmoid(1));

            prevLayer = l7;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l8 = new FullyConnectedLayer(10, prevLayer, ActivationFunctions.SoftMax(1));

            prevLayer = l8;
            prevLayer.InitializeWeights(RandomWeight);

            return(new NeuralNetwork(li, l0, l2, l7, l8));
        }
        /// <summary>
        /// Computes the activation of node n.  We initially start with output neurons, and work
        /// our way backward.
        /// </summary>
        private void ComputeActivation(Node n, List <Node> previousNodeRequests)
        {
            if (n.Role == NodeRole.Input || n.Role == NodeRole.Bias)
            {
                // It doesn't make sense to compute the activation of Input and Bias nodes.
                // The activation of the Bias node is always 1
                // The activation of the input node comes from an external input vector.
                return;
            }

            n.IncomingActivity = 0;
            foreach (Link l in n.LinksIncoming.Values)
            {
                double activationContribution = 0;
                if (previousNodeRequests.Contains(l.NodeIn))
                {
                    // We've already asked about this node, so we must have hit a recurrent loop.
                    // Use the previous activation, and stop asking.
                    activationContribution = l.NodeIn.ActivationPrevious * l.Weight;
                }
                else
                {
                    previousNodeRequests.Add(l.NodeIn);
                    ComputeActivation(l.NodeIn, previousNodeRequests);
                    // After computing, pop the last item off the list
                    previousNodeRequests.RemoveAt(previousNodeRequests.Count - 1);
                    activationContribution = l.NodeIn.ActivationCurrent * l.Weight;
                }
                n.IncomingActivity += activationContribution;
            }

            n.ActivationPrevious = n.ActivationCurrent;
            n.ActivationCurrent  = ActivationFunctions.Sigmoidal_0_1(n.IncomingActivity);
        }
Esempio n. 23
0
 void GenerateCPPN()
 {
     foreach (NodeGene node in cppnTest.Nodes)
     {
         node.fTYPE = ActivationFunctions.RandomFTYPE();
     }
     cppn = new TWEANN(cppnTest);
 }
Esempio n. 24
0
        public void TestSoftmax()
        {
            Matrix matrix = new Matrix(new double[, ] {
                { 0.3, 2.9, 4.0 }
            });

            matrix = ActivationFunctions.Softmax(matrix);
        }
Esempio n. 25
0
 /// <summary>
 /// Constructor
 /// </summary>
 /// <param name="type">Defines the activation function to use</param>
 public Neuron(ActivationFunctions type)
 {
     //Define the ActivationFunction
     activationFunction = type;
     inputconnections   = new List <Connection>();
     outputconnections  = new List <Connection>();
     internalBias       = Randomizer.GetRandomWeight(0.0, 1.0); // Set this to random at the start
 }
Esempio n. 26
0
 private void computeOutput(MHiddenLayerHeader header, MNeuronStack neuronStack)
 {
     foreach (MNeuron x in neuronStack.Stack)
     {
         x.Output = ActivationFunctions.computeOutput(header, x);
         Console.WriteLine("test_output " + x.Output);
         x.OutputTextBox.Text = x.Output.ToString();
     }
 }
Esempio n. 27
0
 public Matrix Forward(Matrix x, Matrix t)
 {
     this.t = t;
     y      = ActivationFunctions.Softmax(x);
     loss   = LossFunctions.CrossEntropyError(y, t);
     return(new Matrix(new double[, ] {
         { loss }
     }));
 }
Esempio n. 28
0
        public void Calculate(double[,] image)
        {
            //Reset ZVals (raw values untouched by the activation function), vals, and momentums
            InputZVals   = new double[InputCount];
            InputValues  = new double[InputCount];
            HiddenZVals  = new double[HiddenDepth, HiddenCount];
            HiddenValues = new double[HiddenDepth, HiddenCount];
            OutputZVals  = new double[OutputCount];
            OutputValues = new double[OutputCount];

            //Random r = new Random();
            //Random is used for dropout of neurons, but said feature is currently disabled for efficiency reasons

            //Input
            for (int k = 0; k < InputCount; k++)
            {
                for (int j = 0; j < (Resolution * Resolution); j++)
                {
                    InputZVals[k] += ((InputWeights[k, j] + InputWeightMomentum[k, j]) * image[j / Resolution, j - ((j / Resolution) * Resolution)]) + InputBiases[k];
                }
                InputValues[k] = ActivationFunctions.Tanh(InputZVals[k]);
            }
            //Hidden
            for (int l = 0; l < HiddenDepth; l++)
            {
                for (int k = 0; k < HiddenCount; k++)
                {
                    if (l == 0)
                    {
                        for (int j = 0; j < InputCount; j++)
                        {
                            HiddenZVals[l, k] += (((FirstHiddenWeights[k, j] + FirstHiddenWeightMomentum[k, j]) * InputValues[j]) + HiddenBiases[l, k]);
                        }
                    }
                    else
                    {
                        for (int j = 0; j < HiddenCount; j++)
                        {
                            //Hiddenweights and momentum use l - 1 because the first layer is under firsthidden and firstmomentum respectively
                            HiddenZVals[l, k] += (((HiddenWeights[l - 1, k, j] + HiddenWeightMomentum[l - 1, k, j]) * HiddenValues[l - 1, j]) + HiddenBiases[l, k]);
                        }
                    }
                    HiddenValues[l, k] = ActivationFunctions.Tanh(HiddenZVals[l, k]);
                }
            }
            //Output
            for (int k = 0; k < OutputCount; k++)
            {
                for (int j = 0; j < HiddenCount; j++)
                {
                    OutputZVals[k] += ((OutputWeights[k, j] + OutputWeightMomentum[k, j]) * HiddenValues[HiddenDepth - 1, j]);
                }
                //No activation function on outputs
                OutputValues[k] = OutputZVals[k];
            }
        }
Esempio n. 29
0
        private static NeuralNetwork InitializeNeuralNetwork(int seed)
        {
            Random random = new Random(seed == 0 ? new Random().Next() : seed);

            float RandomWeight() => (float)(random.NextDouble() * 2 - 1);

            Layer prevLayer;

            InputLayer li = new InputLayer(28, 28);

            prevLayer = li;

            ConvolutionalLayer l0 = new ConvolutionalLayer(15, 5, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l0;
            prevLayer.InitializeWeights(RandomWeight);

            MaxPoolingLayer l1 = new MaxPoolingLayer(2, 2, prevLayer);

            prevLayer = l1;

            ConvolutionalLayer l2 = new ConvolutionalLayer(30, 4, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l2;
            prevLayer.InitializeWeights(RandomWeight);

            MaxPoolingLayer l3 = new MaxPoolingLayer(3, 2, prevLayer);

            prevLayer = l3;

            ConvolutionalLayer l4 = new ConvolutionalLayer(45, 2, 2, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l4;
            prevLayer.InitializeWeights(RandomWeight);

            MaxPoolingLayer l5 = new MaxPoolingLayer(2, 1, prevLayer);

            prevLayer = l5;

            FullyConnectedLayer l6 = new FullyConnectedLayer(64, prevLayer, ActivationFunctions.Sigmoid(1));

            prevLayer = l6;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l7 = new FullyConnectedLayer(32, prevLayer, ActivationFunctions.Sigmoid(1));

            prevLayer = l7;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l8 = new FullyConnectedLayer(10, prevLayer, ActivationFunctions.SoftMax(1));

            prevLayer = l8;
            prevLayer.InitializeWeights(RandomWeight);

            return(new NeuralNetwork(li, l0, l1, l2, l3, l4, l5, l6, l7, l8));
        }
Esempio n. 30
0
        internal static void Run()
        {
            NeuralNetwork nn = InitializeNeuralNetwork(0, ActivationFunctions.Sigmoid(1));

            CalculateXOR(nn);

            TrainXOR(nn, 0.5f, 1000000);

            CalculateXOR(nn);
        }