コード例 #1
0
ファイル: Program.cs プロジェクト: igor-kalashnikov/visionary
        static void Main(string[] args)
        {
            NeuralNetwork neuralNetwork = new NeuralNetwork();
            Layer layer = new Layer();
            for (int i = 0; i < 2; ++i)
            {
                layer.Neurons.Add(new Neuron());
            }

            neuralNetwork.Layers.Add(layer);

            //--------------------------------------------------------------------------------------------
            layer = new Layer("", layer);

            for (int i = 0; i < 2; ++i)
            {
                layer.Neurons.Add(new Neuron());
            }

            for (int i = 0; i < layer.Neurons.Count * (layer.PreviousLayer.Neurons.Count + 1); ++i)
            {
                layer.Weights.Add(new Weight(string.Empty, Utils.RandomDouble(-0.3, 0.3)));
            }

            for (int i = 0; i < layer.Neurons.Count; ++i)
            {
                layer.Neurons[i].AddConnection(UInt32.MaxValue, (uint)(i * layer.Neurons.Count)); // bias weight
                for (int j = 0; j < layer.PreviousLayer.Neurons.Count; ++j)
                {
                    layer.Neurons[i].AddConnection(new Connection((uint)i, (uint)(i * layer.Neurons.Count + j + 1)));
                }
            }

            neuralNetwork.Layers.Add(layer);

            layer = new Layer(string.Empty, layer);

            for (int i = 0; i < 1; ++i)
            {
                layer.Neurons.Add(new Neuron());
            }

            for (int i = 0; i < layer.Neurons.Count * (layer.PreviousLayer.Neurons.Count + 1); ++i)
            {
                layer.Weights.Add(new Weight(string.Empty, Utils.RandomDouble(-0.3, 0.3)));
            }

            for (int i = 0; i < layer.Neurons.Count; ++i)
            {
                layer.Neurons[i].AddConnection(UInt32.MaxValue, (uint)(i * layer.Neurons.Count)); // bias weight
                for (int j = 0; j < layer.PreviousLayer.Neurons.Count; ++j)
                {
                    layer.Neurons[i].AddConnection(new Connection((uint)i, (uint)(i * layer.Neurons.Count + j + 1)));
                }
            }

            neuralNetwork.Layers.Add(layer);

            double[] input = new double[]
                {
                    1, 1
                };

            double[] targetOutput = new double[]
                {
                    0
                };

            double[] output = new double[1];
            for (int i = 0; i < 100000; ++i)
            {
                neuralNetwork.Calculate(input, 100, output, 100);
                neuralNetwork.Backpropagate(output, targetOutput, 100);

                input = new double[] { 0, 1 };
                targetOutput = new double[] { 1 };

                neuralNetwork.Calculate(input, 100, output, 100);
                neuralNetwork.Backpropagate(output, targetOutput, 100);

                input = new double[] { 1, 0 };
                targetOutput = new double[] { 1 };

                neuralNetwork.Calculate(input, 100, output, 100);
                neuralNetwork.Backpropagate(output, targetOutput, 100);

                input = new double[] { 0, 0 };
                targetOutput = new double[] { 0 };

                neuralNetwork.Calculate(input, 100, output, 100);
                neuralNetwork.Backpropagate(output, targetOutput, 100);

                input = new double[] { 1, 1 };
                targetOutput = new double[] { 0 };
            }

            foreach (var l in neuralNetwork.Layers)
            {
                int i = 0;
                foreach (var weight in l.Weights)
                {
                    if (l.PreviousLayer != null && i > l.PreviousLayer.Neurons.Count)
                    {
                        Console.WriteLine();
                        i = 0;
                    }

                    Console.Write(weight.value + " ");
                    i++;
                }

                Console.WriteLine();
                Console.WriteLine();
            }

            input = new double[] { 1, 1 };
            neuralNetwork.Calculate(input, 100, output, 100);

            Console.WriteLine(output[0]);
        }
コード例 #2
0
        private void InitNeuralNetwork()
        {
            // TODO: add reinitialization code here
            // (SDI documents will reuse this document)

            // grab the mutex for the neural network

            //CAutoMutex tlo( m_utxNeuralNet );

            // initialize and build the neural net
            NeuralNetwork NN = m_NN; // for easier nomenclature
            NN.Initialize();

            Layer pLayer = new Layer();

            int ii, jj, kk;
            int icNeurons = 0;
            int icWeights = 0;
            double initWeight;
            string label;

            // layer zero, the input layer.
            // Create neurons: exactly the same number of neurons as the input
            // vector of 29x29=841 pixels, and no weights/connections
            pLayer = new Layer("Layer00");
            NN.Layers.Add(pLayer);

            for (ii = 0; ii < 841; ++ii)
            {
                label = String.Format("Layer00_Neuron%04d_Num%06d", ii, icNeurons);
                pLayer.Neurons.Add(new Neuron(label));
                icNeurons++;
            }

            // layer one:
            // This layer is a convolutional layer that has 6 feature maps. Each feature
            // map is 13x13, and each unit in the feature maps is a 5x5 convolutional kernel
            // of the input layer.
            // So, there are 13x13x6 = 1014 neurons, (5x5+1)x6 = 156 weights
            pLayer = new Layer("Layer01", pLayer);
            NN.Layers.Add(pLayer);

            for (ii = 0; ii < 1014; ++ii)
            {
                label = String.Format("Layer01_Neuron%04d_Num%06d", ii, icNeurons);
                pLayer.Neurons.Add(new Neuron(label));
                icNeurons++;
            }

            for (ii = 0; ii < 156; ++ii)
            {
                label = String.Format("Layer01_Weight%04d_Num%06d", ii, icWeights);
                initWeight = 0.05 * Utils.UNIFORM_PLUS_MINUS_ONE();
                pLayer.Weights.Add(new Weight(label, initWeight));
            }

            // interconnections with previous layer: this is difficult
            // The previous layer is a top-down bitmap image that has been padded to size 29x29
            // Each neuron in this layer is connected to a 5x5 kernel in its feature map, which
            // is also a top-down bitmap of size 13x13.  We move the kernel by TWO pixels, i.e., we
            // skip every other pixel in the input image
            int[] kernelTemplate = new int[]
                {
                    0, 1, 2, 3, 4,
                    29, 30, 31, 32, 33,
                    58, 59, 60, 61, 62,
                    87, 88, 89, 90, 91,
                    116, 117, 118, 119, 120
                };

            int iNumWeight;
            int fm;

            for (fm = 0; fm < 6; ++fm)
            {
                for (ii = 0; ii < 13; ++ii)
                {
                    for (jj = 0; jj < 13; ++jj)
                    {
                        iNumWeight = fm * 26; // 26 is the number of weights per feature map
                        Neuron n = pLayer.Neurons[jj + ii * 13 + fm * 169];

                        n.AddConnection(UInt32.MaxValue, (uint)iNumWeight++); // bias weight

                        for (kk = 0; kk < 25; ++kk)
                        {
                            // note: max value of index == 840, corresponding to 841 neurons in previous layer
                            n.AddConnection((uint)(2 * jj + 58 * ii + kernelTemplate[kk]), (uint)iNumWeight++);
                        }
                    }
                }
            }

            // layer two:
            // This layer is a convolutional layer that has 50 feature maps.  Each feature
            // map is 5x5, and each unit in the feature maps is a 5x5 convolutional kernel
            // of corresponding areas of all 6 of the previous layers, each of which is a 13x13 feature map
            // So, there are 5x5x50 = 1250 neurons, (5x5+1)x6x50 = 7800 weights
            pLayer = new Layer("Layer02", pLayer);
            NN.Layers.Add(pLayer);

            for (ii = 0; ii < 1250; ++ii)
            {
                label = String.Format("Layer02_Neuron%04d_Num%06d", ii, icNeurons);
                pLayer.Neurons.Add(new Neuron(label));
                icNeurons++;
            }

            for (ii = 0; ii < 7800; ++ii)
            {
                label = String.Format("Layer02_Weight%04d_Num%06d", ii, icWeights);
                initWeight = 0.05 * Utils.UNIFORM_PLUS_MINUS_ONE();
                pLayer.Weights.Add(new Weight(label, initWeight));
            }

            // Interconnections with previous layer: this is difficult
            // Each feature map in the previous layer is a top-down bitmap image whose size
            // is 13x13, and there are 6 such feature maps.  Each neuron in one 5x5 feature map of this
            // layer is connected to a 5x5 kernel positioned correspondingly in all 6 parent
            // feature maps, and there are individual weights for the six different 5x5 kernels.  As
            // before, we move the kernel by TWO pixels, i.e., we
            // skip every other pixel in the input image.  The result is 50 different 5x5 top-down bitmap
            // feature maps
            int[] kernelTemplate2 = new int[25]
                {
                    0, 1, 2, 3, 4,
                    13, 14, 15, 16, 17,
                    26, 27, 28, 29, 30,
                    39, 40, 41, 42, 43,
                    52, 53, 54, 55, 56
                };

            for (fm = 0; fm < 50; ++fm)
            {
                for (ii = 0; ii < 5; ++ii)
                {
                    for (jj = 0; jj < 5; ++jj)
                    {
                        iNumWeight = fm * 26; // 26 is the number of weights per feature map
                        Neuron n = pLayer.Neurons[jj + ii * 5 + fm * 25];

                        n.AddConnection(UInt32.MaxValue, (uint)iNumWeight++); // bias weight

                        for (kk = 0; kk < 25; ++kk)
                        {
                            // note: max value of index == 1013, corresponding to 1014 neurons in previous layer
                            n.AddConnection((uint)(2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(169 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(338 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(507 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(676 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(845 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                        }
                    }
                }
            }

            // layer three:
            // This layer is a fully-connected layer with 100 units.  Since it is fully-connected,
            // each of the 100 neurons in the layer is connected to all 1250 neurons in
            // the previous layer.
            // So, there are 100 neurons and 100*(1250+1)=125100 weights
            pLayer = new Layer("Layer03", pLayer);
            NN.Layers.Add(pLayer);

            for (ii = 0; ii < 100; ++ii)
            {
                label = String.Format("Layer03_Neuron%04d_Num%06d", ii, icNeurons);
                pLayer.Neurons.Add(new Neuron(label));
                icNeurons++;
            }

            for (ii = 0; ii < 125100; ++ii)
            {
                label = String.Format("Layer03_Weight%04d_Num%06d", ii, icWeights);
                initWeight = 0.05 * Utils.UNIFORM_PLUS_MINUS_ONE();
                pLayer.Weights.Add(new Weight(label, initWeight));
            }

            // Interconnections with previous layer: fully-connected
            iNumWeight = 0; // weights are not shared in this layer

            for (fm = 0; fm < 100; ++fm)
            {
                Neuron n = pLayer.Neurons[fm];
                n.AddConnection(UInt32.MaxValue, (uint)iNumWeight++); // bias weight

                for (ii = 0; ii < 1250; ++ii)
                {
                    n.AddConnection((uint)ii, (uint)iNumWeight++);
                }
            }

            // layer four, the final (output) layer:
            // This layer is a fully-connected layer with 10 units.  Since it is fully-connected,
            // each of the 10 neurons in the layer is connected to all 100 neurons in
            // the previous layer.
            // So, there are 10 neurons and 10*(100+1)=1010 weights
            pLayer = new Layer("Layer04", pLayer);
            NN.Layers.Add(pLayer);

            for (ii = 0; ii < 10; ++ii)
            {
                label = String.Format("Layer04_Neuron%04d_Num%06d", ii, icNeurons);
                pLayer.Neurons.Add(new Neuron(label));
                icNeurons++;
            }

            for (ii = 0; ii < 1010; ++ii)
            {
                label = String.Format("Layer04_Weight%04d_Num%06d", ii, icWeights);
                initWeight = 0.05 * Utils.UNIFORM_PLUS_MINUS_ONE();
                pLayer.Weights.Add(new Weight(label, initWeight));
            }

            // Interconnections with previous layer: fully-connected
            iNumWeight = 0; // weights are not shared in this layer

            for (fm = 0; fm < 10; ++fm)
            {
                Neuron n = pLayer.Neurons[fm];
                n.AddConnection(UInt32.MaxValue, (uint)iNumWeight++); // bias weight

                for (ii = 0; ii < 100; ++ii)
                {
                    n.AddConnection((uint)ii, (uint)iNumWeight++);
                }
            }

            //SetModifiedFlag( true );
        }
コード例 #3
0
ファイル: Layer.cs プロジェクト: igor-kalashnikov/visionary
 /// <summary>
 /// Initializes a new instance of the <see cref="Layer"/> class.
 /// </summary>
 /// <param name="str">
 /// The str.
 /// </param>
 /// <param name="pPrev">
 /// The p prev.
 /// </param>
 public Layer(string str, Layer pPrev = null)
 {
     this.label = str;
     this.previousLayer = pPrev;
 }