Exemplo n.º 1
0
        public void EnumGradientsTest()
        {
            Shape            shape = new Shape(Shape.BWHC, -1, 20, 20, 10);
            ConvolutionLayer layer = new ConvolutionLayer(shape, 100, new Kernel(3, 4, 3, 3, 2, 1), MatrixLayout.ColumnMajor, null);

            Assert.AreEqual(2, layer.EnumGradients().Count());
        }
Exemplo n.º 2
0
        public void ConstructorTest1()
        {
            Shape     shape           = new Shape(Shape.BWHC, 2, 10, 12, 3);
            const int NumberOfFilters = 100;
            Kernel    kernel          = new Kernel(3, 4, 3, 3, 2, 1);

            foreach (MatrixLayout matrixLayout in Enum.GetValues(typeof(MatrixLayout)).OfType <MatrixLayout>())
            {
                ConvolutionLayer layer = new ConvolutionLayer(shape, NumberOfFilters, kernel, matrixLayout, null);

                Assert.AreEqual(NumberOfFilters, layer.NumberOfNeurons);
                Assert.AreEqual(matrixLayout, layer.MatrixLayout);

                Assert.AreEqual(1, layer.NumberOfOutputs);
                CollectionAssert.AreEqual(new[] { 2, 5, 5, NumberOfFilters }, layer.OutputShape.Axes);

                CollectionAssert.AreEqual(
                    matrixLayout == MatrixLayout.RowMajor ?
                    new[] { NumberOfFilters, kernel.Size * shape.GetAxis(Axis.C) } :
                    new[] { kernel.Size *shape.GetAxis(Axis.C), NumberOfFilters },
                    layer.W.Axes);
                Assert.IsFalse(layer.W.Weights.Take(layer.W.Length).All(x => x == 0.0f));
                Assert.AreEqual(0.0, layer.W.Weights.Take(layer.W.Length).Average(), 0.01f);

                CollectionAssert.AreEqual(new[] { NumberOfFilters }, layer.B.Axes);
                Assert.IsTrue(layer.B.Weights.Take(layer.B.Length).All(x => x == 0.0f));
            }
        }
Exemplo n.º 3
0
        public void ArchitectureConstructorTest1()
        {
            Shape        shape        = new Shape(Shape.BWHC, 2, 10, 12, 3);
            const string Architecture = "16C3+4x1(S)+-1(P)";

            ConvolutionLayer layer = new ConvolutionLayer(shape, Architecture, null);

            Assert.AreEqual(16, layer.NumberOfNeurons);
            Assert.AreEqual(Architecture, layer.Architecture);

            Assert.AreEqual(3, layer.Kernel.Width);
            Assert.AreEqual(3, layer.Kernel.Height);
            Assert.AreEqual(4, layer.Kernel.StrideX);
            Assert.AreEqual(1, layer.Kernel.StrideY);
            Assert.AreEqual(-1, layer.Kernel.PaddingX);
            Assert.AreEqual(-1, layer.Kernel.PaddingY);

            CollectionAssert.AreEqual(new[] { 2, 3, 8, 16 }, layer.OutputShape.Axes);
            Assert.AreEqual(1, layer.NumberOfOutputs);
            Assert.AreEqual(MatrixLayout.RowMajor, layer.MatrixLayout);

            CollectionAssert.AreEqual(new[] { 16, 9 * shape.GetAxis(Axis.C) }, layer.W.Axes);
            Assert.IsFalse(layer.W.Weights.Take(layer.W.Length).All(x => x == 0.0f));
            Assert.AreEqual(0.0, layer.W.Weights.Take(layer.W.Length).Average(), 0.05f);

            CollectionAssert.AreEqual(new[] { 16 }, layer.B.Axes);
            Assert.IsTrue(layer.B.Weights.Take(layer.B.Length).All(x => x == 0.0f));
        }
Exemplo n.º 4
0
    private void CreateDNet()
    {
        ConvolutionLayer  conv0       = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 32, zeroPadding: true);
        ActivationLayer   activation0 = new ActivationLayer(new Relu(leaky: true));
        MaxPooling2DLayer pool0       = new MaxPooling2DLayer();
        ConvolutionLayer  conv1       = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 32, zeroPadding: true);
        ActivationLayer   activation1 = new ActivationLayer(new Relu(leaky: true));
        MaxPooling2DLayer pool1       = new MaxPooling2DLayer();
        FlattenLayer      flatten     = new FlattenLayer();
        LinearLayer       linear0     = new LinearLayer(numNeurons: 128);
        ActivationLayer   activation2 = new ActivationLayer(new Relu(leaky: true));
        LinearLayer       linear1     = new LinearLayer(numNeurons: 1);
        ActivationLayer   activation3 = new ActivationLayer(new Sigmoid());

        dNet.Add(conv0);
        dNet.Add(activation0);
        dNet.Add(pool0);
        dNet.Add(conv1);
        dNet.Add(activation1);
        dNet.Add(pool1);
        dNet.Add(flatten);
        dNet.Add(linear0);
        dNet.Add(activation2);
        dNet.Add(linear1);
        dNet.Add(activation3);
        dNet.Compile(new BinaryCrossEntropy(), new Adam(0.001d));
    }
Exemplo n.º 5
0
        public Model convolutionLayer(int inputCount, int inputWidth, int inputHeight, int prevFilterWidth, int prevFilterHeight, int filterCount, int filterWidth, int filterHeight, int padding, int stride)
        {
            ConvolutionLayer layer = new ConvolutionLayer(inputCount, inputWidth, inputHeight, prevFilterWidth, prevFilterHeight, filterCount, filterWidth, filterHeight, padding, stride);

            initLayers.Add(layer);
            return(this);
        }
Exemplo n.º 6
0
        public void CloneTest()
        {
            Shape            shape  = new Shape(Shape.BWHC, -1, 20, 20, 10);
            ConvolutionLayer layer1 = new ConvolutionLayer(shape, 100, new Kernel(3, 4, 3, 3, 2, 1), MatrixLayout.ColumnMajor, null);
            ConvolutionLayer layer2 = layer1.Clone() as ConvolutionLayer;

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
Exemplo n.º 7
0
        public void ConvText_3D_1d()
        {
            double[,,] x = new double[2, 3, 3]
            {
                {
                    { 0.0, 1.0, 2.0 },
                    { 3.0, 4.0, 5.0 },
                    { 6.0, 7.0, 8.0 },
                },
                {
                    { 11, 12, 13 },
                    { 14, 15, 16 },
                    { 17, 18, 19 },
                }
            };

            double[,,] f = new double[2, 2, 2]
            {
                {
                    { 1, 2 },
                    { -1, 3 },
                },
                {
                    { 3, -1 },
                    { -2, 1 },
                }
            };

            double[,,] expected = new double[1, 2, 2]
            {
                {
                    { 19, 25 },
                    { 37, 43 },
                }
            };

            var convLayer = new ConvolutionLayer
            {
                Inputs  = x,
                Filters = new List <double[, , ]> {
                    f
                },
            };

            var actual = convLayer.GetResult(f);

            Assert.AreEqual(expected.Length, actual.Length);
            for (int l = 0; l < actual.GetLength(0); l++)
            {
                for (int i = 0; i < actual.GetLength(1); i++)
                {
                    for (int j = 0; j < actual.GetLength(2); j++)
                    {
                        Assert.AreEqual(expected[l, i, j], actual[l, i, j], 0.1);
                    }
                }
            }
        }
Exemplo n.º 8
0
        public void ConvTest_3D_2D()
        {
            double[,,] x = new double[3, 3, 3]
            {
                {
                    { 0.0, 1.0, 2.0 },
                    { 3.0, 4.0, 5.0 },
                    { 6.0, 7.0, 8.0 },
                },
                {
                    { 11, 12, 13 },
                    { 14, 15, 16 },
                    { 17, 18, 19 },
                },
                {
                    { 1, 2, 3 },
                    { 4, 5, 6 },
                    { 7, 8, 9 },
                }
            };

            double[,,] f = new double[2, 2, 2]
            {
                {
                    { 1, 2 },
                    { -1, 3 },
                },
                {
                    { 3, -1 },
                    { -2, 1 },
                }
            };

            double[,,] expected = new double[2, 2, 2]
            {
                {
                    { 19, 25 },
                    { 37, 43 },
                },
                {
                    { 64, 70 },
                    { 82, 88 },
                }
            };

            var convLayer = new ConvolutionLayer
            {
                Inputs  = x,
                Filters = new List <double[, , ]> {
                    f
                },
            };

            var actual = convLayer.GetResult(f);

            Assert.That.AreEqual(expected, actual);
        }
Exemplo n.º 9
0
        public void ReverseConv_Test_3D_2d()
        {
            var dy = new double[, , ]
            {
                {
                    { 1, 2 },
                    { 3, 4 },
                },
                {
                    { -1, 3 },
                    { 4, 2 },
                },
            };

            var fs = new double[, , ]
            {
                {
                    { 1, 2 },
                    { -1, 3 },
                },
                {
                    { 3, -1 },
                    { -2, 1 },
                },
            };

            var expected = new double[, , ]
            {
                {
                    { 1, 4, 4 },
                    { 2, 11, 14 },
                    { -3, 5, 12 },
                },
                {
                    { 2, 6, 4 },
                    { 12, 10, 11 },
                    { -10, 5, 10 },
                },
                {
                    { -3, 10, -3 },
                    { 14, -5, 1 },
                    { -8, 0, 2 },
                }
            };

            var cl1 = new ConvolutionLayer
            {
                Filters = new List <double[, , ]> {
                    fs
                },
            };

            var actual = cl1.ReverseConvolution(dy, fs);

            Assert.That.AreEqual(expected, actual);
        }
Exemplo n.º 10
0
        public void SerializeTest()
        {
            Shape            shape  = new Shape(Shape.BWHC, -1, 20, 20, 10);
            ConvolutionLayer layer1 = new ConvolutionLayer(shape, 100, new Kernel(3, 4, 3, 3, 2, 1), MatrixLayout.ColumnMajor, null);
            string           s1     = JsonConvert.SerializeObject(layer1);
            ConvolutionLayer layer2 = JsonConvert.DeserializeObject <ConvolutionLayer>(s1);
            string           s2     = JsonConvert.SerializeObject(layer2);

            Assert.AreEqual(s1, s2);
        }
Exemplo n.º 11
0
        static void Main(string[] args)
        {
            Random random = new Random(13);



            NeuralNetwork cNN = new NeuralNetwork(random, 0.2);

            var conv = new ConvolutionLayer(new RectifiedLinearUnit(0.01), 8, 3, 3);

            conv.IsSame = true;


            cNN.AddNewLayer(new Shape(28, 28), conv);
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 16, 3, 3));
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 32, 3, 3));
            cNN.AddNewLayer(new UnPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 16, 3, 3));
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new Flatten());

            cNN.AddNewLayer(new FeedForwardLayer(20, new RectifiedLinearUnit(0.01)));
            cNN.AddNewLayer(new FeedForwardLayer(2, new SoftmaxUnit()));

            Console.WriteLine(cNN);



            GraphCPU graph = new GraphCPU(false);

            NNValue nValue  = NNValue.Random(28, 28, 2, random);
            NNValue nValue1 = NNValue.Random(28, 28, 2, random);
            NNValue outp    = new NNValue(new double[] { 0, 1 });
            NNValue outp1   = new NNValue(new double[] { 1, 0 });



            DataSetNoReccurent data = new DataSetNoReccurent(new NNValue[] { nValue, nValue1 }, new NNValue[] { outp, outp1 }, new CrossEntropyWithSoftmax());



            TrainerCPU trainer = new TrainerCPU(TrainType.MiniBatch, new Adam());

            trainer.BatchSize = 2;
            trainer.Train(10000, 0.001, cNN, data, 2, 0.0001);
            double[] dbs  = cNN.Activate(nValue, graph).DataInTensor;
            double[] dbs1 = cNN.Activate(nValue1, graph).DataInTensor;
        }
Exemplo n.º 12
0
        public void backwardLearn()
        {
            var layer = new ConvolutionLayer(this.layer);

            layer.backwardLearn(entries, nextGradients, learnRate);
            var weights = layer.Kernels.map(k => k.Weights);

            for (int i = 0; i < expectedLearnedWeights.Length; i++)
            {
                Assert.IsTrue(expectedLearnedWeights[i].EEquals(weights[i]));
            }
        }
Exemplo n.º 13
0
    // Use this for initialization
    void Start()
    {
        m_texLoad = GetComponent <CSVReader> ();

        m_batchField.text = batchSize.ToString();
        m_trainField.text = m_texLoad.trainLength.ToString();
        m_testField.text  = m_texLoad.testLength.ToString();

        errors = new List <Point> ();

        convLayer = new ConvolutionLayer(28, 5, 8, 0.01f);

        m_hiddenNeurons.text = hiddenNeurons.ToString();

        nn = new NeuralNetwork(1152, hiddenNeurons, 10);

        errorDisplay = new Graph(30, 5, new Vector3(-15, -10, 0));

        functionGraph = new Graph(3, 3, new Vector3(25, -8, 0));

        for (int i = 0; i < m_featureMapVisualizers.Length; ++i)
        {
            Texture2D tex = m_filters [i];

            tex.filterMode = FilterMode.Point;

            m_filterMapVisualizers[i].GetComponent <SpriteRenderer>().sprite = Sprite.Create(tex, new Rect(0, 0, tex.width, tex.height), new Vector2(0.5f, 0.5f));
        }

        // Initially convert filter images to filter text files (before training) - utility function

        /*
         * for (int i = 0; i < m_filters.Length; ++i) {
         *
         *      Color[] filters = m_filters [i].GetPixels ();
         *
         *      string[] lines = new string[5];
         *
         *      // 5x5 filters
         *
         *      for (int x = 0; x < 5; ++x) {
         *              for (int y = 0; y < 5; ++y) {
         *                      lines [y] += filters [y * 5 + x].r.ToString() + ",";
         *              }
         *      }
         *
         *      File.WriteAllLines ("Assets/Filters/filter" + i.ToString() + ".txt", lines);
         * }
         */
    }
Exemplo n.º 14
0
    private void CreateGNet()
    {
        ConvolutionLayer conv1          = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 4, zeroPadding: true);
        ActivationLayer  act            = new ActivationLayer(new Relu(leaky: true));
        ConvolutionLayer conv           = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 4, zeroPadding: true);
        ActivationLayer  act1           = new ActivationLayer(new Relu(leaky: true));
        ConvolutionLayer convolutionOut = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 4, zeroPadding: true);
        ActivationLayer  actOut         = new ActivationLayer(new Relu(leaky: true));

        gNet.Add(conv1);
        gNet.Add(act);
        gNet.Add(conv);
        gNet.Add(act1);
        gNet.Add(convolutionOut);
        gNet.Compile(new BinaryCrossEntropy(), new Adam(0.001d));
    }
Exemplo n.º 15
0
        public void ArchitectureConstructorTest2()
        {
            string architecture = "16C";

            try
            {
                ConvolutionLayer layer = new ConvolutionLayer(new Shape(Shape.BWHC, -1, 10, 12, 3), architecture, null);
            }
            catch (ArgumentException e)
            {
                Assert.AreEqual(
                    new ArgumentException(string.Format(CultureInfo.InvariantCulture, Properties.Resources.E_InvalidLayerArchitecture, architecture), nameof(architecture)).Message,
                    e.Message);
                throw;
            }
        }
Exemplo n.º 16
0
    void DrawNumSub(int index)
    {
        Texture2D tex = new Texture2D(24, 24);

        Color[] cols = new Color[576];

        float[] vals = ConvolutionLayer.ImagePreProcessSubSample(m_texLoad.GetPixels(index, 0)).toArray();

        for (int i = 0; i < 576; ++i)
        {
            cols [i] = new Color(vals [i], vals [i], vals [i]);
        }

        tex.SetPixels(cols);

        tex.Apply();

        tex.filterMode = FilterMode.Point;

        m_visualizer.GetComponent <SpriteRenderer> ().sprite = Sprite.Create(tex, new Rect(0, 0, tex.width, tex.height), new Vector2(0.5f, 0.5f));
    }
Exemplo n.º 17
0
        public void ConvTest_2d_1d()
        {
            double[,,] x = new double[1, 3, 3]
            {
                {
                    { 0.0, 1.0, 2.0 },
                    { 3.0, 4.0, 5.0 },
                    { 6.0, 7.0, 8.0 },
                }
            };

            double[,,] f = new double[1, 2, 2]
            {
                {
                    { 1, 2 },
                    { -1, 3 },
                }
            };

            double[,,] expected = new double[1, 2, 2]
            {
                {
                    { 11, 16 },
                    { 26, 31 },
                }
            };

            var convLayer = new ConvolutionLayer
            {
                Inputs  = x,
                Filters = new List <double[, , ]> {
                    f
                },
            };

            var actual = convLayer.GetResult(f);

            Assert.That.AreEqual(expected, actual);
        }
Exemplo n.º 18
0
    // Use this for initialization
    void Start()
    {
        errors = new List <Point> ();

        convLayer = new ConvolutionLayer(28, 5, 8, 0.01f);

        m_texLoad = GetComponent <CSVReader> ();

        nn = new NeuralNetwork(1152, 81, 2);

        errorDisplay = new Graph(30, 5, new Vector3(-15, -15, 0));

        functionGraph = new Graph(3, 3, new Vector3(25, -8, 0));

        for (int i = 0; i < m_featureMapVisualizers.Length; ++i)
        {
            Texture2D tex = m_filters [i];

            tex.filterMode = FilterMode.Point;

            m_filterMapVisualizers[i].GetComponent <SpriteRenderer>().sprite = Sprite.Create(tex, new Rect(0, 0, tex.width, tex.height), new Vector2(0.5f, 0.5f));
        }
    }
Exemplo n.º 19
0
 public void setup()
 {
     this.depth        = GlobalRandom.NextInt(2, 5);
     this.countEntries = GlobalRandom.NextInt(2, 5);
     this.inDims       = GlobalRandom.NextIntArr(countEntries, 2, 5);
     this.entrySize    = inDims.product();
     this.kernels      = ArrayBuilder.repeat(
         new Kernel(inDims.map(x => GlobalRandom.NextInt(2, x))), depth);
     this.layer = new ConvolutionLayer(this.kernels, this.inDims);
     MultiMatrix[]   entries             = ArrayBuilder.repeat(() => MultiMatrix.Build.random(inDims), countEntries);
     MultiMatrix[][] expectedOutputs     = new MultiMatrix[countEntries][];
     MultiMatrix[][] nextGradients       = new MultiMatrix[countEntries][];
     MultiMatrix[]   expectedInGradients = new MultiMatrix[countEntries];
     Kernel[]        kerns = kernels.map(k => new Kernel(k));
     for (int i = 0; i < countEntries; i++)
     {
         expectedInGradients[i] = MultiMatrix.Build.repeat(inDims, 0);
         expectedOutputs[i]     = new MultiMatrix[depth];
     }
     for (int i = 0; i < countEntries; i++)
     {
         nextGradients[i] = new MultiMatrix[depth];
         for (var j = 0; j < kerns.Length; j++)
         {
             expectedOutputs[i][j]   = kernels[j].slideOver(entries[i]);
             nextGradients[i][j]     = MultiMatrix.Build.random(kernels[0].getOutputDims(inDims));
             expectedInGradients[i] += kernels[j].getGradientInput(entries[i], nextGradients[i][j]);
             kerns[j].backwardLearn(entries[i], nextGradients[i][j], learnRate);
         }
     }
     this.entries                = entries.toMatrixD();
     this.expectedOutputs        = expectedOutputs.map(o => new MultiMatrix(o)).toMatrixD();
     this.nextGradients          = nextGradients.map(g => new MultiMatrix(g)).toMatrixD();
     this.expectedGradients      = expectedInGradients.toMatrixD();
     this.expectedLearnedWeights = kerns.map(k => k.Weights);
 }
Exemplo n.º 20
0
        public void Run()
        {
            var batchSize    = 10;
            var epoch        = 50;
            var learningRate = 0.005;
            var outputSize   = 10;
            // 入力レイヤ
            var inputLayer = new InputLayer(28, 28);
            // 畳み込みレイヤ
            // プーリングレイヤ
            var layer00 = new ConvolutionLayer(inputLayer, (3, 1, 20), DLF.ReLU, u => u < 0);
            var layer01 = new PoolingLayer(layer00, (2, 2));
            // 畳み込みレイヤ
            // プーリングレイヤ
            var layer02 = new ConvolutionLayer(layer01, (3, 2, 50), DLF.ReLU, u => u < 0);
            var layer03 = new PoolingLayer(layer02, (2, 2));
            // 出力レイヤ
            var layer04 = new SoftmaxLayer(layer03, outputSize);

            Func <IEnumerable <Tuple <double, double> >, double> errorFunction = DLF.ErrorFunctionCrossEntropy;

            var machine = new Machine(learningRate, epoch, batchSize, new Validator(outputSize)
                                      , x => errorFunction(x) * (1.0 / batchSize)
                                      , inputLayer
                                      , layer00
                                      , layer01
                                      , layer02
                                      , layer03
                                      , layer04);
            // 学習データを生成
            var testData = DLF.Shuffle(new MNISTLoader().Load()).ToArray();

            // 0-9を均等にピックアップ
            var pickNum = 20;
            var a       = new[] {
                testData.Take(10000).Where(x => x.Name == "0").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "1").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "2").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "3").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "4").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "5").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "6").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "7").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "8").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "9").Take(pickNum),
            }.SelectMany(x => x).ToArray();
            var b = new[] {
                testData.Skip(10000).Where(x => x.Name == "0").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "1").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "2").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "3").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "4").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "5").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "6").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "7").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "8").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "9").Take(pickNum),
            }.SelectMany(x => x).ToArray();

            machine.Learn(a, b);
        }
Exemplo n.º 21
0
        /// <summary>
        /// Инициализация реальной схемы.
        /// </summary>
        /// <param name="filterMatrixSize">Размер матриц фильтра.</param>
        /// <param name="poolingMatrixSize">Размер пуллинговых матриц.</param>
        private Dictionary <int, List <Layer> > InitializeRealScheme(int filterMatrixSize, int poolingMatrixSize)
        {
            var realScheme    = new Dictionary <int, List <Layer> >();
            var virtualScheme = _topology.GetScheme();

            var firstValue = _dataSet.GetDataSetForNumber().First().Value.First();

            foreach (var pair in virtualScheme)
            {
                var virtualElements = pair.Value;
                var realElements    = new List <Layer>();

                var index = 0;
                foreach (var layerType in virtualElements)
                {
                    Layer element = null;

                    switch (layerType)
                    {
                    case LayerType.Input:
                        element = new InputLayer(firstValue);
                        element.Initialize(NetworkModeType.Learning);
                        break;

                    case LayerType.Convolution:
                        var previousKey = pair.Key - 1;

                        var previousElements = realScheme[previousKey];
                        var previousType     = virtualScheme[previousKey].First();

                        if (previousElements.Count != virtualElements.Count &&
                            previousType.Equals(LayerType.Input))
                        {
                            var previousElement = previousElements.FirstOrDefault() as InputLayer;

                            if (previousElement is null)
                            {
                                throw new Exception("Предыдущий слой оказался Null!");
                            }

                            var map = previousElement.GetData(LayerReturnType.Map);
                            element = new ConvolutionLayer(map, filterMatrixSize);
                            element.Initialize(NetworkModeType.Learning);
                        }
                        else
                        {
                            throw new NotImplementedException();
                        }
                        break;

                    case LayerType.Subsampling:
                        previousKey = pair.Key - 1;

                        previousElements = realScheme[previousKey];
                        previousType     = virtualScheme[previousKey][index];

                        if (previousElements.Count == virtualElements.Count &&
                            previousType.Equals(LayerType.Convolution))
                        {
                            var previousElement = previousElements[index] as ConvolutionLayer;

                            if (previousElement is null)
                            {
                                throw new Exception("Предыдущий слой оказался Null!");
                            }

                            var map = previousElement.GetData(LayerReturnType.Map);
                            element = new SubsamplingLayer(map, poolingMatrixSize);
                            element.Initialize(NetworkModeType.Learning);
                        }
                        else
                        {
                            throw new NotImplementedException();
                        }
                        break;

                    case LayerType.Hidden:
                        previousKey = pair.Key - 1;

                        previousElements = realScheme[previousKey];
                        previousType     = virtualScheme[previousKey][index];

                        if (previousElements.Count == virtualElements.Count &&
                            previousType.Equals(LayerType.Subsampling))
                        {
                            var previousElement = previousElements[index] as SubsamplingLayer;

                            if (previousElement is null)
                            {
                                throw new Exception("Предыдущий слой оказался Null!");
                            }

                            var neurons = previousElement.GetData(LayerReturnType.Neurons) as List <NeuronFromMap>;
                            element = new HiddenLayer(neurons);
                            element.Initialize(NetworkModeType.Learning);
                        }
                        else
                        {
                            throw new NotImplementedException();
                        }
                        break;

                    case LayerType.Output:
                        previousKey = pair.Key - 1;

                        previousElements = realScheme[previousKey];
                        previousType     = virtualScheme[previousKey][index];

                        if (previousElements.Count > virtualElements.Count &&
                            previousType.Equals(LayerType.Hidden))
                        {
                            var neurons = new List <Neuron>();

                            foreach (var elementInLastLayer in previousElements)
                            {
                                var previousElement = elementInLastLayer as HiddenLayer;

                                if (previousElement is null)
                                {
                                    throw new Exception("Предыдущий слой оказался Null!");
                                }

                                var data = previousElement.GetData(LayerReturnType.Neurons) as List <NeuronFromMap>;
                                neurons.AddRange(data);
                            }

                            var outputs = neurons.Select(neuron => neuron.Output).ToList();

                            element = new OutputLayer(outputs);
                            element.Initialize(NetworkModeType.Learning);
                        }
                        else
                        {
                            throw new NotImplementedException();
                        }
                        break;

                    default:
                        throw new Exception("Неизвестный тип слоя!");
                    }

                    realElements.Add(element);
                    ++index;
                }

                realScheme.Add(pair.Key, realElements);
            }

            return(realScheme);
        }
Exemplo n.º 22
0
        /// <summary>
        /// Загрузить файл настроек и инициализировать сеть.
        /// </summary>
        /// <param name="path">Путь к файлу настроек.</param>
        /// <param name="inputData">Входные данные.</param>
        /// <returns>Возвращает сеть с данными.</returns>
        public static Dictionary <int, List <Layer> > LoadAndInitialize(string path, double[,] inputData)
        {
            var document     = XDocument.Load(path);
            var realTopology = new Dictionary <int, List <Layer> >();

            var baseElement = document.Elements().ToList()
                              .Find(element => string.Equals(
                                        element.Name.LocalName,
                                        IOConstants.NETWORK_BASE_ELEMENT_NAME,
                                        StringComparison.InvariantCultureIgnoreCase));

            var inputDataSize = baseElement.Elements().ToList()
                                .Find(element => string.Equals(
                                          element.Name.LocalName,
                                          IOConstants.INPUT_DATA_SIZE_ELEMENT_NAME,
                                          StringComparison.InvariantCultureIgnoreCase)).Value;

            var size = int.Parse(inputDataSize);

            var xSize = inputData.GetLength(0);
            var ySize = inputData.GetLength(1);

            if (!xSize.Equals(size) || !ySize.Equals(size))
            {
                throw new Exception($"Размер входных данных не соотвествует ожидаемому размеру {size}!");
            }

            var inputLayer = new InputLayer(inputData);

            inputLayer.Initialize(Enums.NetworkModeType.Recognizing);

            var currentNumber = Topology.FIRST_NUMBER;

            realTopology.Add(currentNumber, new List <Layer> {
                inputLayer
            });

            var layers = baseElement.Elements().ToList()
                         .FindAll(element => string.Equals(
                                      element.Name.LocalName,
                                      IOConstants.LAYERS_ELEMENT_NAME,
                                      StringComparison.InvariantCultureIgnoreCase));

            var layersCount = layers.Count;
            var layersCountWithoutOutput = layersCount - 1;

            var previousKey = realTopology.First().Key;

            foreach (var layer in layers)
            {
                var layerElements = layer.Elements().ToList();
                var name          = layerElements.First().Name.LocalName;

                if (string.Equals(name, IOConstants.CONVOLUTION_LAYER_ELEMENT_NAME,
                                  StringComparison.InvariantCultureIgnoreCase))
                {
                    var     previousElementsCount = realTopology[previousKey].Count;
                    dynamic inputToLayer;

                    if (!previousElementsCount.Equals(1))
                    {
                        throw new NotImplementedException();
                    }

                    inputToLayer = realTopology[previousKey].First()
                                   .GetData(Enums.LayerReturnType.Map) as FigureMap;

                    var layersInTopology = new List <Layer>();

                    foreach (var element in layerElements)
                    {
                        var filterMatrixElement = element
                                                  .Elements().ToList().First();

                        var filterMatrixSize = int.Parse(filterMatrixElement
                                                         .Attribute(IOConstants.SIZE_ATTRIBUTE_NAME)
                                                         .Value);

                        var cells = new List <ModifiedCell>();

                        foreach (var cellElement in filterMatrixElement.Elements())
                        {
                            var x     = int.Parse(cellElement.Attribute(IOConstants.X_ATTRIBUTE_NAME).Value);
                            var y     = int.Parse(cellElement.Attribute(IOConstants.Y_ATTRIBUTE_NAME).Value);
                            var value = double.Parse(cellElement.Value.Replace(".", ","));

                            cells.Add(new ModifiedCell(x, y, value));
                        }

                        var filterMatrix = new FilterMatrix(filterMatrixSize,
                                                            Enums.NetworkModeType.Recognizing, cells);

                        var convolutionLayer = new ConvolutionLayer(inputToLayer,
                                                                    filterMatrix, Enums.NetworkModeType.Recognizing);

                        layersInTopology.Add(convolutionLayer);
                    }

                    realTopology.Add(previousKey + 1, layersInTopology);
                    ++previousKey;
                }

                if (string.Equals(name, IOConstants.SUBSAMPLING_LAYER_ELEMENT_NAME,
                                  StringComparison.InvariantCultureIgnoreCase))
                {
                    var previousElements = realTopology[previousKey];
                    var layersInTopology = new List <Layer>();

                    var indexOfElementInPreviousPart = 0;
                    foreach (var element in layerElements)
                    {
                        var inputDataInLayer = previousElements[indexOfElementInPreviousPart]
                                               .GetData(Enums.LayerReturnType.Map) as FigureMap;

                        var poolingMatrixSize = int.Parse(element.Elements()
                                                          .ToList().First().Attribute(IOConstants.SIZE_ATTRIBUTE_NAME).Value);

                        var subsamplingLayer = new SubsamplingLayer(inputDataInLayer, poolingMatrixSize);
                        subsamplingLayer.Initialize(Enums.NetworkModeType.Recognizing);

                        layersInTopology.Add(subsamplingLayer);
                        ++indexOfElementInPreviousPart;
                    }

                    realTopology.Add(previousKey + 1, layersInTopology);
                    ++previousKey;
                }

                if (string.Equals(name, IOConstants.HIDDEN_LAYER_ELEMENT_NAME,
                                  StringComparison.InvariantCultureIgnoreCase))
                {
                    var previousElements = realTopology[previousKey];
                    var layersInTopology = new List <Layer>();

                    var indexOfElementInPreviousPart = 0;
                    foreach (var element in layerElements)
                    {
                        var temporaryNeurons = previousElements[indexOfElementInPreviousPart]
                                               .GetData(Enums.LayerReturnType.Neurons) as List <NeuronFromMap>;

                        var realNeurons = new List <NeuronFromMap>();

                        var index = 0;
                        foreach (var neuronElement in element.Elements().First().Elements())
                        {
                            var weights = new List <double>();

                            foreach (var weightElement in neuronElement.Elements())
                            {
                                weights.Add(double.Parse(weightElement.Value.Replace(".", ",")));
                            }

                            var inputs = temporaryNeurons[index].Inputs;
                            var neuron = new NeuronFromMap(inputs, weights);

                            realNeurons.Add(neuron);
                            ++index;
                        }

                        var hiddenLayer = new HiddenLayer(realNeurons);
                        layersInTopology.Add(hiddenLayer);

                        ++indexOfElementInPreviousPart;
                    }

                    realTopology.Add(previousKey + 1, layersInTopology);
                    ++previousKey;
                }
            }

            var inputValues = new List <double>();

            foreach (HiddenLayer hiddenLayer in realTopology.Last().Value)
            {
                inputValues.AddRange((hiddenLayer.GetData(Enums.LayerReturnType.Neurons)
                                      as List <NeuronFromMap>).Select(neuron => neuron.Output));
            }

            var neurons        = new List <Neuron>();
            var neuronsElement = layers.Last().Elements().First().Elements().First().Elements();

            foreach (var outputNeuron in neuronsElement)
            {
                var weights = outputNeuron.Elements()
                              .Select(weight => double.Parse(weight.Value.Replace(".", ","))).ToList();

                neurons.Add(new Neuron(inputValues, weights));
            }

            var outputLayer = new OutputLayer(neurons,
                                              Enums.NetworkModeType.Recognizing, Enums.OutputLayerType.NumberRecognizing);

            outputLayer.Initialize(Enums.NetworkModeType.Recognizing);

            var lastKey = realTopology.Last().Key;

            realTopology.Add(lastKey + 1, new List <Layer> {
                outputLayer
            });

            return(realTopology);
        }
Exemplo n.º 23
0
        public void ForwardBackwardTest()
        {
            const int numberOfFilters = 2;

            foreach (MatrixLayout matrixLayout in new[] { MatrixLayout.ColumnMajor /*, MatrixLayout.RowMajor*/ })
            {
                foreach (string format in new[] { Shape.BWHC /*, Shape.BHWC, Shape.BCHW*/ })
                {
                    Shape shape = new Shape(format, -1, 13, 11, 2);
                    foreach (int kw in new[] { 1, 2, 3, 4, 5 })
                    {
                        foreach (int kh in new[] { 1, 2, 3, 4, 5 })
                        {
                            foreach (int kstridex in new[] { 1, 2, 3 })
                            {
                                foreach (int kstridey in new[] { 1, 2, 3 })
                                {
                                    foreach (int kpaddingx in new[] { 0, 2, -2 })
                                    {
                                        foreach (int kpaddingy in new[] { 0, 2, -2 })
                                        {
                                            Kernel           kernel = new Kernel(kw, kh, kstridex, kstridey, kpaddingx, kpaddingy);
                                            ConvolutionLayer layer  = new ConvolutionLayer(shape, numberOfFilters, kernel, matrixLayout, null);
                                            layer.W.Randomize(this.random);
                                            layer.B.Randomize(this.random);

                                            for (int mb = 1; mb <= 3; mb++)
                                            {
                                                Session session = new Session(true);

                                                layer.W.ClearGradient();
                                                layer.B.ClearGradient();

                                                Tensor x = new Tensor(null, shape.Reshape(Axis.B, mb));
                                                x.Randomize(this.random);

                                                Tensor y = layer.Forward(session, new[] { x })[0];

                                                Tensor expected = ConvolutionLayerTest.CalculateY(layer.W, x, layer.B, kernel, numberOfFilters, matrixLayout);
                                                Helpers.AreTensorsEqual(expected, y);

                                                // unroll the graph
                                                y.RandomizeGradient(this.random);
                                                session.Unroll();

                                                // should be dy * numberOfCells in y
                                                Tensor expectedDB = ConvolutionLayerTest.CalculateDB(y);
                                                Helpers.AreGradientsEqual(expectedDB, layer.B);

                                                // should be dy * x'
                                                Tensor expectedDW = ConvolutionLayerTest.CalculateDW(layer.W, x, y, kernel, numberOfFilters, matrixLayout);
                                                Helpers.AreGradientsEqual(expectedDW, layer.W);

                                                // should be dW' * dy
                                                Tensor expectedDX = ConvolutionLayerTest.CalculateDX(layer.W, x, y, kernel, numberOfFilters, matrixLayout);
                                                Helpers.AreGradientsEqual(expectedDX, x);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
        public static void Main(string[] args)
        {
            NeuralNetwork network       = new NeuralNetwork(ExecMode.Learning, 1 * Math.Pow(10, -6));
            string        imageFilePath = @"C:\Users\cleist\source\repos\LicensPlateRecognition\LicensPlateRecognition\LicensPlateRecognition\Image\";

            string[] trainingData = Directory.GetFiles(imageFilePath + "TrainingData", "*");
            string[] testData     = Directory.GetFiles(imageFilePath + "TestData", "*");
            // key value pairs for training or test input and desired output
            Dictionary <string, double[]> keyValuePairs = new Dictionary <string, double[]>();

            // Declare network layers: declare in order of traversion! Since it will be the order of the layers list in network class
            InputLayer       inputLayer = new InputLayer(28, 28, 1, network);
            ConvolutionLayer convLayer1 = new ConvolutionLayer(new Filter(5, 5, inputLayer.Depth), 20, 1, network);
            //ConvolutionLayer convLayer2 = new ConvolutionLayer(new Filter(5, 5, convLayer1.Filters.Count), 20, 1, network);
            PoolingLayer     pooling1   = new PoolingLayer(network);
            ConvolutionLayer convLayer3 = new ConvolutionLayer(new Filter(5, 5, convLayer1.Filters.Count), 40, 1, network);
            //ConvolutionLayer convLayer4 = new ConvolutionLayer(new Filter(3, 3, convLayer3.Filters.Count), 40, 1, network);
            PoolingLayer        pooling2             = new PoolingLayer(network);
            FullyConnectedLayer fullyConnectedLayer1 = new FullyConnectedLayer(network);
            //FullyConnectedLayer fullyConnectedLayer2 = new FullyConnectedLayer(network);
            OutputLayer outputLayer = new OutputLayer(network);
            // Declare Output Classes
            int outClass = 10;

            // ------------------------ MNIST Dataset ------------------------
            MNIST mnist = new MNIST();

            // ------------------------ MNIST Dataset ------------------------

            if (network.ExecMode == ExecMode.Learning)
            {
                // create a csv with tuple of image and class value
                //network.CreateCSV(imageFilePath, trainingData, "training.csv");

                //network.LoadCSV(imageFilePath, keyValuePairs, "training.csv", outClass);

                // ------------------------ MNIST Dataset ------------------------
                mnist.ReadTrainMNIST();
                // ------------------------ MNIST Dataset ------------------------

                var epochs = 59;
                // must be divisible through number of training data
                var miniBatchSize = 10;

                network.Learning(keyValuePairs, outClass, epochs, miniBatchSize, imageFilePath, mnist /* Mnist */);

                Console.WriteLine("Press any key to continue...");
                Console.ReadKey();
            }

            if (network.ExecMode == ExecMode.Testing)
            {
                // create a csv with tuple of image and class value
                //network.CreateCSV(imageFilePath, testData, "testing.csv");

                //network.LoadCSV(imageFilePath, keyValuePairs, "testing.csv", outClass);

                // ------------------------ MNIST Dataset ------------------------
                mnist.ReadTestMNIST();
                // ------------------------ MNIST Dataset ------------------------

                network.Testing(outClass, keyValuePairs, mnist /* Mnist */);

                Console.WriteLine("Press any key to continue...");
                Console.ReadKey();
            }

            if (network.ExecMode == ExecMode.Normal)
            {
                while (true)
                {
                    Console.WriteLine("Please Insert an image filepath...");
                    try
                    {
                        string   image  = Console.ReadLine();
                        double[] output = network.ForwardPass(outClass, image, null);
                        for (int i = 0; i < output.Length; i++)
                        {
                            Console.Write("{0} ", output[i]);
                        }
                        Console.WriteLine();
                    }
                    catch
                    {
                        Console.WriteLine("No image or supported image format!");
                    }
                }
            }
        }