Exemplo n.º 1
0
        public void UpSample()
        {
            var maxPoses = new int[, , ]
            {
                {
                    { 0, 1, 1, 0 },
                    { 0, 0, 0, 0 },
                    { 1, 0, 0, 0 },
                    { 0, 0, 0, 1 },
                }
            };
            var maxs = new double[, , ]
            {
                {
                    { 4, 5 },
                    { 6, 7 },
                }
            };

            var expected = new double[, , ]
            {
                {
                    { 0, 4, 5, 0 },
                    { 0, 0, 0, 0 },
                    { 6, 0, 0, 0 },
                    { 0, 0, 0, 7 },
                }
            };

            PoolingLayer pl = new PoolingLayer(howLayers: 1, maxPositions: maxPoses, poolSize: 2);

            var actual = pl.UpSample(maxs);

            Assert.That.AreEqual(expected, actual);
        }
        public void PoolingForward()
        {
            PoolingLayer
                cpu = new PoolingLayer(new TensorInfo(58, 58, 3), PoolingInfo.Default, ActivationType.LeakyReLU),
                gpu = new CuDnnPoolingLayer(cpu.InputInfo, PoolingInfo.Default, ActivationType.LeakyReLU);

            TestForward(cpu, gpu, 400);
        }
        public void PoolingForward()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 58 * 58 * 3, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 58 * 58 * 3);
            PoolingLayer
                cpu = new PoolingLayer(new TensorInfo(58, 58, 3), PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
                gpu = new CuDnnPoolingLayer(cpu.InputInfo, PoolingInfo.Default, ActivationFunctionType.LeakyReLU);

            TestForward(cpu, gpu, x);
        }
Exemplo n.º 4
0
        public ConvSuperResolution()
        {
            superres_enc_front = InputLayer.Create(StartSide, 3);
            superres_enc_back  = ActivationLayer.Create <ReLU>();

            var pooling_0 = PoolingLayer.Create(2, 2);
            var pooling_1 = PoolingLayer.Create(2, 2);
            var pooling_2 = PoolingLayer.Create(2, 2);
            var pooling_3 = PoolingLayer.Create(2, 2);

            superres_enc_front.Append(
                ConvLayer.Create(5, 128, 2).Append(                                                //o = 96
                    ActivationLayer.Create <ReLU>().Append(
                        pooling_0.Append(                                                          //o = 48
                            ConvLayer.Create(3, 128, 1).Append(                                    //o = 48
                                ActivationLayer.Create <ReLU>().Append(
                                    pooling_1.Append(                                              //o = 24
                                        ConvLayer.Create(3, 64, 1).Append(                         //o = 24
                                            ActivationLayer.Create <ReLU>().Append(
                                                pooling_2.Append(                                  //o = 12
                                                    ConvLayer.Create(3, 32, 1).Append(             //o = 12
                                                        ActivationLayer.Create <ReLU>().Append(
                                                            pooling_3.Append(                      //o = 6
                                                                ConvLayer.Create(3, 32, 1).Append( //o = 6
                                                                    superres_enc_back
                                                                    ))))))))))))));

            superres_dec_front = InputLayer.Create(6, 32);
            superres_dec_back  = ActivationLayer.Create <Tanh>();

            superres_dec_front.Append(
                ConvLayer.Create(3, 32, 1).Append(                                                   //o = 6
                    ActivationLayer.Create <ReLU>().Append(
                        UnpoolingLayer.Create(pooling_3).Append(                                     //o = 12
                            ConvLayer.Create(3, 64, 1).Append(                                       //o = 12
                                ActivationLayer.Create <ReLU>().Append(
                                    UnpoolingLayer.Create(pooling_2).Append(                         //o = 24
                                        ConvLayer.Create(3, 128, 1).Append(                          //o = 24
                                            ActivationLayer.Create <ReLU>().Append(
                                                UnpoolingLayer.Create(pooling_1).Append(             //o = 48
                                                    ConvLayer.Create(3, 128, 1).Append(              //o = 48
                                                        ActivationLayer.Create <ReLU>().Append(
                                                            UnpoolingLayer.Create(pooling_0).Append( //o = 96
                                                                ConvLayer.Create(5, 3, 2).Append(    //o = 96
                                                                    superres_dec_back
                                                                    ))))))))))))));

            superres_enc_back.Append(superres_dec_front);

            //TODO: come up with an approach that saves the convolution/multiplication indexes and rearranges the weights etc so they fit into cache better
            //TODO: unpooling layer tied to pooling layers

            //Initialize Weights
            superres_enc_front.SetupInternalState();
            superres_enc_front.InitializeWeights(new UniformWeightInitializer(0, 0.001f));
        }
Exemplo n.º 5
0
            public virtual Builder withPoolingLayer(int width, int height)
            {
                FeatureMapsLayer lastLayer    = LastFeatureMapLayer;
                PoolingLayer     poolingLayer = new PoolingLayer(lastLayer, new comp.Dimension2D(width, height));

                network.addLayer(poolingLayer);
                ConvolutionalUtils.fullConnectMapLayers(lastLayer, poolingLayer);

                return(this);
            }
Exemplo n.º 6
0
        internal static XmlElement SavePoolingLayer(string type, XmlDocument doc, PoolingLayer layer)
        {
            var el = doc.CreateElement(type);

            XmlStorage.AddChild(el, "Rows", layer.Rows.ToString());
            XmlStorage.AddChild(el, "Columns", layer.Columns.ToString());
            XmlStorage.AddChild(el, "RowStride", layer.RowStride.ToString());
            XmlStorage.AddChild(el, "ColumnStride", layer.ColumnStride.ToString());

            return(el);
        }
        public void Test_Return_Correct_Output_If_Input_Not_Resized()
        {
            var inputArray = new double[, , ]
            {
                {
                    { 3, 10, 2, 4 },
                    { 5, 11, 8, 7 },
                    { 3, 10, 5, 3 },
                    { 5, 1, 2, 3 }
                },
                {
                    { 3, 10, 2, 4 },
                    { 5, 11, 8, 7 },
                    { 3, 10, 7, 3 },
                    { 5, 1, 3, 3 }
                },
                {
                    { 3, 10, 2, 4 },
                    { 5, 11, 8, 7 },
                    { 3, 4, 5, 3 },
                    { 5, 1, 2, 3 }
                },
                {
                    { 3, 10, 2, 4 },
                    { 5, 11, 8, 7 },
                    { 3, 5, 5, 3 },
                    { 5, 1, 2, 3 }
                }
            };
            var expectedArray = new double[, , ]
            {
                {
                    { 5, 11, 8, 7 },
                    { 5, 10, 7, 3 }
                },
                {
                    { 5, 11, 8, 7 },
                    { 5, 5, 5, 3 }
                }
            };

            var input    = new Array3D(inputArray);
            var expected = new Array3D(expectedArray);

            var pooling = new PoolingLayer();
            var result  = pooling.FormOutput(input);

            Assert.IsTrue(result as Array3D == expected);
        }
        internal static INetworkLayer CpuLayerDeserialize([NotNull] Stream stream, LayerType type)
        {
            switch (type)
            {
            case LayerType.FullyConnected: return(FullyConnectedLayer.Deserialize(stream));

            case LayerType.Convolutional: return(ConvolutionalLayer.Deserialize(stream));

            case LayerType.Pooling: return(PoolingLayer.Deserialize(stream));

            case LayerType.Output: return(OutputLayer.Deserialize(stream));

            case LayerType.Softmax: return(SoftmaxLayer.Deserialize(stream));

            default: throw new ArgumentOutOfRangeException(nameof(type), $"The {type} layer type is not supported by the default deserializer");
            }
        }
        public unsafe void PoolingBackward()
        {
            // Setup
            Tensor.New(400, 58 * 58 * 3, out Tensor x);
            KerasWeightsProvider.FillWithHeEtAlUniform(x, 10);
            PoolingLayer
                cpu = new PoolingLayer(new TensorInfo(58, 58, 3), PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
                gpu = new CuDnnPoolingLayer(cpu.InputInfo, PoolingInfo.Default, ActivationFunctionType.LeakyReLU);

            gpu.Forward(x, out Tensor z, out Tensor a);
            a.Free();
            x.Duplicate(out Tensor x1);
            x.Duplicate(out Tensor x2);
            Tensor.New(z.Entities, z.Length, out Tensor delta);
            KerasWeightsProvider.FillWithHeEtAlUniform(delta, 10);

            // Backward
            cpu.Backpropagate(x, delta, x1, ActivationFunctions.LeakyReLUPrime);
            gpu.Backpropagate(x, delta, x2, ActivationFunctions.LeakyReLUPrime);
            bool   valid = true;
            float *px = (float *)x1.Ptr.ToPointer(), px2 = (float *)x2.Ptr.ToPointer();
            int    count = 0;

            for (int i = 0; i < x1.Size; i++)
            {
                if (px[i].EqualsWithDelta(px2[i], 1e-5f))
                {
                    continue;
                }
                if (px[i].EqualsWithDelta(px2[i] * 100f, 1e-5f))
                {
                    count++;                                                // The cuDNN pooling backwards method returns a value scaled by 0.01 from time to time for some reason (less than 2% anyways)
                }
                else
                {
                    valid = false;
                    break;
                }
            }
            Assert.IsTrue(valid && count * 100f / x1.Size < 2);
            x.Free();
            x1.Free();
            x2.Free();
            z.Free();
            delta.Free();
        }
Exemplo n.º 10
0
        public void setup()
        {
            this.countEntries = GlobalRandom.NextInt(2, 5);
            this.inDims       = GlobalRandom.NextIntArr(countEntries, 2, 5);
            this.entrySize    = inDims.product();
            this.pooler       = new Pooler(inDims.map(x => GlobalRandom.NextInt(2, x)));
            this.layer        = new PoolingLayer(this.pooler, this.inDims);

            MultiMatrix[] entries           = ArrayBuilder.repeat(() => MultiMatrix.Build.random(inDims), countEntries);
            MultiMatrix[] expectedOutputs   = new MultiMatrix[countEntries];
            MultiMatrix[] nextGradients     = new MultiMatrix[countEntries];
            MultiMatrix[] expectedGradients = new MultiMatrix[countEntries];
            for (int i = 0; i < countEntries; i++)
            {
                expectedOutputs[i]   = pooler.slideOver(entries[i]);
                nextGradients[i]     = MultiMatrix.Build.random(pooler.getOutputDims(inDims));
                expectedGradients[i] = pooler.getGradientInput(entries[i], nextGradients[i]);
            }

            this.inputs            = entries.toMatrixD();
            this.expectedOutputs   = expectedOutputs.toMatrixD();
            this.nextGradients     = nextGradients.toMatrixD();
            this.expectedGradients = expectedGradients.toMatrixD();
        }
Exemplo n.º 11
0
        public ReversibleAutoencoder()
        {
            superres_enc_front = InputLayer.Create(StartSide, 3);
            superres_enc_back  = ActivationLayer.Create <LeakyReLU>();

            var pooling_0 = PoolingLayer.Create(2, 2);
            var pooling_1 = PoolingLayer.Create(2, 2);
            var pooling_2 = PoolingLayer.Create(2, 2);
            var pooling_3 = PoolingLayer.Create(2, 2);

            superres_enc_front.Append(
                ConvLayer.Create(3, 12).Append(                                                                                                                                       //o = 94, 16
                    ActivationLayer.Create <LeakyReLU>().Append(
                        ConvLayer.Create(3, 12).Append(                                                                                                                               //o = 92, 16
                            ActivationLayer.Create <LeakyReLU>().Append(
                                ConvLayer.Create(3, 12).Append(                                                                                                                       //o = 90, 16
                                    ActivationLayer.Create <LeakyReLU>().Append(
                                        ConvLayer.Create(3, 12).Append(                                                                                                               //o = 88, 16
                                            ActivationLayer.Create <LeakyReLU>().Append(
                                                pooling_0.Append(                                                                                                                     //o = 44, 16
                                                    ConvLayer.Create(3, 12).Append(                                                                                                   //o = 42, 16
                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                            ConvLayer.Create(3, 12).Append(                                                                                           //o = 40, 16
                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                    ConvLayer.Create(3, 12).Append(                                                                                   //o = 38, 16
                                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                                            ConvLayer.Create(3, 12).Append(                                                                           //o = 36, 16
                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                    ConvLayer.Create(3, 12).Append(                                                                   //o = 34, 16
                                                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                                                            ConvLayer.Create(3, 12).Append(                                                           //o = 32, 16
                                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                    ConvLayer.Create(3, 12).Append(                                                   //o = 30, 16
                                                                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                            ConvLayer.Create(3, 12).Append(                                           //o = 28, 16
                                                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                                    pooling_2.Append(                                                 //o = 14, 16
                                                                                                                        ConvLayer.Create(3, 12).Append(                               //o = 12, 16
                                                                                                                            ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                                                ConvLayer.Create(3, 12).Append(                       //o = 10, 16
                                                                                                                                    ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                                                        ConvLayer.Create(3, 12).Append(               //o = 8, 16
                                                                                                                                            ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                                                                pooling_3.Append(                     //o = 4, 16
                                                                                                                                                    ConvLayer.Create(3, 8, 1).Append( //o = 4, 8
                                                                                                                                                        superres_enc_back
                                                                                                                                                        )))))))))))))))))))))))))))))))))));

            superres_dec_front = InputLayer.Create(4, 8);
            superres_dec_back  = ActivationLayer.Create <Tanh>();

            superres_dec_front.Append(                                                                                                                                                //o = 4, 8
                ConvLayer.Create(3, 8, 1).Append(                                                                                                                                     //o = 4, 16
                    ActivationLayer.Create <LeakyReLU>().Append(
                        UnpoolingLayer.Create(pooling_3).Append(                                                                                                                      //o = 8, 16
                            ActivationLayer.Create <LeakyReLU>().Append(
                                ConvLayer.Create(3, 12, 2).Append(                                                                                                                    //o = 10, 16
                                    ActivationLayer.Create <LeakyReLU>().Append(
                                        ConvLayer.Create(3, 12, 2).Append(                                                                                                            //o = 12, 16
                                            ActivationLayer.Create <LeakyReLU>().Append(
                                                ConvLayer.Create(3, 12, 2).Append(                                                                                                    //o = 14, 16
                                                    UnpoolingLayer.Create(pooling_2).Append(                                                                                          //o = 28, 16
                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                            ConvLayer.Create(3, 12, 2).Append(                                                                                        //o = 30, 16
                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                    ConvLayer.Create(3, 12, 2).Append(                                                                                //o = 32, 16
                                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                                            ConvLayer.Create(3, 12, 2).Append(                                                                        //o = 34, 16
                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                    ConvLayer.Create(3, 12, 2).Append(                                                                //o = 36, 16
                                                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                                                            ConvLayer.Create(3, 12, 2).Append(                                                        //o = 38, 16
                                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                    ConvLayer.Create(3, 12, 2).Append(                                                //o = 40, 16
                                                                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                            ConvLayer.Create(3, 12, 2).Append(                                        //o = 42, 16
                                                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                                    ConvLayer.Create(3, 12, 2).Append(                                //o = 44, 16
                                                                                                                        UnpoolingLayer.Create(pooling_0).Append(                      //o = 88, 16
                                                                                                                            ConvLayer.Create(3, 12, 2).Append(                        //o = 120, 16
                                                                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                                                    ConvLayer.Create(3, 12, 2).Append(                //o = 122, 16
                                                                                                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                                                            ConvLayer.Create(3, 12, 2).Append(        //o = 124, 16
                                                                                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                                                                                    ConvLayer.Create(3, 3, 2).Append( //o = 126, 3
                                                                                                                                                        superres_dec_back
                                                                                                                                                        )))))))))))))))))))))))))))))))))));

            superres_enc_back.Append(superres_dec_front);

            //TODO: come up with an approach that saves the convolution/multiplication indexes and rearranges the weights etc so they fit into cache better
            //TODO: unpooling layer tied to pooling layers

            //Initialize Weights
            superres_enc_front.SetupInternalState();
            superres_enc_front.InitializeWeights(new UniformWeightInitializer(0, 0.001f));
        }
        public void Test_Return_Correct_Output_If_Input_Resized()
        {
            var inputArray = new double[, , ]
            {
                {
                    { 0, 12 },
                    { 3, 0 },
                    { 4, 8 },
                    { 1, 1 },
                    { 7, 4 }
                },
                {
                    { 7, 10 },
                    { 1, 7 },
                    { 5, 3 },
                    { 9, 11 },
                    { 1, 1 }
                },
                {
                    { 6, 3 },
                    { 8, 7 },
                    { 0, 5 },
                    { 0, 8 },
                    { 8, 5 }
                },
                {
                    { 3, 4 },
                    { 5, 0 },
                    { 3, 8 },
                    { 11, 11 },
                    { 8, 5 }
                },
                {
                    { 4, 4 },
                    { 6, 5 },
                    { 2, 3 },
                    { 7, 6 },
                    { 10, 2 }
                }
            };
            var expectedArray = new double[, , ]
            {
                {
                    { 7, 12 },
                    { 9, 11 }
                },
                {
                    { 8, 7 },
                    { 11, 11 }
                }
            };

            var input    = new Array3D(inputArray);
            var expected = new Array3D(expectedArray);

            var pooling = new PoolingLayer {
                Stride = 3, Height = 3, Width = 3
            };
            var result = pooling.FormOutput(input);

            Assert.IsTrue(result as Array3D == expected);
        }
        public void Test_Training_Return_Correct_Error()
        {
            var inputArray = new double[, , ]
            {
                {
                    { 3, 10, 2, 4 },
                    { 5, 11, 8, 7 },
                    { 3, 10, 5, 3 },
                    { 5, 1, 2, 3 }
                },
                {
                    { 3, 10, 2, 4 },
                    { 5, 11, 8, 7 },
                    { 3, 10, 7, 3 },
                    { 5, 1, 3, 3 }
                },
                {
                    { 3, 10, 2, 4 },
                    { 5, 11, 8, 7 },
                    { 3, 4, 5, 3 },
                    { 5, 1, 2, 3 }
                },
                {
                    { 3, 10, 2, 4 },
                    { 5, 11, 8, 7 },
                    { 3, 5, 5, 3 },
                    { 5, 1, 2, 3 }
                }
            };
            var errorArray = new double[, , ]
            {
                {
                    { 0.5, 0.11, 0.8, 0.7 },
                    { 0.5, 0.10, 0.7, 0.3 }
                },
                {
                    { 0.5, 0.11, 0.8, 0.7 },
                    { 0.5, 0.5, 0.5, 0.3 }
                }
            };
            var expectedFormOuputArray = new double[, , ]
            {
                {
                    { 5, 11, 8, 7 },
                    { 5, 10, 7, 3 }
                },
                {
                    { 5, 11, 8, 7 },
                    { 5, 5, 5, 3 }
                }
            };
            var expectedTrainArray = new double[, , ]
            {
                {
                    { 0, 0, 0, 0 },
                    { 0.5, 0.11, 0.8, 0.7 },
                    { 0, 0.10, 0, 0.3 },
                    { 0.5, 0, 0, 0.3 }
                },
                {
                    { 0, 0, 0, 0 },
                    { 0.5, 0.11, 0.8, 0.7 },
                    { 0, 0.10, 0.7, 0.3 },
                    { 0.5, 0, 0, 0.3 }
                },
                {
                    { 0, 0, 0, 0 },
                    { 0.5, 0.11, 0.8, 0.7 },
                    { 0, 0, 0.5, 0.3 },
                    { 0.5, 0, 0, 0.3 }
                },
                {
                    { 0, 0, 0, 0 },
                    { 0.5, 0.11, 0.8, 0.7 },
                    { 0, 0.5, 0.5, 0.3 },
                    { 0.5, 0, 0, 0.3 }
                }
            };

            var input             = new Array3D(inputArray);
            var error             = new Array3D(errorArray);
            var expectedFormOuput = new Array3D(expectedFormOuputArray);
            var expectedTrain     = new Array3D(expectedTrainArray);

            var pooling = new PoolingLayer();

            var resultFormOuput = pooling.FormOutput(input);
            var resultTrain     = pooling.Train(error, input, resultFormOuput);

            Assert.IsTrue(resultFormOuput as Array3D == expectedFormOuput);
            Assert.IsTrue(resultTrain as Array3D == expectedTrain);
        }
        public static void Main(string[] args)
        {
            NeuralNetwork network       = new NeuralNetwork(ExecMode.Learning, 1 * Math.Pow(10, -6));
            string        imageFilePath = @"C:\Users\cleist\source\repos\LicensPlateRecognition\LicensPlateRecognition\LicensPlateRecognition\Image\";

            string[] trainingData = Directory.GetFiles(imageFilePath + "TrainingData", "*");
            string[] testData     = Directory.GetFiles(imageFilePath + "TestData", "*");
            // key value pairs for training or test input and desired output
            Dictionary <string, double[]> keyValuePairs = new Dictionary <string, double[]>();

            // Declare network layers: declare in order of traversion! Since it will be the order of the layers list in network class
            InputLayer       inputLayer = new InputLayer(28, 28, 1, network);
            ConvolutionLayer convLayer1 = new ConvolutionLayer(new Filter(5, 5, inputLayer.Depth), 20, 1, network);
            //ConvolutionLayer convLayer2 = new ConvolutionLayer(new Filter(5, 5, convLayer1.Filters.Count), 20, 1, network);
            PoolingLayer     pooling1   = new PoolingLayer(network);
            ConvolutionLayer convLayer3 = new ConvolutionLayer(new Filter(5, 5, convLayer1.Filters.Count), 40, 1, network);
            //ConvolutionLayer convLayer4 = new ConvolutionLayer(new Filter(3, 3, convLayer3.Filters.Count), 40, 1, network);
            PoolingLayer        pooling2             = new PoolingLayer(network);
            FullyConnectedLayer fullyConnectedLayer1 = new FullyConnectedLayer(network);
            //FullyConnectedLayer fullyConnectedLayer2 = new FullyConnectedLayer(network);
            OutputLayer outputLayer = new OutputLayer(network);
            // Declare Output Classes
            int outClass = 10;

            // ------------------------ MNIST Dataset ------------------------
            MNIST mnist = new MNIST();

            // ------------------------ MNIST Dataset ------------------------

            if (network.ExecMode == ExecMode.Learning)
            {
                // create a csv with tuple of image and class value
                //network.CreateCSV(imageFilePath, trainingData, "training.csv");

                //network.LoadCSV(imageFilePath, keyValuePairs, "training.csv", outClass);

                // ------------------------ MNIST Dataset ------------------------
                mnist.ReadTrainMNIST();
                // ------------------------ MNIST Dataset ------------------------

                var epochs = 59;
                // must be divisible through number of training data
                var miniBatchSize = 10;

                network.Learning(keyValuePairs, outClass, epochs, miniBatchSize, imageFilePath, mnist /* Mnist */);

                Console.WriteLine("Press any key to continue...");
                Console.ReadKey();
            }

            if (network.ExecMode == ExecMode.Testing)
            {
                // create a csv with tuple of image and class value
                //network.CreateCSV(imageFilePath, testData, "testing.csv");

                //network.LoadCSV(imageFilePath, keyValuePairs, "testing.csv", outClass);

                // ------------------------ MNIST Dataset ------------------------
                mnist.ReadTestMNIST();
                // ------------------------ MNIST Dataset ------------------------

                network.Testing(outClass, keyValuePairs, mnist /* Mnist */);

                Console.WriteLine("Press any key to continue...");
                Console.ReadKey();
            }

            if (network.ExecMode == ExecMode.Normal)
            {
                while (true)
                {
                    Console.WriteLine("Please Insert an image filepath...");
                    try
                    {
                        string   image  = Console.ReadLine();
                        double[] output = network.ForwardPass(outClass, image, null);
                        for (int i = 0; i < output.Length; i++)
                        {
                            Console.Write("{0} ", output[i]);
                        }
                        Console.WriteLine();
                    }
                    catch
                    {
                        Console.WriteLine("No image or supported image format!");
                    }
                }
            }
        }
Exemplo n.º 15
0
        public void Run()
        {
            var batchSize    = 10;
            var epoch        = 50;
            var learningRate = 0.005;
            var outputSize   = 10;
            // 入力レイヤ
            var inputLayer = new InputLayer(28, 28);
            // 畳み込みレイヤ
            // プーリングレイヤ
            var layer00 = new ConvolutionLayer(inputLayer, (3, 1, 20), DLF.ReLU, u => u < 0);
            var layer01 = new PoolingLayer(layer00, (2, 2));
            // 畳み込みレイヤ
            // プーリングレイヤ
            var layer02 = new ConvolutionLayer(layer01, (3, 2, 50), DLF.ReLU, u => u < 0);
            var layer03 = new PoolingLayer(layer02, (2, 2));
            // 出力レイヤ
            var layer04 = new SoftmaxLayer(layer03, outputSize);

            Func <IEnumerable <Tuple <double, double> >, double> errorFunction = DLF.ErrorFunctionCrossEntropy;

            var machine = new Machine(learningRate, epoch, batchSize, new Validator(outputSize)
                                      , x => errorFunction(x) * (1.0 / batchSize)
                                      , inputLayer
                                      , layer00
                                      , layer01
                                      , layer02
                                      , layer03
                                      , layer04);
            // 学習データを生成
            var testData = DLF.Shuffle(new MNISTLoader().Load()).ToArray();

            // 0-9を均等にピックアップ
            var pickNum = 20;
            var a       = new[] {
                testData.Take(10000).Where(x => x.Name == "0").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "1").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "2").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "3").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "4").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "5").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "6").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "7").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "8").Take(pickNum),
                testData.Take(10000).Where(x => x.Name == "9").Take(pickNum),
            }.SelectMany(x => x).ToArray();
            var b = new[] {
                testData.Skip(10000).Where(x => x.Name == "0").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "1").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "2").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "3").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "4").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "5").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "6").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "7").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "8").Take(pickNum),
                testData.Skip(10000).Where(x => x.Name == "9").Take(pickNum),
            }.SelectMany(x => x).ToArray();

            machine.Learn(a, b);
        }