Пример #1
0
    private void CreateDNet()
    {
        ConvolutionLayer  conv0       = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 32, zeroPadding: true);
        ActivationLayer   activation0 = new ActivationLayer(new Relu(leaky: true));
        MaxPooling2DLayer pool0       = new MaxPooling2DLayer();
        ConvolutionLayer  conv1       = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 32, zeroPadding: true);
        ActivationLayer   activation1 = new ActivationLayer(new Relu(leaky: true));
        MaxPooling2DLayer pool1       = new MaxPooling2DLayer();
        FlattenLayer      flatten     = new FlattenLayer();
        LinearLayer       linear0     = new LinearLayer(numNeurons: 128);
        ActivationLayer   activation2 = new ActivationLayer(new Relu(leaky: true));
        LinearLayer       linear1     = new LinearLayer(numNeurons: 1);
        ActivationLayer   activation3 = new ActivationLayer(new Sigmoid());

        dNet.Add(conv0);
        dNet.Add(activation0);
        dNet.Add(pool0);
        dNet.Add(conv1);
        dNet.Add(activation1);
        dNet.Add(pool1);
        dNet.Add(flatten);
        dNet.Add(linear0);
        dNet.Add(activation2);
        dNet.Add(linear1);
        dNet.Add(activation3);
        dNet.Compile(new BinaryCrossEntropy(), new Adam(0.001d));
    }
Пример #2
0
        public void BackwardStep(double[] outputLayerGradient, double learningRate, double[] inputResult)
        {
            var flattenLayerGradient = Perceptron.BackwardStep(learningRate, FlattenLayer.LastOutput, inputResult, outputLayerGradient);
            var gradientToProcess    = FlattenLayer.ProcessBackpropMaps(flattenLayerGradient);

            for (int i = LayersCount - 1; i > -1; --i)
            {
                gradientToProcess = Layers[i].ProcessBackpropMaps(gradientToProcess);
            }
        }
Пример #3
0
        public void FlattenTest()
        {
            var fl       = new FlattenLayer();
            var input    = new TensorOld(new double[] { 1, 2, 3, 4, 5, 6, 7, 8 }, 2, 1, 2, 2);
            var expected = new TensorOld(new double[] { 1, 2, 3, 4, 5, 6, 7, 8, }, 2, 4);
            var actual   = fl.Forward(input);

            Assert.Equal(expected, actual);

            var back = fl.Backward(actual);

            Assert.Equal(input, back);
        }
Пример #4
0
        public Tuple <double, double> FeedForwardStep(double learningRate, List <double[][]> input, double[] inputResult)
        {
            var loss = .0;

            //feedforward
            var lastInput = input;

            for (int i = 0; i < Layers.Count; ++i)
            {
                lastInput = Layers[i].ProcessMaps(lastInput);
            }
            var arr = FlattenLayer.ProcessMaps(lastInput);

            //var results = Perceptron.SGDStep(learningRate, arr, inputResult);
            return(Perceptron.FeedForwardStep(arr, inputResult));
        }
Пример #5
0
        public double SGDStepCifar(
            double learningRate,
            List <double[][]> input,
            double[] inputResult
            )
        {
            var loss = .0;

            //feedforward
            var lastInput = input;

            for (int i = 0; i < Layers.Count; ++i)
            {
                lastInput = Layers[i].ProcessMaps(lastInput);
            }
            var lastOut = FlattenLayer.ProcessMaps(lastInput);

            for (int i = 0; i < PerLayers.Count; ++i)
            {
                lastOut = PerLayers[i].FeedForward(lastOut);
            }

            if (lastOut.FindIndex(item => item == lastOut.Max()) == inputResult.FindIndex(item => item == 1))
            {
                Accuracy.Add(1);
            }

            loss = CrossEntropyLoss(lastOut, inputResult);


            //backprop
            var gradientToProcess = inputResult;

            for (int i = PerLayers.Count - 1; i > -1; i--)
            {
                gradientToProcess = PerLayers[i].Backward(gradientToProcess, learningRate);
            }

            var cnnGrad = FlattenLayer.ProcessBackpropMaps(gradientToProcess);

            for (int i = LayersCount - 1; i > -1; i--)
            {
                cnnGrad = Layers[i].ProcessBackpropMaps(cnnGrad);
            }

            return(loss);
        }
Пример #6
0
        public double SGDStep(
            double learningRate,
            List <double[][]> input,
            double[] inputResult
            )
        {
            var loss = .0;

            //feedforward
            var lastInput = input;

            for (int i = 0; i < Layers.Count; ++i)
            {
                lastInput = Layers[i].ProcessMaps(lastInput);
            }
            var arr = FlattenLayer.ProcessMaps(lastInput);

            var results = Perceptron.SGDStep(learningRate, arr, inputResult);

            loss = results.Item1;

            //backprop
            var flattenLayerGradient = results.Item2;

            //for(int i = 0; i < flattenLayerGradient.Length; ++i)
            //{
            //    flattenLayerGradient[i] /= 100;
            //}

            var gradientToProcess = FlattenLayer.ProcessBackpropMaps(flattenLayerGradient);

            for (int i = LayersCount - 1; i > -1; i--)
            {
                gradientToProcess = Layers[i].ProcessBackpropMaps(gradientToProcess);
            }

            return(loss);
        }
Пример #7
0
        public void AddChainLink()
        {
            var windowManager = new WindowManager();
            var context       = new AddLinkWindowViewModel();

            windowManager.ShowDialog(context);

            if (context.Result.HasValue)
            {
                int      insertIndex = ChainLinks.Count;
                LinkBase link;
                switch (context.Result.Value)
                {
                case LinkType.InputLayer:
                    if (ChainLinks.Count > 0)
                    {
                        if (ChainData.CountLinksOfType(typeof(InputLayer)) > 0)
                        {
                            MessageBox.Show("Only one Input Layer is allowed (or useful) per chain.");
                            return;
                        }
                    }
                    insertIndex = 0;
                    link        = new InputLayer(ChainData, String.Format("Input Layer"));
                    //TODO: Fix
                    ((InputDataParameter)link.Parameters[0]).InputDataValue = _parent.NetworkArchitectureData.Problem.Inputs[0];
                    break;

                case LinkType.ActivationLayer:
                    link = new ActivationLayer(ChainData, String.Format("Activation Layer"));
                    break;

                case LinkType.Convolution1DLayer:
                    link = new Convolution1DLayer(ChainData, String.Format("1D Convolution Layer"));
                    break;

                case LinkType.Convolution2DLayer:
                    link = new Convolution2DLayer(ChainData, String.Format("2D Convolution Layer"));
                    break;

                case LinkType.Convolution3DLayer:
                    link = new Convolution3DLayer(ChainData, String.Format("3D Convolution Layer"));
                    break;

                default:
                case LinkType.DenseLayer:
                    link = new DenseLayer(ChainData, String.Format("Dense Layer"));
                    break;

                case LinkType.DropoutLayer:
                    link = new DropoutLayer(ChainData, String.Format("Dropout Layer"));
                    break;

                case LinkType.FlattenLayer:
                    link = new FlattenLayer(ChainData, String.Format("Flatten Layer"));
                    break;

                case LinkType.ReshapeLayer:
                    link = new ReshapeLayer(ChainData, String.Format("Reshape Layer"));
                    break;

                case LinkType.MergeLayer:
                    link = new MergeLayer(ChainData, String.Format("Merge Layer"));
                    break;

                case LinkType.BatchNormalizationLayer:
                    link = new BatchNormalizationLayer(ChainData, String.Format("Batch Normalization Layer"));
                    break;

                case LinkType.LinearTransformationLayer:
                    link = new LinearTransformationLayer(ChainData, String.Format("Linear Transformation"));
                    break;
                }

                ChainData.ChainLinks.Insert(insertIndex, link);
                ValidateInputCompatibility();
                refreshLinks();
            }
        }
Пример #8
0
 public void SetUp()
 {
     _layer = new FlattenLayer(2, 5, 1);
 }