public void BatchNormalizationLayer_CopyLayerForPredictionModel()
        {
            var batchSize = 1;

            var sut = new BatchNormalizationLayer();

            sut.Initialize(3, 3, 1, batchSize, Initialization.GlorotUniform, new Random(232));

            var layers = new List <ILayer>();

            sut.CopyLayerForPredictionModel(layers);

            var actual = (BatchNormalizationLayer)layers.Single();

            Assert.AreEqual(sut.Width, actual.Width);
            Assert.AreEqual(sut.Height, actual.Height);
            Assert.AreEqual(sut.Depth, actual.Depth);

            MatrixAsserts.AreEqual(sut.Scale, actual.Scale);
            MatrixAsserts.AreEqual(sut.Bias, actual.Bias);

            MatrixAsserts.AreEqual(sut.MovingAverageMeans, actual.MovingAverageMeans);
            MatrixAsserts.AreEqual(sut.MovingAverageVariance, actual.MovingAverageVariance);
            Assert.IsNull(actual.BatchColumnMeans);
            Assert.IsNull(actual.BatchcolumnVars);


            Assert.AreEqual(sut.OutputActivations.RowCount, actual.OutputActivations.RowCount);
            Assert.AreEqual(sut.OutputActivations.ColumnCount, actual.OutputActivations.ColumnCount);
        }
        public void BatchNormalizationLayer_Backward()
        {
            const int fanIn     = 4;
            const int batchSize = 2;
            var       random    = new Random(232);

            var sut = new BatchNormalizationLayer();

            sut.Initialize(1, 1, fanIn, batchSize, Initialization.GlorotUniform, random);

            var data  = new float[] { 0, 1, -1, 1, 0.5f, 1.5f, -10, 10 };
            var input = Matrix <float> .Build.Dense(batchSize, fanIn, data);

            Trace.WriteLine(input.ToString());
            sut.Forward(input);

            var delta = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            var actual = sut.Backward(delta);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { -2.600517E-06f, 2.615418E-06f, -1.349278E-06f, 1.349278E-06f, 1.158319E-06f, -1.150868E-06f, -5.639333E-10f, -9.261829E-10f });

            MatrixAsserts.AreEqual(expected, actual);
        }
        public void SpatialBatchNormalizationBackward()
        {
            BatchNormalizationLayerBase
                cpu = new BatchNormalizationLayer(TensorInfo.Volume(12, 12, 13), NormalizationMode.Spatial, ActivationType.ReLU),
                gpu = new CuDnnBatchNormalizationLayer(cpu.InputInfo, NormalizationMode.Spatial, cpu.Weights, cpu.Biases, cpu.Iteration, cpu.Mu.AsSpan().ToArray(), cpu.Sigma2.AsSpan().ToArray(), cpu.ActivationType);

            TestBackward(cpu, gpu, 400);
        }
        public void PerActivationBatchNormalizationBackward()
        {
            BatchNormalizationLayerBase
                cpu = new BatchNormalizationLayer(TensorInfo.Linear(250), NormalizationMode.PerActivation, ActivationType.ReLU),
                gpu = new CuDnnBatchNormalizationLayer(cpu.InputInfo, NormalizationMode.PerActivation, cpu.Weights, cpu.Biases, cpu.Iteration, cpu.Mu.AsSpan().ToArray(), cpu.Sigma2.AsSpan().ToArray(), cpu.ActivationType);

            TestBackward(cpu, gpu, 400);
        }
        public void BatchNormalizationLayer_GradientCheck_BatchSize_10()
        {
            const int fanIn     = 5;
            const int batchSize = 10;

            var sut = new BatchNormalizationLayer();

            GradientCheckTools.CheckLayer(sut, 1, 1, fanIn, batchSize, 1e-4f, new Random(21));
        }
        public void BatchNormalizationLayer_Forward_SpatialInput()
        {
            var batchSize = 2;

            var filterHeight = 2;
            var filterWidth  = 2;
            var filterDepth  = 2;

            var stride  = 1;
            var padding = 0;

            var inputWidth  = 3;
            var inputHeight = 3;
            var inputDepth  = 3;

            var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth,
                                                                stride, padding, BorderMode.Valid);

            var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight,
                                                                 stride, padding, BorderMode.Valid);

            var k = filterDepth;

            var input     = new float[] { 111, 121, 112, 122, 113, 123, 114, 124, 211, 221, 212, 222, 213, 223, 214, 224 };
            var convInput = Matrix <float> .Build.Dense(2, 8, input);

            var rowWiseInput = Matrix <float> .Build.Dense(batchSize, k *filterGridWidth *filterGridHeight);

            Trace.WriteLine(convInput);

            ConvUtils.ReshapeConvolutionsToRowMajor(convInput, inputDepth, inputHeight, inputWidth,
                                                    filterHeight, filterWidth, padding, padding, stride, stride,
                                                    BorderMode.Valid, rowWiseInput);

            Trace.WriteLine(rowWiseInput);

            var sut = new BatchNormalizationLayer();

            sut.Initialize(filterGridWidth, filterGridHeight, filterDepth, batchSize,
                           Initialization.GlorotUniform, new Random(232));

            var actual = sut.Forward(rowWiseInput);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));
            Trace.WriteLine(actual);

            var expected = Matrix <float> .Build.Dense(batchSize, k *filterGridWidth *filterGridHeight, new float[] { -1.0297426f, 0.9697576f, -1.00974762f, 0.9897526f, -0.9897526f, 1.00974762f, -0.9697576f, 1.0297426f, -1.0297426f, 0.9697576f, -1.00974762f, 0.9897526f, -0.9897526f, 1.00974762f, -0.9697576f, 1.0297426f });

            MatrixAsserts.AreEqual(expected, actual);
        }
Exemple #7
0
        public MNISTBatchNormalizationNetwork(int inputSize, int hiddenSize, int outputSize) : base()
        {
            this.inputSize  = inputSize;
            this.hiddenSize = hiddenSize;
            this.outputSize = outputSize;

            affine1 = new AffineLayer(inputSize, hiddenSize, Mathf.Sqrt(2.0f / inputSize));
            bn1     = new BatchNormalizationLayer(hiddenSize, hiddenSize);
            relu1   = new ReLULayer();

            affine2 = new AffineLayer(hiddenSize, hiddenSize, Mathf.Sqrt(2.0f / hiddenSize));
            bn2     = new BatchNormalizationLayer(hiddenSize, hiddenSize);
            relu2   = new ReLULayer();

            affine3 = new AffineLayer(hiddenSize, outputSize, Mathf.Sqrt(2.0f / hiddenSize));
            softmax = new SoftmaxLayer();

            optimizer = new MomentumOptimizer(0.9f);
        }
Exemple #8
0
        internal static INetworkLayer CpuLayerDeserialize([NotNull] Stream stream, LayerType type)
        {
            switch (type)
            {
            case LayerType.FullyConnected: return(FullyConnectedLayer.Deserialize(stream));

            case LayerType.Convolutional: return(ConvolutionalLayer.Deserialize(stream));

            case LayerType.Pooling: return(PoolingLayer.Deserialize(stream));

            case LayerType.Output: return(OutputLayer.Deserialize(stream));

            case LayerType.Softmax: return(SoftmaxLayer.Deserialize(stream));

            case LayerType.BatchNormalization: return(BatchNormalizationLayer.Deserialize(stream));

            default: throw new ArgumentOutOfRangeException(nameof(type), $"The {type} layer type is not supported by the default deserializer");
            }
        }
        public void BatchNormalizationLayer_Forward()
        {
            const int fanIn     = 4;
            const int batchSize = 2;

            var sut = new BatchNormalizationLayer();

            sut.Initialize(1, 1, fanIn, batchSize, Initialization.GlorotUniform, new Random(232));

            var data  = new float[] { 0, 1, -1, 1, 0.5f, 1.5f, -10, 10 };
            var input = Matrix <float> .Build.Dense(batchSize, fanIn, data);

            Trace.WriteLine(input.ToString());

            var actual = sut.Forward(input);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));
            Trace.WriteLine(actual);

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { -0.999998f, 0.999998f, -0.9999995f, 0.9999995f, -0.999998f, 0.999998f, -1, 1 });

            MatrixAsserts.AreEqual(expected, actual);
        }
        public void AddChainLink()
        {
            var windowManager = new WindowManager();
            var context       = new AddLinkWindowViewModel();

            windowManager.ShowDialog(context);

            if (context.Result.HasValue)
            {
                int      insertIndex = ChainLinks.Count;
                LinkBase link;
                switch (context.Result.Value)
                {
                case LinkType.InputLayer:
                    if (ChainLinks.Count > 0)
                    {
                        if (ChainData.CountLinksOfType(typeof(InputLayer)) > 0)
                        {
                            MessageBox.Show("Only one Input Layer is allowed (or useful) per chain.");
                            return;
                        }
                    }
                    insertIndex = 0;
                    link        = new InputLayer(ChainData, String.Format("Input Layer"));
                    //TODO: Fix
                    ((InputDataParameter)link.Parameters[0]).InputDataValue = _parent.NetworkArchitectureData.Problem.Inputs[0];
                    break;

                case LinkType.ActivationLayer:
                    link = new ActivationLayer(ChainData, String.Format("Activation Layer"));
                    break;

                case LinkType.Convolution1DLayer:
                    link = new Convolution1DLayer(ChainData, String.Format("1D Convolution Layer"));
                    break;

                case LinkType.Convolution2DLayer:
                    link = new Convolution2DLayer(ChainData, String.Format("2D Convolution Layer"));
                    break;

                case LinkType.Convolution3DLayer:
                    link = new Convolution3DLayer(ChainData, String.Format("3D Convolution Layer"));
                    break;

                default:
                case LinkType.DenseLayer:
                    link = new DenseLayer(ChainData, String.Format("Dense Layer"));
                    break;

                case LinkType.DropoutLayer:
                    link = new DropoutLayer(ChainData, String.Format("Dropout Layer"));
                    break;

                case LinkType.FlattenLayer:
                    link = new FlattenLayer(ChainData, String.Format("Flatten Layer"));
                    break;

                case LinkType.ReshapeLayer:
                    link = new ReshapeLayer(ChainData, String.Format("Reshape Layer"));
                    break;

                case LinkType.MergeLayer:
                    link = new MergeLayer(ChainData, String.Format("Merge Layer"));
                    break;

                case LinkType.BatchNormalizationLayer:
                    link = new BatchNormalizationLayer(ChainData, String.Format("Batch Normalization Layer"));
                    break;

                case LinkType.LinearTransformationLayer:
                    link = new LinearTransformationLayer(ChainData, String.Format("Linear Transformation"));
                    break;
                }

                ChainData.ChainLinks.Insert(insertIndex, link);
                ValidateInputCompatibility();
                refreshLinks();
            }
        }