コード例 #1
0
        public ILayer GetLayer()
        {
            var    activation = GetActivationFunction();
            ILayer toReturn;

            switch (LayerType)
            {
            case NeuralLayerType.BasicLayer:
                toReturn = new BasicLayer(activation, HasBias, NeuronsPerDimension.All.ToArray());
                break;

            case NeuralLayerType.Conv2D:
                toReturn = new Conv2DLayer(activation, ConvNbFilters, ConvNbRows, ConvNbColumns);
                break;

            case NeuralLayerType.Dropout:
                toReturn = new DropoutLayer(activation, HasBias, NeuronsPerDimension.One, DropOut);
                break;

            case NeuralLayerType.MaxPool:
                toReturn = new MaxPoolLayer(NeuronsPerDimension.All.ToArray());
                break;

            default:
                throw new ArgumentOutOfRangeException();
            }
            return(toReturn);
        }
コード例 #2
0
ファイル: DGAN.cs プロジェクト: himanshugoel2797/NNSharp
        public DGAN()
        {
            discriminator      = InputLayer.Create(StartSide, 1);
            discriminator_back = ActivationLayer.Create <Sigmoid>();
            discriminator.Append(
                FCLayer.Create(1, 256).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        FCLayer.Create(1, 1).Append(
                            discriminator_back
                            ))));

            generator      = InputLayer.Create(1, LatentSize);
            generator_back = ActivationLayer.Create <Tanh>();
            generator.Append(
                FCLayer.Create(1, 256).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        DropoutLayer.Create(0.5f).Append(
                            FCLayer.Create(1, 512).Append(
                                ActivationLayer.Create <LeakyReLU>().Append(
                                    DropoutLayer.Create(0.5f).Append(
                                        FCLayer.Create(1, OutputSize).Append(
                                            generator_back
                                            ))))))));

            //Initialize Weights
            discriminator.SetupInternalState();
            discriminator.InitializeWeights(new UniformWeightInitializer(3, 0));

            generator.SetupInternalState();
            generator.InitializeWeights(new UniformWeightInitializer(1, 0));
        }
コード例 #3
0
        public static DropoutLayer<T> DropoutLayer<T>(this LayerBase<T> layer, T dropoutProbability) where T : struct, IEquatable<T>, IFormattable
        {
            var dropout = new DropoutLayer<T>(dropoutProbability);
            dropout.AcceptParent(layer);

            return dropout;
        }
コード例 #4
0
ファイル: DropoutLayerTest.cs プロジェクト: arnavdas88/dnn
        public void CloneTest()
        {
            DropoutLayer layer1 = new DropoutLayer(new Shape(new int[] { -1, 10000 }), 0.5);
            DropoutLayer layer2 = layer1.Clone() as DropoutLayer;

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
コード例 #5
0
        public void DropoutLayer_ForwardTestPhase()
        {
            Context.Instance.Phase = PhaseType.Test;

            var layer = new DropoutLayer();

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    int count = bottom.Count;
                    for (int i = 0; i < count; i++)
                    {
                        if (!MathHelpers.Equality(topCpu.DataAt(i), 0))
                        {
                            Assert.True(MathHelpers.Equality(topCpu.DataAt(i), bottomCpu.DataAt(i)));
                        }
                    }
                    ;
                }
        }
コード例 #6
0
ファイル: DropoutLayerTest.cs プロジェクト: arnavdas88/dnn
        public void CopyConstructorTest1()
        {
            DropoutLayer layer1 = new DropoutLayer(new Shape(new int[] { -1, 10000 }), 0.5);
            DropoutLayer layer2 = new DropoutLayer(layer1);

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
コード例 #7
0
        public static ConvNet SimpleLinearNetworkWithDropout(double drate, int dseed)
        {
            var net = new ConvNet(1, 1, activation: new Mocks.LinearActivation());

            net.IsTraining = true;
            var layer1 = new DenseLayer(1);

            net.AddLayer(layer1);
            var layer2 = new DenseLayer(1);

            net.AddLayer(layer2);
            var layer3 = new DropoutLayer(drate, dseed);

            layer3.Mask = new bool[1][, ] {
                new bool[, ] {
                    { true }
                }
            };
            net.AddLayer(layer3);
            var layer4 = new DenseLayer(1);

            net.AddLayer(layer4);
            net._Build();

            layer1.Weights[1] = 1;
            layer1.Weights[0] = 3;
            layer2.Weights[1] = -1;
            layer2.Weights[0] = 1;
            layer4.Weights[1] = 2;
            layer4.Weights[0] = -1;

            return(net);
        }
コード例 #8
0
        public void DropoutLayer_MultipleBackwardsPasses()
        {
            const int fanIn     = 5;
            var       batchSize = 1;
            var       random    = new Random(232);

            var sut = new DropoutLayer(0.5);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            sut.Forward(input);

            var delta = Matrix <float> .Build.Dense(batchSize, fanIn, 1.0f);

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn);

            sut.Backward(delta).CopyTo(expected);

            for (int i = 0; i < 20; i++)
            {
                var actual = sut.Backward(delta);
                Assert.AreEqual(expected, actual);
            }
        }
コード例 #9
0
        public void DropoutSerialization()
        {
            var layer = new DropoutLayer {
                DropProbability = 0.1
            };

            layer.Init(28, 24, 1);
            var data = layer.GetData();

            Assert.AreEqual(28, data["InputWidth"]);
            Assert.AreEqual(24, data["InputHeight"]);
            Assert.AreEqual(1, data["InputDepth"]);

            var deserialized = LayerBase <double> .FromData(data) as DropoutLayer;

            Assert.IsNotNull(deserialized);
            Assert.AreEqual(28, deserialized.InputWidth);
            Assert.AreEqual(24, deserialized.InputHeight);
            Assert.AreEqual(1, deserialized.InputDepth);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);

            Assert.AreEqual(layer.DropProbability, deserialized.DropProbability);
        }
コード例 #10
0
        public static ILayer Load(LayerType layerType, BinaryReader br, bool forTraining = false)
        {
            ILayer layer = null;

            switch (layerType)
            {
            case LayerType.LSTM:
                layer = new LSTMLayer();
                break;

            case LayerType.DropOut:
                layer = new DropoutLayer();
                break;

            case LayerType.Softmax:
                layer = new SoftmaxLayer();
                break;

            case LayerType.SampledSoftmax:
                layer = new SampledSoftmaxLayer();
                break;

            case LayerType.Simple:
                layer = new SimpleLayer();
                break;
            }

            layer.Load(br, layerType, forTraining);

            return(layer);
        }
コード例 #11
0
ファイル: RNN.cs プロジェクト: deTrident/RNNSharp
        protected virtual List <SimpleLayer> CreateLayers(List <LayerConfig> hiddenLayersConfig)
        {
            var hiddenLayers = new List <SimpleLayer>();

            for (var i = 0; i < hiddenLayersConfig.Count; i++)
            {
                SimpleLayer layer = null;
                switch (hiddenLayersConfig[i].LayerType)
                {
                case LayerType.LSTM:
                    layer = new LSTMLayer(hiddenLayersConfig[i] as LSTMLayerConfig);
                    Logger.WriteLine("Create LSTM layer.");
                    break;

                case LayerType.DropOut:
                    layer = new DropoutLayer(hiddenLayersConfig[i] as DropoutLayerConfig);
                    Logger.WriteLine("Create Dropout layer.");
                    break;
                }

                hiddenLayers.Add(layer);
            }

            return(hiddenLayers);
        }
コード例 #12
0
        /// <summary>
        /// Create a MNIST trainer (writing recognition) that will be added to an environemnt.
        /// </summary>
        /// <param name="sigma">The sigma environemnt this trainer will be assigned to.</param>
        /// <returns>The newly created trainer.</returns>
        private static ITrainer CreateMnistTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Mnist();

            ITrainer trainer = sigma.CreateTrainer("mnist-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(28, 28)
                                           + DropoutLayer.Construct(0.2)
                                           + FullyConnectedLayer.Construct(1000, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(800, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(10, activation: "sigmoid")
                                           + OutputLayer.Construct(10)
                                           + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(100, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(Defaults.Datasets.MnistValidation()));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new ValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), reportEpochIteration: true)
                                 .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));

            return(trainer);
        }
コード例 #13
0
        public void DropoutLayer_ForwardTrainPhase(double ratio)
        {
            Context.Instance.Phase = PhaseType.Train;

            var config = new DropoutLayerConfiguration(ratio);
            var layer  = new DropoutLayer(config);

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    double scale = 1f / (1f - layer.Parameters.Ratio);

                    int count = bottom.Count;
                    int kept  = 0;
                    for (int i = 0; i < count; i++)
                    {
                        if (!MathHelpers.Equality(topCpu.DataAt(i), 0))
                        {
                            kept++;
                            Assert.True(MathHelpers.Equality(topCpu.DataAt(i), bottomCpu.DataAt(i) * scale));
                        }
                    }
                    ;

                    double stdError = Math.Sqrt(ratio * (1 - ratio) / count);
                    double empiricalDropoutRatio = 1.0d - ((double)kept / count);

                    Assert.True(MathHelpers.Equality(ratio, empiricalDropoutRatio, 1.96 * stdError));
                }
        }
コード例 #14
0
        public static DropoutLayer <T> DropoutLayer <T>(this LayerBase <T> layer, T dropProbability) where T : struct, IEquatable <T>, IFormattable
        {
            var pool = new DropoutLayer <T>(dropProbability);

            layer.ConnectTo(pool);

            return(pool);
        }
コード例 #15
0
ファイル: DropoutLayerTest.cs プロジェクト: arnavdas88/dnn
        public void ArchitectureConstructorTest1()
        {
            Shape        shape = new Shape(new int[] { -1, 10000 });
            DropoutLayer layer = new DropoutLayer(shape, "D0.5", null);

            Assert.AreEqual(0.5, layer.Probability);
            CollectionAssert.AreEqual(shape.Axes, layer.OutputShape.Axes);
            Assert.AreEqual("D0.5", layer.Architecture);
        }
コード例 #16
0
ファイル: DropoutLayerTest.cs プロジェクト: arnavdas88/dnn
        public void SerializeTest()
        {
            DropoutLayer layer1 = new DropoutLayer(new Shape(new int[] { -1, 10000 }), 0.5);
            string       s1     = JsonConvert.SerializeObject(layer1);
            DropoutLayer layer2 = JsonConvert.DeserializeObject <DropoutLayer>(s1);
            string       s2     = JsonConvert.SerializeObject(layer2);

            Assert.AreEqual(s1, s2);
        }
コード例 #17
0
        public void DropoutLayer_BackwardGradientTrainPhase()
        {
            Context.Instance.Phase = PhaseType.Train;

            var layer = new DropoutLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f);
            checker.CheckEltwise(layer, bottom, top);
        }
コード例 #18
0
        public void DropoutLayer_BackwardGradientTestPhase()
        {
            Context.Instance.Phase = PhaseType.Test;

            var layer = new DropoutLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f);

            checker.CheckEltwise(layer, bottom, top);
        }
コード例 #19
0
        public void DropoutLayer_Setup()
        {
            var layer = new DropoutLayer();

            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
コード例 #20
0
        public void DropoutLayer_CopyLayerForPredictionModel()
        {
            var batchSize = 1;

            var sut = new DropoutLayer(0.5);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, new Random(233));

            var layers = new List <ILayer>();

            sut.CopyLayerForPredictionModel(layers);

            Assert.IsTrue(layers.Count == 0);
        }
コード例 #21
0
        public void NotLearning()
        {
            var n = 1000000;
            var dropProbability = 0.2;
            var layer           = new DropoutLayer <double>(dropProbability);

            layer.Init(1, 1, n);

            var input  = BuilderInstance.Volume.From(new double[n].Populate(1.0), new Shape(1, 1, n, 1));
            var result = layer.DoForward(input, false);

            var average = result.ToArray().Average();

            Assert.AreEqual(1.0, average); // Let everything go through
        }
コード例 #22
0
ファイル: Program.cs プロジェクト: xiaoxiongnpu/Sigma
        private static void SampleMnist()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("mnist");

            sigma.SetRandomSeed(0);

            IDataset dataset = Defaults.Datasets.Mnist();

            ITrainer trainer = sigma.CreateTrainer("mnist-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(28, 28)
                                           + DropoutLayer.Construct(0.2)
                                           + FullyConnectedLayer.Construct(1000, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(800, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(10, activation: "sigmoid")
                                           + OutputLayer.Construct(10)
                                           + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(100, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(Defaults.Datasets.MnistValidation()));
            //trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.01);
            //trainer.Optimiser = new MomentumGradientOptimiser(learningRate: 0.01, momentum: 0.9);
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new ValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), reportEpochIteration: true)
                                 .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            var validationTimeStep = TimeStep.Every(1, TimeScale.Epoch);

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", validationTimeStep, tops: new[] { 1, 2, 3 }));

            for (int i = 0; i < 10; i++)
            {
                trainer.AddGlobalHook(new TargetMaximisationReporter(trainer.Operator.Handler.NDArray(ArrayUtils.OneHot(i, 10), 10), TimeStep.Every(1, TimeScale.Epoch)));
            }

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(10, TimeScale.Iteration), 32));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));
            trainer.AddHook(new StopTrainingHook(atEpoch: 10));

            sigma.PrepareAndRun();
        }
コード例 #23
0
ファイル: DropoutLayerTest.cs プロジェクト: arnavdas88/dnn
        public void ArchitectureConstructorTest2()
        {
            string architecture = "DD";

            try
            {
                DropoutLayer layer = new DropoutLayer(new Shape(new int[] { -1, 10000 }), architecture, null);
            }
            catch (ArgumentException e)
            {
                Assert.AreEqual(
                    new ArgumentException(string.Format(CultureInfo.InvariantCulture, Properties.Resources.E_InvalidLayerArchitecture, architecture), nameof(architecture)).Message,
                    e.Message);
                throw;
            }
        }
コード例 #24
0
ファイル: DropoutLayerTest.cs プロジェクト: arnavdas88/dnn
        public void ForwardTest1()
        {
            Shape        shape = new Shape(new int[] { -1, 10000 });
            DropoutLayer layer = new DropoutLayer(shape, 0.5);

            for (int i = 1; i <= 3; i++)
            {
                Session session = new Session(false);

                Tensor x = new Tensor(null, shape.Reshape(0, i));
                x.Randomize();

                IList <Tensor> xs = new[] { x };
                IList <Tensor> ys = layer.Forward(session, xs);

                Assert.AreEqual(x.Weights.Sum() * layer.Probability, ys[0].Weights.Sum());
            }
        }
コード例 #25
0
        public void DropoutLayer_Forward()
        {
            const int fanIn     = 5;
            var       batchSize = 1;
            var       random    = new Random(232);

            var sut = new DropoutLayer(0.5);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            var actual = sut.Forward(input);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { 0.9177308f, 1.495695f, -0.07688076f, 0f, -2.932818f });

            MatrixAsserts.AreEqual(expected, actual);
        }
コード例 #26
0
        public void Learning()
        {
            var n = 1000000;
            var dropProbability = 0.2;
            var layer           = new DropoutLayer <double>(dropProbability);

            layer.Init(1, 1, n);

            var input  = BuilderInstance.Volume.From(new double[n].Populate(1.0), new Shape(1, 1, n, 1));
            var result = layer.DoForward(input, true);

            var val           = result.ToArray().First(o => o != 0.0);
            var scalingFactor = 1.0 / (1.0 - dropProbability);

            Assert.AreEqual(scalingFactor, val); // Make sure output is scaled during learning

            var average       = result.ToArray().Average();
            var measuredProba = average * dropProbability;

            Assert.AreEqual(dropProbability, measuredProba, 0.001); // Make sure dropout really happened
        }
コード例 #27
0
        public void DropoutLayer_ForwardTestPhase()
        {
            Context.Instance.Phase = PhaseType.Test;

            var layer = new DropoutLayer();
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;
                for (int i = 0; i < count; i++)
                {
                    if (!MathHelpers.Equality(topCpu.DataAt(i), 0))
                        Assert.True(MathHelpers.Equality(topCpu.DataAt(i), bottomCpu.DataAt(i)));
                };
            }
        }
コード例 #28
0
ファイル: RNN.cs プロジェクト: deTrident/RNNSharp
        public static SimpleLayer Load(LayerType layerType, BinaryReader br)
        {
            switch (layerType)
            {
            case LayerType.LSTM:
                return(LSTMLayer.Load(br, LayerType.LSTM));

            case LayerType.DropOut:
                return(DropoutLayer.Load(br, LayerType.DropOut));

            case LayerType.Softmax:
                return(SoftmaxLayer.Load(br, LayerType.Softmax));

            case LayerType.SampledSoftmax:
                return(SampledSoftmaxLayer.Load(br, LayerType.SampledSoftmax));

            case LayerType.Simple:
                return(SimpleLayer.Load(br, LayerType.Simple));
            }

            return(null);
        }
コード例 #29
0
        public void DropoutLayer_Backward()
        {
            const int fanIn     = 5;
            var       batchSize = 1;
            var       random    = new Random(232);

            var sut = new DropoutLayer(0.5);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            sut.Forward(input);

            var delta = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            var actual = sut.Backward(delta);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { -1.676851f, -1.938897f, -1.108109f, 0f, -0.4058239f });

            MatrixAsserts.AreEqual(expected, actual);
        }
コード例 #30
0
ファイル: DropoutLayerTest.cs プロジェクト: arnavdas88/dnn
        public void ForwardBackwardTest2()
        {
            Shape        shape = new Shape(new int[] { -1, 10000 });
            DropoutLayer layer = new DropoutLayer(shape, 0.5);

            for (int i = 1; i <= 3; i++)
            {
                Session session = new Session(true);

                Tensor x = new Tensor(null, shape.Reshape(0, i));
                x.Randomize();

                Tensor y = layer.Forward(session, new[] { x })[0];

                Assert.AreEqual(0.0f, y.Weights.Sum(), 1.0f);
                Assert.AreEqual((int)(y.Length * layer.Probability), y.Weights.Count(w => w == 0.0f), y.Length / 50);

                // unroll the graph
                y.SetGradient(1.0f);
                session.Unroll();

                CollectionAssert.AreEqual(y.Weights.Select(w => w == 0.0f ? 0.0f : 1.0f).ToArray(), x.Gradient);
            }
        }
コード例 #31
0
        public void AddChainLink()
        {
            var windowManager = new WindowManager();
            var context       = new AddLinkWindowViewModel();

            windowManager.ShowDialog(context);

            if (context.Result.HasValue)
            {
                int      insertIndex = ChainLinks.Count;
                LinkBase link;
                switch (context.Result.Value)
                {
                case LinkType.InputLayer:
                    if (ChainLinks.Count > 0)
                    {
                        if (ChainData.CountLinksOfType(typeof(InputLayer)) > 0)
                        {
                            MessageBox.Show("Only one Input Layer is allowed (or useful) per chain.");
                            return;
                        }
                    }
                    insertIndex = 0;
                    link        = new InputLayer(ChainData, String.Format("Input Layer"));
                    //TODO: Fix
                    ((InputDataParameter)link.Parameters[0]).InputDataValue = _parent.NetworkArchitectureData.Problem.Inputs[0];
                    break;

                case LinkType.ActivationLayer:
                    link = new ActivationLayer(ChainData, String.Format("Activation Layer"));
                    break;

                case LinkType.Convolution1DLayer:
                    link = new Convolution1DLayer(ChainData, String.Format("1D Convolution Layer"));
                    break;

                case LinkType.Convolution2DLayer:
                    link = new Convolution2DLayer(ChainData, String.Format("2D Convolution Layer"));
                    break;

                case LinkType.Convolution3DLayer:
                    link = new Convolution3DLayer(ChainData, String.Format("3D Convolution Layer"));
                    break;

                default:
                case LinkType.DenseLayer:
                    link = new DenseLayer(ChainData, String.Format("Dense Layer"));
                    break;

                case LinkType.DropoutLayer:
                    link = new DropoutLayer(ChainData, String.Format("Dropout Layer"));
                    break;

                case LinkType.FlattenLayer:
                    link = new FlattenLayer(ChainData, String.Format("Flatten Layer"));
                    break;

                case LinkType.ReshapeLayer:
                    link = new ReshapeLayer(ChainData, String.Format("Reshape Layer"));
                    break;

                case LinkType.MergeLayer:
                    link = new MergeLayer(ChainData, String.Format("Merge Layer"));
                    break;

                case LinkType.BatchNormalizationLayer:
                    link = new BatchNormalizationLayer(ChainData, String.Format("Batch Normalization Layer"));
                    break;

                case LinkType.LinearTransformationLayer:
                    link = new LinearTransformationLayer(ChainData, String.Format("Linear Transformation"));
                    break;
                }

                ChainData.ChainLinks.Insert(insertIndex, link);
                ValidateInputCompatibility();
                refreshLinks();
            }
        }
コード例 #32
0
        public void DropoutLayer_Setup()
        {
            var layer = new DropoutLayer();
            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
コード例 #33
0
        public void DropoutLayer_ForwardTrainPhase(double ratio)
        {
            Context.Instance.Phase = PhaseType.Train;

            var config = new DropoutLayerConfiguration(ratio);
            var layer = new DropoutLayer(config);
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                double scale = 1f / (1f - layer.Parameters.Ratio);

                int count = bottom.Count;
                int kept = 0;
                for (int i = 0; i < count; i++)
                {
                    if (!MathHelpers.Equality(topCpu.DataAt(i), 0))
                    {
                        kept++;
                        Assert.True(MathHelpers.Equality(topCpu.DataAt(i), bottomCpu.DataAt(i) * scale));
                    }
                };

                double stdError = Math.Sqrt(ratio * (1 - ratio) / count);
                double empiricalDropoutRatio = 1.0d - ((double)kept / count);

                Assert.True(MathHelpers.Equality(ratio, empiricalDropoutRatio, 1.96 * stdError));
            }
        }
コード例 #34
0
        public GAN()
        {
            #region 128x128

            /*discriminator = InputLayer.Create(StartSide, 3);
             * discriminator_back = ActivationLayer.Create<Sigmoid>();
             * discriminator.Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 126
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 124
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 122
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 120
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  pooling_1.Append(                           //o = 60
             *  ConvLayer.Create(3, 8).Append(              //o = 58
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 56
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 54
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 52
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  pooling_2.Append(                           //o = 26
             *  ConvLayer.Create(3, 8).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 22
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  pooling_3.Append(                           //o = 11
             *  ConvLayer.Create(3, 8).Append(              //o = 22
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  FCLayer.Create(1, 1).Append(
             *      discriminator_back
             * )))))))))))))))))))))))))));
             *
             * generator = InputLayer.Create(32, 8);
             * generator_back = ActivationLayer.Create<Tanh>();
             * generator.Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 3, 2).Append(              //o = 26
             *      generator_back
             * ))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));*/
            #endregion

            /*
             * discriminator = InputLayer.Create(StartSide, 3);
             * discriminator_back = ActivationLayer.Create<Sigmoid>();
             * discriminator.Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 30
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 512).Append(              //o = 28
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 256).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 256).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 128).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 64).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 32).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 1).Append(
             *      discriminator_back
             * )))))))))))))))))))))));
             *
             * generator = InputLayer.Create(1, LatentSize);
             * generator_back = ActivationLayer.Create<Tanh>();
             * generator.Append(
             *  FCLayer.Create(1, 32).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 64).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 128).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 256).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 256).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 512).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(16, 2).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(EndSide, 3).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *      generator_back
             * ))))))))))))))))))))))))));*/


            discriminator      = InputLayer.Create(StartSide, 3);
            discriminator_back = ActivationLayer.Create <Sigmoid>();
            discriminator.Append(
                FCLayer.Create(1, 1024).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        DropoutLayer.Create(0.3f).Append(
                            FCLayer.Create(1, 512).Append(
                                ActivationLayer.Create <LeakyReLU>().Append(
                                    DropoutLayer.Create(0.3f).Append(
                                        FCLayer.Create(1, 256).Append(
                                            ActivationLayer.Create <LeakyReLU>().Append(
                                                DropoutLayer.Create(0.3f).Append(
                                                    FCLayer.Create(1, 256).Append(
                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                            DropoutLayer.Create(0.3f).Append(
                                                                FCLayer.Create(1, 64).Append(
                                                                    ActivationLayer.Create <LeakyReLU>().Append(
                                                                        DropoutLayer.Create(0.3f).Append(
                                                                            FCLayer.Create(1, 64).Append(
                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                    DropoutLayer.Create(0.3f).Append(
                                                                                        FCLayer.Create(1, 1).Append(
                                                                                            discriminator_back
                                                                                            ))))))))))))))))))));

            generator      = InputLayer.Create(1, LatentSize);
            generator_back = ActivationLayer.Create <Tanh>();
            generator.Append(
                FCLayer.Create(1, 128).Append(
                    ActivationLayer.Create <ReLU>().Append(
                        FCLayer.Create(1, 256).Append(
                            ActivationLayer.Create <ReLU>().Append(
                                FCLayer.Create(1, 256).Append(
                                    ActivationLayer.Create <ReLU>().Append(
                                        DropoutLayer.Create(0.5f).Append(
                                            FCLayer.Create(1, 256).Append(
                                                ActivationLayer.Create <ReLU>().Append(
                                                    FCLayer.Create(1, 512).Append(
                                                        ActivationLayer.Create <ReLU>().Append(
                                                            FCLayer.Create(1, 1024).Append(
                                                                ActivationLayer.Create <ReLU>().Append(
                                                                    FCLayer.Create(32, 3).Append(
                                                                        generator_back
                                                                        )))))))))))))));

            encoder      = InputLayer.Create(32, 3);
            encoder_back = ActivationLayer.Create <LeakyReLU>();
            encoder.Append(
                FCLayer.Create(1, 1024).Append(
                    ActivationLayer.Create <ReLU>().Append(
                        FCLayer.Create(1, 512).Append(
                            ActivationLayer.Create <ReLU>().Append(
                                FCLayer.Create(1, 256).Append(
                                    ActivationLayer.Create <ReLU>().Append(
                                        DropoutLayer.Create(0.5f).Append(
                                            FCLayer.Create(1, 256).Append(
                                                ActivationLayer.Create <ReLU>().Append(
                                                    FCLayer.Create(1, 256).Append(
                                                        ActivationLayer.Create <ReLU>().Append(
                                                            FCLayer.Create(1, 128).Append(
                                                                ActivationLayer.Create <ReLU>().Append(
                                                                    FCLayer.Create(1, LatentSize).Append(
                                                                        encoder_back
                                                                        )))))))))))))));

            //Initialize Weights
            discriminator.SetupInternalState();
            discriminator.InitializeWeights(new UniformWeightInitializer(3, 0));

            generator.SetupInternalState();
            generator.InitializeWeights(new UniformWeightInitializer(1, 0));

            encoder.SetupInternalState();
            encoder.InitializeWeights(new UniformWeightInitializer(2, 0));
        }