예제 #1
0
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters()
        {
            base.ReInitializeParameters();

            Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.lstm_simple_param.weight_filler);

            weight_filler.Fill(m_colBlobs[0]);
            weight_filler.Fill(m_colBlobs[1]);

            Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.lstm_simple_param.bias_filler);

            bias_filler.Fill(m_colBlobs[2]);

            // Initialize the bias for the forget gate to 5.0 as described in the
            // Clockwork RNN paper:
            // [1] Koutnik, J., Greff, K., Gomez, F., Schmidhuber, J., 'A Clockwork RNN', 2014"
            if (m_param.lstm_simple_param.enable_clockwork_forgetgate_bias)
            {
                double[] rgBias = convertD(m_colBlobs[2].mutable_cpu_data);

                for (int i = m_nH; i < 2 * m_nH; i++)
                {
                    rgBias[i] = 5.0;
                }

                m_colBlobs[2].mutable_cpu_data = convert(rgBias);
            }

            return(true);
        }
예제 #2
0
        public void DependentFieldsWillNotBeUsefulUnlessOrderIsSpecified()
        {
            var rootObject = new Foo();
            var filler     = new Filler();

            var generator = MockRepository.GenerateStub <IGenerateDummyData>();

            generator.Stub(g => g.Generate(Arg <GenerationContext> .Is.Anything)).Return(10);

            var dependentPropertyGenerator = MockRepository.GenerateStub <IGenerateDummyData>();

            dependentPropertyGenerator.Stub(g => g.Generate(Arg <GenerationContext> .Is.Anything)).Do(new GeneratorDelegate(context => context.RootAs <Foo>().Age + 1));

            filler.Configure <Foo>(config =>
            {
                config.For(f => f.CalculatedAge).Use(dependentPropertyGenerator);
                config.For(f => f.Age).Use(generator);
            });

            filler.Fill(rootObject);
            Assert.That(rootObject.Age, Is.EqualTo(10));
            Assert.That(rootObject.CalculatedAge, Is.EqualTo(1));

            filler.Configure <Foo>(config =>
            {
                config.For(f => f.Age).Order(2);
                config.For(f => f.CalculatedAge).Order(1);
            });

            filler.Fill(rootObject);
            Assert.That(rootObject.Age, Is.EqualTo(10));
            Assert.That(rootObject.CalculatedAge, Is.EqualTo(11));
        }
예제 #3
0
        /// <summary>
        /// Resample the noise for both weights and bias (if used).
        /// </summary>
        public void ResetNoise()
        {
            if (m_bEnableNoise)
            {
                // Resamples the noise vector.
                m_fillerEpsilon.Fill(m_blobEpsilonWeight);

                if (m_bBiasTerm)
                {
                    // Resample the noise vector
                    m_fillerEpsilon.Fill(m_blobEpsilonBias);
                }
            }
        }
예제 #4
0
        public void DependencyOrderingWorksWithSubclassesAsWellAsPrimitives()
        {
            var rootObject = new Foo();
            var filler = new Filler();

            var generator = MockRepository.GenerateStub<IGenerateDummyData>();
            generator.Stub(g => g.Generate(Arg<GenerationContext>.Is.Anything)).Return("Chris");

            var dependentGenerator = MockRepository.GenerateStub<IGenerateDummyData>();
            dependentGenerator.Stub(g => g.Generate(Arg<GenerationContext>.Is.Anything))
                .Do(new GeneratorDelegate(context => string.Format("Hello {0}", context.RootAs<Foo>().Bar.Name)));

            filler.Configure<Goo>(config => config.For(goo => goo.Name).Use(dependentGenerator));
            filler.Configure<Bar>(config => config.For(goo => goo.Name).Use(generator));

            filler.Configure<Foo>(config =>
            {
                config.For(f => f.Goo).Order(2);
                config.For(f => f.Bar).Order(1);
            });

            filler.Fill(rootObject);

            Assert.That(rootObject.Goo.Name, Is.EqualTo("Hello Chris"));
        }
예제 #5
0
        public void DependencyOrderingWorksWithSubclassesAsWellAsPrimitives()
        {
            var rootObject = new Foo();
            var filler     = new Filler();

            var generator = MockRepository.GenerateStub <IGenerateDummyData>();

            generator.Stub(g => g.Generate(Arg <GenerationContext> .Is.Anything)).Return("Chris");

            var dependentGenerator = MockRepository.GenerateStub <IGenerateDummyData>();

            dependentGenerator.Stub(g => g.Generate(Arg <GenerationContext> .Is.Anything))
            .Do(new GeneratorDelegate(context => string.Format("Hello {0}", context.RootAs <Foo>().Bar.Name)));

            filler.Configure <Goo>(config => config.For(goo => goo.Name).Use(dependentGenerator));
            filler.Configure <Bar>(config => config.For(goo => goo.Name).Use(generator));

            filler.Configure <Foo>(config =>
            {
                config.For(f => f.Goo).Order(2);
                config.For(f => f.Bar).Order(1);
            });

            filler.Fill(rootObject);

            Assert.That(rootObject.Goo.Name, Is.EqualTo("Hello Chris"));
        }
            public void FillPerson()
            {
                Person person = new Person();

                Filler <Person> pFiller = new Filler <Person>();
                Person          p       = pFiller.Fill(person);
            }
예제 #7
0
        public void LetsGetSomeMatchesGoingOn()
        {
            var users = new List <User>();

            var filler = new Filler();

            filler.Configure <Bar>().Defaults();
            filler.Configure <Foo>(config =>
            {
                config.For(foo => foo.Bars).Times(Constants.Random.Next(100));
                config.For(foo => foo.Age).Use(new RandomWholeNumberGenerator(10, 21)).Order(1);
                config.For(foo => foo.CalculatedAge).Do(context => context.CurrentAs <Foo>().Age + 20).Order(2);
            }).Defaults();

            filler.Configure <Goo>().Defaults();
            filler.Configure <User>().Defaults();
            filler.Configure <AllowedPartner>(config =>
            {
                config.For(allowedPartner => allowedPartner.MinAge).Use(new MinAgeGenerator());
                config.For(allowedPartner => allowedPartner.MaxAge).Use(new MaxAgeGenerator());
            });

            1000.Times(() => users.Add(filler.Fill(new User())));
            users.ToString();
        }
예제 #8
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            if (colBottom.Count == 1 && m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else if (colBottom.Count == 1)
            {
                // bias is a learned parameter; initialize it.
                BiasParameter p        = m_param.bias_param;
                int           nAxis    = colBottom[0].CanonicalAxisIndex(p.axis);
                int           nNumAxes = p.num_axes;

                m_log.CHECK_GE(nNumAxes, -1, "num_axes must be non-negative, or -1 to extend to end of bottom[0].");

                if (nNumAxes >= 0)
                {
                    m_log.CHECK_GE(colBottom[0].num_axes, nAxis + nNumAxes, "bias blob's shape extends past bottom[0]'s shape when applied starting with bottom[0] axis = " + nAxis.ToString());
                }

                m_colBlobs = new BlobCollection <T>();

                List <int> rgBiasShape = new List <int>();
                int        nStart      = nAxis;
                int        nEnd        = (nNumAxes == -1) ? colBottom[0].shape().Count : nStart + nNumAxes;

                for (int i = nStart; i < nEnd; i++)
                {
                    rgBiasShape.Add(colBottom[0].shape(i));
                }

                Blob <T> blobBias = new Blob <T>(m_cuda, m_log);
                blobBias.Name = m_param.name + " bias";
                blobBias.type = BLOB_TYPE.INTERNAL;
                blobBias.type = BLOB_TYPE.WEIGHT;

                if (!shareParameter(blobBias, rgBiasShape))
                {
                    blobBias.Reshape(rgBiasShape);
                    FillerParameter fp = p.filler;
                    if (fp == null)
                    {
                        fp = new FillerParameter("constant", 0.0);
                    }

                    Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

                    filler.Fill(blobBias);
                }
                m_colBlobs.Add(blobBias);
            }

            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);
        }
        public static bool ClickedOnTile(Point target) //TODO refactor
        {
            board.TimeKeeper.Start();

            var tile = board.GetTile(target);

            switch (tile.TileState)
            {
            case TileState.Known:
                return(false);

            case TileState.Unknown:
            {
                tile.TileState = TileState.Known;
                if (CheckIfWin())
                {
                    board.GameState = GameState.Win;
                    return(true);
                }
                break;
            }

            case TileState.Flag:
                return(false);
            }

            if (tile.IsNumber)
            {
                return(true);
            }

            if (tile.TileValue == TileValue.Empty)
            {
                tile.TileState = TileState.Unknown;
                Filler.Fill(board, target);
                if (CheckIfWin())
                {
                    board.GameState = GameState.Win;
                    return(true);
                }
            }

            if (tile.TileValue == TileValue.Bomb)
            {
                board.GameState = GameState.Lose;
            }

            return(true);
        }
예제 #10
0
        public void ShouldNotFillSubObjectIfRootObjectNotDefined()
        {
            const string dummyData  = "Data";
            var          rootObject = new Foo();
            var          filler     = new Filler();

            var generator = MockRepository.GenerateStub <IGenerateDummyData>();

            generator.Stub(g => g.Generate(Arg <GenerationContext> .Is.Anything)).Return(dummyData);

            filler.Configure <Bar>(config => config.For(f => f.Name).Use(generator));

            filler.Fill(rootObject);
            Assert.That(rootObject.Bar, Is.Null);
        }
예제 #11
0
        public void CollectionsGenerateDataXTimes()
        {
            var rootObject = new Foo();
            var filler = new Filler();

            var generator = MockRepository.GenerateStub<IGenerateDummyData>();
            generator.Stub(g => g.Generate(Arg<GenerationContext>.Is.Anything)).Return("Chris");

            filler.Configure<Bar>(config => config.For(goo => goo.Name).Use(generator));
            filler.Configure<Foo>(config => config.For(f => f.Bars).Times(10));

            filler.Fill(rootObject);
            Assert.That(rootObject.Bars.Count, Is.EqualTo(10));
            Assert.That(rootObject.Bars.First().Name, Is.EqualTo("Chris"));
        }
예제 #12
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            m_nN = (int)m_param.embed_param.num_output;
            m_log.CHECK_GT(m_nN, 0, "EmbedLayer num_output must be positive.");

            m_nK = (int)m_param.embed_param.input_dim;
            m_log.CHECK_GT(m_nK, 0, "EmbedLayer input_dim must be positive.");

            m_bBiasTerm = m_param.embed_param.bias_term;

            if (m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                m_colBlobs.Clear();

                // Initialize the weights --
                // transposed from InnerProductLayer for spacial locality.
                List <int> rgWeightShape = new List <int>()
                {
                    m_nK, m_nN
                };
                m_colBlobs.Add(new common.Blob <T>(m_cuda, m_log, rgWeightShape));

                // fill the weights
                Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.embed_param.weight_filler);

                weight_filler.Fill(m_colBlobs[0]);

                // If necessary, initialize and fill the bias term
                if (m_bBiasTerm)
                {
                    List <int> rgBiasShape = new List <int>()
                    {
                        m_nN
                    };
                    m_colBlobs.Add(new common.Blob <T>(m_cuda, m_log, rgBiasShape));

                    Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.embed_param.bias_filler);

                    bias_filler.Fill(m_colBlobs[1]);
                }
            }

            m_rgbParamPropagateDown = new common.DictionaryMap <bool>(m_colBlobs.Count, true);
        }
예제 #13
0
        public void CollectionsGenerateDataXTimes()
        {
            var rootObject = new Foo();
            var filler     = new Filler();

            var generator = MockRepository.GenerateStub <IGenerateDummyData>();

            generator.Stub(g => g.Generate(Arg <GenerationContext> .Is.Anything)).Return("Chris");

            filler.Configure <Bar>(config => config.For(goo => goo.Name).Use(generator));
            filler.Configure <Foo>(config => config.For(f => f.Bars).Times(10));

            filler.Fill(rootObject);
            Assert.That(rootObject.Bars.Count, Is.EqualTo(10));
            Assert.That(rootObject.Bars.First().Name, Is.EqualTo("Chris"));
        }
예제 #14
0
        private float randomUniformValue(float fMin, float fMax)
        {
            m_blobWork.Reshape(1, 1, 1, 1);

            FillerParameter fp = new FillerParameter("uniform");

            fp.min = fMin;
            fp.max = fMax;
            Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

            filler.Fill(m_blobWork);

            float[] rg = Utility.ConvertVecF <T>(m_blobWork.mutable_cpu_data);

            return(rg[0]);
        }
예제 #15
0
        public void TestFillPerson()
        {
            Person p = new Person();
            Filler<Person> filler = new Filler<Person>();
            filler.Setup()
                .OnType<IAddress>().CreateInstanceOf<Address>()
                .OnType<string>().Use(new MnemonicString(10))
                .OnProperty(person => person.FirstName).Use<MnemonicString>()
                .OnProperty(person => person.LastName).Use(new RandomListItem<string>("Maik", "Tom", "Anton"))
                .OnProperty(person => person.Age).Use(() => Tynamix.ObjectFiller.Random.Next(12, 83))
                .SetupFor<Address>()
                .OnProperty(x => x.City, x => x.Country).IgnoreIt();

            Person pFilled = filler.Fill(p);

            Assert.True(new List<string>() { "Maik", "Tom", "Anton" }.Contains(pFilled.LastName));
        }
예제 #16
0
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters()
        {
            base.ReInitializeParameters();

            Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.weight_filler);

            weight_filler.Fill(m_colBlobs[0]);

            if (m_param.inner_product_param.bias_term && m_colBlobs.Count > 1)
            {
                Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.bias_filler);

                bias_filler.Fill(m_colBlobs[1]);
            }

            return(true);
        }
예제 #17
0
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters()
        {
            base.ReInitializeParameters();

            Filler <T> filler = Filler <T> .Create(m_cuda, m_log, m_param.convolution_param.weight_filler);

            filler.Fill(m_colBlobs[0]);

            if (m_param.convolution_param.bias_term && m_colBlobs.Count > 1)
            {
                Filler <T> fillerBias = Filler <T> .Create(m_cuda, m_log, m_param.convolution_param.bias_filler);

                fillerBias.Fill(m_colBlobs[1]);
            }

            return(true);
        }
예제 #18
0
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters()
        {
            base.ReInitializeParameters();

            FillerParameter fp = m_param.prelu_param.filler;

            if (fp == null)
            {
                fp = new FillerParameter("constant", 0.25);
            }

            Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

            filler.Fill(m_colBlobs[0]);

            return(true);
        }
예제 #19
0
        public void IgnoringPropertiesWillPreventFilling()
        {
            const string dummyData = "Data";
            var generator = MockRepository.GenerateStub<IGenerateDummyData>();
            generator.Stub(g => g.Generate(Arg<GenerationContext>.Is.Anything)).Return(dummyData);

            var filler = new Filler();
            var rootObject = new Foo();

            filler.Configure<Foo>(config =>{
                config.For(f => f.Description).Use(generator).Ignore();
                config.For(f => f.Name).Use(generator);
            });

            filler.Fill(rootObject);
            Assert.That(rootObject.Name, Is.EqualTo(dummyData));
            Assert.That(rootObject.Description, Is.Null);
        }
예제 #20
0
        public void CollectionsGenerateDataBetweenXandYTimes()
        {
            var filler = new Filler();

            var generator = MockRepository.GenerateStub<IGenerateDummyData>();
            generator.Stub(g => g.Generate(Arg<GenerationContext>.Is.Anything)).Return("Chris");

            filler.Configure<Bar>(config => config.For(goo => goo.Name).Use(generator));
            filler.Configure<Foo>(config => config.For(f => f.Bars).Between(10, 20));

            for (var i = 0; i <= 100; i++)
            {
                var rootObject = new Foo();
                filler.Fill(rootObject);
                var bars = rootObject.Bars.Count;
                Assert.That(bars, Is.LessThanOrEqualTo(20));
                Assert.That(bars, Is.GreaterThanOrEqualTo(10));
            }
        }
예제 #21
0
        public void IgnoringPropertiesWillPreventFilling()
        {
            const string dummyData = "Data";
            var          generator = MockRepository.GenerateStub <IGenerateDummyData>();

            generator.Stub(g => g.Generate(Arg <GenerationContext> .Is.Anything)).Return(dummyData);

            var filler     = new Filler();
            var rootObject = new Foo();

            filler.Configure <Foo>(config => {
                config.For(f => f.Description).Use(generator).Ignore();
                config.For(f => f.Name).Use(generator);
            });

            filler.Fill(rootObject);
            Assert.That(rootObject.Name, Is.EqualTo(dummyData));
            Assert.That(rootObject.Description, Is.Null);
        }
예제 #22
0
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            if (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.BIAS)
            {
                FillerParameter fp = m_param.bias_param.filler;
                if (fp == null)
                {
                    fp = new FillerParameter("constant", 0.0);
                }

                Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

                filler.Fill(m_colBlobs[0]);
            }

            return(true);
        }
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            if (target == WEIGHT_TARGET.WEIGHTS || target == WEIGHT_TARGET.BOTH)
            {
                Filler <T> filler = Filler <T> .Create(m_cuda, m_log, m_param.convolution_param.weight_filler);

                filler.Fill(m_colBlobs[0]);
            }

            if (m_param.convolution_param.bias_term && m_colBlobs.Count > 1 && (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.BIAS))
            {
                Filler <T> fillerBias = Filler <T> .Create(m_cuda, m_log, m_param.convolution_param.bias_filler);

                fillerBias.Fill(m_colBlobs[1]);
            }

            return(true);
        }
예제 #24
0
        public void CollectionsGenerateDataBetweenXandYTimes()
        {
            var filler = new Filler();

            var generator = MockRepository.GenerateStub <IGenerateDummyData>();

            generator.Stub(g => g.Generate(Arg <GenerationContext> .Is.Anything)).Return("Chris");

            filler.Configure <Bar>(config => config.For(goo => goo.Name).Use(generator));
            filler.Configure <Foo>(config => config.For(f => f.Bars).Between(10, 20));

            for (var i = 0; i <= 100; i++)
            {
                var rootObject = new Foo();
                filler.Fill(rootObject);
                var bars = rootObject.Bars.Count;
                Assert.That(bars, Is.LessThanOrEqualTo(20));
                Assert.That(bars, Is.GreaterThanOrEqualTo(10));
            }
        }
예제 #25
0
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            if (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.WEIGHTS)
            {
                Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.weight_filler);

                weight_filler.Fill(m_colBlobs[0]);
            }

            if (m_param.inner_product_param.bias_term && m_colBlobs.Count > 1 && (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.BIAS))
            {
                Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.bias_filler);

                bias_filler.Fill(m_colBlobs[1]);
            }

            return(true);
        }
예제 #26
0
        public void TestFillPerson()
        {
            Person          p      = new Person();
            Filler <Person> filler = new Filler <Person>();

            filler.Setup()
            .OnType <IAddress>().CreateInstanceOf <Address>()
            .OnType <string>().Use(new MnemonicString(10))
            .OnProperty(person => person.FirstName).Use <MnemonicString>()
            .OnProperty(person => person.LastName).Use(new RandomListItem <string>("Maik", "Tom", "Anton"))
            .OnProperty(person => person.Age).Use(() => Tynamix.ObjectFiller.Random.Next(12, 83))
            .SetupFor <Address>()
            .OnProperty(x => x.City, x => x.Country).IgnoreIt();

            Person pFilled = filler.Fill(p);

            Assert.IsTrue(new List <string>()
            {
                "Maik", "Tom", "Anton"
            }.Contains(pFilled.LastName));
        }
예제 #27
0
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters()
        {
            base.ReInitializeParameters();

            FillerParameter fp = m_param.scale_param.filler;

            if (fp == null)
            {
                fp = new FillerParameter("constant", 1.0);
            }

            Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

            filler.Fill(m_colBlobs[0]);

            if (m_param.scale_param.bias_term)
            {
                m_biasLayer.ReInitializeParameters();
            }

            return(true);
        }
예제 #28
0
        public TestEx(string strName, List <int> rgBottomShape = null, int nDeviceID = TestBase.DEFAULT_DEVICE_ID)
            : base(strName, nDeviceID)
        {
            if (rgBottomShape == null)
            {
                rgBottomShape = new List <int>()
                {
                    2, 3, 4, 5
                }
            }
            ;

            m_blob_bottom = new Blob <T>(m_cuda, m_log, rgBottomShape);
            m_blob_top    = new Blob <T>(m_cuda, m_log);
            m_colBottom.Add(m_blob_bottom);
            m_colTop.Add(m_blob_top);

            FillerParameter fp = getFillerParam();

            m_filler = Filler <T> .Create(m_cuda, m_log, fp);

            m_filler.Fill(m_blob_bottom);
        }
예제 #29
0
        public void LetsGetSomeMatchesGoingOn()
        {
            var users = new List<User>();

            var filler = new Filler();
            filler.Configure<Bar>().Defaults();
            filler.Configure<Foo>(config =>
            {
                config.For(foo => foo.Bars).Times(Constants.Random.Next(100));
                config.For(foo => foo.Age).Use(new RandomWholeNumberGenerator(10, 21)).Order(1);
                config.For(foo => foo.CalculatedAge).Do(context => context.CurrentAs<Foo>().Age + 20).Order(2);
            }).Defaults();

            filler.Configure<Goo>().Defaults();
            filler.Configure<User>().Defaults();
            filler.Configure<AllowedPartner>(config =>
            {
                config.For(allowedPartner => allowedPartner.MinAge).Use(new MinAgeGenerator());
                config.For(allowedPartner => allowedPartner.MaxAge).Use(new MaxAgeGenerator());
            });

            1000.Times(() => users.Add(filler.Fill(new User())));
            users.ToString();
        }
예제 #30
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            int nNumOutput = (int)m_param.inner_product_param.num_output;

            m_bBiasTerm  = m_param.inner_product_param.bias_term;
            m_bTranspose = m_param.inner_product_param.transpose;
            m_nN         = nNumOutput;
            int nAxis = colBottom[0].CanonicalAxisIndex(m_param.inner_product_param.axis);

            // Dimensions starting from 'axis' are 'flattened' into a single
            // length K_ vector. For example, if bottom[0]'s shape is (N, C, H, W),
            // and axis == 1, N inner products with dimension CHW are preformed..
            m_nK = colBottom[0].count(nAxis);

            // Check if we need to set up the weights.
            if (m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                // Initialize the weight.
                List <int> rgWeightShape = Utility.Create <int>(2, 0);

                if (m_bTranspose)
                {
                    rgWeightShape[0] = m_nK;
                    rgWeightShape[1] = m_nN;
                }
                else
                {
                    rgWeightShape[0] = m_nN;
                    rgWeightShape[1] = m_nK;
                }

                Blob <T> blobWeight = new Blob <T>(m_cuda, m_log);
                blobWeight.Name = m_param.name + " weights";
                blobWeight.type = Blob <T> .BLOB_TYPE.IP_WEIGHT;

                if (!shareParameter(blobWeight, rgWeightShape))
                {
                    blobWeight.Reshape(rgWeightShape);
                    Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.weight_filler);

                    weight_filler.Fill(blobWeight);
                }
                m_colBlobs.Add(blobWeight);

                // If necessary, initialize and fill the bias term.
                if (m_bBiasTerm)
                {
                    List <int> rgBiasShape = Utility.Create <int>(1, 0);
                    rgBiasShape[0] = m_nN;

                    Blob <T> blobBias = new Blob <T>(m_cuda, m_log);
                    blobBias.Name = m_param.name + " bias";
                    blobBias.type = Blob <T> .BLOB_TYPE.IP_WEIGHT;

                    if (!shareParameter(blobBias, rgBiasShape))
                    {
                        blobBias.Reshape(rgBiasShape);
                        Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.bias_filler);

                        bias_filler.Fill(blobBias);
                    }
                    m_colBlobs.Add(blobBias);
                }
            }

            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);
        }
예제 #31
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            m_dfClippingThreshold = m_param.lstm_simple_param.clipping_threshold;
            m_nN = (int)m_param.lstm_simple_param.batch_size;              // batch size.
            m_nH = (int)m_param.lstm_simple_param.num_output;              // number of hidden units.
            m_nI = (int)(colBottom[0].count() / colBottom[0].num);         // input dimension.

            // Check if we need to set up the weights.
            if (m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                m_colBlobs = new BlobCollection <T>();

                Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.lstm_simple_param.weight_filler);

                Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.lstm_simple_param.bias_filler);

                // input-to-hidden weights
                // Initialize the weight.
                List <int> rgShape1 = new List <int>()
                {
                    4 * m_nH, m_nI
                };
                Blob <T> blobWeights_I_H = new Blob <T>(m_cuda, m_log);
                blobWeights_I_H.Name = m_param.name + " weights I to H";
                blobWeights_I_H.type = Blob <T> .BLOB_TYPE.WEIGHT;

                if (!shareParameter(blobWeights_I_H, rgShape1))
                {
                    blobWeights_I_H.Reshape(rgShape1);
                    weight_filler.Fill(blobWeights_I_H);
                }
                m_colBlobs.Add(blobWeights_I_H);

                // hidden-to-hidden weights
                // Initialize the weight.
                List <int> rgShape2 = new List <int>()
                {
                    4 * m_nH, m_nH
                };
                Blob <T> blobWeights_H_H = new Blob <T>(m_cuda, m_log);
                blobWeights_H_H.Name = m_param.name + " weights H to H";
                blobWeights_H_H.type = Blob <T> .BLOB_TYPE.WEIGHT;

                if (!shareParameter(blobWeights_H_H, rgShape2))
                {
                    blobWeights_H_H.Reshape(rgShape2);
                    weight_filler.Fill(blobWeights_H_H);
                }
                m_colBlobs.Add(blobWeights_H_H);

                // If necessary, initialize and fill the bias term.
                List <int> rgShape3 = new List <int>()
                {
                    4 * m_nH
                };
                Blob <T> blobBias = new Blob <T>(m_cuda, m_log);
                blobBias.Name = m_param.name + " bias weights";
                blobBias.type = Blob <T> .BLOB_TYPE.WEIGHT;

                if (!shareParameter(blobBias, rgShape3))
                {
                    blobBias.Reshape(rgShape3);
                    bias_filler.Fill(blobBias);
                }
                m_colBlobs.Add(blobBias);

                // Initialize the bias for the forget gate to 5.0 as described in the
                // Clockwork RNN paper:
                // [1] Koutnik, J., Greff, K., Gomez, F., Schmidhuber, J., 'A Clockwork RNN', 2014"
                if (m_param.lstm_simple_param.enable_clockwork_forgetgate_bias)
                {
                    double[] rgBias = convertD(blobBias.mutable_cpu_data);

                    for (int i = m_nH; i < 2 * m_nH; i++)
                    {
                        rgBias[i] = 5.0;
                    }

                    blobBias.mutable_cpu_data = convert(rgBias);
                }
            }

            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);

            List <int> rgCellShape = new List <int>()
            {
                m_nN, m_nH
            };

            m_blob_C_0.Reshape(rgCellShape);
            m_blob_H_0.Reshape(rgCellShape);
            m_blob_C_T.Reshape(rgCellShape);
            m_blob_H_T.Reshape(rgCellShape);
            m_blob_H_to_H.Reshape(rgCellShape);

            List <int> rgGateShape = new List <int>()
            {
                m_nN, 4, m_nH
            };

            m_blob_H_to_Gate.Reshape(rgGateShape);
        }
예제 #32
0
            public void FillPerson()
            {
                Person person = new Person();

                Filler<Person> pFiller = new Filler<Person>();
                Person p = pFiller.Fill(person);
            }
예제 #33
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            ScaleParameter p = m_param.scale_param;

            if (colBottom.Count == 1 && blobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else if (colBottom.Count == 1)
            {
                // scale is a learned parameter; initialize it.
                m_nAxis = colBottom[0].CanonicalAxisIndex(p.axis);
                int nNumAxes = p.num_axes;
                m_log.CHECK_GE(nNumAxes, -1, "num_axes must be non-negative, or -1 to extend to the end of bottom[0].");

                if (nNumAxes >= 0)
                {
                    m_log.CHECK_GE(colBottom[0].num_axes, m_nAxis + nNumAxes, "scale blob's shape extends past bottom[0]'s shape when applied starting with bottom[0] axis = " + m_nAxis.ToString());
                }

                m_colBlobs = new BlobCollection <T>();

                List <int> rgShape = new List <int>();
                int        nStart  = m_nAxis;
                int        nEnd    = (nNumAxes == -1) ? colBottom[0].shape().Count : nStart + nNumAxes;

                for (int i = nStart; i < nEnd; i++)
                {
                    rgShape.Add(colBottom[0].shape(i));
                }

                Blob <T> blobScale = new Blob <T>(m_cuda, m_log, rgShape);
                blobScale.Name = "scale";
                FillerParameter fp = p.filler;

                // Default to unit (1) filler for identity operation.
                if (fp == null)
                {
                    fp = new FillerParameter("constant", 1.0);
                }

                Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

                filler.Fill(blobScale);

                m_colBlobs.Add(blobScale);
            }

            if (p.bias_term)
            {
                LayerParameter pb = new LayerParameter(LayerParameter.LayerType.BIAS);
                pb.bias_param.axis     = p.axis;
                pb.bias_param.num_axes = (colBottom.Count > 1) ? colBottom[1].num_axes : p.num_axes;
                pb.bias_param.filler   = p.bias_filler;

                m_colBiasBottomVec = new BlobCollection <T>();
                m_colBiasBottomVec.Add(colBottom[0]);

                m_biasLayer = new BiasLayer <T>(m_cuda, m_log, pb);
                m_biasLayer.Setup(m_colBiasBottomVec, colTop);
                m_nBiasParamId = m_colBlobs.Count;
                m_colBlobs.Add(m_biasLayer.blobs[0]);
                m_rgbBiasPropagateDown = Utility.Create <bool>(1, false);
            }

            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count(), true);
        }
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            if (!reshapeNeeded(colBottom, colTop))
            {
                return;
            }

            // Configure the kernel size, padding, stride and inputs.
            ConvolutionParameter p = m_param.convolution_param;

            m_bForceNDim2col = p.force_nd_im2col;
            m_nChannelAxis   = colBottom[0].CanonicalAxisIndex(p.axis);

            int nFirstSpatialAxis = m_nChannelAxis + 1;
            int nNumAxes          = colBottom[0].num_axes;

            m_nNumSpatialAxes = nNumAxes - nFirstSpatialAxis;

            m_log.CHECK_GE(m_nNumSpatialAxes, 0, "The number of spatial axes must be zero or greater.");

            List <int> rgBottomDimBlobShape = new List <int>()
            {
                m_nNumSpatialAxes + 1
            };
            List <int> rgSpaitalDimBlobShape = new List <int>()
            {
                Math.Max(m_nNumSpatialAxes, 1)
            };

            // Setup filter kernel dimensions (blobKernelShape)
            m_blobKernelShape.Reshape(rgSpaitalDimBlobShape);
            T[] rgKernelShape = m_blobKernelShape.mutable_cpu_data;

            if (p.kernel_h.HasValue || p.kernel_w.HasValue)
            {
                m_log.CHECK_EQ(m_nNumSpatialAxes, 2, "kernel_h & kernel_w can only be used in 2D convolution.");
                m_log.CHECK_EQ(0, p.kernel_size.Count, "Either kernel_size or kernel_h/w should be specified; not both.");
                rgKernelShape[0] = (T)Convert.ChangeType(p.kernel_h.Value, typeof(T));
                rgKernelShape[1] = (T)Convert.ChangeType(p.kernel_w.Value, typeof(T));
            }
            else
            {
                int nNumKernelDims = p.kernel_size.Count;
                m_log.CHECK(nNumKernelDims == 1 || nNumKernelDims == m_nNumSpatialAxes, "Kernel size must be specified once, or once per spatial dimension (kernel_size specified " + nNumKernelDims.ToString() + " times; " + m_nNumSpatialAxes.ToString() + " spatial dims);");

                for (int i = 0; i < m_nNumSpatialAxes; i++)
                {
                    int nIdx = (nNumKernelDims == 1) ? 0 : i;
                    rgKernelShape[i] = (T)Convert.ChangeType(p.kernel_size[nIdx], typeof(T));
                }
            }

            for (int i = 0; i < m_nNumSpatialAxes; i++)
            {
                m_log.CHECK_GT((int)Convert.ChangeType(rgKernelShape[i], typeof(int)), 0, "Filter dimension must be non-zero.");
            }

            m_blobKernelShape.mutable_cpu_data = rgKernelShape;


            // Setup stride dimensions (blobStride)
            m_blobStride.Reshape(rgSpaitalDimBlobShape);
            T[] rgStrideData = m_blobStride.mutable_cpu_data;

            if (p.stride_h.HasValue || p.stride_w.HasValue)
            {
                m_log.CHECK_EQ(m_nNumSpatialAxes, 2, "stride_h & stride_w can only be used in 2D convolution.");
                m_log.CHECK_EQ(0, p.stride.Count, "Either stride_size or stride_h/w should be specified; not both.");
                rgStrideData[0] = (T)Convert.ChangeType(p.stride_h.Value, typeof(T));
                rgStrideData[1] = (T)Convert.ChangeType(p.stride_w.Value, typeof(T));
            }
            else
            {
                int nNumStrideDims = p.stride.Count;
                m_log.CHECK(nNumStrideDims == 0 || nNumStrideDims == 1 || nNumStrideDims == m_nNumSpatialAxes, "Stride size must be specified once, or once per spatial dimension (stride specified " + nNumStrideDims.ToString() + " times; " + m_nNumSpatialAxes.ToString() + " spatial dims);");
                int nDefaultStride = 1;

                for (int i = 0; i < m_nNumSpatialAxes; i++)
                {
                    if (nNumStrideDims == 0)
                    {
                        rgStrideData[i] = (T)Convert.ChangeType(nDefaultStride, typeof(T));
                    }
                    else
                    {
                        int nIdx = (nNumStrideDims == 1) ? 0 : i;
                        rgStrideData[i] = (T)Convert.ChangeType(p.stride[nIdx], typeof(T));
                    }
                    m_log.CHECK_GT((int)Convert.ChangeType(rgStrideData[i], typeof(int)), 0, "Stride dimension must be non-zero.");
                }
            }

            m_blobStride.mutable_cpu_data = rgStrideData;


            // Setup pad dimensions (blobPad)
            m_blobPad.Reshape(rgSpaitalDimBlobShape);
            T[] rgPadData = m_blobPad.mutable_cpu_data;

            if (p.pad_h.HasValue || p.pad_w.HasValue)
            {
                m_log.CHECK_EQ(m_nNumSpatialAxes, 2, "pad_h & pad_w can only be used in 2D convolution.");
                m_log.CHECK_EQ(0, p.pad.Count, "Either pad_size or pad_h/w should be specified; not both.");
                rgPadData[0] = (T)Convert.ChangeType(p.pad_h.Value, typeof(T));
                rgPadData[1] = (T)Convert.ChangeType(p.pad_w.Value, typeof(T));
            }
            else
            {
                int nNumPadDims = p.pad.Count;
                m_log.CHECK(nNumPadDims == 0 || nNumPadDims == 1 || nNumPadDims == m_nNumSpatialAxes, "Pad size must be specified once, or once per spatial dimension (pad specified " + nNumPadDims.ToString() + " times; " + m_nNumSpatialAxes.ToString() + " spatial dims);");
                int nDefaultPad = 0;

                for (int i = 0; i < m_nNumSpatialAxes; i++)
                {
                    if (nNumPadDims == 0)
                    {
                        rgPadData[i] = (T)Convert.ChangeType(nDefaultPad, typeof(T));
                    }
                    else
                    {
                        int nIdx = (nNumPadDims == 1) ? 0 : i;
                        rgPadData[i] = (T)Convert.ChangeType(p.pad[nIdx], typeof(T));
                    }
                }
            }

            m_blobPad.mutable_cpu_data = rgPadData;


            // Setup dilation dimensions (blobDilation)
            m_blobDilation.Reshape(rgSpaitalDimBlobShape);
            T[] rgDilationData   = m_blobDilation.mutable_cpu_data;
            int nNumDilationDims = p.dilation.Count;

            m_log.CHECK(nNumDilationDims == 0 || nNumDilationDims == 1 || nNumDilationDims == m_nNumSpatialAxes, "Dilation size must be specified once, or once per spatial dimension (dilation specified " + nNumDilationDims.ToString() + " times; " + m_nNumSpatialAxes.ToString() + " spatial dims);");
            int nDefaultDilation = 1;

            for (int i = 0; i < m_nNumSpatialAxes; i++)
            {
                if (nNumDilationDims == 0)
                {
                    rgDilationData[i] = (T)Convert.ChangeType(nDefaultDilation, typeof(T));
                }
                else
                {
                    int nIdx = (nNumDilationDims == 1) ? 0 : i;
                    rgDilationData[i] = (T)Convert.ChangeType(p.dilation[nIdx], typeof(T));
                }
            }

            m_blobDilation.mutable_cpu_data = rgDilationData;


            // Special case: im2col is the identity for 1x1 convolution with stride 1
            // add no padding, so flag for skipping the buffer and transformation.
            m_bIs1x1 = true;

            for (int i = 0; i < m_nNumSpatialAxes; i++)
            {
                if (!(val_at(rgKernelShape, i) == 1 &&
                      val_at(rgStrideData, i) == 1 &&
                      val_at(rgPadData, i) == 0))
                {
                    m_bIs1x1 = false;
                    break;
                }
            }

            // Configure output channels and groups.
            m_nChannels  = colBottom[0].shape(m_nChannelAxis);
            m_nNumOutput = (int)p.num_output;
            m_log.CHECK_GT(m_nNumOutput, 0, "Output count must be greater than zero.");

            m_nGroup = (int)p.group;
            m_log.CHECK_EQ(m_nChannels % m_nGroup, 0, "The channels must span evenly across the groups.");
            m_log.CHECK_EQ(m_nNumOutput % m_nGroup, 0, "The number of output should be a in multiples of group.");

            if (reverse_dimensions())
            {
                m_nConvOutChannels = m_nChannels;
                m_nConvInChannels  = m_nNumOutput;
            }
            else
            {
                m_nConvOutChannels = m_nNumOutput;
                m_nConvInChannels  = m_nChannels;
            }

            // Handle the parameters: weights and biases
            // - blobs[0] holds the filter weights.
            // - blobs[1] holds the biases (optional)

            List <int> rgWeightShape = new List <int>();

            rgWeightShape.Add(m_nConvOutChannels);
            rgWeightShape.Add(m_nConvInChannels / m_nGroup);

            for (int i = 0; i < m_nNumSpatialAxes; i++)
            {
                rgWeightShape.Add(val_at(rgKernelShape, i));
            }

            m_bBiasTerm = p.bias_term;

            List <int> rgBiasShape = new List <int>()
            {
                m_nNumOutput
            };

            // Setup the convert to half flags used by the Layer just before calling forward and backward.
            if (p.useCudnn(m_nNumSpatialAxes))
            {
                m_bUseHalfSize = m_param.use_halfsize;
            }

            if (m_colBlobs.Count > 0)
            {
                m_log.CHECK_EQ(1 + ((m_bBiasTerm) ? 1 : 0), m_colBlobs.Count, "Incorrect number of weight blobs.");

                if (!Utility.Compare <int>(rgWeightShape, m_colBlobs[0].shape()))
                {
                    Blob <T> b = new Blob <T>(m_cuda, m_log, rgWeightShape);
                    m_log.FAIL("Incorrect weight shape: expected shape " + b.shape_string + "; instead, shape was " + m_colBlobs[0].shape_string);
                }

                if (m_bBiasTerm && !Utility.Compare <int>(rgBiasShape, m_colBlobs[1].shape()))
                {
                    Blob <T> b = new Blob <T>(m_cuda, m_log, rgBiasShape);
                    m_log.FAIL("Incorrect bias shape: expected shape " + b.shape_string + "; instead, shape was " + m_colBlobs[1].shape_string);
                }

                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                m_colBlobs.Clear();

                // Initialize and fill the weights:
                // output channels x input channels per-group x kernel height x kernel width.
                Blob <T> blobWts = new Blob <T>(m_cuda, m_log, true, m_bUseHalfSize);
                blobWts.Name = colTop[0].Name + " weights";
                blobWts.type = BLOB_TYPE.WEIGHT;

                if (m_bUseHalfSize || !shareParameter(blobWts, rgWeightShape))
                {
                    blobWts.Reshape(rgWeightShape, m_bUseHalfSize);
                    Filler <T> wtFiller = Filler <T> .Create(m_cuda, m_log, p.weight_filler);

                    Blob <T> blobWts1 = blobWts;

                    if (m_bUseHalfSize)
                    {
                        blobWts1 = new Blob <T>(m_cuda, m_log, false, false);
                        blobWts1.ReshapeLike(blobWts);
                    }

                    wtFiller.Fill(blobWts1);

                    if (m_bUseHalfSize)
                    {
                        blobWts.CopyFrom(blobWts1);
                        blobWts1.Dispose();
                    }
                }

                m_colBlobs.Add(blobWts);

                // If necessary, initialize and fill the biases:
                if (m_bBiasTerm)
                {
                    Blob <T> blobBias = new Blob <T>(m_cuda, m_log, true, m_bUseHalfSize);
                    blobBias.Name = colTop[0].Name + " bias";
                    blobBias.type = BLOB_TYPE.WEIGHT;

                    if (m_bUseHalfSize || !shareParameter(blobBias, rgBiasShape))
                    {
                        blobBias.Reshape(rgBiasShape, m_bUseHalfSize);
                        Filler <T> biasFiller = Filler <T> .Create(m_cuda, m_log, p.bias_filler);

                        Blob <T> blobBias1 = blobBias;

                        if (m_bUseHalfSize)
                        {
                            blobBias1 = new Blob <T>(m_cuda, m_log, false, false);
                            blobBias1.ReshapeLike(blobBias);
                        }

                        biasFiller.Fill(blobBias1);

                        if (m_bUseHalfSize)
                        {
                            blobBias.CopyFrom(blobBias1);
                            blobBias1.Dispose();
                        }
                    }

                    m_colBlobs.Add(blobBias);
                }
            }

            m_nKernelDim    = m_colBlobs[0].count(1);
            m_nWeightOffset = m_nConvOutChannels * m_nKernelDim / m_nGroup;

            // Propagate gradients to the parameters (as directed by backward pass).
            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);
        }
예제 #35
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            bool bUseCuDnn = m_param.batch_norm_param.useCudnn();

            m_dfMovingAverageFraction = m_param.batch_norm_param.moving_average_fraction;
            m_bUseGlobalStats         = (m_phase == Phase.TEST || m_phase == Phase.RUN) ? true : false;

            if (m_param.batch_norm_param.use_global_stats.HasValue)
            {
                m_bUseGlobalStats = m_param.batch_norm_param.use_global_stats.Value;
            }

            if (colBottom[0].num_axes == 1)
            {
                m_nChannels = 1;
            }
            else
            {
                m_nChannels = colBottom[0].shape(1);
            }

            m_dfEps = m_param.batch_norm_param.eps;

            m_bScaleBias = m_param.batch_norm_param.scale_bias;  // by default = false;
            if (m_param.batch_norm_param.scale_filler != null || // implicit set.
                m_param.batch_norm_param.bias_filler != null)
            {
                m_bScaleBias = true;
            }

            if (m_bScaleBias && !bUseCuDnn)
            {
                m_bScaleBias = false;
            }

            if (m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                List <int> rgSize = new List <int>();
                rgSize.Add(m_nChannels);

                m_colBlobs.Clear(true);

                m_colBlobs.Add(new Blob <T>(m_cuda, m_log, rgSize, false));  // global mean
                m_colBlobs[0].Name = "global mean";
                m_colBlobs[0].SetData(0.0);
                m_colBlobs.Add(new Blob <T>(m_cuda, m_log, rgSize, false));  // glboal var
                m_colBlobs[1].Name = "global variance";
                m_colBlobs[1].SetData(0.0);
                m_colBlobs.Add(new Blob <T>(m_cuda, m_log, rgSize, false));  // variance correction
                m_colBlobs[2].Name = "var correction";
                m_colBlobs[2].SetData(1.0);

                if (m_bScaleBias)
                {
                    m_colBlobs.Add(new Blob <T>(m_cuda, m_log, rgSize)); // scale
                    m_colBlobs[3].Name = "scale";

                    FillerParameter fpScale = m_param.batch_norm_param.scale_filler;
                    if (fpScale == null)
                    {
                        fpScale = new FillerParameter("constant", 1.0);
                    }

                    Filler <T> fillerScale = Filler <T> .Create(m_cuda, m_log, fpScale);

                    fillerScale.Fill(m_colBlobs[3]);

                    m_colBlobs.Add(new Blob <T>(m_cuda, m_log, rgSize)); // bias
                    m_colBlobs[4].Name = "bias";

                    FillerParameter fpBias = m_param.batch_norm_param.bias_filler;
                    if (fpBias == null)
                    {
                        fpBias = new FillerParameter("constant", 0.0);
                    }

                    Filler <T> fillerBias = Filler <T> .Create(m_cuda, m_log, fpBias);

                    fillerBias.Fill(m_colBlobs[4]);
                }

                m_nIteration = 0;
            }

            // Mask statistics from optimization by setting local learning rates
            // for mean, variance, and variance correction to zero.
            for (int i = 0; i < 3; i++)
            {
                if (m_param.parameters.Count == i)
                {
                    m_param.parameters.Add(new ParamSpec(0.0, 0.0));
                }
                else
                {
                    m_param.parameters[i].lr_mult    = 0;
                    m_param.parameters[i].decay_mult = 0;
                }
            }

            // Set lr for scale and bias to 1
            if (m_bScaleBias)
            {
                for (int i = 3; i < 5; i++)
                {
                    if (m_param.parameters.Count == i)
                    {
                        m_param.parameters.Add(new ParamSpec(1.0, 1.0));
                    }
                    else
                    {
                        m_param.parameters[i].lr_mult    = 1;
                        m_param.parameters[i].decay_mult = 1;
                    }
                }
            }

            if (!m_param.batch_norm_param.useCudnn())
            {
                return;
            }

            //-----------------------------------
            // Handle cuDNN setup
            //-----------------------------------

            // Setup the convert to half flags used by the Layer just before calling forward and backward.
            m_bUseHalfSize = m_param.use_halfsize;

            int        nChannels = colBottom[0].channels;
            List <int> rgShape   = new List <int>()
            {
                1, nChannels, 1, 1
            };

            if (!m_bScaleBias)
            {
                m_blobScaleOnes.Reshape(rgShape);
                m_blobScaleOnes.SetData(1.0);
                m_blobBiasZeros.Reshape(rgShape);
                m_blobBiasZeros.SetData(0.0);
            }

            m_hCuDnn                   = m_cuda.CreateCuDNN();
            m_hFwdBottomDesc           = m_cuda.CreateTensorDesc();
            m_hFwdTopDesc              = m_cuda.CreateTensorDesc();
            m_hFwdScaleBiasMeanVarDesc = m_cuda.CreateTensorDesc();
            m_hBwdBottomDesc           = m_cuda.CreateTensorDesc();
            m_hBwdTopDesc              = m_cuda.CreateTensorDesc();
            m_hBwdScaleBiasMeanVarDesc = m_cuda.CreateTensorDesc();
            m_mode  = BATCHNORM_MODE.SPATIAL;
            m_dfEps = Math.Min(m_dfEps, CUDNN_BN_MIN_EPSILON);

            m_blobMean.Reshape(rgShape);
            m_blobVariance.Reshape(rgShape);

            if (colBottom[0] == colTop[0]) // CuDNN BN does not support in-place.
            {
                m_blobPrivateTop.ReshapeLike(colTop[0]);
                m_blobPrivateBottom.ReshapeLike(colBottom[0]);
            }
        }
예제 #36
0
        public void DependentFieldsWillNotBeUsefulUnlessOrderIsSpecified()
        {
            var rootObject = new Foo();
            var filler = new Filler();

            var generator = MockRepository.GenerateStub<IGenerateDummyData>();
            generator.Stub(g => g.Generate(Arg<GenerationContext>.Is.Anything)).Return(10);

            var dependentPropertyGenerator = MockRepository.GenerateStub<IGenerateDummyData>();
            dependentPropertyGenerator.Stub(g => g.Generate(Arg<GenerationContext>.Is.Anything)).Do(new GeneratorDelegate(context => context.RootAs<Foo>().Age + 1));

            filler.Configure<Foo>(config =>
            {
                config.For(f => f.CalculatedAge).Use(dependentPropertyGenerator);
                config.For(f => f.Age).Use(generator);
            });

            filler.Fill(rootObject);
            Assert.That(rootObject.Age, Is.EqualTo(10));
            Assert.That(rootObject.CalculatedAge, Is.EqualTo(1));

            filler.Configure<Foo>(config =>
            {
                config.For(f => f.Age).Order(2);
                config.For(f => f.CalculatedAge).Order(1);
            });

            filler.Fill(rootObject);
            Assert.That(rootObject.Age, Is.EqualTo(10));
            Assert.That(rootObject.CalculatedAge, Is.EqualTo(11));
        }
예제 #37
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            m_log.CHECK_GE(colBottom[0].num_axes, 2, "Number of axes of bottom must be >= 2");
            PReLUParameter p         = m_param.prelu_param;
            int            nChannels = colBottom[0].channels;

            m_bChannelShared = p.channel_shared;

            if (m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                m_colBlobs = new BlobCollection <T>();

                List <int> rgSlopeShape = new List <int>();
                if (!m_bChannelShared)
                {
                    rgSlopeShape.Add(nChannels);
                }

                Blob <T> blobSlope = new Blob <T>(m_cuda, m_log);
                blobSlope.Name = m_param.name + " slope";
                blobSlope.type = BLOB_TYPE.INTERNAL;

                if (!shareParameter(blobSlope, rgSlopeShape))
                {
                    blobSlope.Reshape(rgSlopeShape);
                    FillerParameter fp = p.filler;

                    if (fp == null)
                    {
                        fp = new FillerParameter("constant", 0.25);
                    }

                    Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

                    filler.Fill(blobSlope);
                }
                m_colBlobs.Add(blobSlope);
            }

            if (m_bChannelShared)
            {
                m_log.CHECK_EQ(m_colBlobs[0].count(), 1, "Negative slope size is inconsistent with prototxt config.");
            }
            else
            {
                m_log.CHECK_EQ(m_colBlobs[0].count(), nChannels, "Negative slope size is inconsistent with prototxt config.");
            }

            // Propagate gradients to the parameters (as directed by backward pass)
            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);

            List <int> rgShape = new List <int>()
            {
                colBottom[0].count(1)
            };

            m_blobMultiplier.Reshape(rgShape);
            m_blobBackwardBuff.Reshape(rgShape);
            m_blobMultiplier.SetData(1.0);
        }
예제 #38
0
        public void ShouldNotFillSubObjectIfRootObjectNotDefined()
        {
            const string dummyData = "Data";
            var rootObject = new Foo();
            var filler = new Filler();

            var generator = MockRepository.GenerateStub<IGenerateDummyData>();
            generator.Stub(g => g.Generate(Arg<GenerationContext>.Is.Anything)).Return(dummyData);

            filler.Configure<Bar>(config => config.For(f => f.Name).Use(generator));

            filler.Fill(rootObject);
            Assert.That(rootObject.Bar, Is.Null);
        }
예제 #39
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            int nNumOutput = (int)m_param.inner_product_param.num_output;

            m_bBiasTerm    = m_param.inner_product_param.bias_term;
            m_bTranspose   = m_param.inner_product_param.transpose;
            m_bEnableNoise = m_param.inner_product_param.enable_noise;
            m_dfSigmaInit  = m_param.inner_product_param.sigma_init;
            m_nN           = nNumOutput;

            List <int> rgShape     = colBottom[0].shape();
            int        nShapeCount = rgShape.Count;

            for (int i = nShapeCount; i <= m_param.inner_product_param.axis; i++)
            {
                rgShape.Add(1);
            }

            if (nShapeCount != rgShape.Count)
            {
                colBottom[0].Reshape(rgShape);
            }

            int nAxis = colBottom[0].CanonicalAxisIndex(m_param.inner_product_param.axis);

            // Dimensions starting from 'axis' are 'flattened' into a single
            // length K_ vector. For example, if bottom[0]'s shape is (N, C, H, W),
            // and axis == 1, N inner products with dimension CHW are preformed..
            m_nK = colBottom[0].count(nAxis);

            // Check if we need to set up the weights.
            if (m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                // Initialize the weight.
                List <int> rgWeightShape = Utility.Create <int>(2, 0);

                if (m_bTranspose)
                {
                    rgWeightShape[0] = m_nK;
                    rgWeightShape[1] = m_nN;
                }
                else
                {
                    rgWeightShape[0] = m_nN;
                    rgWeightShape[1] = m_nK;
                }

                double   dfNoiseRange = 1.0 / Math.Sqrt(rgWeightShape[1]);
                Blob <T> blobWeight   = new Blob <T>(m_cuda, m_log);
                blobWeight.Name = m_param.name + " weights";
                blobWeight.type = BLOB_TYPE.IP_WEIGHT;

                if (!shareParameter(blobWeight, rgWeightShape))
                {
                    blobWeight.Reshape(rgWeightShape);
                    Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.weight_filler);

                    weight_filler.Fill(blobWeight);

                    if (m_bEnableNoise)
                    {
                        blobWeight.scale_data(dfNoiseRange);
                    }
                }
                m_colBlobs.Add(blobWeight);

                // If necessary, initialize and fill the bias term.
                if (m_bBiasTerm)
                {
                    List <int> rgBiasShape = Utility.Create <int>(1, 0);
                    rgBiasShape[0] = m_nN;

                    Blob <T> blobBias = new Blob <T>(m_cuda, m_log);
                    blobBias.Name = m_param.name + " bias";
                    blobBias.type = BLOB_TYPE.IP_WEIGHT;

                    if (!shareParameter(blobBias, rgBiasShape))
                    {
                        blobBias.Reshape(rgBiasShape);
                        Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.bias_filler);

                        bias_filler.Fill(blobBias);

                        if (m_bEnableNoise)
                        {
                            blobBias.scale_data(dfNoiseRange);
                        }
                    }
                    m_colBlobs.Add(blobBias);
                }

                // Add Noise sigma weight and bias
                if (m_bEnableNoise)
                {
                    FillerParameter fp = new FillerParameter("uniform");
                    fp.min          = -1;
                    fp.max          = 1;
                    m_fillerEpsilon = Filler <T> .Create(m_cuda, m_log, fp);

                    Blob <T> blobSigmaWeight = new Blob <T>(m_cuda, m_log);
                    blobSigmaWeight.Name = m_param.name + " sigma_wt";
                    blobSigmaWeight.type = BLOB_TYPE.WEIGHT;
                    blobSigmaWeight.ReshapeLike(m_colBlobs[0]);
                    blobSigmaWeight.SetData(m_dfSigmaInit / Math.Sqrt(blobSigmaWeight.shape(1)));
                    m_colBlobs.Add(blobSigmaWeight);
                    m_blobEpsilonWeight.ReshapeLike(blobSigmaWeight);

                    if (m_bBiasTerm)
                    {
                        Blob <T> blobSigmaBias = new Blob <T>(m_cuda, m_log);
                        blobSigmaBias.Name = m_param.name + " sigma_bias";
                        blobSigmaBias.type = BLOB_TYPE.WEIGHT;
                        blobSigmaBias.ReshapeLike(m_colBlobs[1]);
                        blobSigmaBias.SetData(m_dfSigmaInit / Math.Sqrt(blobSigmaBias.shape(0)));
                        m_colBlobs.Add(blobSigmaBias);
                        m_blobEpsilonBias.ReshapeLike(blobSigmaBias);
                    }

                    ResetNoise();
                }
            }

            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);
        }