Exemplo n.º 1
0
        public void CloneTest()
        {
            SoftMaxLayer layer1 = new SoftMaxLayer(new Shape(new[] { 1, SoftMaxLayerTest.weights.Length }));
            SoftMaxLayer layer2 = layer1.Clone() as SoftMaxLayer;

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
        public void SoftMaxLayer_Backward()
        {
            var batchSize       = 1;
            var width           = 28;
            var height          = 28;
            var depth           = 3;
            var numberOfClasses = 10;
            var random          = new Random(232);

            var sut = new SoftMaxLayer(numberOfClasses);

            sut.Initialize(width, height, depth, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next());

            sut.Forward(input);

            var delta = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next());

            var actual = sut.Backward(delta);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, numberOfClasses, new float[] { -0.3891016f, -0.6150756f, 0.0618184f, -0.2334358f, 1.544145f, -1.01483f, 0.6160479f, 0.3225261f, -1.007966f, -0.1111263f });

            MatrixAsserts.AreEqual(expected, actual);
        }
Exemplo n.º 3
0
        public void CanComputeSoftmaxForward()
        {
            var dataSet = new TestDataSet <float>(3, 4, 5, 10);

            var softmaxLayer = new SoftMaxLayer <float>(dataSet.InputSize);

            TestGpuLayer(softmaxLayer, dataSet, dataSet.InputSize);
        }
Exemplo n.º 4
0
        public void CopyConstructorTest1()
        {
            Shape        shape  = new Shape(new[] { 1, SoftMaxLayerTest.weights.Length });
            SoftMaxLayer layer1 = new SoftMaxLayer(shape);
            SoftMaxLayer layer2 = new SoftMaxLayer(layer1);

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
Exemplo n.º 5
0
        public void ArchitectureConstructorTest1()
        {
            Shape        shape = new Shape(new[] { 1, SoftMaxLayerTest.weights.Length });
            SoftMaxLayer layer = new SoftMaxLayer(shape, "SM", null);

            CollectionAssert.AreEqual(shape.Axes, layer.OutputShape.Axes);
            Assert.AreEqual("SM", layer.Architecture);
        }
Exemplo n.º 6
0
        public void SerializeTest()
        {
            SoftMaxLayer layer1 = new SoftMaxLayer(new Shape(new[] { 1, SoftMaxLayerTest.weights.Length }));
            string       s1     = JsonConvert.SerializeObject(layer1);
            SoftMaxLayer layer2 = JsonConvert.DeserializeObject <SoftMaxLayer>(s1);
            string       s2     = JsonConvert.SerializeObject(layer2);

            Assert.AreEqual(s1, s2);
        }
Exemplo n.º 7
0
        public void ArchitectureConstructorTest2()
        {
            string architecture = "SMX";

            try
            {
                SoftMaxLayer layer = new SoftMaxLayer(new Shape(new[] { 1, SoftMaxLayerTest.weights.Length }), architecture, null);
            }
            catch (ArgumentException e)
            {
                Assert.AreEqual(
                    new ArgumentException(string.Format(CultureInfo.InvariantCulture, Properties.Resources.E_InvalidLayerArchitecture, architecture), nameof(architecture)).Message,
                    e.Message);
                throw;
            }
        }
Exemplo n.º 8
0
        public void TestGpuLayers()
        {
            var dataSet = new TestDataSet <float>(3, 4, 5, 10);

            Console.WriteLine("Testing softmax forward");
            var softmaxLayer = new SoftMaxLayer <float>(dataSet.InputSize);

            TestLayerForward(softmaxLayer, dataSet, dataSet.InputSize);

            Console.WriteLine("Testing linear forward");
            var linLayer = new LinearLayer <float>(dataSet.InputSize, dataSet.TargetSize, new RandomMatrixInitializer <float>());

            TestLayerForward(linLayer, dataSet);

            Console.WriteLine("Testing GRU forward");
            var gruLayer = new GruLayer <float>(dataSet.InputSize, dataSet.TargetSize, new ProportionalRandomMatrixInitializer <float>(), new ProportionalRandomMatrixInitializer <float>(), new RandomMatrixInitializer <float>());

            TestLayerForward(gruLayer, dataSet);
        }
        public void SoftMaxLayer_CopyLayerForPredictionModel()
        {
            var batchSize       = 1;
            var width           = 28;
            var height          = 28;
            var depth           = 3;
            var numberOfClasses = 10;

            var sut = new SoftMaxLayer(numberOfClasses);

            sut.Initialize(width, height, depth, batchSize, Initialization.GlorotUniform, new Random(232));

            var layers = new List <ILayer>();

            sut.CopyLayerForPredictionModel(layers);

            var actual = (SoftMaxLayer)layers.Single();

            Assert.AreEqual(sut.Width, actual.Width);
            Assert.AreEqual(sut.NumberOfClasses, actual.NumberOfClasses);
        }
Exemplo n.º 10
0
        public void ForwardBackwardTest()
        {
            Shape        shape = new Shape(new[] { 1, SoftMaxLayerTest.weights.Length });
            SoftMaxLayer layer = new SoftMaxLayer(shape);

            Session session = new Session();

            Tensor x = new Tensor(null, shape);

            x.Set(SoftMaxLayerTest.weights);
            Tensor y = layer.Forward(session, new[] { x })[0];

            Helpers.AreArraysEqual(SoftMaxLayerTest.activations, y.Weights);

            // unroll the graph
            y.Gradient[0] = 1.0f;
            session.Unroll();

            ////float[] expectedDx = SoftMaxLayerTest.activations.Select((w, i) => i == 0 ? w - 1.0f : w).ToArray();
            Helpers.AreArraysEqual(new float[] { 1.0f, 0, 0, 0 }, x.Gradient);
        }
Exemplo n.º 11
0
        public void SoftMaxLayer_Forward()
        {
            var batchSize       = 1;
            var width           = 28;
            var height          = 28;
            var depth           = 3;
            var numberOfClasses = 10;
            var random          = new Random(232);

            var sut = new SoftMaxLayer(numberOfClasses);

            sut.Initialize(width, height, depth, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next());

            var actual = sut.Forward(input);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, numberOfClasses, new float[] { 0.06976377f, 0.1327717f, 0.02337802f, 0.3784489f, 0.0777365f, 0.05847027f, 0.1072708f, 0.0503228f, 0.0624512f, 0.03938601f });

            MatrixAsserts.AreEqual(expected, actual);
        }