Esempio n. 1
0
        public void SumAutograd()
        {
            float[] data = { 1,   2,  3,  4,  5,  6,  7,  8,  9,
                             10, 11, 12, 13, 14, 15, 16, 17, 18,
                             19, 20, 21, 22, 23, 24, 25, 26, 27 };
            int[]   shape = { 3, 3, 3 };

            var tensor = new Syft.Tensor.FloatTensor(_controller: ctrl, _data: data, _shape: shape, _autograd: true);

            var sum = tensor.Sum(1);

            float[] gradData  = { 1, 2, 3, 4, 5, 6, 7, 8, 9 };
            int[]   gradShape = { 3, 3 };

            var gradTensor = new Syft.Tensor.FloatTensor(_controller: ctrl, _data: gradData, _shape: gradShape);

            sum.Backward(gradTensor);

            var grad = tensor.Grad;

            float[] expectedData = { 1, 2, 3, 1, 2, 3, 1, 2, 3,
                                     4, 5, 6, 4, 5, 6, 4, 5, 6,
                                     7, 8, 9, 7, 8, 9, 7, 8, 9 };
            int[]   expectedShape = { 3, 3, 3 };

            Assert.IsTrue(expectedData.SequenceEqual(grad.Data));
            Assert.IsTrue(expectedShape.SequenceEqual(grad.Shape));
        }
Esempio n. 2
0
        public void TanhAutograd()
        {
            float[] data1  = { 1, 2, 3, 4 };
            int[]   shape1 = { 2, 2 };
            var     tensor = new Syft.Tensor.FloatTensor(_controller: ctrl, _data: data1, _shape: shape1, _autograd: true);

            float[] data2  = { 0.4200f, 0.0707f, 0.0099f, 0.0013f };
            int[]   shape2 = { 2, 2 };

            var expectedGradTensor = new Syft.Tensor.FloatTensor(_controller: ctrl, _data: data2, _shape: shape2);

            float[] data3  = { 0.7616f, 0.9640f, 0.9951f, 0.9993f };
            int[]   shape3 = { 2, 2 };

            var expectedTanhTensor = new Syft.Tensor.FloatTensor(_controller: ctrl, _data: data3, _shape: shape3);

            var tanhTensor = tensor.Tanh();

            for (var i = 0; i < tensor.Size; i++)
            {
                Assert.AreEqual(expectedTanhTensor.Data[i], tanhTensor.Data[i], 1e-4);
            }

            tanhTensor.Backward();

            for (var i = 0; i < tensor.Size; i++)
            {
                Assert.AreEqual(expectedGradTensor.Data[i], tensor.Grad.Data[i], 1e-4);
            }
        }
Esempio n. 3
0
        public void NumberGetsCloseTo1()
        {
            float[] data    = { 30, 1, 1f };
            int[]   shape   = { 3 };
            var     tensor  = new Syft.Tensor.FloatTensor(_ctrl, _data: data, _shape: shape);
            var     softmax = Syft.NN.Functional.Softmax(tensor);

            Assert.GreaterOrEqual(softmax.Data[0], 0.99f);
        }
Esempio n. 4
0
        public void AllNumbersAboveZero()
        {
            float[] data    = { 32.23f, -11f, -30f };
            int[]   shape   = { 3 };
            var     tensor  = new Syft.Tensor.FloatTensor(_ctrl, _data: data, _shape: shape);
            var     softmax = Syft.NN.Functional.Softmax(tensor);

            for (var i = 0; i < softmax.Size; ++i)
            {
                Assert.GreaterOrEqual(softmax.Data[i], 0);
            }
        }
Esempio n. 5
0
        public void SoftmaxSumsToOne()
        {
            float[] data   = { 4.32f, 1.32f, 0.838f, 1.111f, 0.0001f };
            int[]   shape  = { 5 };
            var     tensor = new Syft.Tensor.FloatTensor(_ctrl, _data: data, _shape: shape);

            var softmax = Syft.NN.Functional.Softmax(tensor);

            var total = softmax.Data.Sum();

            Assert.AreEqual(1, total);
        }
Esempio n. 6
0
        public void SoftmaxMultiDimensionLast()
        {
            float[] data    = { 1, 2, 3, 8, 3, 2 };
            int[]   shape   = { 2, 3 };
            var     tensor  = new Syft.Tensor.FloatTensor(_ctrl, _data: data, _shape: shape);
            var     softmax = Syft.NN.Functional.Softmax(tensor);

            float[] expected = { 0.090f, 0.245f, 0.665f, 0.991f, 0.007f, 0.002f };
            for (var i = 0; i < expected.Length; ++i)
            {
                Assert.AreEqual(expected[i], (float)Math.Round(softmax.Data[i], 3));
            }
        }
Esempio n. 7
0
        public void SigmoidAutograd()
        {
            float[] data1   = { 1, 2, 3, 4 };
            int[]   shape1  = { 2, 2 };
            var     tensor1 = new Syft.Tensor.FloatTensor(_controller: ctrl, _data: data1, _shape: shape1, _autograd: true);

            float[] data2  = { 0.1966f, 0.1050f, 0.0452f, 0.0177f };
            int[]   shape2 = { 2, 2 };

            var expectedGradTensor = new Syft.Tensor.FloatTensor(_controller: ctrl, _data: data2, _shape: shape2);

            var sigmoidTensor = tensor1.Sigmoid();

            sigmoidTensor.Backward();

            for (var i = 0; i < tensor1.Size; i++)
            {
                Assert.AreEqual(expectedGradTensor.Data[i], tensor1.Grad.Data[i], 1e-4);
            }
        }
Esempio n. 8
0
        public void SoftmaxMultiDimension0Th()
        {
            float[] data    = { 1, 2, 3, 8, 3, 2 };
            int[]   shape   = { 2, 3 };
            var     tensor  = new Syft.Tensor.FloatTensor(_ctrl, _data: data, _shape: shape);
            var     softmax = Syft.NN.Functional.Softmax(tensor, 0);

            // corresponds to
            //
            // [
            //   0.001, 0.269, 0.731
            //   0.999, 0.731, 0.269
            // ]
            //
            // Note that each column adds up to 1!

            float[] expected = { 0.001f, 0.269f, 0.731f, 0.999f, 0.731f, 0.269f };
            for (var i = 0; i < expected.Length; ++i)
            {
                Assert.AreEqual(expected[i], (float)Math.Round(softmax.Data[i], 3));
            }
        }
Esempio n. 9
0
        public void TestModelCanLearn()
        {
            float[] inputData   = { 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1 };
            int[]   inputShape  = { 4, 3 };
            var     inputTensor = new Syft.Tensor.FloatTensor(ctrl, _data: inputData, _shape: inputShape, _autograd: true);

            float[] targetData   = { 0, 0, 1, 1 };
            int[]   targetShape  = { 4, 1 };
            var     targetTensor = new Syft.Tensor.FloatTensor(ctrl, _data: targetData, _shape: targetShape, _autograd: true);

            var model = new Syft.Layer.Model(
                new Linear(ctrl, 3, 4),
                new Sigmoid(),
                new Linear(ctrl, 4, 1),
                new Sigmoid()
                );

            float currentLoss = 1;

            // train the model
            for (var i = 0; i < 10; ++i)
            {
                var prediction = model.Predict(inputTensor);
                var loss       = MSELoss.Value(prediction, targetTensor);
                loss.Backward();

                foreach (var layer in model.Layers)
                {
                    var weight = layer.GetWeights();
                    weight?.Sub(weight.Grad.Transpose(), true);
                }

                currentLoss = loss.Data.Sum();
            }

            Assert.True(Math.Round(currentLoss, 5) <= 0.20936);
        }