示例#1
0
        public void ForwardVolumes()
        {
            Random random = new Random(0);

            ClassificationNetworkTrainer trainer = new ClassificationNetworkTrainer();

            SGD sgd = new SGD()
            {
                LearningRate = 0.0001f,
                Momentum     = 0.0f
            };

            ClassificationNetwork network = ClassificationNetwork.FromArchitecture("1x1x2~5N~5N~3N", this.classes);

            // lets test 100 random point and label settings
            // note that this should work since l2 and l1 regularization are off
            // an issue is that if step size is too high, this could technically fail...
            for (int k = 0; k < 100; k++)
            {
                int gti = (int)Math.Floor(random.NextDouble() * 3);

                Tensor x = new Tensor(null, new Shape(Shape.BWHC, 1, 1, 1, 2));
                x.Set(new float[] { ((float)random.NextDouble() * 2) - 1, ((float)random.NextDouble() * 2) - 1 });

                Tensor pv = network.Forward(null, x).Clone() as Tensor;

                trainer.RunEpoch(
                    k,
                    network,
                    Enumerable.Repeat((x, new string[] { this.classes[gti] }), 1),
                    sgd,
                    new LogLikelihoodLoss(),
                    CancellationToken.None);

                Tensor pv2 = network.Forward(null, x).Clone() as Tensor;
                Assert.IsTrue(pv2.Weights[gti] > pv.Weights[gti], "k: {0}, gti: {1}, pv2[gti]: {2}, pv[gti]: {3}", k, gti, pv2.Weights[gti], pv.Weights[gti]);
            }
        }
示例#2
0
        public void ForwardToProbability()
        {
            ClassificationNetwork network = ClassificationNetwork.FromArchitecture("1x1x2~5N~5N~3N", this.classes);

            Tensor x = new Tensor(null, new Shape(Shape.BWHC, 1, 1, 1, 2));

            x.Set(new float[] { 0.2f, -0.3f });

            Tensor probability = network.Forward(null, x);

            Assert.AreEqual(3, probability.Length); // 3 classes output

            for (int i = 0; i < probability.Length; i++)
            {
                Assert.IsTrue(probability.Weights[i] > 0.0);
                Assert.IsTrue(probability.Weights[i] < 1.0);
            }

            Assert.AreEqual(1.0, probability.Weights[0] + probability.Weights[1] + probability.Weights[2], 1e-6);
        }
示例#3
0
        public void XorTest1()
        {
            const int AlphabetSize = 16;
            const int VectorSize   = 4;

            const int BatchSize     = 3000;
            const int Epochs        = 200;
            const int TestBatchSize = 3000;
            Random    random        = new Random(0);

            string[] classes = Enumerable.Range(0, AlphabetSize).Select(v => v.ToString(CultureInfo.InvariantCulture)).ToArray();
            ClassificationNetwork network = ClassificationNetwork.FromArchitecture("1x1x4~80-80-80-16LSTM", classes);

            float[] vectors = new RandomGeneratorF().Generate(AlphabetSize * VectorSize);

            (Tensor, int[]) createSample(int size)
            {
                Tensor input = new Tensor(null, new[] { size, 1, 1, VectorSize });

                int[] truth = new int[size];

                int v = 0;

                for (int i = 0; i < size; i++)
                {
                    v ^= random.Next(0, AlphabetSize);
                    Vectors.Copy(VectorSize, vectors, v * VectorSize, input.Weights, i * VectorSize);

                    if (i > 0)
                    {
                        truth[i - 1] = v;
                    }
                }

                return(input, truth);
            }

            // train the network
            Trainer <int[]> trainer = new Trainer <int[]>()
            {
                ClipValue = 2.0f
            };

            SGD           sgd  = new SGD();
            ILoss <int[]> loss = new LogLikelihoodLoss();

            for (int epoch = 0; epoch < Epochs; epoch++)
            {
                (Tensor, int[])sample = createSample(BatchSize);

                TrainingResult result = trainer.RunEpoch(
                    network,
                    Enumerable.Repeat(sample, 1),
                    epoch,
                    sgd,
                    loss,
                    CancellationToken.None);
                Console.WriteLine(result.CostLoss);
            }

            // test the network
            (Tensor x, int[] expected) = createSample(TestBatchSize);
            Tensor y = network.Forward(null, x);
            ////y.Reshape(testBatchSize - 1);
            ////expected.Reshape(testBatchSize - 1);
            float error = loss.Loss(y, expected, false);

            Console.WriteLine(y);
            Console.WriteLine(expected);
            ////Console.WriteLine(y.Axes[1]);
            Console.WriteLine(error);

            ////Assert.IsTrue(errorL1 < 0.01, errorL1.ToString(CultureInfo.InvariantCulture));
        }