private async Task MLPTrainRecTest(ComputationContext ctx, GradientComputationMethod method, params LayerBehavior[] rules)
        {
            var trainingData =
                new[]
                {
                    new[]
                    {
                        Tuple.Create( -1.0f, (float[])null),
                        Tuple.Create( -1.0f, (float[])null),
                        Tuple.Create( -1.0f, new[] { -1.0f, -1.0f, -1.0f }),
                    },
                    new[]
                    {
                        Tuple.Create( -1.0f, (float[])null),
                        Tuple.Create( -1.0f, (float[])null),
                        Tuple.Create( 1.0f, new[] { -1.0f, -1.0f, 1.0f }),
                    },
                    new[]
                    {
                        Tuple.Create( -1.0f, (float[])null),
                        Tuple.Create( 1.0f, (float[])null),
                        Tuple.Create( -1.0f, new[] { -1.0f, 1.0f, -1.0f }),
                    },
                    new[]
                    {
                        Tuple.Create( -1.0f, (float[])null),
                        Tuple.Create( 1.0f, (float[])null),
                        Tuple.Create( 1.0f, new[] { -1.0f, 1.0f, 1.0f }),
                    },
                    new[]
                    {
                        Tuple.Create( 1.0f, (float[])null),
                        Tuple.Create( -1.0f, (float[])null),
                        Tuple.Create( -1.0f, new[] { 1.0f, -1.0f, -1.0f }),
                    },
                    new[]
                    {
                        Tuple.Create( 1.0f, (float[])null),
                        Tuple.Create( -1.0f, (float[])null),
                        Tuple.Create( 1.0f, new[] { 1.0f, -1.0f, 1.0f }),
                    },
                    new[]
                    {
                        Tuple.Create( 1.0f, (float[])null),
                        Tuple.Create( 1.0f, (float[])null),
                        Tuple.Create( -1.0f, new[] { 1.0f, 1.0f, -1.0f }),
                    },
                    new[]
                    {
                        Tuple.Create( 1.0f, (float[])null),
                        Tuple.Create( 1.0f, (float[])null),
                        Tuple.Create( 1.0f, new[] { 1.0f, 1.0f, 1.0f }),
                    }
                };

            const int inputSize = 1;
            const int hiddenSize = 8;
            const int outputSize = 3;

            const int maxIterations = 1000;

            var layers = NNTestHelpers.CreateGDMLPLayers(false, inputSize, hiddenSize, outputSize, rules);

            using (var nn = ctx.NeuralNetworkFactory.CreateMultilayerPerceptron(layers, new MultilayerPerceptronProperties { GradientComputationMethod = method }))
            using (var batch = new SupervisedBatch())
            using (var errors = ctx.DataArrayFactory.Create(maxIterations))
            {
                foreach (var dataEntry in trainingData)
                {
                    var sample = new SupervisedSample();

                    foreach (var sampleEntry in dataEntry)
                    {
                        if (sampleEntry.Item2 == null)
                        {
                            sample.Add(ctx.DataArrayFactory.CreateConst(new[] { sampleEntry.Item1 }));
                        }
                        else
                        {
                            sample.Add(
                                ctx.DataArrayFactory.CreateConst(new[] { sampleEntry.Item1 }),
                                ctx.DataArrayFactory.CreateConst(sampleEntry.Item2),
                                ctx.DataArrayFactory.Create(sampleEntry.Item2.Length));
                        }
                    }

                    batch.Add(sample);
                }

                bool first = true;
                var sw = new Stopwatch();
                for (int it = 0; it < maxIterations; it++)
                {
                    nn.Train(batch);

                    ctx.VectorUtils.CalculateMSE(batch, errors, it);

                    if (first)
                    {
                        using (var weights = ctx.DataArrayFactory.Create(nn.NumberOfWeights))
                        {
                            nn.GetWeights(weights);
                            float[] wa = new float[weights.Size];
                            await weights.Read(wa);

                            // It must be randomized:
                            Assert.IsTrue(wa.Sum() != 0.0f);
                        }
                        first = false;
                        sw.Start();
                    }
                }

                float[] mses = new float[maxIterations];
                await errors.Read(mses);

                sw.Stop();

                foreach (var mse in mses) Console.WriteLine("Error: {0}", mse.ToString("0.00000000"));

                Console.WriteLine("Ellapsed: {0} ms", sw.Elapsed.TotalMilliseconds);
            }
        }
        private async Task MLPTrainFFTest(ComputationContext ctx, params LayerBehavior[] rules)
        {
            var trainingData =
                new[,]
                {
                    { -4.0f, 16.0f },
                    { -3.0f, 9.0f },
                    { -2.0f, 4.0f },
                    { -1.0f, 1.0f },
                    { 0.0f, 0.0f },
                    { 1.0f, 1.0f },
                    { 2.0f, 4.0f },
                    { 3.0f, 9.0f },
                    { 4.0f, 16.0f },
                };

            const float maxInput = 4.0f;
            const float minInput = -4.0f;
            const float maxOutput = 16.0f;
            const float minOutput = 0.0f;

            const int inputSize = 1;
            const int hiddenSize = 16;
            const int outputSize = 1;

            const int maxIterations = 1000;

            var layers = NNTestHelpers.CreateGDMLPLayers(true, inputSize, hiddenSize, outputSize, rules);

            using (var nn = ctx.NeuralNetworkFactory.CreateMultilayerPerceptron(layers, new MultilayerPerceptronProperties { GradientComputationMethod = GradientComputationMethod.FeedForward }))
            using (var batch = new SupervisedBatch())
            using (var errors = ctx.DataArrayFactory.Create(maxIterations))
            {
                for (int i = 0; i < trainingData.GetLength(0); i++)
                {
                    batch.Add(
                        ctx.DataArrayFactory.Create(new[] { NNTestHelpers.Normalize(trainingData[i, 0], minInput, maxInput) }),
                        ctx.DataArrayFactory.Create(new[] { NNTestHelpers.Normalize(trainingData[i, 1], minOutput, maxOutput) }),
                        ctx.DataArrayFactory.Create(1));
                }

                bool first = true;
                var sw = new Stopwatch();
                sw.Start();
                for (int it = 0; it < maxIterations; it++)
                {
                    nn.Train(batch);

                    if (first)
                    {
                        using (var weights = ctx.DataArrayFactory.Create(nn.NumberOfWeights))
                        {
                            nn.GetWeights(weights);
                            float[] wa = new float[weights.Size];
                            await weights.Read(wa);

                            // It must be randomized:
                            Assert.IsTrue(wa.Sum() != 0.0f);
                        }
                        first = false;
                    }

                    ctx.VectorUtils.CalculateMSE(batch, errors, it);
                }

                float[] mses = new float[maxIterations];
                await errors.Read(mses);

                sw.Stop();

                foreach (var mse in mses) Console.WriteLine("Error: {0}", mse.ToString("0.00000000"));

                Console.WriteLine("Ellapsed: {0} ms", sw.Elapsed.TotalMilliseconds);
            }
        }
Esempio n. 3
0
        static async Task MLPTrainBPOnlineTest(ComputationContext ctx)
        {
            var trainingData =
                 new[,]
                {
                    { -4.0f, 16.0f },
                    { -3.0f, 9.0f },
                    { -2.0f, 4.0f },
                    { -1.0f, 1.0f },
                    { 0.0f, 0.0f },
                    { 1.0f, 1.0f },
                    { 2.0f, 4.0f },
                    { 3.0f, 9.0f },
                    { 4.0f, 16.0f },
                };

            const float maxInput = 4.0f;
            const float minInput = -4.0f;
            const float maxOutput = 16.0f;
            const float minOutput = 0.0f;

            const int inputSize = 1;
            const int hidden1Size = 512;
            const int hidden2Size = 256;
            const int outputSize = 1;

            const int maxIterations = 1000;

            var init = new UniformRandomizeWeights(.3f);

            var algo = new GradientDescentLearningRule
            {
                LearningRate = 0.01f,
                Momentum = 0.25f,
                WeightUpdateMode = WeigthUpdateMode.Online,
                Smoothing = false
            };

            //var algo = new CrossEntropyLearningRule
            //{
            //    NarrowingRate = 0.85f,
            //    MutationChance = 0.001f,
            //    MeanMutationStrength = 0.05f,
            //    StdDevMutationStrength = 1.0f,
            //    PopulationSize = 10
            //};

            //var algo = new AlopexBLearningRule();
            //algo.StepSizeB = 0.001f;
            //algo.StepSizeA = 0.0001f;
            //algo.ForgettingRate = 0.35f;

            var layers = new[]
            {
                new Layer(inputSize),
                new Layer(hidden1Size)
                {
                    Behaviors =
                    {
                        init,
                        algo
                    },
                    Descriptions =
                    {
                        new ActivationDescription(ActivationFunction.Sigmoid)
                    }
                },
                new Layer(hidden2Size)
                {
                    Behaviors =
                    {
                        init,
                        algo
                    },
                    Descriptions =
                    {
                        new ActivationDescription(ActivationFunction.Sigmoid)
                    }
                },
                new Layer(outputSize)
                {
                    Behaviors =
                    {
                        init,
                        algo
                    },
                    Descriptions =
                    {
                        new ActivationDescription(ActivationFunction.Linear)
                    }
                },
            };

            layers[0].OutputConnections.AddOneWay(layers[1]);
            layers[1].OutputConnections.AddOneWay(layers[2]);
            layers[2].OutputConnections.AddOneWay(layers[3]);

            using (var nn = ctx.NeuralNetworkFactory.CreateMultilayerPerceptron(layers, new MultilayerPerceptronProperties { GradientComputationMethod = GradientComputationMethod.FeedForward }))
            using (var batch = new SupervisedBatch())
            using (var errors = ctx.DataArrayFactory.Create(maxIterations))
            {
                for (int i = 0; i < trainingData.GetLength(0); i++)
                {
                    batch.Add(
                        ctx.DataArrayFactory.Create(new[] { Normalize(trainingData[i, 0], minInput, maxInput) }),
                        ctx.DataArrayFactory.Create(new[] { Normalize(trainingData[i, 1], minOutput, maxOutput) }),
                        ctx.DataArrayFactory.Create(1));
                }

                bool first = true;
                var sw = new Stopwatch();
                for (int it = 0; it < maxIterations; it++)
                {
                    nn.Train(batch);

                    ctx.VectorUtils.CalculateMSE(batch, errors, it);

                    if (first)
                    {
                        sw.Start();
                        first = false;
                    }
                }

                float[] mses = new float[maxIterations];
                await errors.Read(mses);

                sw.Stop();

                //foreach (var mse in mses) Console.WriteLine("Error: {0}", mse.ToString("0.00000000"));

                Console.WriteLine("MSE: {0}", mses.Last());

                Console.WriteLine("Ellapsed: {0} ms", sw.Elapsed.TotalMilliseconds);
            }
        }
Esempio n. 4
0
        private async Task CalculateMSETest(ComputationContext ctx)
        {
            const int valuesCount = 1024;
            const int repeat = 10000;

            float[][][] desired = 
            { 
                new[] 
                { 
                    RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray(), 
                    RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray() 
                }, 
                new[] 
                { 
                    RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray(), 
                    RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray() 
                } 
            };
            float[][][] current = 
            { 
                new[] 
                { 
                    RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray(), 
                    RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray() 
                }, 
                new[] 
                { 
                    RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray(), 
                    RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray() 
                } 
            };
            float mse = CalcMSE(desired, current);

            using (var batch = new SupervisedBatch())
            using (var resultValues = ctx.DataArrayFactory.Create(2))
            {
                Assert.AreEqual(desired.Length, current.Length);
                for (int i1 = 0; i1 < desired.Length; i1++)
                {
                    float[][] d1 = desired[i1];
                    float[][] c1 = current[i1];
                    var sample = new SupervisedSample();
                    batch.Add(sample);
                    Assert.AreEqual(d1.Length, c1.Length);
                    for (int i2 = 0; i2 < d1.Length; i2++)
                    {
                        float[] d2 = d1[i2];
                        float[] c2 = c1[i2];
                        Assert.AreEqual(d2.Length, c2.Length);
                        var da = ctx.DataArrayFactory.CreateConst(d2);
                        var ca = ctx.DataArrayFactory.CreateConst(c2);
                        sample.Add(da, da, ca);
                    }
                }

                float[] result = new float[2];

                var sw = new Stopwatch();
                sw.Start();

                for (int i = 0; i < repeat; i++)
                {
                    ctx.VectorUtils.CalculateMSE(batch, resultValues, 1);

                    await resultValues.Read(result);

                    Assert.AreEqual(0.0f, result[0]);
                    Assert.AreEqual(Math.Round(mse, 4), Math.Round(result[1], 4));
                }

                sw.Stop();
                Console.WriteLine("Ellapsed: " + sw.ElapsedMilliseconds + " ms");
            }
        }