Exemple #1
0
        [Test] public void CompareMultiLayerPerceptron()
        {
            var model = MultiLayerPerceptronModel();
            var ctx   = Context.GpuContext(0);

            var memMb = ctx.ToGpuContext().Gpu.Device.TotalMemory / 1024.0 / 1024.0;

            if (memMb < 4096.0)
            {
                Assert.Inconclusive("Need more Gpu memory.");
            }

            var opt = new GradientDescentOptimizer(ctx, model.Loss.Loss, 0.00008);

            // now we need to initalize the parameters for the optimizer
            opt.Initalize();

            // load mnist data
            var mnist   = new MNIST();
            var batcher = new Batcher(ctx, mnist.TrainImages, mnist.TrainLabels);

            var timer = Stopwatch.StartNew();

            for (var i = 0; i < 1; ++i)
            {
                batcher.Next(5000, opt, model.Images, model.Labels);
                opt.Forward();
                opt.Backward();
                opt.Optimize();
            }
            timer.Stop();
            Console.WriteLine(timer.Elapsed);

            timer.Restart();
            for (var i = 0; i < 5; ++i)
            {
                batcher.Next(10000, opt, model.Images, model.Labels);
                opt.Forward();
                opt.Backward();
                opt.Optimize();
            }
            ctx.ToGpuContext().Stream.Synchronize();
            timer.Stop();
            Console.WriteLine(timer.Elapsed);

            timer.Restart();
            PrintResult(opt, model, mnist.TestImages, mnist.TestLabels);
            timer.Stop();
            Console.WriteLine(timer.Elapsed);

            CleanMem_();
        }
Exemple #2
0
        [Test] public void CompareConvolutionalNeuralNetwork()
        {
            var model = ConvolutionalNeuralNetworkModel();
            var ctx   = Context.GpuContext(0);

            var memMb = ctx.ToGpuContext().Gpu.Device.TotalMemory / 1024.0 / 1024.0;

            if (memMb < 4096.0)
            {
                Assert.Inconclusive("Need more Gpu memory.");
            }

            var opt = new GradientDescentOptimizer(ctx, model.Loss.Loss, 0.000008);

            opt.Initalize();

            var mnist   = new MNIST();
            var batcher = new Batcher(ctx, mnist.TrainImages, mnist.TrainLabels);

            var timer = Stopwatch.StartNew();

            for (var i = 0; i < 2; ++i)
            {
                batcher.Next(2500, opt, model.Images, model.Labels);
                opt.Forward();
                opt.Backward();
                opt.Optimize();
            }
            timer.Stop();
            Console.WriteLine(timer.Elapsed);

            timer.Restart();
            for (var i = 0; i < 20; ++i)
            {
                batcher.Next(2500, opt, model.Images, model.Labels);
                opt.Forward();
                opt.Backward();
                opt.Optimize();
            }
            ctx.ToGpuContext().Stream.Synchronize();
            timer.Stop();
            Console.WriteLine(timer.Elapsed);

            timer.Restart();
            PrintResult(opt, model, mnist.TestImages, mnist.TestLabels);
            timer.Stop();
            Console.WriteLine(timer.Elapsed);

            CleanMem_();
        }
Exemple #3
0
        [Test] public void MultinomialRegression()
        {
            CleanMem_();
            const long batchSize = 1000L;
            const long epochs    = 3;

            var model = MultinomialRegressionModel();
            var ctx   = Context.GpuContext(0);
            var opt   = new GradientDescentOptimizer(ctx, model.Loss.Loss, 0.0005);

            opt.Initalize();

            var mnist   = new MNIST();
            var batcher = new Batcher(ctx, mnist.TrainImages, mnist.TrainLabels);

            for (var e = 1; e <= epochs; ++e)
            {
                var i = 0;
                while (batcher.Next(batchSize, opt, model.Images, model.Labels))
                {
                    i++;
                    opt.Forward();
                    opt.Backward();
                    opt.Optimize();

                    if ((i % 10 == 0) || ((i == 1) && (e == 1)))
                    {
                        PrintStatus(e, i, opt, model, mnist.ValidationImages, mnist.ValidationLabels);
                    }
                }
            }
            PrintResult(opt, model, mnist.TestImages, mnist.TestLabels);

            CleanMem_();
        }
        public void GradientDescentOptimizerReturnsValueCloseToOptimumOneParameter()
        {
            var expected          = 42.0;
            var parameterSettings = new[]
            {
                new ParameterSetting(0, 50, 1, 30)
            };

            Func <double[], double> costFunc = parameters => Math.Abs(expected - parameters[0]);
            var actual = GradientDescentOptimizer.Optimize(costFunc, parameterSettings, parameterSettings[0].StepSize);

            Assert.That(actual.Parameters[0], Is.EqualTo(expected).Within(parameterSettings[0].StepSize));
        }
        public void GradientDescentOptimizerHandlesConstantCostFunction()
        {
            var parameterSettings = new[]
            {
                new ParameterSetting(0, 50, 1, 30)
            };

            Func <double[], double> costFunc = parameters => 0;
            OptimizationResult      actual   = null;

            Assert.That(() => actual = GradientDescentOptimizer.Optimize(costFunc, parameterSettings, parameterSettings[0].StepSize), Throws.Nothing);
            Assert.That(actual, Is.Not.Null);
            Assert.That(actual.Parameters[0], Is.Not.NaN);
        }
Exemple #6
0
        //runs the backbone of the network (forward and backward prop and other stuff)
        [Test] public void LinearNeuron(Datas data)
        {
            CleanMem_();
            const long BatchSize = 1000L;
            const long Epoch     = 5;

            var model = LinearModel();
            var ctx   = Context.GpuContext(0);


            //makes sure gpu has 2 or more GB of ram
            var memMB = ctx.ToGpuContext().Gpu.Device.TotalMemory / 1024.0 / 1024.0;

            if (memMB < 4096.0)
            {
                Assert.Inconclusive("Need more gpu mem");
            }

            var opt = new GradientDescentOptimizer(ctx, model.Loss.Loss, 0.00005);

            opt.Initalize();

            var batcher = new Batcher(ctx, data.TrainText, data.TrainStory);

            for (var e = 1; e < Epoch; e++)
            {
                int i = 0;

                while (batcher.Next(BatchSize, opt, model.Text, model.Story))
                {
                    i++;

                    opt.Forward();
                    opt.Backward();
                    opt.Optimize();

                    if ((i % 10 == 0) || (i == 1 && e == 1))
                    {
                        PrintStatus(e, i, opt, model, data.TrainText, data.TrainStory);
                    }
                }
            }
            //PrintResult(opt, model, data.TestText, data.TestStory);
            //Need to make some place to dump the weights and biases, but not exactly sure how... maybe just have it write a story right away?

            CleanMem_();
        }
Exemple #7
0
        public static void SimpleLogisticRegression()
        {
            //const int N = 8;
            //const int D = 5;
            //const int P = 3;
            //const double learn = 0.001;

            const int    N     = 100;
            const int    D     = 784;
            const int    P     = 10;
            const double learn = 0.00005;

            var input   = Variable <double>();
            var label   = Variable <double>();
            var weights = Parameter(0.01 * RandomUniform <double>(Shape.Create(D, P)));
            var pred    = Dot(input, weights);
            var loss    = L2Loss(pred, label);

            var ctx = Context.GpuContext(0);
            var opt = new GradientDescentOptimizer(ctx, loss, learn);

            // set some data
            var inputData = new double[N, D];
            var matA      = new double[D, P];
            var matB      = new double[N, P];

            NormalRandomArray(inputData);
            NormalRandomArray(matA);
            NormalRandomArray(matB);
            var labelData = Dot(inputData, matA).Add(matB.Mul(0.1));

            opt.AssignTensor(input, inputData.AsTensor());
            opt.AssignTensor(label, labelData.AsTensor());

            opt.Initalize();
            for (var i = 0; i < 800; ++i)
            {
                opt.Forward();
                opt.Backward();
                opt.Optimize();
                if (i % 20 == 0)
                {
                    Console.WriteLine($"loss = {opt.GetTensor(loss).ToScalar()}");
                }
            }
        }
        public void GradientDescentOptimizerReturnsValueCloseToOptimumThreeParameters()
        {
            var expected          = new double[] { 1, 13, -4 };
            var parameterSettings = new[]
            {
                new ParameterSetting(0, 10, 0.1, Double.NaN),
                new ParameterSetting(5, 20, 0.5, 19),
                new ParameterSetting(-10, 10, 0.2, 1.1)
            };

            Func <double[], double> costFunc = parameters => Math.Abs(expected[0] - parameters[0]) + Math.Abs(expected[1] - parameters[1]) + Math.Abs(expected[2] - parameters[2]);
            var actual = GradientDescentOptimizer.Optimize(costFunc, parameterSettings, parameterSettings.Sum(p => p.StepSize));

            for (int p = 0; p < expected.Length; p++)
            {
                Assert.That(actual.Parameters[p], Is.EqualTo(expected[p]).Within(parameterSettings[p].StepSize));
            }
        }
Exemple #9
0
        public void EstimateAminoAcidHelixAffinity()
        {
            var annotatedSequencesFile = @"G:\Projects\HumanGenome\fullPdbSequencesHelixMarked.txt";
            var annotatedSequences     = ParseHelixSequences(annotatedSequencesFile);
            //var aminoAcidPairs = GetAminoAcidPairs(annotatedSequences);
            //var leastCommonPair = aminoAcidPairs.OrderBy(kvp => kvp.Value).First();

            Func <double[], double> costFunc = parameters => HelixSequenceCostFunc(parameters, annotatedSequences);
            var parameterSettings            = GeneratePairwiseAminoAcidParameters();

            var randomizedStartValueIterations = 100;

            //Parallel.For(0L, randomizedStartValueIterations, idx =>
            for (int idx = 0; idx < randomizedStartValueIterations; idx++)
            {
                RandomizeStartValues(parameterSettings, 2);
                var optimizationResult = GradientDescentOptimizer.Optimize(costFunc, parameterSettings, double.NegativeInfinity);
                WriteOptimizationResult(@"G:\Projects\HumanGenome\helixAffinityOptimizationResults.dat", optimizationResult);
            }
        }