Example #1
0
        static void Main(string[] args)
        {
            WriteLine("Execution begins");
            var fn   = @"c:\DEMO\Data\train.csv";
            var f    = File.ReadLines(fn);
            var data = from t in f.Skip(1)
                       let zz = t.Split(',').Select(float.Parse)
                                select new Digit
            {
                Label = (int)zz.First(),
                Image = zz.Skip(1).Select(x => x / 256f).ToArray()
            };

            var train = data.Take(40000).ToArray();
            var test  = data.Skip(40000).Take(1000).ToArray();

            WriteLine("Creating network");
            DeviceDescriptor device = DeviceDescriptor.CPUDevice;

            int inputDim  = 784;
            int outputDim = 10;

            var inputShape  = new NDShape(1, inputDim);
            var outputShape = new NDShape(1, outputDim);

            Variable features = Variable.InputVariable(inputShape, DataType.Float);
            Variable label    = Variable.InputVariable(outputShape, DataType.Float);

            var W = new Parameter(new [] { outputDim, inputDim }, DataType.Float, CNTKLib.GlorotUniformInitializer(), device, "w");
            var b = new Parameter(new [] { outputDim }, DataType.Float, 0, device, "b");

            var z = CNTKLib.Times(W, features) + b;

            var loss      = CNTKLib.CrossEntropyWithSoftmax(z, label);
            var evalError = CNTKLib.ClassificationError(z, label);

            CNTK.TrainingParameterScheduleDouble learningRatePerSample = new CNTK.TrainingParameterScheduleDouble(0.02, 1);
            IList <Learner> parameterLearners =
                new List <Learner>()
            {
                Learner.SGDLearner(z.Parameters(), learningRatePerSample)
            };
            var trainer = Trainer.CreateTrainer(z, loss, evalError, parameterLearners);

            int minibatchSize         = 64;
            int numMinibatchesToTrain = 500;

            var feat = new BatchSource <float[]>((from x in train select x.Image).ToArray(), minibatchSize);
            var labl = new BatchSource <float[]>((from x in train select x.Label.ToOneHot10(10).ToFloatArray()).ToArray(), minibatchSize);
            var gr   = new List <float>();

            // train the model
            for (int ep = 0; ep < numMinibatchesToTrain; ep++)
            {
                Value ft, lb;

                feat.MoveNext(); labl.MoveNext();

                ft = Value.CreateBatchOfSequences <float>(inputShape, feat.Current, device);
                lb = Value.CreateBatchOfSequences <float>(outputShape, labl.Current, device);

                trainer.TrainMinibatch(
                    new Dictionary <Variable, Value>()
                {
                    { features, ft }, { label, lb }
                }, device);

                if (ep % 50 == 0)
                {
                    var _loss = trainer.PreviousMinibatchLossAverage();
                    var _eval = trainer.PreviousMinibatchEvaluationAverage();
                    WriteLine($"Epoch={ep}, loss={_loss}, eval={_eval}");
                    gr.Add((float)_loss);
                }
            }

            var G = new GraphLib();

            G.Plot(gr, System.Windows.Forms.DataVisualization.Charting.SeriesChartType.Line);
            int count = 0, correct = 0;

            // Test the model
            foreach (var x in test)
            {
                var imap = new Dictionary <Variable, Value> {
                    { features, Value.CreateBatch(inputShape, x.Image, device) }
                };
                var omap = new Dictionary <Variable, Value> {
                    { z, null }
                };
                z.Evaluate(imap, omap, device);
                var o   = omap[z].GetDenseData <float>(z).First();
                var res = o.MaxIndex();

                WriteLine("{0} => {1}", x.Label, res);
                if (x.Label == res)
                {
                    correct++;
                }
                count++;
            }
            WriteLine("Done, {0} of {1} correct ({2}%)", correct, count, (double)correct / (double)count * 100);

            Console.ReadKey();
        }
Example #2
0
        public void Main()
        {
            WriteLine("Starting");

            var G = new GraphLib();

            WriteLine("Generating sample data");

            // Создаём случайные точки двух типов
            float cx1 = -1.0f, cy1 = -1.0f;
            float cx2 = 1.0f, cy2 = 1.0f;

            var Rnd = new Random();

            var x      = new List <float>();
            var y      = new List <float>();
            var lab    = new List <float>();
            var lab_oh = new List <float[]>();

            for (int i = 0; i < 200; i++)
            {
                x.Add(cx1 + Rnd.Next(-2.0, 2.0)); y.Add(cy1 + Rnd.Next(-2.0, 2.0));
                lab.Add(-1.0f); lab_oh.Add(new float[] { 1.0f, 0.0f });
                x.Add(cx2 + Rnd.Next(-2.0, 2.0)); y.Add(cy2 + Rnd.Next(-2.0, 2.0));
                lab.Add(1.0f); lab_oh.Add(new float[] { 0.0f, 1.0f });
            }

            G.Plot(x, y);

            WriteLine("Doing data split");

            var x_train    = x.Take(150).ToArray();
            var y_train    = y.Take(150).ToArray();
            var l_train    = lab.Take(150).ToArray();
            var l_oh_train = lab_oh.Take(150).ToArray();

            var x_test    = x.Skip(150).ToArray();
            var y_test    = y.Skip(150).ToArray();
            var l_test    = lab.Skip(150).ToArray();
            var l_oh_test = lab_oh.Skip(150).ToArray();


            WriteLine("Creating network");

            DeviceDescriptor device = DeviceDescriptor.CPUDevice;

            // Create network model
            int inputDim  = 2;
            int outputDim = 2;

            Variable features = Variable.InputVariable(inputDim.AsArray(), DataType.Float);
            Variable label    = Variable.InputVariable(outputDim.AsArray(), DataType.Float);

            var W = new Parameter(new int[] { outputDim, inputDim }, DataType.Float, 1, device, "w");
            var b = new Parameter(new int[] { outputDim }, DataType.Float, 0, device, "b");
            var z = CNTKLib.Times(W, features) + b;

            var loss      = CNTKLib.CrossEntropyWithSoftmax(z, label);
            var evalError = CNTKLib.ClassificationError(z, label);

            // prepare for training
            CNTK.TrainingParameterScheduleDouble learningRatePerSample = new CNTK.TrainingParameterScheduleDouble(0.02, 1);
            IList <Learner> parameterLearners =
                new List <Learner>()
            {
                Learner.SGDLearner(z.Parameters(), learningRatePerSample)
            };
            var trainer = Trainer.CreateTrainer(z, loss, evalError, parameterLearners);

            int minibatchSize         = 64;
            int numMinibatchesToTrain = 1000;

            int k = 0; // current position in dataset

            // train the model
            for (int ep = 0; ep < numMinibatchesToTrain; ep++)
            {
                Value f, l;

                var fa = new float[minibatchSize * inputDim];
                var la = new float[minibatchSize * outputDim];

                for (int j = 0; j < minibatchSize; j++)
                {
                    fa[j * inputDim]      = x_train[k];
                    fa[j * inputDim + 1]  = y_train[k];
                    la[j * outputDim]     = l_oh_train[k][0];
                    la[j * outputDim + 1] = l_oh_train[k][1];
                    k++;
                    if (k == x_train.Length)
                    {
                        k = 0;
                    }
                }

                f = Value.CreateBatch <float>(inputDim.AsArray(), fa, device);
                l = Value.CreateBatch <float>(outputDim.AsArray(), la, device);

                trainer.TrainMinibatch(
                    new Dictionary <Variable, Value>()
                {
                    { features, f }, { label, l }
                }, device);

                if (ep % 50 == 0)
                {
                    var _loss = trainer.PreviousMinibatchLossAverage();
                    var _eval = trainer.PreviousMinibatchEvaluationAverage();
                    WriteLine($"Epoch={ep}, loss={_loss}, eval={_eval}");
                }
            }


            WriteLine("Press any key to exit");
            Console.ReadKey();
        }