Exemplo n.º 1
0
        protected override void EndProcessing()
        {
            if (Device == null)
            {
                Device = DeviceDescriptor.UseDefaultDevice();
            }

            var result = Composite.Dense(
                Input,           // Variable input,
                Shape,           // Shape outputShape,
                Initializer,     // CNTKDictionary initializer,
                Bias,            // bool hasBias,
                BiasInitializer, // CNTKDictionary biasInitializer,
                Stabilize,       // bool stabilize,
                Steepness,       // double steepness
                Activation,      // string activation
                Device,          // DeviceDescriptor device,
                Name             // string name
                );

            WriteObject(new WrappedFunction(result));
        }
Exemplo n.º 2
0
        static void Main(string[] args)
        {
            string root = "C:\\sc\\github\\anndotnet\\src\\tool\\";

            //transformDailyLevelVeanaLake();
            //return;

            //regression
            var mlConfigFile1 = $"{root}anndotnet.wnd\\Resources\\Concrete\\ConcreteSlumpProject\\FFNModel.mlconfig";

            //binary classification
            var mlConfigFile2 = $"{root}anndotnet.wnd\\Resources\\Titanic\\TitanicProject\\DNNModel.mlconfig";

            //Multi-class classification
            //Famous multi class classification datset: https://archive.ics.uci.edu/ml/datasets/iris
            var mlConfigFile3 = "./model_mlconfigs/iris.mlconfig";

            //run example
            var token2 = new CancellationToken();

            //train mlconfig
            var result = MachineLearning.Train(mlConfigFile3, trainingProgress, token2, null);

            //once the mode is trained you can write performance analysis of the model
            MachineLearning.PrintPerformance(mlConfigFile1);

            //evaluate model and export the result of testing
            MLExport.ExportToCSV(mlConfigFile2, DeviceDescriptor.UseDefaultDevice(), "./model_mlconfigs/iris_result.csv").Wait();

            //******run all configurations in the solution******
            //string strLocation1 = "C:\\sc\\github\\anndotnet\\src\\tool\\";
            //for (int i = 0; i < 10; i++)
            //    runAllml_configurations(strLocation1);


            //*****end of program*****
            Console.WriteLine("Press any key to continue!");
            Console.ReadKey();
        }
Exemplo n.º 3
0
        static void Main(string[] args)
        {
            //Iris flower recognition
            //Famous multi class classification datset: https://archive.ics.uci.edu/ml/datasets/iris
            var mlConfigFile2 = "./model_mlconfigs/iris.mlconfig";

            Console.WriteLine(Environment.NewLine);
            Console.WriteLine($"****Iris flower recognition****");
            Console.WriteLine(Environment.NewLine);
            var token2 = new CancellationToken();
            var result = MachineLearning.Run(mlConfigFile2, DeviceDescriptor.UseDefaultDevice(), token2, trainingProgress, null);

            //evaluate model and export the result of testing
            MachineLearning.EvaluateModel(mlConfigFile2, result.BestModelFile, DeviceDescriptor.UseDefaultDevice());

            //******run all configurations in the solution******
            //string strLocation1 = "D:\\repos\\anndotnet\\src\\tool\\";
            //for(int i=0; i< 10; i++)
            //    runAllml_configurations(strLocation1);
            //*****end of program*****
            Console.WriteLine("Press any key to continue!");
            Console.Read();
        }
Exemplo n.º 4
0
        public void SetDevice(DeviceType device)
        {
            switch (device)
            {
            case DeviceType.Default:
                DeviceManager.Current = DeviceDescriptor.UseDefaultDevice();
                break;

            case DeviceType.CPU:
                DeviceManager.Current = DeviceDescriptor.CPUDevice;
                break;

            case DeviceType.CUDA:
                DeviceManager.Current = DeviceDescriptor.GPUDevice(0);
                break;

            case DeviceType.OpenCL:
                throw new NotSupportedException("CNTK doesn't support OpenCL. Please use ArrayFire backend.");

            default:
                break;
            }
        }
Exemplo n.º 5
0
        void Start()
        {
            switch (hardwareOption)
            {
            case HardwareOptions.Auto:
            {
                SelectBestDevice();
                break;
            }

            case HardwareOptions.CPU:
            {
                device = DeviceDescriptor.CPUDevice;
                break;
            }

            case HardwareOptions.GPU:
            {
                device = DeviceDescriptor.GPUDevice(0);
                break;
            }

            case HardwareOptions.Default:
            {
                device = DeviceDescriptor.UseDefaultDevice();
                break;
            }
            }
            for (int i = 0; i < managedModels.Count; i++)
            {
                var model = managedModels[i];
                if (model.LoadOnStart)
                {
                    model.LoadModel();
                }
            }
        }
Exemplo n.º 6
0
        public void Run()
        {
            var device = DeviceDescriptor.UseDefaultDevice();
            var util   = new Example_103_Util();

            Example_201_Data datasource = new Example_201_Data();
            IEnumerable <Example_201_Item> trainingImages = datasource.LoadTrainingImages().ToList();
            IEnumerable <Example_201_Item> testImages     = datasource.LoadTestImages().ToList();
            IDictionary <double, string>   labelIndex     = datasource.LoadLabelIndex().ToDictionary(x => (double)x.Key, x => x.Value);

            int image_height = 32, image_width = 32, num_channels = 3, num_classes = 10;

            Variable input          = Variable.InputVariable(NDShape.CreateNDShape(new[] { image_height, image_width, num_channels }), DataType.Double, "input");
            Variable expectedOutput = Variable.InputVariable(new int[] { num_classes }, DataType.Double, "expectedOutput");

            Function normalizedInput = CNTKLib.ElementTimes(Constant.Scalar(1.0 / 255.0, device), input);
            Function model           = DefineModel_C(normalizedInput, num_classes, util);

            Variable output = model.Output;

            uint    minibatchSize = 64;
            Trainer trainer       = MakeTrainer(expectedOutput, output, model, minibatchSize);

            {   // train
                int nbSamplesToUseForTraining = trainingImages.Count();
                int numSweepsToTrainWith      = 5;
                int numMinibatchesToTrain     = nbSamplesToUseForTraining * numSweepsToTrainWith / (int)minibatchSize;
                var trainingInput             = trainingImages.Select(x => x.Image.Select(y => (double)y).ToArray()).ToList();
                var trainingOutput            = trainingImages.Select(x => ToOneHotVector(x.Label, labelIndex.Count)).ToList();
                var trainingMinibatchSource   = new GenericMinibatchSource(input, trainingInput, expectedOutput, trainingOutput, nbSamplesToUseForTraining, numSweepsToTrainWith, minibatchSize, device);
                RunTraining(trainer, trainingMinibatchSource, numMinibatchesToTrain, device);
            }

            // evaluate
            Evaluate(model, testImages, input, device, labelIndex);
        }
Exemplo n.º 7
0
        public void gaussNormalization_test01()
        {
            DeviceDescriptor device = DeviceDescriptor.UseDefaultDevice();
            //create factory object
            MLFactory f = new MLFactory();

            //create config streams
            f.CreateIOVariables("feature 4 0", "flower 3 0", DataType.Float);
            var trData = MLFactory.CreateTrainingParameters("|Type: default |BatchSize: 130 |Epochs:5 |Normalization: 0 |SaveWhileTraining: 0 |RandomizeBatch: 0 |ProgressFrequency: 1");

            string trainingPath   = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_with_hot_vector.txt";
            string validationPath = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_with_hot_vector_test.txt";
            //string trainingPath = "../../../../data/iris_with_hot_vector.txt";
            //string validationPath = "../../../../data/iris_with_hot_vector_test.txt";

            //string trainingNormalizedPathh = "../../../../data/iris_train_normalized.txt";
            string trainingNormalizedPathh = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_train_normalized.txt";
            var    strNormalizedLine       = System.IO.File.ReadAllLines(trainingNormalizedPathh);

            string validationNormalizedPath = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_valid_normalized.txt";
            //string validationNormalizedPath = "../../../../data/iris_valid_normalized.txt";
            var strValidNormalizedLine = System.IO.File.ReadAllLines(validationNormalizedPath);
            //
            List <Function> normalizedInputs = null;

            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                normalizedInputs = mbs1.NormalizeInput(f.InputVariables, device);
            }

            //normalization test for train datatset
            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = mbs1.GetNextMinibatch(130, device);

                //go through all functions and perform the calculation
                foreach (var fun in normalizedInputs)
                {
                    //
                    var input = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables.First(), data.First().Value.data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);

                    var normalizedValues = output[fun].GetDenseData <float>(fun);

                    for (int i = 0; i < normalizedValues.Count; i++)
                    {
                        var currNorLine = strNormalizedLine[i].Split('\t').ToList();
                        for (int j = 0; j < normalizedValues[0].Count(); j++)
                        {
                            var n1 = normalizedValues[i][j].ToString(CultureInfo.InvariantCulture).Substring(0, 5);
                            var n2 = currNorLine[j].Substring(0, 5);
                            Assert.Equal(n1, n2);
                        }
                    }
                }
            }

            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = MinibatchSourceEx.GetFullBatch(mbs1.Type, mbs1.ValidationDataFile, mbs1.StreamConfigurations, device);

                //go through all functions and perform the calculation
                foreach (var fun in normalizedInputs)
                {
                    //
                    var input = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables.First(), data.First().Value.data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);

                    var normalizedValues = output[fun].GetDenseData <float>(fun);

                    for (int i = 0; i < normalizedValues.Count; i++)
                    {
                        var currNorLine = strValidNormalizedLine[i].Split('\t').ToList();
                        for (int j = 0; j < normalizedValues[0].Count(); j++)
                        {
                            var n1 = normalizedValues[i][j].ToString(CultureInfo.InvariantCulture).Substring(0, 5);
                            var n2 = currNorLine[j].Substring(0, 5);
                            Assert.Equal(n1, n2);
                        }
                    }
                }
            }
        }
Exemplo n.º 8
0
        public void gaussNormalization_test02()
        {
            float[][] mData = new float[][] {
                new float[] { 5.1f, 3.5f, 1.4f, 0.2f },
                new float[] { 4.9f, 3.0f, 1.4f, 0.2f },
                new float[] { 4.7f, 3.2f, 1.3f, 0.2f },
                new float[] { 4.6f, 3.1f, 1.5f, 0.2f },
                new float[] { 6.9f, 3.1f, 4.9f, 1.5f },
            };
            float[][] constants = new float[][]
            {
                //four constant
                //c1     c2   c3    c4
                new float[] { 10f, 10f, 10f, 10f },
            };

            DeviceDescriptor device = DeviceDescriptor.UseDefaultDevice();

            //define values, and variables
            var xValues = Value.CreateBatchOfSequences <float>(new int[] { 4 }, mData, device);

            var xConstants = Value.CreateBatchOfSequences <float>(new int[] { 4 }, constants, device);
            var inputVar   = Variable.InputVariable(xValues.Shape, DataType.Float);

            //create simple function which multiple data by constant
            var cnt = new Constant(xConstants.Data);
            var fn  = CNTKLib.ElementTimes(cnt, inputVar);

            //evaluate function
            var inMap = new Dictionary <Variable, Value>()
            {
                { inputVar, xValues }
            };
            var inputVar2 = Variable.InputVariable(xValues.Shape, DataType.Float);
            var outMap    = new Dictionary <Variable, Value>()
            {
                { fn, null }
            };

            fn.Evaluate(inMap, outMap, device);

            //
            var result = outMap[fn].GetDenseData <float>(fn);

            /*
             * //Expected result
             * //x1     x2   x3    x4
             * 51f, 35f, 14f, 2f,//row1
             * 49f, 30f, 14f, 2f,//row2
             * 47f, 32f, 13f, 2f,//row3
             * 46f, 31f, 15f, 2f,//row4
             * 69f, 31f, 49f, 15f,//row5
             *
             * //Actual result
             * //x1     x2   x3    x4
             * 5.1f, 3.5f, 1.4f, 0.2f,//row1
             * 4.9f, 3.0f, 1.4f, 0.2f,//row2
             * 4.7f, 3.2f, 1.3f, 0.2f,//row3
             * 4.6f, 3.1f, 1.5f, 0.2f,//row4
             * 6.9f, 3.1f, 4.9f, 1.5f,//row5
             *
             */
        }
        public void networkConfiguration_test08()
        {
            MLFactory f = new MLFactory();
            //LSTM Network  in(4)-LSTM(5,5)-out(3), with peepholes and stabilization
            List <NNLayer> layers = new List <NNLayer>()
            {
                new NNLayer()
                {
                    Type = LayerType.LSTM, Param1 = 5, Param2 = 5, FParam = Activation.TanH, BParam2 = true, BParam1 = true, Id = 1, Name = "LSTM Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 3, FParam = Activation.None, Id = 2, Name = "out1"
                },
            };

            //create input and output variable
            var      device     = DeviceDescriptor.UseDefaultDevice();
            Variable featureVar = Variable.InputVariable(new int[] { 4 }, DataType.Float, "in1");
            Variable labelVar   = Variable.InputVariable(new int[] { 3 }, DataType.Float, "out1");


            var nnModel = MLFactory.CreateNetwrok(layers, new List <Variable>()
            {
                { featureVar }
            }, labelVar, device);

            //Structure of the network  parameters
            var nnparams = nnModel.Inputs.Where(p => p.Uid.StartsWith("Parameter")).ToList();

            //weights
            var w = nnparams.Where(p => p.Name.Equals("w")).ToList();

            Assert.Equal(5, w.Count);//4 in lstm and  1 in hidden layer

            // total weights 3x5 + 4*5x4
            Assert.Equal(95, w.Sum(p => p.Shape.TotalSize));
            //total biases
            var b = nnparams.Where(p => p.Name.Equals("b")).ToList();

            Assert.Equal(5, b.Count);//4 in lstm and  1 in output
            //4*1*5 in lstm and  3 in output layer
            Assert.Equal(23, b.Sum(p => p.Shape.TotalSize));

            //4*5*4
            var u = nnparams.Where(p => p.Name.Equals("u")).ToList();

            Assert.Equal(4, u.Count);//4 in lstm
            //4*5*5 in lstm
            Assert.Equal(100, u.Sum(p => p.Shape.TotalSize));

            //peephole only in LSTM.
            var peep = nnparams.Where(p => p.Name.Equals("pe")).ToList();

            //Peep connection in 3 gates ft, it and ot
            Assert.Equal(3, peep.Count);
            //3*5
            Assert.Equal(15, peep.Sum(p => p.Shape.TotalSize));

            //stabilization on all gates: ft, it and ot. when using peepholes 3 extra.
            var stab = nnparams.Where(p => p.Name.Equals("st")).ToList();

            //for peephole lstm count is 3+3
            Assert.Equal(6, stab.Count);
            //6x1
            Assert.Equal(6, stab.Sum(p => p.Shape.TotalSize));
            //constant: 6x3 +1x3
            var constants = nnModel.Inputs.Where(p => p.Uid.StartsWith("Constant")).ToList();

            Assert.Equal(21, constants.Count);
            var variables = nnModel.Inputs.Where(p => p.Name.StartsWith("in1")).ToList();
            var outVars   = nnModel.Outputs.ToList();

            //check first and last variable
            Assert.Equal("in1", nnModel.Arguments[0].Name);
            Assert.Equal("out1", nnModel.Outputs[0].Name);
            Assert.Equal(3, nnModel.Output.Shape.Dimensions[0]);
        }
        public void networkConfiguration_test01()
        {
            MLFactory f = new MLFactory();

            List <NNLayer> layers = new List <NNLayer>()
            {
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 5, FParam = Activation.TanH, Id = 1, Name = "Dense Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 1, FParam = Activation.None, Id = 2, Name = "out1"
                },
            };
            //create input and output variable
            var      device     = DeviceDescriptor.UseDefaultDevice();
            Variable featureVar = Variable.InputVariable(new int[] { 4 }, DataType.Float, "in1");
            Variable labelVar   = Variable.InputVariable(new int[] { 1 }, DataType.Float, "out1");

            var nnModel = MLFactory.CreateNetwrok(layers, new List <Variable>()
            {
                { featureVar }
            }, labelVar, device);

            //Structure of the network  parameters
            var nnparams = nnModel.Inputs.Where(p => p.Uid.StartsWith("Parameter")).ToList();
            //weights
            var w = nnparams.Where(p => p.Name.Equals("w")).ToList();

            Assert.Equal(2, w.Count);//2 = 1 in hidden and 1 in out layer

            // total weights 1x5 + 4*5 = 25
            Assert.Equal(25, w.Sum(p => p.Shape.TotalSize));

            //total biases
            var b = nnparams.Where(p => p.Name.Equals("b")).ToList();

            Assert.Equal(2, b.Count);//2 = 1 in hidden and 1 in out layer

            //1x5 + 1x1 = 6
            Assert.Equal(6, b.Sum(p => p.Shape.TotalSize));


            //last parameter is related to network output
            var outputLayer = nnModel.Outputs.Where(p => p.Name.Equals(labelVar.Name)).ToList();

            Assert.Single(outputLayer);
            //dimension is 1
            Assert.Equal(1, outputLayer.Sum(p => p.Shape.TotalSize));


            var constants = nnModel.Inputs.Where(p => p.Uid.StartsWith("Constant")).ToList();

            Assert.Empty(constants);//9 constants are from peep
            var variables = nnModel.Inputs.Where(p => p.Name.StartsWith("in1")).ToList();
            var outVars   = nnModel.Outputs.ToList();

            //check first and last variable
            Assert.Equal("in1", nnModel.Arguments[0].Name);
            Assert.Equal("out1", nnModel.Outputs[0].Name);
            Assert.Equal(1, nnModel.Output.Shape.Dimensions[0]);
        }
Exemplo n.º 11
0
        public void Run()
        {
            var device = DeviceDescriptor.UseDefaultDevice();

            // 1. Generate Data
            int sampleSize        = 32;
            int nbDimensionsInput = 2; // 2 dimensions (age&tumorsize)
            int nbLabels          = 2; // l'output est un vecteur de probabilités qui doit sommer à 1. Si on ne met qu'une seule dimension de sortie, l'output sera toujours de 1.
            // on met donc deux dimension, une dimension 'vrai' et une dimension 'faux'. L'output sera du genre 0.25 vrai et 0.75 faux => total des poids = 1;
            // premier label = faux, second = vrai

            IEnumerable <DataPoint> data = GenerateData(sampleSize);

            //foreach (var pt in data)
            //    Debug.WriteLine($"{pt.Age};{pt.TumorSize};{(pt.HasCancer ? 1 : 0)}");

            Variable inputVariables = Variable.InputVariable(NDShape.CreateNDShape(new[] { nbDimensionsInput }), DataType.Double, "input");
            Variable expectedOutput = Variable.InputVariable(new int[] { nbLabels }, DataType.Double, "output");

            Parameter bias    = new Parameter(NDShape.CreateNDShape(new[] { nbLabels }), DataType.Double, 0);                    // une abscisse pour chaque dimension
            Parameter weights = new Parameter(NDShape.CreateNDShape(new[] { nbDimensionsInput, nbLabels }), DataType.Double, 0); // les coefficients à trouver
            // 2 variable d'input, 2 estimations en sortie (proba vrai et proba faux)

            Function predictionFunction = CNTKLib.Plus(CNTKLib.Times(weights, inputVariables), bias);

            Function lossFunction      = CNTKLib.CrossEntropyWithSoftmax(predictionFunction, expectedOutput);
            Function evalErrorFunction = CNTKLib.ClassificationError(predictionFunction, expectedOutput);
            //Function logisticClassifier = CNTKLib.Sigmoid(evaluationFunction, "LogisticClassifier");
            uint minibatchSize = 25;
            //double learningRate = 0.5;
            //TrainingParameterScheduleDouble learningRatePerSample = new TrainingParameterScheduleDouble(learningRate, minibatchSize);

            TrainingParameterScheduleDouble learningRatePerSample = new TrainingParameterScheduleDouble(0.3, (uint)(data.Count() / 1.0));
            TrainingParameterScheduleDouble momentumSchedule      = new TrainingParameterScheduleDouble(0.9126265014311797, minibatchSize);

            var parameters = new ParameterVector();

            foreach (var p in predictionFunction.Parameters())
            {
                parameters.Add(p);
            }

            List <Learner> parameterLearners = new List <Learner>()
            {
                CNTKLib.FSAdaGradLearner(parameters, learningRatePerSample, momentumSchedule, true)
            };

            Trainer trainer = Trainer.CreateTrainer(predictionFunction, lossFunction, evalErrorFunction, parameterLearners);

            double nbSamplesToUseForTraining = 20000;
            int    numMinibatchesToTrain     = (int)(nbSamplesToUseForTraining / (int)minibatchSize);

            // train the model
            for (int minibatchCount = 0; minibatchCount < numMinibatchesToTrain; minibatchCount++)
            {
                IEnumerable <DataPoint> trainingData = GenerateData((int)minibatchSize);

                List <double> minibatchInput  = new List <double>();
                List <double> minibatchOutput = new List <double>();
                foreach (DataPoint row in trainingData)
                {
                    minibatchInput.Add(row.Age);
                    minibatchInput.Add(row.TumorSize);
                    minibatchOutput.Add(row.HasCancer ? 0d : 1d);
                    minibatchOutput.Add(row.HasCancer ? 1d : 0d);
                }


                Value inputData  = Value.CreateBatch <double>(NDShape.CreateNDShape(new int[] { nbDimensionsInput }), minibatchInput, device);
                Value outputData = Value.CreateBatch <double>(NDShape.CreateNDShape(new int[] { nbLabels }), minibatchOutput, device);

                trainer.TrainMinibatch(new Dictionary <Variable, Value>()
                {
                    { inputVariables, inputData }, { expectedOutput, outputData }
                }, device);

                PrintTrainingProgress(trainer, minibatchCount);
            }

            // test
            {
                int testSize = 100;
                IEnumerable <DataPoint> trainingData = GenerateData(testSize);

                List <double> minibatchInput  = new List <double>();
                List <double> minibatchOutput = new List <double>();
                foreach (DataPoint row in trainingData)
                {
                    minibatchInput.Add(row.Age);
                    minibatchInput.Add(row.TumorSize);
                    minibatchOutput.Add(row.HasCancer ? 0d : 1d);
                    minibatchOutput.Add(row.HasCancer ? 1d : 0d);
                }


                Value inputData  = Value.CreateBatch <double>(NDShape.CreateNDShape(new int[] { nbDimensionsInput }), minibatchInput, device);
                Value outputData = Value.CreateBatch <double>(NDShape.CreateNDShape(new int[] { nbLabels }), minibatchOutput, device);

                IList <IList <double> > expectedOneHot = outputData.GetDenseData <double>(predictionFunction.Output);
                IList <int>             expectedLabels = expectedOneHot.Select(l => l.IndexOf(1.0d)).ToList();

                var outputDataMap = new Dictionary <Variable, Value>()
                {
                    { predictionFunction.Output, null }
                };
                predictionFunction.Evaluate(
                    new Dictionary <Variable, Value>()
                {
                    { inputVariables, inputData }
                },
                    outputDataMap,
                    device);

                Value outputValue = outputDataMap[predictionFunction.Output];

                IList <IList <double> > actualLabelSoftMax = outputValue.GetDenseData <double>(predictionFunction.Output);
                var actualLabels = actualLabelSoftMax.Select((IList <double> l) => l.IndexOf(l.Max())).ToList();
                int misMatches   = actualLabels.Zip(expectedLabels, (a, b) => a.Equals(b) ? 0 : 1).Sum();

                Debug.WriteLine($"Validating Model: Total Samples = {testSize}, Misclassify Count = {misMatches}");
            }
        }
Exemplo n.º 12
0
        public ExpressionSampler(string name, Variable expression, Variable inputVariable = null, int minibatchSize = 1, Value initialValue = null, int iterationsPerEpoch = int.MaxValue)
        {
            Name          = name;
            Expression    = expression;
            InputVariable = inputVariable;
            MinibatchSize = minibatchSize;

            _initialValue = initialValue;

            if (initialValue != null)
            {
                PrevValue = initialValue;
            }
            else
            {
                if (InputVariable != null)
                {
                    var shape         = expression.Shape;
                    var initialBuffer = new float[shape.TotalSize * minibatchSize];

                    var dims = new int[shape.Rank + 1];
                    shape.Dimensions.CopyTo(dims, 0);
                    dims[shape.Rank] = minibatchSize;

                    PrevValue = new Value(NDArrayViewMethods.SafeCreate(dims, initialBuffer, DeviceDescriptor.UseDefaultDevice()));
                }
            }

            IterationsPerEpoch = iterationsPerEpoch;

            Iterations = 0;
        }
Exemplo n.º 13
0
        public void Run()
        {
            var device = DeviceDescriptor.UseDefaultDevice();

            var util = new Example_103_Util();

            // data
            string trainImagesPath = "./Example_103/train-images-idx3-ubyte.gz";
            //string trainLabelsPath = "./Example_103/train-labels-idx1-ubyte.gz";
            List <double[]> trainImages = util.LoadImages(trainImagesPath).Select(x => x.Select(y => (double)y).ToArray()).ToList();
            //List<int> trainLabels = util.LoadLabels(trainLabelsPath);
            //List<double[]> trainLabels1Hot = trainLabels.Select(x => util.ConvertTo1Hot(x)).Select(x => x.Cast<double>().ToArray()).ToList();

            string evelImagesPath = "./Example_103/t10k-images-idx3-ubyte.gz";
            //string evalLabelsPath = "./Example_103/t10k-labels-idx1-ubyte.gz";
            List <double[]> evalImages = util.LoadImages(evelImagesPath).Select(x => x.Select(y => (double)y).ToArray()).ToList();
            //List<int> evalLabels = util.LoadLabels(evalLabelsPath);
            //List<int[]> evalLabels1Hot = evalLabels.Select(x => util.ConvertTo1Hot(x)).ToList();

            // model

            int sampleSize        = trainImages.Count;
            int nbDimensionsInput = 28 * 28;

            Variable inputVariables = Variable.InputVariable(NDShape.CreateNDShape(new [] { nbDimensionsInput }), DataType.Double, "input");
            Variable expectedOutput = Variable.InputVariable(NDShape.CreateNDShape(new [] { nbDimensionsInput }), DataType.Double, "output");

            Function encodeDecode = DefineModel_104B(util, inputVariables, device);

            //var scaledModelOutput = CNTKLib.ElementTimes(Constant.Scalar<double>(1.0 / 255.0, device), encodeDecode);
            //var scaledExpectedOutput = CNTKLib.ElementTimes(Constant.Scalar<double>(1.0 / 255.0, device), expectedOutput);

            //{

            //    Function test = CNTKLib.ElementTimes(
            //                Constant.Scalar(-1.0d, device),
            //                inputVariables);


            //}


            //Function lossFunction = -scaledExpectedOutput * CNTKLib.Log(scaledModelOutput) - (Constant.Scalar(-1.0d, device) - scaledExpectedOutput) * CNTKLib.Log(1 - scaledModelOutput);

            var scaledExpectedOutput = CNTKLib.ElementTimes(expectedOutput, Constant.Scalar(1 / 255.0, device));
            //Function lossFunction = CNTKLib.CrossEntropyWithSoftmax(encodeDecode, expectedOutput);

            // Function lossFunction = CNTKLib.CrossEntropyWithSoftmax(scaledModelOutput, scaledExpectedOutput);
            Function lossFunction = CNTKLib.Square(CNTKLib.Minus(scaledExpectedOutput, encodeDecode));

            Function evalErrorFunction = CNTKLib.ClassificationError(encodeDecode, scaledExpectedOutput);

            // training

            Trainer trainer;
            {
                // define training
                //int epochSize = 30000;
                uint minibatchSize = 64;
                //double learningRate = 0.8;
                int numSweepsToTrainWith      = 2;     // traduction de sweep ?
                int nbSamplesToUseForTraining = 60000; // trainImages.Count;

                double lr_per_sample = 0.2;
                //double lr_per_sample = 0.2; // 0.00003;
                //double lr_per_sample = 0.00003; // 0.00003;
                uint epoch_size = 30000; //        # 30000 samples is half the dataset size

                TrainingParameterScheduleDouble learningRatePerSample = new TrainingParameterScheduleDouble(lr_per_sample, epoch_size);
                TrainingParameterScheduleDouble momentumSchedule      = new TrainingParameterScheduleDouble(0.9126265014311797, minibatchSize);

                var parameters = new ParameterVector();
                foreach (var p in encodeDecode.Parameters())
                {
                    parameters.Add(p);
                }

                List <Learner> parameterLearners = new List <Learner>()
                {
                    CNTKLib.FSAdaGradLearner(parameters, learningRatePerSample, momentumSchedule, true)
                };
                //IList<Learner> parameterLearners = new List<Learner>() { Learner.SGDLearner(encodeDecode.Parameters(), learningRatePerSample) };
                trainer = Trainer.CreateTrainer(encodeDecode, lossFunction, evalErrorFunction, parameterLearners);

                // run training

                int numMinibatchesToTrain = nbSamplesToUseForTraining * numSweepsToTrainWith / (int)minibatchSize;

                var minibatchSource = new GenericMinibatchSource(inputVariables, trainImages, expectedOutput, trainImages, nbSamplesToUseForTraining, numSweepsToTrainWith, minibatchSize, device);

                double aggregate_metric = 0;
                for (int minibatchCount = 0; minibatchCount < numMinibatchesToTrain; minibatchCount++)
                {
                    IDictionary <Variable, MinibatchData> data = minibatchSource.GetNextRandomMinibatch();
                    trainer.TrainMinibatch(data, device);

                    double samples = trainer.PreviousMinibatchSampleCount();
                    double avg     = trainer.PreviousMinibatchEvaluationAverage();
                    aggregate_metric += avg * samples;
                    double nbSampleSeen = trainer.TotalNumberOfSamplesSeen();
                    double train_error  = aggregate_metric / nbSampleSeen;
                    Debug.WriteLine($"{minibatchCount} Average training error: {train_error:p2}");
                }
            }

            // evaluate
            {
                uint testMinibatchSize     = 32;
                int  nbSamplesToTest       = 32;// evalImages.Count;
                int  numMinibatchesToTrain = nbSamplesToTest / (int)testMinibatchSize;

                double metric_numer = 0;
                double metric_denom = 0;

                var minibatchSource = new GenericMinibatchSource(inputVariables, evalImages, expectedOutput, evalImages, nbSamplesToTest, 1, testMinibatchSize, device);
                for (int minibatchCount = 0; minibatchCount < numMinibatchesToTrain; minibatchCount++)
                {
                    IDictionary <Variable, MinibatchData> data = minibatchSource.GetNextRandomMinibatch();

                    ////UnorderedMapVariableMinibatchData evalInput = new UnorderedMapVariableMinibatchData();
                    ////foreach (var row in data)
                    ////    evalInput[row.Key] = row.Value;

                    ////double error = trainer.TestMinibatch(evalInput, device);

                    ////metric_numer += Math.Abs(error * testMinibatchSize);
                    ////metric_denom += testMinibatchSize;

                    ////MinibatchData outputValue = evalInput[expectedOutput];

                    //IList<IList<double>> inputPixels = outputValue.data.GetDenseData<double>(inputVariables);

                    //IList<IList<double>> actualLabelSoftMax = outputValue.data.GetDenseData<double>(encodeDecode.Output);

                    //for (int i = 0; i < actualLabelSoftMax.Count; i++)
                    //    PrintBitmap(inputPixels[i], actualLabelSoftMax[i], i);

                    // var normalizedInput = CNTKLib.ElementTimes(Constant.Scalar<double>(1.0 / 255.0, device), inputVariables);

                    Dictionary <Variable, Value> input = new Dictionary <Variable, Value>()
                    {
                        { inputVariables, data[inputVariables].data }
                    };
                    Dictionary <Variable, Value> output = new Dictionary <Variable, Value>()
                    {
                        // { normalizedInput.Output, null }
                        { encodeDecode.Output, null }
                    };

                    encodeDecode.Evaluate(input, output, device);

                    IList <IList <double> > inputPixels  = input[inputVariables].GetDenseData <double>(inputVariables);
                    IList <IList <double> > outputPixels = output[encodeDecode.Output].GetDenseData <double>(encodeDecode.Output);
                    for (int i = 0; i < inputPixels.Count; i++)
                    {
                        PrintBitmap(inputPixels[i], outputPixels[i], i);
                    }
                }

                double test_error = (metric_numer * 100.0) / (metric_denom);
                Debug.WriteLine($"Average test error: {test_error:p2}");
            }
        }
Exemplo n.º 14
0
        public static void Run_MNIST_Test()
        {
            //
            var device = DeviceDescriptor.UseDefaultDevice();
            //dims
            var inDim  = 784;
            var outDim = 10;

            // MNIST images are 28x28=784 pixels
            var input  = CNTKLib.InputVariable(new NDShape(1, inDim), DataType.Float, "features");
            var labels = CNTKLib.InputVariable(new NDShape(1, outDim), DataType.Float, "labels");

            //create network
            var nnModel = createModel(input, outDim, 1, device);

            //Loss and Eval functions
            var trainingLoss = CNTKLib.CrossEntropyWithSoftmax(nnModel, labels, "lossFunction");
            var prediction   = CNTKLib.ClassificationError(nnModel, labels, "classificationError");

            //create learners and trainer
            // set per sample learning rate and momentum
            var learningRatePerSample = new CNTK.TrainingParameterScheduleDouble(0.001, 1);
            var momentumPerSample     = new CNTK.TrainingParameterScheduleDouble(0.9, 1);
            var nnParams          = nnModel.Parameters();
            var parameterLearners = new List <Learner>()
            {
                CNTKLib.AdamLearner(new ParameterVector(nnModel.Parameters().ToList()), learningRatePerSample, momentumPerSample)
            };

            var trainer = Trainer.CreateTrainer(nnModel, trainingLoss, prediction, parameterLearners);

            //create minibatch source
            var sConfigs = new StreamConfiguration[]
            { new StreamConfiguration("features", inDim), new StreamConfiguration("labels", outDim) };

            //this file is huge and cannot be uploaded on GitHUb.
            //it can be downloaded from: https://github.com/Microsoft/CNTK/tree/987b22a8350211cb4c44278951857af1289c3666/Examples/Image/DataSets/MNIST
            var minibatchSource   = MinibatchSource.TextFormatMinibatchSource("..\\..\\..\\Data\\MNIST-TrainData.txt", sConfigs, MinibatchSource.InfinitelyRepeat);
            var minibatchSize     = (uint)754;
            var featureStreamInfo = minibatchSource.StreamInfo("features");
            var labelStreamInfo   = minibatchSource.StreamInfo("labels");
            var maxIt             = 250;
            var curIt             = 1;

            while (true)
            {
                var minibatchData = minibatchSource.GetNextMinibatch(minibatchSize, device);
                var arguments     = new Dictionary <Variable, MinibatchData>
                {
                    { input, minibatchData[featureStreamInfo] },
                    { labels, minibatchData[labelStreamInfo] }
                };

                trainer.TrainMinibatch(arguments, device);

                //
                if (minibatchData[featureStreamInfo].sweepEnd)
                {
                    if (curIt % 50 == 0 || curIt == 1)
                    {
                        printProgress(trainer, curIt);
                    }
                    curIt++;
                }

                if (maxIt <= curIt)
                {
                    break;
                }
            }

            // save the trained model
            nnModel.Save("mnist_classifier");

            // validate the model
            var minibatchSourceNewModel = MinibatchSource.TextFormatMinibatchSource("../../../data/MNIST-TestData.txt", sConfigs, MinibatchSource.InfinitelyRepeat);
            //prepare vars to accept results
            List <List <float> > X = new List <List <float> >();
            List <float>         Y = new List <float>();

            //Model validation
            ValidateModel("mnist_classifier", minibatchSourceNewModel, new int[] { 28, 28 }, 10, "features", "labels", device, 1000, X, Y, false);

            //show image classification result
            showResult(X, Y);
        }
Exemplo n.º 15
0
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\CatsAndDogs";
            var mapFiles = PrepareMapFiles(baseDataDirectoryPath);

            // Define the input and output shape.
            var inputShape      = new int[] { 150, 150, 3 };
            var numberOfClasses = 2;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 64, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 128, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 128, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.RMSProp(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            var featuresName = "features";
            var targetsName  = "targets";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { featuresName, inputVariable },
                { targetsName, targetVariable },
            };
            var train = CreateMinibatchSource(mapFiles.trainFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, augmentation: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice augmentation is switched off for validation data.
            var valid = CreateMinibatchSource(mapFiles.validFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, augmentation: false);
            var validationSource = new CntkMinibatchSource(valid, nameToVariable);

            // Notice augmentation is switched off for test data.
            var test = CreateMinibatchSource(mapFiles.testFilePath, featuresName, targetsName,
                                             numberOfClasses, inputShape, augmentation: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            model.Fit(trainMinibatchSource: trainingSource,
                      epochs: 100, batchSize: 32,
                      validationMinibatchSource: validationSource);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");

            // Save model.
            model.Network.Save("cats_and_dogs_small_2.cntk");
        }
Exemplo n.º 16
0
        public void Run()
        {
            var device = DeviceDescriptor.UseDefaultDevice();

            var util = new Example_103_Util();

            // data
            string        trainImagesPath = "./Example_103/train-images-idx3-ubyte.gz";
            string        trainLabelsPath = "./Example_103/train-labels-idx1-ubyte.gz";
            List <byte[]> trainImages     = util.LoadImages(trainImagesPath);
            List <int>    trainLabels     = util.LoadLabels(trainLabelsPath);
            List <int[]>  trainLabels1Hot = trainLabels.Select(x => util.ConvertTo1Hot(x)).ToList();

            string        evelImagesPath = "./Example_103/t10k-images-idx3-ubyte.gz";
            string        evalLabelsPath = "./Example_103/t10k-labels-idx1-ubyte.gz";
            List <byte[]> evalImages     = util.LoadImages(evelImagesPath);
            List <int>    evalLabels     = util.LoadLabels(evalLabelsPath);
            List <int[]>  evalLabels1Hot = evalLabels.Select(x => util.ConvertTo1Hot(x)).ToList();

            // model

            int sampleSize        = trainImages.Count;
            int nbDimensionsInput = trainImages[0].Length;
            int nbLabels          = 10; // de 0 à 10

            Variable inputVariables = Variable.InputVariable(NDShape.CreateNDShape(new[] { nbDimensionsInput }), DataType.Double, "input");
            Variable expectedOutput = Variable.InputVariable(NDShape.CreateNDShape(new int[] { nbLabels }), DataType.Double, "output");

            var scaledInput = CNTKLib.ElementTimes(Constant.Scalar <double>(1.0 / 255.0, device), inputVariables);

            Function lastLayer = DefineModel_103C(util, nbLabels, scaledInput);

            Function lossFunction      = CNTKLib.CrossEntropyWithSoftmax(lastLayer, expectedOutput);
            Function evalErrorFunction = CNTKLib.ClassificationError(lastLayer, expectedOutput);

            // training

            Trainer trainer;
            {
                // define training

                uint   minibatchSize = 64;
                double learningRate  = 0.2;
                TrainingParameterScheduleDouble learningRatePerSample = new TrainingParameterScheduleDouble(learningRate, minibatchSize);
                List <Learner> parameterLearners = new List <Learner>()
                {
                    Learner.SGDLearner(lastLayer.Parameters(), learningRatePerSample)
                };
                trainer = Trainer.CreateTrainer(lastLayer, lossFunction, evalErrorFunction, parameterLearners);

                // run training

                int nbSamplesToUseForTraining = trainImages.Count;
                int numSweepsToTrainWith      = 10; // traduction de sweep ?
                int numMinibatchesToTrain     = nbSamplesToUseForTraining * numSweepsToTrainWith / (int)minibatchSize;

                var minibatchSource = new Example_103_MinibatchSource(inputVariables, trainImages, expectedOutput, trainLabels1Hot, nbSamplesToUseForTraining, numSweepsToTrainWith, minibatchSize, device);
                for (int minibatchCount = 0; minibatchCount < numMinibatchesToTrain; minibatchCount++)
                {
                    IDictionary <Variable, MinibatchData> data = minibatchSource.GetNextRandomMinibatch();
                    trainer.TrainMinibatch(data, device);
                    util.PrintTrainingProgress(trainer, minibatchCount);
                }
            }

            // evaluate
            {
                uint   testMinibatchSize     = 512;
                int    nbSamplesToTest       = evalImages.Count;
                int    numMinibatchesToTrain = nbSamplesToTest / (int)testMinibatchSize;
                double testResult            = 0;

                var minibatchSource = new Example_103_MinibatchSource(inputVariables, evalImages, expectedOutput, evalLabels1Hot, nbSamplesToTest, 1, testMinibatchSize, device);
                for (int minibatchCount = 0; minibatchCount < numMinibatchesToTrain; minibatchCount++)
                {
                    IDictionary <Variable, MinibatchData> data = minibatchSource.GetNextRandomMinibatch();

                    UnorderedMapVariableMinibatchData evalInput = new UnorderedMapVariableMinibatchData();
                    foreach (var row in data)
                    {
                        evalInput[row.Key] = row.Value;
                    }

                    double error = trainer.TestMinibatch(evalInput, device);
                    testResult += error;

                    //var z = CNTKLib.Softmax(lastLayer);
                    //var tOut = new Dictionary<Variable, Value>() { { z.Output, null } };

                    //z.Evaluate(
                    //    new Dictionary<Variable, Value>() { { inputVariables, data[inputVariables].data } },
                    //    tOut,
                    //    device
                    //    );

                    //Value outputValue = tOut[z.Output];
                    //IList<IList<double>> actualLabelSoftMax = outputValue.GetDenseData<double>(z.Output);
                    //var actualLabels = actualLabelSoftMax.Select((IList<double> l) => l.IndexOf(l.Max())).ToList();

                    //Value expectedOutputValue = data[expectedOutput].data;
                    //IList<IList<double>> expectedLabelsSoftmax = expectedOutputValue.GetDenseData<double>(z.Output);
                    //var expectedLabels = expectedLabelsSoftmax.Select((IList<double> l) => l.IndexOf(l.Max())).ToList();

                    //for(int i = 0; i < expectedLabels.Count; i++)
                    //{
                    //    if (actualLabels[i] != expectedLabels[i])
                    //    {
                    //        Debug.WriteLine($"{actualLabels[i]} {expectedLabels[i]}");
                    //    }
                    //}

                    //int misMatches = actualLabels.Zip(expectedLabels, (a, b) => a.Equals(b) ? 0 : 1).Sum();

                    Debug.WriteLine($"Average test error: {(testResult / (minibatchCount + 1)):p2}");
                }

                Debug.WriteLine($"Average test error: {(testResult / numMinibatchesToTrain):p2}");
            }
        }
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Mnist";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "Train-28x28_cntk_text.txt");
            var testFilePath          = Path.Combine(baseDataDirectoryPath, "Test-28x28_cntk_text.txt");

            // Define the input and output shape.
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var input = Layers.Input(inputShape, dataType);
            // scale input between 0 and 1.
            var scaledInput = CNTKLib.ElementTimes(Constant.Scalar(0.00390625f, device), input);

            var network = scaledInput
                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()

                          .Dense(64, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.RMSProp(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            // Network will be trained using the training set,
            // and tested using the test set.

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { "features", inputVariable },
                { "labels", targetVariable },
            };

            // The order of the training data is randomize.
            var train          = CreateMinibatchSource(trainFilePath, nameToVariable, randomize: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice randomization is switched off for test data.
            var test       = CreateMinibatchSource(testFilePath, nameToVariable, randomize: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            model.Fit(trainingSource, epochs: 5, batchSize: 64);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");
        }
Exemplo n.º 18
0
        public void TestTrainingSession2()
        {
            // Data

            var features = DataSourceFactory.Create(new float[] { 0, 0, 0, 1, 1, 0, 1, 1, 3, 4, 3, 5, 4, 4, 4, 5 }, new int[] { 2, 1, -1 });
            var labels   = DataSourceFactory.Create(new float[] { 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0 }, new int[] { 2, 1, -1 });

            var sampler = new DataSourceSampler(new Dictionary <string, IDataSource <float> >()
            {
                { "input", features },
                { "label", labels }
            }, 2);

            // Model

            var input = CNTKLib.InputVariable(new int[] { 2 }, false, DataType.Float, "input");
            var h     = Composite.Dense(input, new int[] { 100 }, CNTKLib.HeNormalInitializer(), true, null, false, 4, "relu", DeviceDescriptor.UseDefaultDevice(), "");

            h = Composite.Dense(h, new int[] { 2 }, CNTKLib.GlorotNormalInitializer(), true, null, false, 4, "sigmoid", DeviceDescriptor.UseDefaultDevice(), "");
            var output = h;

            var label = CNTKLib.InputVariable(new int[] { 2 }, DataType.Float, "label");

            // Loss and metric functions

            var loss  = CNTKLib.BinaryCrossEntropy(output, label);
            var error = CNTKLib.ClassificationError(output, label);

            // Train

            var lr = new TrainingParameterScheduleDouble(.01);
            var m  = new TrainingParameterScheduleDouble(.9);

            var learner = Learner.MomentumSGDLearner(output.Parameters(), lr, m, true);

            var session   = new TrainingSession(output, loss, error, learner, null, sampler, null);
            var iteration = session.GetEnumerator();

            for (var i = 0; i < 1000; ++i)
            {
                iteration.MoveNext();
                var dummy = iteration.Current;
                var valid = session.GetValidationMetric();
            }

            Assert.IsTrue(session.Metric < 0.1);
        }
Exemplo n.º 19
0
        public void NormalizationfeatureGroup_test03()
        {
            DeviceDescriptor device = DeviceDescriptor.UseDefaultDevice();
            //create factory object
            MLFactory f = new MLFactory();

            //create config streams
            f.CreateIOVariables("|Itemid 1 0 |Sales 4 0 |Color 1 0", "|Label 1 0", DataType.Float);
            var trData = MLFactory.CreateTrainingParameters("|Type: default |BatchSize: 130 |Epochs:5 |Normalization:Sales |SaveWhileTraining: 0 |RandomizeBatch: 0 |ProgressFrequency: 1");

            string trainingPath            = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\cntk_dataset_for_normalization_test.txt";
            string trainingNormalizedPathh = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\cntk_dataset_for_normalization_test_result.txt";

            //string trainingPath = "../../../../data/cntk_dataset_for_normalization_test.txt";
            //string trainingNormalizedPathh = "../../../../data/cntk_dataset_for_normalization_test_result.txt";

            var strTrainData     = System.IO.File.ReadAllLines(trainingNormalizedPathh);
            var normalizedResult = System.IO.File.ReadAllLines(trainingNormalizedPathh);
            var inputVars        = MLFactory.NormalizeInputLayer(trData, f, trainingPath, trainingPath, device);

            //normalization test for train dataset
            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, trainingPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = mbs1.GetNextMinibatch(10, device);

                //go through all functions and perform the calculation
                for (int i = 0; i < inputVars.Count; i++)
                {
                    //
                    var fun     = (Function)inputVars[i];
                    var strName = data.Keys.Where(x => x.m_name.Equals(f.InputVariables[i].Name)).FirstOrDefault();
                    var input   = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables[i], data[strName].data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);
                    var inputValues      = data[strName].data.GetDenseData <float>(fun).Select(x => x[0]).ToList();
                    var normalizedValues = output[fun].GetDenseData <float>(fun).Select(x => x[0]).ToList();
                    int index            = 0;
                    if (i < 2)
                    {
                        index = i;
                    }
                    else
                    {
                        index = i + 3;
                    }
                    var currNorLine = normalizedResult[index].Split(new char[] { '\t', ' ' }).ToList();

                    for (int j = 0; j < normalizedValues.Count; j++)
                    {
                        var n1 = normalizedValues[j].ToString(CultureInfo.InvariantCulture);
                        var n2 = currNorLine[j];
                        if (n1.Length < 2)
                        {
                            Assert.Equal(n1, n2);
                        }
                        else
                        {
                            Assert.Equal(n1.Substring(0, 5), n2.Substring(0, 5));
                        }
                    }
                }
            }
        }
Exemplo n.º 20
0
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Mnist";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "Train-28x28_cntk_text.txt");

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.Xavier(random.Next(), scale: 0.02);
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Setup generator
            var ganeratorInputShape = NDShape.CreateNDShape(new int[] { 100 });
            var generatorInput      = Variable.InputVariable(ganeratorInputShape, dataType);
            var generatorNetwork    = Generator(generatorInput, weightInit, biasInit, device, dataType);

            // Setup discriminator
            var discriminatorInputShape = NDShape.CreateNDShape(new int[] { 784 }); // 28 * 28 * 1.
            var discriminatorInput      = Variable.InputVariable(discriminatorInputShape, dataType);
            // scale image input between -1.0 and 1.0.
            var discriminatorInputScaled = CNTKLib.Minus(
                CNTKLib.ElementTimes(Constant.Scalar(2 * 0.00390625f, device), discriminatorInput),
                Constant.Scalar(1.0f, device));

            var discriminatorNetwork = Discriminator(discriminatorInputScaled, weightInit, biasInit, device, dataType);

            // The discriminator must be used on both the real MNIST images and fake images generated by the generator function.
            // One way to represent this in the computational graph is to create a clone of the output of the discriminator function,
            // but with substituted inputs. Setting method = share in the clone function ensures that both paths through the discriminator model
            // use the same set of parameters.
            var discriminatorNetworkFake = discriminatorNetwork
                                           .Clone(ParameterCloningMethod.Share, replacements:
                                                  new Dictionary <Variable, Variable>
            {
                { discriminatorInputScaled.Output, generatorNetwork.Output },
            });

            // Create minibatch source for providing the real images.
            var imageNameToVariable = new Dictionary <string, Variable> {
                { "features", discriminatorInput }
            };
            var imageMinibatchSource = CreateMinibatchSource(trainFilePath, imageNameToVariable, randomize: true);

            // Create minibatch source for providing the noise.
            var noiseNameToVariable = new Dictionary <string, Variable> {
                { "noise", generatorInput }
            };
            var noiseMinibatchSource = new UniformNoiseMinibatchSource(noiseNameToVariable, min: -1.0f, max: 1.0f, seed: random.Next());

            // Combine both sources in the composite minibatch source.
            var compositeMinibatchSource = new CompositeMinibatchSource(imageMinibatchSource, noiseMinibatchSource);

            // Setup generator loss: 1.0 - C.log(D_fake)
            var generatorLossFunc = CNTKLib.Minus(Constant.Scalar(1.0f, device),
                                                  CNTKLib.Log(discriminatorNetworkFake));

            // Setup discriminator loss: -(C.log(D_real) + C.log(1.0 - D_fake))
            var discriminatorLossFunc = CNTKLib.Negate(CNTKLib.Plus(CNTKLib.Log(discriminatorNetwork),
                                                                    CNTKLib.Log(CNTKLib.Minus(Constant.Scalar(1.0f, device), discriminatorNetworkFake))));

            // Create fitters for the training loop.
            // Generator uses Adam and discriminator SGD.
            // Advice from: https://github.com/soumith/ganhacks
            var generatorLearner = Learners.Adam(generatorNetwork.Parameters(),
                                                 learningRate: 0.0002, momentum: 0.5, gradientClippingThresholdPerSample: 1.0);
            var generatorFitter = CreateFitter(generatorLearner, generatorNetwork, generatorLossFunc, device);

            var discriminatorLearner = Learners.SGD(discriminatorNetwork.Parameters(),
                                                    learningRate: 0.0002, gradientClippingThresholdPerSample: 1.0);
            var discriminatorFitter = CreateFitter(discriminatorLearner, discriminatorNetwork, discriminatorLossFunc, device);

            int epochs    = 30;
            int batchSize = 128;

            // Controls how many steps the discriminator takes,
            // each time the generator takes 1 step.
            // Default from the original paper is 1.
            int discriminatorSteps = 1;

            var isSweepEnd = false;

            for (int epoch = 0; epoch < epochs;)
            {
                for (int step = 0; step < discriminatorSteps; step++)
                {
                    // Discriminator needs both real images and noise,
                    // so uses the composite minibatch source.
                    var minibatchItems = compositeMinibatchSource.GetNextMinibatch(batchSize, device);
                    isSweepEnd = minibatchItems.isSweepEnd;

                    discriminatorFitter.FitNextStep(minibatchItems.minibatch, batchSize);
                    DisposeValues(minibatchItems.minibatch);
                }

                // Generator only needs noise images,
                // so uses the noise minibatch source separately.
                var noiseMinibatchItems = noiseMinibatchSource.GetNextMinibatch(batchSize, device);

                generatorFitter.FitNextStep(noiseMinibatchItems.minibatch, batchSize);
                DisposeValues(noiseMinibatchItems.minibatch);

                if (isSweepEnd)
                {
                    var generatorCurrentLoss = generatorFitter.CurrentLoss;
                    generatorFitter.ResetLossAndMetricAccumulators();

                    var discriminatorCurrentLoss = discriminatorFitter.CurrentLoss;
                    discriminatorFitter.ResetLossAndMetricAccumulators();

                    var traceOutput = $"Epoch: {epoch + 1:000} Generator Loss = {generatorCurrentLoss:F8}, Discriminator Loss = {discriminatorCurrentLoss:F8}";
                    Trace.WriteLine(traceOutput);

                    ++epoch;
                }
            }

            // Sample 6x6 images from generator.
            var samples = 6 * 6;
            var batch   = noiseMinibatchSource.GetNextMinibatch(samples, device);
            var noise   = batch.minibatch;

            var predictor  = new Predictor(generatorNetwork, device);
            var images     = predictor.PredictNextStep(noise);
            var imagesData = images.SelectMany(t => t).ToArray();

            // Show examples
            var app    = new Application();
            var window = new PlotWindowBitMap("Generated Images", imagesData, 28, 28, 1, true);

            window.Show();
            app.Run(window);
        }
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Imdb";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "imdb_sparse_train_50w.txt");
            var testFilePath          = Path.Combine(baseDataDirectoryPath, "imdb_sparse_test_50w.txt");

            // Define the input and output shape.
            var inputShape      = new int[] { 129888 + 4 }; // Number distinct input words + offset for one-hot, sparse
            var numberOfClasses = 2;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType, isSparse: true)
                          .Embedding(32, weightInit(), dataType, device)
                          .LSTMStack(32, 1, weightInit(), false, device, dataType)
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Since we are processing sequence data,
            // wrap network in sequenceLast.
            network = CNTKLib.SequenceLast(network);

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType,
                                                        dynamicAxes: new List <Axis>()
            {
                Axis.DefaultBatchAxis()
            },
                                                        isSparse: false);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.Adam(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            // Network will be trained using the training set,
            // and tested using the test set.
            var featuresName = "x";
            var targetsName  = "y";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { featuresName, inputVariable },
                { targetsName, targetVariable },
            };

            // The order of the training data is randomize.
            var train = CreateMinibatchSource(trainFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, randomize: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice randomization is switched off for test data.
            var test = CreateMinibatchSource(testFilePath, featuresName, targetsName,
                                             numberOfClasses, inputShape, randomize: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            var history = model.Fit(trainingSource, epochs: 100, batchSize: 512,
                                    validationMinibatchSource: testSource);

            // Trace loss and validation history
            TraceLossValidationHistory(history);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");

            // Write first ten predictions
            var predictions = model.Predict(testSource)
                              .Take(10);

            // Use tensor data directly, since only 1 element pr. sample.
            Trace.WriteLine($"Predictions: [{string.Join(", ", predictions.Select(p => p.First()))}]");
        }
Exemplo n.º 22
0
        /// <summary>
        /// Generates GraphViz string graph from mlconifg file
        /// </summary>
        /// <param name="configPath"></param>
        /// <returns></returns>
        public static string GenerateNetworkGraph(string configPath)
        {
            try
            {
                //LOad ML configuration file
                var dicMParameters = MLFactory.LoadMLConfiguration(configPath);

                var fi         = new FileInfo(configPath);
                var folderPath = MLFactory.GetMLConfigFolder(fi.FullName);
                //add path of model folder
                dicMParameters.Add("root", folderPath);
                var f     = MLFactory.CreateMLFactory(dicMParameters);
                var model = MLFactory.CreateNetworkModel(dicMParameters["network"], f.InputVariables, f.OutputVariables, null, DeviceDescriptor.UseDefaultDevice());

                return(GenerateNetworkGraph(model));
            }
            catch (Exception)
            {
                throw;
            }
        }
Exemplo n.º 23
0
        // Assume input shape is such as (x [, y [, z]], channels)
        public static Function ConvolutionTranspose(Variable input, int[] filterShape, int numFilters, string activation, CNTKDictionary initializer, bool[] padding, int[] strides, bool useBias, CNTKDictionary biasInitializer, int[] outputShape, int[] dilation, int reductionRank, int maxTempMemSizeInSamples, string name)
        {
            try
            {
                NodeGroup.EnterNewGroup(name);

                // Initializers

                if (initializer == null)
                {
                    initializer = CNTKLib.GlorotUniformInitializer();
                }

                if (useBias && biasInitializer == null)
                {
                    biasInitializer = CNTKLib.ConstantInitializer(0);
                }

                // Convolution map
                // (kernelWidth, kernelHeight, featureMapCount, kernelChannel)

                var convDims = new int[filterShape.Length + 2];
                filterShape.CopyTo(convDims, 0);
                convDims[convDims.Length - 2] = numFilters;
                convDims[convDims.Length - 1] = input.Shape.Dimensions[filterShape.Length]; // input channel

                var convolutionMap = new Parameter(convDims, DataType.Float, initializer, DeviceDescriptor.UseDefaultDevice(), name + "/weight");
                Register(convolutionMap);

                var conv = CNTKLib.ConvolutionTranspose(
                    convolutionMap,                      // CNTK.Variable convolutionMap
                    input,                               // CNTK.Variable operand
                    strides,                             // CNTK.NDShape strides
                    new BoolVector(new bool[] { true }), // CNTK.BoolVector sharing
                    new BoolVector(padding),             // CNTK.BoolVector autoPadding
                    outputShape,                         // CNTK.NDShape outputShape
                    dilation,                            // CNTK.NDShape dilation
                    (uint)reductionRank,                 // uint reductionRank
                    (uint)maxTempMemSizeInSamples,       // uint maxTempMemSizeInSamples
                    ""                                   // string name
                    );
                Register(conv);

                if (useBias)
                {
                    var bias = new Parameter(conv.Output.Shape, DataType.Float, biasInitializer, DeviceDescriptor.UseDefaultDevice(), name + "/bias");
                    Register(bias);
                    conv = CNTKLib.Plus(conv, bias);
                    Register(conv);
                }

                conv = ApplyActivation(conv, activation);

                conv.RootFunction.SetName(name);

                return(conv);
            }
            finally
            {
                NodeGroup.LeaveGroup();
            }
        }
Exemplo n.º 24
0
        public void TestNodeGroupsLifecycle()
        {
            var input = CNTKLib.InputVariable(new int[] { 2 }, DataType.Float);
            var lstm  = Horker.PSCNTK.Microsoft.LSTMSequenceClassifierNet.Create(input, 3, 4, false, true, DeviceDescriptor.UseDefaultDevice(), "LSTM");

            // Keep reference to the model
            model = lstm;

            for (var i = 0; i < 10; ++i)
            {
                GC.Collect();

                var g = NodeGroup.Groups.Where(x => x.Name == "LSTM_it").First();
                Assert.IsTrue(g.Nodes.Count() > 5);

                g = NodeGroup.Groups.Where(x => x.Name == "LSTM_ft").First();
                Assert.IsTrue(g.Nodes.Count() > 5);

                g = NodeGroup.Groups.Where(x => x.Name == "LSTM_ot").First();
                Assert.IsTrue(g.Nodes.Count() > 5);
            }
        }
Exemplo n.º 25
0
        public static Function OptimizedRNNStack(Variable input, int hiddenSize, int layerSize = 1, bool bidirectional = false, string cellType = "lstm", string name = "")
        {
            try
            {
                NodeGroup.EnterNewGroup(name);

                var dim = input.Shape.Dimensions[0];

                var weightSize = (dim - 1) * 4 * hiddenSize;
                weightSize += (layerSize - 1) * (8 * hiddenSize * hiddenSize + 8 * hiddenSize);
                weightSize += 4 * hiddenSize * hiddenSize + 12 * hiddenSize;

                var w = new Parameter(new int[] { weightSize }, DataType.Float, CNTKLib.GlorotUniformInitializer(), DeviceDescriptor.UseDefaultDevice(), name + "_w");
                Register(w);

                var rnn = CNTKLib.OptimizedRNNStack(input, w, (uint)hiddenSize, (uint)layerSize, bidirectional, cellType, name + "_rnn");
                Register(rnn);

                var output = CNTKLib.SequenceLast(rnn);
                Register(output);

                output.RootFunction.SetName(name);

                return(output);
            }
            finally
            {
                NodeGroup.LeaveGroup();
            }
        }
Exemplo n.º 26
0
 /// <summary>
 /// Prints the performance analysis on the console
 /// </summary>
 /// <param name="mlConfigPath"></param>
 public static void PrintPerformance(string mlConfigPath)
 {
     try
     {
         //print evaluation result on console
         var performanceData = MLExport.PrintPerformance(mlConfigPath, DataSetType.Validation, DeviceDescriptor.UseDefaultDevice());
         performanceData.Wait();
         foreach (var s in performanceData.Result)
         {
             Console.WriteLine(s);
         }
     }
     catch (Exception)
     {
         throw;
     }
 }
Exemplo n.º 27
0
        //[TestMethod]
        public void Fitter_Loop()
        {
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            (var observations, var targets) = CreateArtificialData(inputShape, outputShape, observationCount: 10000);

            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // setup input and target variables.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(network.Output.Shape, dataType);

            // loss
            var loss   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metric = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.MomentumSGD(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, loss, metric, new LearnerVector {
                learner
            });

            // data names
            var observationsName = "observations";
            var targetsName      = "targets";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { observationsName, inputVariable },
                { targetsName, targetVariable },
            };

            // setup name to data map.
            var nameToData = new Dictionary <string, MemoryMinibatchData>
            {
                { observationsName, observations },
                { targetsName, targets }
            };

            var minibatchSource = new MemoryMinibatchSource(nameToVariable, nameToData, seed: 232, randomize: true);

            // setup Fitter
            var fitter = new Fitter(trainer, device);

            // variables for training loop.
            var inputMap = new Dictionary <Variable, Value>();

            var epochs    = 10;
            int batchSize = 32;

            for (int epoch = 0; epoch < epochs;)
            {
                var(minibatch, isSweepEnd) = minibatchSource.GetNextMinibatch(batchSize, device);
                fitter.FitNextStep(minibatch, batchSize);

                if (isSweepEnd)
                {
                    var currentLoss   = fitter.CurrentLoss;
                    var currentMetric = fitter.CurrentMetric;
                    fitter.ResetLossAndMetricAccumulators();

                    var traceOutput = $"Epoch: {epoch + 1:000} Loss = {currentLoss:F8}, Metric = {currentMetric:F8}";

                    ++epoch;

                    Trace.WriteLine(traceOutput);
                }
            }
        }
Exemplo n.º 28
0
        public static object ANNdotNETEval(object arg, string modelPath)
        {
            try
            {
                object[,] obj = null;

                if (!(arg is object[, ]))
                {
                    obj       = new object[1, 1];
                    obj[0, 0] = arg;
                }

                else
                {
                    //First convert object in to array
                    obj = (object[, ])arg;
                }


                //create list to convert values
                List <float> calculatedOutput = new List <float>();

                //
                foreach (var s in obj)
                {
                    var ss = float.Parse(s.ToString(), CultureInfo.InvariantCulture);
                    calculatedOutput.Add(ss);
                }
                //
                return(ANNdotNET.Core.MLEvaluator.TestModel(modelPath, calculatedOutput.ToArray(), DeviceDescriptor.UseDefaultDevice()));
            }
            catch (Exception ex)
            {
                return(ex.Message);
            }
        }
Exemplo n.º 29
0
        private void btLearn_Click(object sender, RoutedEventArgs e)
        {
            //Step 1: Create some Demo helpers
            Output.Text += "Linear Regression with CNTK!" + Environment.NewLine;
            Output.Text += "#### Linear Regression with CNTK! ####" + Environment.NewLine;
            Output.Text += "" + Environment.NewLine;
            //define device
            var device = DeviceDescriptor.UseDefaultDevice();

            //Step 2: define values, and variables
            Variable x = Variable.InputVariable(new int[] { 1 }, DataType.Float, "input");
            Variable y = Variable.InputVariable(new int[] { 1 }, DataType.Float, "output");

            //Step 2: define training data set from table above
            var xValues = Value.CreateBatch(new NDShape(1, 1), new float[] { 1f, 2f, 3f, 4f, 5f }, device);
            var yValues = Value.CreateBatch(new NDShape(1, 1), new float[] { 3f, 5f, 7f, 9f, 11f }, device);

            //Step 3: create linear regression model
            var lr = CreateLRModel(x, device);
            //Network model contains only two parameters b and w, so we query
            //the model in order to get parameter values
            var paramValues     = lr.Inputs.Where(z => z.IsParameter).ToList();
            var totalParameters = paramValues.Sum(c => c.Shape.TotalSize);

            Output.Text += $"LRM has {totalParameters} params, {paramValues[0].Name} and {paramValues[1].Name}." + Environment.NewLine;

            //Step 4: create trainer
            var trainer = CreateTrainer(lr, y);

            float b = 0;
            float A = 0;

            //Step 5: training
            for (int i = 1; i <= 500; i++)
            {
                var d = new Dictionary <Variable, Value>
                {
                    { x, xValues },
                    { y, yValues }
                };


                trainer.TrainMinibatch(d, true, device);

                var loss = trainer.PreviousMinibatchLossAverage();
                var eval = trainer.PreviousMinibatchLossAverage();

                if (i % 20 == 0)
                {
                    Output.Text += $"It={i}, Loss={loss}, Eval={eval}" + Environment.NewLine;
                }

                //print weights
                var b0_name = paramValues[0].Name;
                var b1_name = paramValues[1].Name;
                var b0      = new Value(paramValues[0].GetValue()).GetDenseData <float>(paramValues[0]);
                var b1      = new Value(paramValues[1].GetValue()).GetDenseData <float>(paramValues[1]);
                if (i == 500)
                {
                    Output.Text += $" " + Environment.NewLine;
                    Output.Text += $"Training process finished with the following regression parameters:" + Environment.NewLine;
                    Output.Text += $"b={b0[0][0]}, w={b1[0][0]}" + Environment.NewLine;
                    Output.Text += $" " + Environment.NewLine;
                }

                b = b0[0][0];
                A = b1[0][0];
            }

            MyModel.Series.Add(new FunctionSeries(LinearFunction(A, b), -1, 12, 0.01, "result"));
            MyModel.InvalidatePlot(true);
        }
        public void networkConfiguration_test05()
        {
            MLFactory f = new MLFactory();
            //Deep Neural Network in(4) - 5-10-15-out(3)
            List <NNLayer> layers = new List <NNLayer>()
            {
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 5, FParam = Activation.TanH, Id = 1, Name = "Dense Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 10, FParam = Activation.TanH, Id = 1, Name = "Dense Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 15, FParam = Activation.TanH, Id = 1, Name = "Dense Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 3, FParam = Activation.None, Id = 2, Name = "out1"
                },
            };

            //create input and output variable
            var      device     = DeviceDescriptor.UseDefaultDevice();
            Variable featureVar = Variable.InputVariable(new int[] { 4 }, DataType.Float, "in1");
            Variable labelVar   = Variable.InputVariable(new int[] { 3 }, DataType.Float, "out1");


            var nnModel = MLFactory.CreateNetwrok(layers, new List <Variable>()
            {
                { featureVar }
            }, labelVar, device);


            //Structure of the network  parameters
            var nnparams = nnModel.Inputs.Where(p => p.Uid.StartsWith("Parameter")).ToList();
            //weights
            var w = nnparams.Where(p => p.Name.Equals("w")).ToList();

            Assert.Equal(4, w.Count);//3. One for each hidden layer
            // total weights 4x5 + 5x10 + 10x15 + 15x3 = 265
            Assert.Equal(265, w.Sum(p => p.Shape.TotalSize));
            //total biases
            var b = nnparams.Where(p => p.Name.Equals("b")).ToList();

            Assert.Equal(4, b.Count);//4 (3 for hidden and 1 for output layer)
            // 5x1 + 4x1 + 15x1 + 10x1 +
            Assert.Equal(33, b.Sum(p => p.Shape.TotalSize));


            var constants = nnModel.Inputs.Where(p => p.Uid.StartsWith("Constant")).ToList();

            Assert.Empty(constants);
            var variables = nnModel.Inputs.Where(p => p.Name.StartsWith("in1")).ToList();
            var outVars   = nnModel.Outputs.ToList();

            //check first and last variable
            Assert.Equal("in1", nnModel.Arguments[0].Name);
            Assert.Equal("out1", nnModel.Outputs[0].Name);
            Assert.Equal(3, nnModel.Output.Shape.Dimensions[0]);
        }