Exemplo n.º 1
0
        public Tensor CreateVariable(float[] data, long[] shape, string name = "")
        {
            var arr = new CNTK.NDArrayView(BackendUtil.CastShapeInt(shape), data, DeviceManager.Current);
            var v   = new CNTK.Variable(BackendUtil.CastShapeInt(shape), VariableKind.Parameter, CNTK.DataType.Float, arr, false, new AxisVector(), false, name, name);

            return(Out(v));
        }
Exemplo n.º 2
0
        void create_network()
        {
            Console.WriteLine("Compute Device: " + computeDevice.AsString());
            imageVariable       = Util.inputVariable(new int[] { 28, 28, 1 }, "image_tensor");
            categoricalVariable = Util.inputVariable(new int[] { 10 }, "label_tensor");

            network = imageVariable;
            network = Layers.Convolution2D(network, 32, new int[] { 3, 3 }, computeDevice, CC.ReLU);
            network = CC.Pooling(network, C.PoolingType.Max, new int[] { 2, 2 }, new int[] { 2 });
            network = Layers.Convolution2D(network, 64, new int[] { 3, 3 }, computeDevice, CC.ReLU);
            network = CC.Pooling(network, C.PoolingType.Max, new int[] { 2, 2 }, new int[] { 2 });
            network = Layers.Convolution2D(network, 64, new int[] { 3, 3 }, computeDevice, CC.ReLU);
            network = Layers.Dense(network, 64, computeDevice, activation: CC.ReLU);
            network = Layers.Dense(network, 10, computeDevice);

            Logging.detailed_summary(network);
            Logging.log_number_of_parameters(network);

            loss_function = CC.CrossEntropyWithSoftmax(network, categoricalVariable);
            eval_function = CC.ClassificationError(network, categoricalVariable);

            learner = CC.AdamLearner(
                new C.ParameterVector(network.Parameters().ToArray()),
                new C.TrainingParameterScheduleDouble(0.001 * batch_size, (uint)batch_size),
                new C.TrainingParameterScheduleDouble(0.9),
                true,
                new C.TrainingParameterScheduleDouble(0.99));

            trainer   = CC.CreateTrainer(network, loss_function, eval_function, new C.LearnerVector(new C.Learner[] { learner }));
            evaluator = CC.CreateEvaluator(eval_function);
        }
        /// <summary>
        /// Create the neural network for this app.
        /// </summary>
        /// <returns>The neural network to use</returns>
        public static CNTK.Function CreateNetwork()
        {
            // build features and labels
            features = NetUtil.Var(new int[] { 13 }, DataType.Float);
            labels   = NetUtil.Var(new int[] { 1 }, DataType.Float);

            // build the network
            var network = features
                          .Dense(64, CNTKLib.ReLU)
                          .Dense(64, CNTKLib.ReLU)
                          .Dense(1)
                          .ToNetwork();

            // set up the loss function and the classification error function
            var lossFunc  = NetUtil.MeanSquaredError(network.Output, labels);
            var errorFunc = NetUtil.MeanAbsoluteError(network.Output, labels);

            // use the Adam learning algorithm
            var learner = network.GetAdamLearner(
                learningRateSchedule: (0.001, 1),
                momentumSchedule: (0.9, 1),
                unitGain: true);

            // set up a trainer and an evaluator
            trainer   = network.GetTrainer(learner, lossFunc, errorFunc);
            evaluator = network.GetEvaluator(errorFunc);

            return(network);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Create the model.
        /// </summary>
        /// <param name="features">The input feature to build the model on.</param>
        /// <returns>The completed model to use.</returns>
        protected override CNTK.Function CreateModel(CNTK.Variable features)
        {
            // ******************
            // ADD YOUR CODE HERE
            // ******************

            return(null); // remove this when done!
        }
Exemplo n.º 5
0
        private Tensor CreateVariable(int[] data, long[] shape, string name = "")
        {
            shape = BackendUtil.Row2ColMajor(shape);

            var arr = new CNTK.NDArrayView(BackendUtil.CastShapeInt(shape), Array.ConvertAll(data, x => (float)x), DeviceManager.Current);
            var v   = new CNTK.Variable(BackendUtil.CastShapeInt(shape), VariableKind.Input, CNTK.DataType.Float, arr, false, new AxisVector(), false, name, name);

            return(Out(v));
        }
Exemplo n.º 6
0
        internal void LoadTextData(CNTK.Variable feature, CNTK.Variable label)
        {
            int imageSize  = feature.Shape.Rank == 1 ? feature.Shape[0] : feature.Shape[0] * feature.Shape[1] * feature.Shape[2];
            int numClasses = label.Shape[0];
            IList <StreamConfiguration> streamConfigurations = new StreamConfiguration[] { new StreamConfiguration(featureStreamName, imageSize), new StreamConfiguration(labelsStreamName, numClasses) };

            miniBatchSource   = MinibatchSource.TextFormatMinibatchSource(FileName, streamConfigurations, MinibatchSource.InfinitelyRepeat);
            featureVariable   = feature;
            labelVariable     = label;
            featureStreamInfo = miniBatchSource.StreamInfo(featureStreamName);
            labelStreamInfo   = miniBatchSource.StreamInfo(labelsStreamName);
        }
Exemplo n.º 7
0
        public static Dictionary <CNTK.Variable, CNTK.Value> SetVariableValue(CNTK.Variable variable, float[] value)
        {
            CNTK.Value inputValue = null;

            if (value != null)
            {
                inputValue = CNTK.Value.CreateBatch <float>(new int[] { variable.Shape.Dimensions[0] }, value, 0, value.Length, Layers._device);
            }
            return(new Dictionary <CNTK.Variable, CNTK.Value>()
            {
                { variable, inputValue }
            });
        }
        /// <summary>
        /// Create the model.
        /// </summary>
        /// <param name="features">The input feature to build the model on.</param>
        /// <returns>The completed model to use.</returns>
        protected override CNTK.Function CreateModel(CNTK.Variable features)
        {
            int numberOfClasses    = 10000;
            int embeddingDimension = 32;
            int lstmUnits          = 32;

            return(features
                   .OneHotOp(numberOfClasses, true)
                   .Embedding(embeddingDimension)
                   .LSTM(lstmUnits, lstmUnits)
                   .Dense(1, CNTKLib.Sigmoid)
                   .ToNetwork());
        }
Exemplo n.º 9
0
        public Matrix <double> Predict(Matrix <double> inputs)
        {
            if (Model == null)
            {
                return(new DenseMatrix(0, 0));
            }

            CNTK.Variable inputVar = Model.Arguments.Single();

            double[] original = inputs.ToRowMajorArray();
            //for (int i = 0; i < original.Length; i++)
            //{
            //    if (i != 0 && i % 28 == 0)
            //        Console.Write("\n");
            //    Console.Write((int)(original[i] * 10) + ",");
            //}
            List <float> converted = original.Select(o => (float)o).ToList();

            NDShape inputShape   = inputVar.Shape;
            var     inputDataMap = new Dictionary <CNTK.Variable, Value>();
            var     inputVal     = Value.CreateBatch(inputShape, converted, CNTKHelper.Device());

            inputDataMap.Add(inputVar, inputVal);

            CNTK.Variable outputVar = Model.Output;

            // Create output data map. Using null as Value to indicate using system allocated memory.
            // Alternatively, create a Value object and add it to the data map.
            var outputDataMap = new Dictionary <CNTK.Variable, Value>();

            outputDataMap.Add(outputVar, null);

            // Start evaluation on the device
            Model.Evaluate(inputDataMap, outputDataMap, CNTKHelper.Device());

            // Get evaluate result as dense output
            var outputVal  = outputDataMap[outputVar];
            var outputData = outputVal.GetDenseData <float>(outputVar);

            double[,] arr = new double[outputData.Count, outputData[0].Count];
            for (int i = 0; i < outputData.Count; i++)
            {
                for (int j = 0; j < outputData[i].Count; j++)
                {
                    arr[i, j] = outputData[i][j];
                }
            }
            return(DenseMatrix.OfArray(arr));
        }
Exemplo n.º 10
0
        /// <summary>
        /// Reshapes the dataset to new specified shape.
        /// </summary>
        /// <param name="shape">The new shape on the dataset.</param>
        /// <exception cref="System.ArgumentException"></exception>
        public void Reshape(params int[] shape)
        {
            CNTK.Variable features = CNTK.Variable.InputVariable(new int[] { Shape[1], Shape[0] }, DataType.Float);
            int           total    = Shape.Aggregate((d1, d2) => d1 * d2);

            if (shape.Aggregate((d1, d2) => d1 * d2) != total)
            {
                throw new ArgumentException(string.Format("Cannot reshape array of size {0} into shape {1}", total, string.Concat(shape)));
            }
            //shape.ToList().Insert(0, Data.Count);
            CNTK.Variable outfeatures = CNTK.Variable.InputVariable(shape, DataType.Float);

            //Variable outfeatures = new Variable(shape, VariableKind.Output, DataType.Float, null, false, new AxisVector(), false, "", "");
            CNTK.Function reshapeFunc = CNTKLib.Reshape(features, shape);

            List <float> vectorData = new List <float>();

            foreach (var item in Data)
            {
                vectorData.AddRange(item);
            }

            Value v = Value.CreateBatch <float>(Shape, vectorData, GlobalParameters.Device);
            Dictionary <CNTK.Variable, Value> inputs = new Dictionary <CNTK.Variable, Value>()
            {
                { features, v }
            };
            Dictionary <CNTK.Variable, Value> outputs = new Dictionary <CNTK.Variable, Value>()
            {
                { outfeatures, null }
            };

            reshapeFunc.Evaluate(inputs, outputs, GlobalParameters.Device);
            var res = outputs[outfeatures].GetDenseData <float>(outfeatures);

            Data = new List <List <float> >();
            foreach (var item in res)
            {
                Data.Add(item.ToList());
            }
        }
Exemplo n.º 11
0
 internal Variable(CNTK.Variable variable)
 {
     UnderlyingVariable = variable;
 }
Exemplo n.º 12
0
        /// <summary>
        /// Predicts the specified BMP.
        /// </summary>
        /// <param name="bmp">The image in bitmap format.</param>
        /// <param name="topK">The top k accurate result to return.</param>
        /// <returns></returns>
        public List <PredResult> Predict(Bitmap bmp, int topK = 3)
        {
            try
            {
                Variable inputVar = modelFunc.Arguments[0];

                NDShape inputShape  = inputVar.Shape;
                int     imageWidth  = inputShape[0];
                int     imageHeight = inputShape[1];

                var          resized    = bmp.Resize(imageWidth, imageHeight, true);
                List <float> resizedCHW = resized.ParallelExtractCHW();

                // Create input data map
                var inputDataMap = new Dictionary <Variable, Value>();
                var inputVal     = Value.CreateBatch(inputShape, resizedCHW, GlobalParameters.Device);
                inputDataMap.Add(inputVar, inputVal);
                inputVar = modelFunc.Arguments[1];
                //inputDataMap.Add(inputVar, null);

                Variable outputVar = modelFunc.Outputs.Where(x => (x.Shape.TotalSize == 1000)).ToList()[0];

                // Create output data map. Using null as Value to indicate using system allocated memory.
                // Alternatively, create a Value object and add it to the data map.
                var outputDataMap = new Dictionary <Variable, Value>();
                outputDataMap.Add(outputVar, null);

                // Start evaluation on the device
                modelFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);

                // Get evaluate result as dense output
                var outputVal  = outputDataMap[outputVar];
                var outputData = outputVal.GetDenseData <float>(outputVar);
                Dictionary <int, float> outputPred = new Dictionary <int, float>();

                for (int i = 0; i < outputData[0].Count; i++)
                {
                    outputPred.Add(i, outputData[0][i]);
                }

                var topList = outputPred.OrderByDescending(x => (x.Value)).Take(topK).ToList();
                List <PredResult> result        = new List <PredResult>();
                float             sumpredresult = outputPred.Sum(x => (x.Value));
                float             avgpredresult = outputPred.Average(x => (x.Value));
                float             min           = outputPred.Min(x => (x.Value));
                float             max           = outputPred.Max(x => (x.Value));

                foreach (var item in topList)
                {
                    result.Add(new PredResult()
                    {
                        Score = item.Value,
                        Name  = actualValues[item.Key]
                    });
                }

                Logging.WriteTrace("Prediction Completed");

                return(result);
            }
            catch (Exception ex)
            {
                Logging.WriteTrace(ex);
                throw ex;
            }
        }
Exemplo n.º 13
0
 public CompiledModel(Function model)
 {
     Model           = model;
     LabelVariable   = Variable.InputVariable(new[] { Model.Output.Shape[0] }, DataType.Float);
     FeatureVariable = Model.Inputs.FirstOrDefault(variable => variable.IsInput);
 }
Exemplo n.º 14
0
        /// <summary>
        /// Train the model.
        /// </summary>
        /// <param name="threshold"></param>
        public void Train(double threshold = 0)
        {
            // create model and variables
            features = CreateFeatureVariable();
            labels   = CreateLabelVariable();
            Model    = CreateModel(features);
            AssertSequenceLength();

            // set up loss function
            CNTK.Function lossFunction = null;
            switch (lossFunctionType)
            {
            case LossFunctionType.BinaryCrossEntropy: lossFunction = CNTK.CNTKLib.BinaryCrossEntropy(Model, labels); break;

            case LossFunctionType.MSE: lossFunction = CNTK.CNTKLib.SquaredError(Model, labels); break;

            case LossFunctionType.CrossEntropyWithSoftmax: lossFunction = CNTK.CNTKLib.CrossEntropyWithSoftmax(Model, labels); break;

            case LossFunctionType.Custom: lossFunction = CustomLossFunction(); break;
            }

            // set up accuracy function
            CNTK.Function accuracy_function = null;
            switch (accuracyFunctionType)
            {
            case AccuracyFunctionType.SameAsLoss: accuracy_function = lossFunction; break;

            case AccuracyFunctionType.BinaryAccuracy: accuracy_function = NetUtil.BinaryAccuracy(Model, labels); break;
            }

            // set up an adam learner
            var learner = Model.GetAdamLearner(
                (LearningRate, (uint)BatchSize), // remove batch_size?
                (0.9, (uint)BatchSize),          // remove batch_size?
                unitGain: false);

            // set up trainer
            trainer = CNTK.CNTKLib.CreateTrainer(Model, lossFunction, accuracy_function, new CNTK.LearnerVector()
            {
                learner
            });

            // set up a scheduler to tweak the learning rate
            scheduler = new ReduceLROnPlateau(learner, LearningRate);

            // set up an evaluator
            if (validationFeatures != null)
            {
                evaluator = CNTK.CNTKLib.CreateEvaluator(accuracy_function);
            }

            // write the model summary
            Console.WriteLine("  Model architecture:");
            Console.WriteLine(Model.ToSummary());

            // clear the training curves
            TrainingCurves[0].Clear();
            TrainingCurves[1].Clear();

            // train for a certain number of epochs
            for (int epoch = 0; epoch < NumberOfEpochs; epoch++)
            {
                var epoch_start_time = DateTime.Now;

                // train and evaluate the model
                var epoch_training_metric     = TrainBatches();
                var epoch_validation_accuracy = EvaluateBatches();

                // add to training curve
                TrainingCurves[0].Add(epoch_training_metric);
                TrainingCurves[1].Add(epoch_validation_accuracy);

                // write current loss and accuracy
                var elapsedTime = DateTime.Now.Subtract(epoch_start_time);
                if (metricType == MetricType.Accuracy)
                {
                    Console.WriteLine($"Epoch {epoch + 1:D2}/{NumberOfEpochs}, Elapsed time: {elapsedTime.TotalSeconds:F3} seconds. " +
                                      $"Training Accuracy: {epoch_training_metric:F3}. Validation Accuracy: {epoch_validation_accuracy:F3}.");
                }
                else
                {
                    Console.WriteLine($"Epoch {epoch + 1:D2}/{NumberOfEpochs}, Elapsed time: {elapsedTime.TotalSeconds:F3} seconds, Training Loss: {epoch_training_metric:F3}");
                }

                // abort training if scheduler says so
                if (scheduler.Update(epoch_training_metric))
                {
                    break;
                }
                if ((threshold != 0) && (epoch_training_metric < threshold))
                {
                    break;
                }
            }
        }
Exemplo n.º 15
0
 /// <summary>
 /// Create the model.
 /// </summary>
 /// <param name="features">The input feature to build the model on.</param>
 /// <returns>The completed model to use.</returns>
 protected abstract CNTK.Function CreateModel(CNTK.Variable features);
Exemplo n.º 16
0
        /// <summary>
        /// Predicts the specified image.
        /// </summary>
        /// <param name="bmp">The image in bitmap format.</param>
        /// <param name="confidence">The confidence level for the prediction result.</param>
        /// <returns></returns>
        public List <PredResult> Predict(Bitmap bmp, double confidence = 0.5)
        {
            try
            {
                proposedBoxes = new List <Rectangle>();

                var          resized    = bmp.Resize(1000, 1000, true);
                List <float> roiList    = GenerateROIS(resized, model);
                List <float> resizedCHW = resized.ParallelExtractCHW();

                //CalculateROI(resizedCHW);
                // Create input data map
                var inputDataMap = new Dictionary <Variable, Value>();
                var inputVal1    = Value.CreateBatch(modelFunc.Arguments.First().Shape, resizedCHW, GlobalParameters.Device);
                inputDataMap.Add(modelFunc.Arguments.First(), inputVal1);

                var inputVal2 = Value.CreateBatch(modelFunc.Arguments[1].Shape, roiList, GlobalParameters.Device);
                inputDataMap.Add(modelFunc.Arguments[1], inputVal2);

                Variable outputVar = GetOutputVar(model);

                // Create output data map. Using null as Value to indicate using system allocated memory.
                // Alternatively, create a Value object and add it to the data map.
                var outputDataMap = new Dictionary <Variable, Value>();
                outputDataMap.Add(outputVar, null);

                // Start evaluation on the device
                modelFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);

                // Get evaluate result as dense output
                var outputVal            = outputDataMap[outputVar];
                var outputData           = outputVal.GetDenseData <float>(outputVar);
                List <PredResult> result = new List <PredResult>();

                var labels    = GetLabels(model);
                int numLabels = labels.Length;
                int numRois   = outputData[0].Count / numLabels;

                int numBackgroundRois = 0;
                for (int i = 0; i < numRois; i++)
                {
                    var outputForRoi = outputData[0].Skip(i * numLabels).Take(numLabels).ToList();

                    // Retrieve the predicted label as the argmax over all predictions for the current ROI
                    var max = outputForRoi.IndexOf(outputForRoi.Max());

                    if (max > 0)
                    {
                        result.Add(new PredResult()
                        {
                            Name  = labels[max],
                            BBox  = proposedBoxes[i],
                            Score = outputForRoi.Max()
                        });

                        //Console.WriteLine("Outcome for ROI {0}: {1} \t({2})", i, max, labels[max]);
                    }
                    else
                    {
                        numBackgroundRois++;
                    }
                }

                var groupBoxes = result.GroupBy(x => (x.Name)).ToList();
                result = new List <PredResult>();
                foreach (var item in groupBoxes)
                {
                    int       counter   = 0;
                    Rectangle unionRect = new Rectangle();

                    foreach (var rect in item.ToList())
                    {
                        if (counter == 0)
                        {
                            unionRect = rect.BBox;
                            continue;
                        }

                        unionRect = Rectangle.Union(unionRect, rect.BBox);
                    }

                    //var orderedList = item.ToList().OrderByDescending(x => (x.BBox.Width * x.BBox.Height)).ToList();
                    foreach (var rect in item.ToList())
                    {
                        unionRect = Rectangle.Intersect(unionRect, rect.BBox);
                    }

                    var goodPred = item.ToList().OrderByDescending(x => (x.Score)).ToList()[0];
                    goodPred.BBox = unionRect;
                    result.Add(goodPred);
                }

                //foreach (var item in result)
                //{
                //    img.Draw(item.BBox, new Bgr(0, 255, 0));
                //}

                //img.Save("objdet_pred.jpg");

                Logging.WriteTrace("Prediction Completed");

                return(result);
            }
            catch (Exception ex)
            {
                Logging.WriteTrace(ex);
                throw ex;
            }
        }