/// <summary>
 /// Add a new set of data.
 /// </summary>
 /// <param name="mcm"></param>
 /// <param name="timeTaken"></param>
 public void AddData(MulticlassClassificationMetrics mcm, double timeTaken)
 {
     MacroAccuracy.AddValue(mcm.MacroAccuracy);
     MicroAccuracy.AddValue(mcm.MicroAccuracy);
     LogLoss.AddValue(mcm.LogLoss);
     LogLossReduction.AddValue(mcm.LogLossReduction);
     TimeTakenMiliSeconds.AddValue(timeTaken);
     StatConfusionMatrix.AddConfusionMatrix(mcm.ConfusionMatrix);
 }
Esempio n. 2
0
        public void LogLoss_Loss()
        {
            var sut     = new LogLoss();
            var targets = Matrix <float> .Build.Dense(3, 2, new float[] { 1f, 1f, 0f, 0f, 0f, 1, });

            var predictions = Matrix <float> .Build.Dense(3, 2, new float[] { 0.9f, 0.9f, 0.1f, .1f, .1f, .9f });

            var actual = sut.Loss(targets, predictions);

            Assert.AreEqual(0.105360545, actual, 0.001);
        }
Esempio n. 3
0
        /// <summary>
        /// Add a set of evaluation metrics to the set of observations.
        /// </summary>
        /// <param name="metrics">The observed binary classification evaluation metric</param>
        void IMetricsStatistics <MultiClassClassifierMetrics> .Add(MultiClassClassifierMetrics metrics)
        {
            MacroAccuracy.Add(metrics.MacroAccuracy);
            MicroAccuracy.Add(metrics.MicroAccuracy);
            LogLoss.Add(metrics.LogLoss);
            LogLossReduction.Add(metrics.LogLossReduction);
            TopKAccuracy.Add(metrics.TopKAccuracy);

            if (PerClassLogLoss == null)
            {
                PerClassLogLoss = MetricsStatisticsUtils.InitializeArray(metrics.PerClassLogLoss.Count);
            }
            MetricsStatisticsUtils.AddToEach(metrics.PerClassLogLoss, PerClassLogLoss);
        }
Esempio n. 4
0
        public static double Train()
        {
            var  startTime = System.Diagnostics.Stopwatch.StartNew();
            Data data      = allData.Take(trainDataSize);

            int inputSize = 28 * 28, outputSize = 10, deep;

            deep = width.Count + 1;
            //double[] init = new double[5] { 1, 1, 1, 1, 1 }, bias = new double[5] { 1, 1, 1, 1, 1 };
            ILossFunction <double> loss = new LogLoss();

            Model m = new Model(deep, width.ToArray(), 1, 1, inputSize, outputSize, true, trainMatrixRandomCenter, trainMatrixRandomOffset, trainReLUCoef, trainSigmoidCoef);

            m.LogEpoch = logEpoch;
            m.LogBatch = logBatch;

            m.Train(data, trainEpoch, trainBatch, trainTeachRate, loss);
            m.Save(modelPath);
            startTime.Stop();
            return(startTime.ElapsedMilliseconds);
        }
Esempio n. 5
0
        private static void Main()
        {
            /*basic config*/
            const int   batchSize    = 256;
            const int   maxEpo       = 100;
            const float learningRate = 1e-4f;
            const float weightDecay  = 1e-4f;

            /*context and net symbol*/
            var ctx = Context.Gpu();
            var net = AlexnetSymbol(2);

            /*args_map and aux_map is used for parameters' saving*/
            var argsMap = new Dictionary <string, NDArray>();
            var auxMap  = new Dictionary <string, NDArray>();

            /*we should tell mxnet the shape of data and label*/
            argsMap["data"]  = new NDArray(new Shape(batchSize, 3, 256, 256), ctx);
            argsMap["label"] = new NDArray(new Shape(batchSize), ctx);

            /*with data and label, executor can be generated varmatically*/
            using (var exec = net.SimpleBind(ctx, argsMap))
            {
                var argNames            = net.ListArguments();
                var auxiliaryDictionary = exec.AuxiliaryDictionary();
                var argmentDictionary   = exec.ArgmentDictionary();

                /*if fine tune from some pre-trained model, we should load the parameters*/
                // NDArray.Load("./model/alex_params_3", nullptr, &args_map);
                /*else, we should use initializer Xavier to init the params*/
                var xavier = new Xavier(RandType.Gaussian, FactorType.In, 2.34f);
                foreach (var arg in argmentDictionary)
                {
                    /*be careful here, the arg's name must has some specific ends or starts for
                     * initializer to call*/
                    xavier.Operator(arg.Key, arg.Value);
                }

                /*print out to check the shape of the net*/
                foreach (var s in net.ListArguments())
                {
                    Logging.LG(s);

                    var sb = new StringBuilder();
                    var k  = argmentDictionary[s].GetShape();
                    foreach (var i in k)
                    {
                        sb.Append($"{i} ");
                    }

                    Logging.LG(sb.ToString());
                }

                /*these binary files should be generated using im2rc tools, which can be found
                 * in mxnet/bin*/
                var trainIter = new MXDataIter("ImageRecordIter")
                                .SetParam("path_imglist", "./data/train.lst")
                                .SetParam("path_imgrec", "./data/train.rec")
                                .SetParam("data_shape", new Shape(3, 256, 256))
                                .SetParam("batch_size", batchSize)
                                .SetParam("shuffle", 1)
                                .CreateDataIter();
                var valIter = new MXDataIter("ImageRecordIter")
                              .SetParam("path_imglist", "./data/val.lst")
                              .SetParam("path_imgrec", "./data/val.rec")
                              .SetParam("data_shape", new Shape(3, 256, 256))
                              .SetParam("batch_size", batchSize)
                              .CreateDataIter();

                var opt = OptimizerRegistry.Find("ccsgd");
                opt.SetParam("momentum", 0.9)
                .SetParam("rescale_grad", 1.0 / batchSize)
                .SetParam("clip_gradient", 10)
                .SetParam("lr", learningRate)
                .SetParam("wd", weightDecay);

                var accuracyTrain = new Accuracy();
                var accuracyVal   = new Accuracy();
                var loglossVal    = new LogLoss();
                for (var iter = 0; iter < maxEpo; ++iter)
                {
                    Logging.LG($"Train Epoch: {iter}");
                    /*reset the metric every epoch*/
                    accuracyTrain.Reset();
                    /*reset the data iter every epoch*/
                    trainIter.Reset();
                    while (trainIter.Next())
                    {
                        var batch = trainIter.GetDataBatch();
                        Logging.LG($"{trainIter.GetDataBatch().Index.Length}");
                        /*use copyto to feed new data and label to the executor*/
                        batch.Data.CopyTo(argmentDictionary["data"]);
                        batch.Label.CopyTo(argmentDictionary["label"]);
                        exec.Forward(true);
                        exec.Backward();
                        for (var i = 0; i < argNames.Count; ++i)
                        {
                            if (argNames[i] == "data" || argNames[i] == "label")
                            {
                                continue;
                            }
                            opt.Update(i, exec.ArgmentArrays[i], exec.GradientArrays[i]);
                        }

                        NDArray.WaitAll();
                        accuracyTrain.Update(batch.Label, exec.Outputs[0]);
                    }
                    Logging.LG($"ITER: {iter} Train Accuracy: {accuracyTrain.Get()}");

                    Logging.LG($"Val Epoch: {iter}");
                    accuracyVal.Reset();
                    valIter.Reset();
                    loglossVal.Reset();
                    while (valIter.Next())
                    {
                        var batch = valIter.GetDataBatch();
                        Logging.LG($"{valIter.GetDataBatch().Index.Length}");
                        batch.Data.CopyTo(argmentDictionary["data"]);
                        batch.Label.CopyTo(argmentDictionary["label"]);
                        exec.Forward(false);
                        NDArray.WaitAll();
                        accuracyVal.Update(batch.Label, exec.Outputs[0]);
                        loglossVal.Update(batch.Label, exec.Outputs[0]);
                    }
                    Logging.LG($"ITER: {iter} Val Accuracy: {accuracyVal.Get()}");
                    Logging.LG($"ITER: {iter} Val LogLoss: {loglossVal.Get()}");

                    /*save the parameters*/
                    var savePathParam = $"./model/alex_param_{iter}";
                    var saveArgs      = argmentDictionary;
                    /*we do not want to save the data and label*/
                    if (saveArgs.ContainsKey("data"))
                    {
                        saveArgs.Remove("data");
                    }
                    if (saveArgs.ContainsKey("label"))
                    {
                        saveArgs.Remove("label");
                    }

                    /*the alexnet does not get any aux array, so we do not need to save
                     * aux_map*/
                    Logging.LG($"ITER: {iter} Saving to...{savePathParam}");
                    NDArray.Save(savePathParam, saveArgs);
                }
                /*don't foget to release the executor*/
            }

            MXNet.MXNotifyShutdown();
        }