Ejemplo n.º 1
0
        /// <summary>
        /// Train the model with training dataset, for certain number of iterations and using batch size
        /// </summary>
        /// <param name="x"></param>
        /// <param name="y"></param>
        /// <param name="numIterations"></param>
        /// <param name="batchSize"></param>
        public void Train(NDArray x, NDArray y, int numIterations, int batchSize)
        {
            //Initialise bacch loss and metric list for temporary holding of result
            List <double> batchLoss    = new List <double>();
            List <double> batchMetrics = new List <double>();

            //Loop through till the end of specified iterations
            for (int i = 1; i <= numIterations; i++)
            {
                //Initialize local variables
                int currentIndex = 0;
                batchLoss.Clear();
                batchMetrics.Clear();

                //Loop untill the data is exhauted for every batch selected
                while (x.Next(currentIndex, batchSize))
                {
                    //Get the batch data based on the specified batch size
                    var xtrain = x.Slice(currentIndex, batchSize);
                    var ytrain = y.Slice(currentIndex, batchSize);

                    //Run forward for all the layers to predict the value for the training set
                    var ypred = Forward(xtrain);

                    //Find the loss/cost value for the prediction wrt expected result
                    var costVal = Cost.Forward(ypred, ytrain);
                    batchLoss.AddRange(costVal.Data);

                    //Find the metric value for the prediction wrt expected result
                    if (Metric != null)
                    {
                        var metric = Metric.Calculate(ypred, ytrain);
                        batchMetrics.AddRange(metric.Data);
                    }

                    //Get the gradient of the cost function which is the passed to the layers during back-propagation
                    var grad = Cost.Backward(ypred, ytrain);

                    //Run back-propagation accross all the layers
                    Backward(grad);

                    //Now time to update the neural network weights using the specified optimizer function
                    foreach (var layer in Layers)
                    {
                        Optimizer.Update(i, layer);
                    }

                    currentIndex = currentIndex + batchSize;;
                }

                //Collect the result and fire the event
                double batchLossAvg = Math.Round(batchLoss.Average(), 2);

                double batchMetricAvg = Metric != null?Math.Round(batchMetrics.Average(), 2) : 0;

                TrainingLoss.Add(batchLossAvg);

                if (batchMetrics.Count > 0)
                {
                    TrainingMetrics.Add(batchMetricAvg);
                }

                EpochEndEventArgs eventArgs = new EpochEndEventArgs(i, batchLossAvg, batchMetricAvg);
                BatchEnd?.Invoke(i, eventArgs);
            }
        }
Ejemplo n.º 2
0
        public ActionResult Summary()
        {
            var metrics = Metric.Calculate();

            return(View(metrics));
        }