Ejemplo n.º 1
0
        /// <summary>
        /// Runs the train on batch.
        /// </summary>
        /// <param name="i">The i.</param>
        /// <param name="x">The x.</param>
        /// <param name="y">The y.</param>
        private void RunTrainOnBatch(int i, SuperArray x, SuperArray y)
        {
            SuperArray pred    = Forward(x);
            SuperArray lossVal = LossFn.Forward(pred, y);
            SuperArray grad    = LossFn.Backward(pred, y).Reshape(-1, 1);

            lossVal = ApplyRegularizer(lossVal);
            var metricVal = MetricFn.Calc(pred, y);

            train_losses.Add(Ops.Mean(lossVal));
            train_metrics.Add(Ops.Mean(metricVal));

            Backward(grad);

            ApplyDeltaRegularizer();

            foreach (var layer in Layers)
            {
                OptimizerFn.Update(i, layer);
            }

            pred.Dispose();
            lossVal.Dispose();
            grad.Dispose();
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Runs the train on batch.
        /// </summary>
        /// <param name="i">The i.</param>
        /// <param name="x">The x.</param>
        /// <param name="y">The y.</param>
        private void RunTrainOnBatch(int i, Tensor x, Tensor y)
        {
            Tensor pred    = Forward(x);
            Tensor lossVal = LossFn.Forward(pred, y);
            Tensor grad    = LossFn.Backward(pred, y);

            lossVal = ApplyRegularizer(lossVal);
            var metricVal = MetricFn.Calc(pred, y);

            train_losses.Add(K.Mean(lossVal));
            train_metrics.Add(K.Mean(metricVal));

            Backward(grad);

            ApplyDeltaRegularizer();

            foreach (var layer in Layers)
            {
                OptimizerFn.Update(i, layer);
            }

            pred.Dispose();
            lossVal.Dispose();
            grad.Dispose();
        }
Ejemplo n.º 3
0
        public void Fit(IFrameIter train, int epochs, int batchSize, IFrameIter val = null)
        {
            DateTime     start         = DateTime.Now;
            List <float> train_losses  = new List <float>();
            List <float> train_metrics = new List <float>();
            List <float> val_losses    = new List <float>();
            List <float> val_metrics   = new List <float>();

            train.SetBatchSize(batchSize);
            for (int iteration = 1; iteration <= epochs; iteration++)
            {
                train.Reset();
                while (train.Next())
                {
                    var(x, y) = train.GetBatch();

                    using (Variable pred = Forward(x))
                        using (Tensor lossVal = LossFn.Call(pred.Data, y))
                            using (Tensor grad = LossFn.CalcGrad(pred.Data, y))
                                using (Tensor reg_loss = ApplyRegularizer(lossVal))
                                {
                                    //var metricVal = MetricFn.Call(pred.Data, y);
                                    train_losses.Add(reg_loss.TVar().ToScalar().Evaluate());
                                    //train_metrics.Add(metricVal.ToScalar().Evaluate());

                                    Backward(grad);

                                    ApplyDeltaRegularizer();

                                    foreach (var layer in Layers)
                                    {
                                        OptimizerFn.Update(iteration, layer);
                                    }
                                }

                    x.Dispose();
                    y.Dispose();
                }

                if (val != null)
                {
                    while (val.Next())
                    {
                        var(x, y) = val.GetBatch();

                        var pred = Forward(x);

                        var lossVal   = LossFn.Call(pred.Data, y);
                        var metricVal = MetricFn.Call(pred.Data, y);
                        val_losses.Add(TOps.MeanF(lossVal));
                        val_metrics.Add(TOps.MeanF(metricVal));
                    }
                }

                Console.WriteLine("Epoch: {0}, Loss: {1}", iteration, train_losses.Average());
            }
        }