コード例 #1
0
        /// <summary>
        /// Runs the train on batch.
        /// </summary>
        /// <param name="i">The i.</param>
        /// <param name="x">The x.</param>
        /// <param name="y">The y.</param>
        private void RunTrainOnBatch(int i, SuperArray x, SuperArray y)
        {
            SuperArray pred    = Forward(x);
            SuperArray lossVal = LossFn.Forward(pred, y);
            SuperArray grad    = LossFn.Backward(pred, y).Reshape(-1, 1);

            lossVal = ApplyRegularizer(lossVal);
            var metricVal = MetricFn.Calc(pred, y);

            train_losses.Add(Ops.Mean(lossVal));
            train_metrics.Add(Ops.Mean(metricVal));

            Backward(grad);

            ApplyDeltaRegularizer();

            foreach (var layer in Layers)
            {
                OptimizerFn.Update(i, layer);
            }

            pred.Dispose();
            lossVal.Dispose();
            grad.Dispose();
        }
コード例 #2
0
ファイル: TrainPredict.cs プロジェクト: zhuthree/SiaNet
        /// <summary>
        /// Runs the train on batch.
        /// </summary>
        /// <param name="i">The i.</param>
        /// <param name="x">The x.</param>
        /// <param name="y">The y.</param>
        private void RunTrainOnBatch(int i, Tensor x, Tensor y)
        {
            Tensor pred    = Forward(x);
            Tensor lossVal = LossFn.Forward(pred, y);
            Tensor grad    = LossFn.Backward(pred, y);

            lossVal = ApplyRegularizer(lossVal);
            var metricVal = MetricFn.Calc(pred, y);

            train_losses.Add(K.Mean(lossVal));
            train_metrics.Add(K.Mean(metricVal));

            Backward(grad);

            ApplyDeltaRegularizer();

            foreach (var layer in Layers)
            {
                OptimizerFn.Update(i, layer);
            }

            pred.Dispose();
            lossVal.Dispose();
            grad.Dispose();
        }
 /// <summary>
 /// Add a set of evaluation metrics to the set of observations.
 /// </summary>
 /// <param name="metrics">The observed regression evaluation metric</param>
 public override void Add(RegressionMetrics metrics)
 {
     L1.Add(metrics.MeanAbsoluteError);
     L2.Add(metrics.MeanSquaredError);
     Rms.Add(metrics.RootMeanSquaredError);
     LossFn.Add(metrics.LossFunction);
     RSquared.Add(metrics.RSquared);
 }
コード例 #4
0
 /// <summary>
 /// Add a set of evaluation metrics to the set of observations.
 /// </summary>
 /// <param name="metrics">The observed regression evaluation metric</param>
 public override void Add(RegressionMetrics metrics)
 {
     L1.Add(metrics.L1);
     L2.Add(metrics.L2);
     Rms.Add(metrics.Rms);
     LossFn.Add(metrics.LossFn);
     RSquared.Add(metrics.RSquared);
 }
コード例 #5
0
        public void Fit(IFrameIter train, int epochs, int batchSize, IFrameIter val = null)
        {
            DateTime     start         = DateTime.Now;
            List <float> train_losses  = new List <float>();
            List <float> train_metrics = new List <float>();
            List <float> val_losses    = new List <float>();
            List <float> val_metrics   = new List <float>();

            train.SetBatchSize(batchSize);
            for (int iteration = 1; iteration <= epochs; iteration++)
            {
                train.Reset();
                while (train.Next())
                {
                    var(x, y) = train.GetBatch();

                    using (Variable pred = Forward(x))
                        using (Tensor lossVal = LossFn.Call(pred.Data, y))
                            using (Tensor grad = LossFn.CalcGrad(pred.Data, y))
                                using (Tensor reg_loss = ApplyRegularizer(lossVal))
                                {
                                    //var metricVal = MetricFn.Call(pred.Data, y);
                                    train_losses.Add(reg_loss.TVar().ToScalar().Evaluate());
                                    //train_metrics.Add(metricVal.ToScalar().Evaluate());

                                    Backward(grad);

                                    ApplyDeltaRegularizer();

                                    foreach (var layer in Layers)
                                    {
                                        OptimizerFn.Update(iteration, layer);
                                    }
                                }

                    x.Dispose();
                    y.Dispose();
                }

                if (val != null)
                {
                    while (val.Next())
                    {
                        var(x, y) = val.GetBatch();

                        var pred = Forward(x);

                        var lossVal   = LossFn.Call(pred.Data, y);
                        var metricVal = MetricFn.Call(pred.Data, y);
                        val_losses.Add(TOps.MeanF(lossVal));
                        val_metrics.Add(TOps.MeanF(metricVal));
                    }
                }

                Console.WriteLine("Epoch: {0}, Loss: {1}", iteration, train_losses.Average());
            }
        }
コード例 #6
0
        /// <summary>
        /// Runs the epoch.
        /// </summary>
        /// <param name="iteration">The iteration.</param>
        /// <param name="train">The train.</param>
        /// <param name="val">The value.</param>
        /// <returns></returns>
        private int RunEpoch(int iteration, DataFrameIter train, DataFrameIter val = null)
        {
            train_losses.Clear();
            train_metrics.Clear();
            val_losses.Clear();
            val_metrics.Clear();

            train.Reset();
            if (val != null)
            {
                val.Reset();
            }

            while (train.Next())
            {
                var(x, y) = train.GetBatch();
                RunTrainOnBatch(iteration, x, y);
                x.Dispose();
                y.Dispose();
            }

            if (val != null)
            {
                while (val.Next())
                {
                    var(x, y) = val.GetBatch();

                    var pred = Forward(x);

                    var lossVal   = LossFn.Forward(pred, y);
                    var metricVal = MetricFn.Calc(pred, y);
                    val_losses.Add(Ops.Mean(lossVal));
                    val_metrics.Add(Ops.Mean(metricVal));
                    x.Dispose();
                    y.Dispose();
                    lossVal.Dispose();
                    metricVal.Dispose();
                }
            }

            return(iteration);
        }