private static float TrainModel()
        {
            float cumulative_train_loss = 0;

            foreach (var(data, label) in train_dataloader)
            {
                NDArray loss_result = null;
                using (var ag = Autograd.Record())
                {
                    var output = net.Call(data);
                    loss_result = loss.Call(output, label);
                    loss_result.Backward();
                }

                trainer.Step(batch_size);
                cumulative_train_loss += nd.Sum(loss_result).AsScalar <float>();
            }

            return(cumulative_train_loss);
        }
Пример #2
0
        public override NDArrayOrSymbol HybridForward(NDArrayOrSymbol x, params NDArrayOrSymbol[] args)
        {
            var @out = output.Call(x, args);

            if (use_shortcut)
            {
                if (x.IsNDArray)
                {
                    @out = nd.ElemwiseAdd(@out, x.NdX);
                }
                else
                {
                    @out = sym.ElemwiseAdd(@out, x.SymX);
                }
            }

            return(@out);
        }
Пример #3
0
        public override NDArrayOrSymbol HybridForward(NDArrayOrSymbol x, params NDArrayOrSymbol[] args)
        {
            var residual = x;

            x = body.Call(x, args);
            if (ds != null)
            {
                residual = ds.Call(residual, args);
            }

            if (x.IsNDArray)
            {
                x = nd.Activation(x.NdX + residual.NdX, ActivationType.Relu);
            }
            else
            {
                x = sym.Activation(x.SymX + residual.SymX, ActivationType.Relu);
            }

            return(x);
        }
Пример #4
0
        public override NDArrayOrSymbol HybridForward(NDArrayOrSymbol x, params NDArrayOrSymbol[] args)
        {
            var matches = _matchers.Call(x);

            return(this.ComposeMatches(matches.NdX));
        }