Пример #1
0
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="x"></param>
        /// <param name="outputDim"></param>
        /// <param name="dataType"></param>
        /// <param name="seed"></param>
        /// <param name="device"></param>
        public NALU(Variable x, int outputDim, DataType dataType, uint seed, DeviceDescriptor device)
        {
            var inputDim = x.Shape[0];
            var error    = new Constant(new NDShape(0), dataType, 1e-10);
            var one      = new Constant(new NDShape(0), dataType, 1.0f);

            var W_hat = Weights(inputDim, outputDim, dataType, device, seed, "w_hat");
            var M_hat = Weights(inputDim, outputDim, dataType, device, seed, "m_hat");
            var G     = Weights(inputDim, outputDim, dataType, device, seed, "g_");

            //first construct NAC
            var      W = CNTKLib.ElementTimes(CNTKLib.Tanh(W_hat), CNTKLib.Sigmoid(M_hat));
            Variable a = CNTKLib.Times(W, x);
            Variable g = CNTKLib.Sigmoid(CNTKLib.Times(G, x));

            var      t2 = CNTKLib.Log(CNTKLib.Abs(x) + error);
            var      t1 = CNTKLib.Times(W, t2);
            Variable m  = CNTKLib.Exp(t1);

            //construct NALU terms
            var o1 = CNTKLib.ElementTimes(g, a);
            var o2 = CNTKLib.ElementTimes(CNTKLib.Minus(one, g), m);

            //NALU output
            var output = CNTKLib.Plus(o1, o2, "NALU");

            //initialize output
            X = x;
            H = output;
        }
Пример #2
0
        public static Function MeanAbsoluteError(Variable predictions, Variable targets)
        {
            var errors         = CNTKLib.Minus(targets, predictions);
            var absoluteErrors = CNTKLib.Abs(errors);

            return(ReduceMeanAll(absoluteErrors));
        }
Пример #3
0
        public static Function MeanAbsolutePercentageError(Variable prediction, Variable targets)
        {
            var error      = CNTKLib.Minus(targets, prediction);
            var percentage = CNTKLib.Abs(CNTKLib.ElementDivide(error, targets));

            return(CNTKLib.ReduceMean(percentage, new Axis(-1)));
        }
Пример #4
0
        /// <summary>
        /// Means the abs percentage error.
        /// </summary>
        /// <param name="labels">The labels.</param>
        /// <param name="predictions">The predictions.</param>
        /// <returns>Function.</returns>
        internal static Function MeanAbsPercentageError(Variable labels, Variable predictions)
        {
            var diff = CNTKLib.ElementDivide(CNTKLib.Abs(CNTKLib.Minus(predictions, labels)), CNTKLib.Clip(CNTKLib.Abs(labels), Utility.CreateParamVar(float.Epsilon), Utility.CreateParamVar(float.MaxValue)));
            var mean = CNTKLib.ReduceMean(diff, new Axis(-1));

            return(CNTKLib.ElementTimes(Utility.CreateParamVar(100), mean));
        }
Пример #5
0
        public void Train()
        {
            var yt = Variable.InputVariable(new int[] { _dataSet.OutputSize }, DataType.Float);

            var y_yt = CNTKLib.Abs(CNTKLib.Minus(_y, yt));
            var loss = CNTKLib.ReduceSum(y_yt, Axis.AllAxes());

            var learner = CNTKLib.SGDLearner(new ParameterVector(_y.Parameters().ToArray()), new TrainingParameterScheduleDouble(1.0, BATCH_SIZE));
            var trainer = Trainer.CreateTrainer(_y, loss, null, new List <Learner> {
                learner
            });

            for (int i = 0; i < EPOCH_COUNT; i++)
            {
                var sumLoss = 0.0;
                var sumEval = 0.0;

                for (int j = 0; j < _dataSet.Count / BATCH_SIZE - 1; j++)
                {
                    var x_value      = Value.CreateBatch(_x.Shape, _dataSet.Input.GetRange(j * BATCH_SIZE * _dataSet.InputSize, BATCH_SIZE * _dataSet.InputSize), DeviceDescriptor.CPUDevice);
                    var yt_value     = Value.CreateBatch(yt.Shape, _dataSet.Output.GetRange(j * BATCH_SIZE * _dataSet.OutputSize, BATCH_SIZE * _dataSet.OutputSize), DeviceDescriptor.CPUDevice);
                    var inputDataMap = new Dictionary <Variable, Value>()
                    {
                        { _x, x_value },
                        { yt, yt_value }
                    };

                    trainer.TrainMinibatch(inputDataMap, false, DeviceDescriptor.CPUDevice);
                    sumLoss += trainer.PreviousMinibatchLossAverage() * trainer.PreviousMinibatchSampleCount();
                }

                Console.WriteLine($"Iter: {i}\tLoss: {sumLoss / _dataSet.Count}");
            }
        }
Пример #6
0
        public StockPricePrediction(int hiddenNeuronCount_, DataSet dataSet_)
        {
            _dataSet = dataSet_;

            var layers = new int[] { _dataSet.InputSize, hiddenNeuronCount_, hiddenNeuronCount_, hiddenNeuronCount_, _dataSet.OutputSize };

            _x = Variable.InputVariable(new int[] { layers[0] }, DataType.Float);

            var lastLayer = _x;

            for (int i = 0; i < layers.Length - 1; i++)
            {
                Parameter weight = new Parameter(new int[] { layers[i + 1], layers[i] }, DataType.Float, CNTKLib.GlorotNormalInitializer());
                Parameter bias   = new Parameter(new int[] { layers[i + 1] }, DataType.Float, CNTKLib.GlorotNormalInitializer());

                Function times = CNTKLib.Times(weight, lastLayer);
                Function plus  = CNTKLib.Plus(times, bias);
                if (i != layers.Length - 2)
                {
                    lastLayer = CNTKLib.Sigmoid(plus);
                }
                else
                {
                    lastLayer = CNTKLib.Abs(plus);
                }
            }
            _y = lastLayer;
        }
Пример #7
0
        public static Function HuberLoss(Variable input1, Variable input2, DeviceDescriptor device)
        {
            var error     = CNTKLib.Minus(input1, input2);
            var square    = CNTKLib.ElementDivide(CNTKLib.Square(error), Constant.Scalar(2.0f, device));
            var linear    = CNTKLib.Minus(CNTKLib.Abs(error), Constant.Scalar(0.5f, device));
            var useLinear = CNTKLib.Cast(CNTKLib.GreaterEqual(linear, Constant.Scalar(0.5f, device)), DataType.Float);

            return(CNTKLib.ElementTimes(linear, useLinear).Output + CNTKLib.ElementTimes(square, CNTKLib.Minus(Constant.Scalar(1.0f, device), useLinear)).Output);
        }
Пример #8
0
        public override Function GetLoss(Variable prediction, Variable targets, DeviceDescriptor device)
        {
            var absolute = CNTKLib.Minus(prediction, targets);

            return(CNTKLib.Abs(absolute));
        }
Пример #9
0
 /// <summary>
 /// Means the abs error.
 /// </summary>
 /// <param name="labels">The labels.</param>
 /// <param name="predictions">The predictions.</param>
 /// <returns>Function.</returns>
 internal static Function MeanAbsError(Variable labels, Variable predictions)
 {
     return(CNTKLib.ReduceMean(CNTKLib.Abs(CNTKLib.Minus(predictions, labels)), new Axis(-1)));
 }
Пример #10
0
 public static Function MeanAbsoluteError(Variable prediction, Variable targets)
 {
     return(CNTKLib.ReduceMean(CNTKLib.Abs(CNTKLib.Minus(targets, prediction)), new Axis(-1)));
 }