/// <summary>
        /// Backpropagation method to calculate gradient of the loss function
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Backward(SuperArray preds, SuperArray labels)
        {
            var y_true = Ops.Max(Ops.Sum(labels, 1), 1) / (Ops.Abs(preds * Ops.Abs(labels)));
            var y_pred = Ops.Max(Ops.Sum(preds, 1), 1) / Ops.Square(Ops.Abs(preds));

            return(y_true + _cossine_sim(preds, labels) * y_pred);
        }
        /// <summary>
        /// Forwards the inputs and calculate the loss.
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Forward(SuperArray preds, SuperArray labels)
        {
            var pos = Ops.Sum(labels * preds, 1);
            var neg = Ops.Max((1 - labels) * preds, 1);

            return(Ops.Maximum(neg - pos + 1, 0f));
        }
Пример #3
0
        /// <summary>
        /// Forwards the inputs and compute the output
        /// </summary>
        /// <param name="x">The input SuperArray for this layer.</param>
        public override void Forward(SuperArray x)
        {
            base.Forward(x);

            base.Forward(x);
            Output = scale * Output;
        }
Пример #4
0
        /// <summary>
        /// Gets the next batch of data to process.
        /// </summary>
        /// <returns></returns>
        public (SuperArray, SuperArray) GetBatch()
        {
            SuperArray x = frameX.GetBatch(current, batchSize);
            SuperArray y = frameY.GetBatch(current, batchSize);

            return(x, y);
        }
        private SuperArray _cossine_sim(SuperArray preds, SuperArray labels)
        {
            var y_true = Utils.L2Normalize(labels, 1);
            var y_pred = Utils.L2Normalize(preds, 1);

            return(Ops.Sum(y_true * y_pred, 1));
        }
        /// <summary>
        /// Runs the train on batch.
        /// </summary>
        /// <param name="i">The i.</param>
        /// <param name="x">The x.</param>
        /// <param name="y">The y.</param>
        private void RunTrainOnBatch(int i, SuperArray x, SuperArray y)
        {
            SuperArray pred    = Forward(x);
            SuperArray lossVal = LossFn.Forward(pred, y);
            SuperArray grad    = LossFn.Backward(pred, y).Reshape(-1, 1);

            lossVal = ApplyRegularizer(lossVal);
            var metricVal = MetricFn.Calc(pred, y);

            train_losses.Add(Ops.Mean(lossVal));
            train_metrics.Add(Ops.Mean(metricVal));

            Backward(grad);

            ApplyDeltaRegularizer();

            foreach (var layer in Layers)
            {
                OptimizerFn.Update(i, layer);
            }

            pred.Dispose();
            lossVal.Dispose();
            grad.Dispose();
        }
Пример #7
0
        /// <summary>
        /// Forwards the inputs and calculate the loss.
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Forward(SuperArray preds, SuperArray labels)
        {
            preds /= Ops.Sum(preds, 1);

            preds = Ops.Clip(preds, Ops.EPSILON, 1 - Ops.EPSILON);
            return(Ops.Sum(-1 * labels * Ops.Log(preds), 1));
        }
Пример #8
0
        /// <summary>
        /// Calculate the gradient of this layer function
        /// </summary>
        /// <param name="outputgrad">The calculated output grad from previous layer.</param>
        public override void Backward(SuperArray outputgrad)
        {
            pos_relu.Backward(outputgrad);
            neg_relu.Backward(outputgrad);

            Input.Grad = pos_relu.Input.Grad - Params["a"].Data * neg_relu.Input.Grad;
        }
Пример #9
0
        public static SuperArray Softmax(SuperArray x, uint axis = 1)
        {
            var e = Ops.Exp(x - Ops.Max(x, axis));
            var s = Ops.Sum(e, axis);

            return(e / s);
        }
        /// <summary>
        /// Backpropagation method to calculate gradient of the loss function
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Backward(SuperArray preds, SuperArray labels)
        {
            var y_true = Ops.Clip(labels, Ops.EPSILON, 1);
            var y_pred = Ops.Clip(preds, Ops.EPSILON, 1);

            return(Ops.Maximum((-1 * (y_true / y_pred)), 0));
        }
Пример #11
0
        private static (ImageFrame, ImageFrame) BuildSet(DigitImage[] images, bool flatten = false)
        {
            var inputs  = new SuperArray(images.Length, 1, ImageSize, ImageSize);
            var outputs = new SuperArray(images.Length, 10);

            List <float> data = new List <float>();

            for (int i = 0; i < images.Length; ++i)
            {
                var target = inputs.Select(0, i);

                //Variable.FromArray(images[i].pixels, cpuAllocator)
                //    .AsType(DType.Float32)
                //    .ToDevice(Global.Device)
                //    .Evaluate(target);

                target = target / 255;
            }

            Global.OP.FillOneHot(outputs, LabelCount, images.Select(x => (int)x.label).ToArray());
            if (flatten)
            {
                inputs = inputs.Reshape(images.Length, 784);
            }
            return(new ImageFrame(inputs), new ImageFrame(outputs));
        }
Пример #12
0
        /// <summary>
        /// Generates output predictions for the input samples. Computation is done in batches.
        /// </summary>
        /// <param name="x">The input data frame to run prediction.</param>
        /// <param name="batch_size">Size of the batch.</param>
        /// <returns></returns>
        public DataFrame Predict(DataFrame x, int batch_size)
        {
            DataFrameIter dataFrameIter = new DataFrameIter(x);
            List <float>  predictions   = new List <float>();

            dataFrameIter.SetBatchSize(batch_size);

            while (dataFrameIter.Next())
            {
                var        data   = dataFrameIter.GetBatchX();
                SuperArray output = data;
                foreach (var layer in Layers)
                {
                    if (layer.SkipPred)
                    {
                        continue;
                    }

                    layer.Forward(output);
                    output = layer.Output;
                }

                predictions.AddRange(output.List <float>());
            }

            DataFrame result = new DataFrame();

            result.Load(predictions.ToArray());

            return(result);
        }
Пример #13
0
        /// <summary>
        /// Forwards the inputs and compute the output
        /// </summary>
        /// <param name="x">The input SuperArray for this layer.</param>
        public override void Forward(SuperArray x)
        {
            base.Forward(x);
            var keepElements = x >= 0;

            Output = x * keepElements + (Alpha * x * (1 - keepElements));
        }
Пример #14
0
        /// <summary>
        /// Calculate the gradient of this layer function
        /// </summary>
        /// <param name="outputgrad">The calculated output grad from previous layer.</param>
        public override void Backward(SuperArray outputgrad)
        {
            var s = Output.Reshape(-1, 1);
            var d = Ops.Diag(s) - Ops.Dot(s, s.Transpose());

            Input.Grad = outputgrad * Ops.Sum(d, 1).Reshape(Input.Data.Shape);
        }
Пример #15
0
        public override SuperArray Mod(SuperArray x, float scalar)
        {
            SuperArray rhs_arr = new SuperArray(x.Shape);

            rhs_arr.Fill(scalar);
            return(ExecuteReturn(GetFuncName("ndarr_mod", x.ElementType), new object[] { x, rhs_arr }));
        }
        /// <summary>
        /// Forwards the inputs and calculate the loss.
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Forward(SuperArray preds, SuperArray labels)
        {
            var y_true = Ops.Clip(labels, Ops.EPSILON, 1);
            var y_pred = Ops.Clip(preds, Ops.EPSILON, 1);

            return(Ops.Sum(y_true * Ops.Log(y_true / y_pred), 1));
        }
Пример #17
0
        public override void Forward(SuperArray x)
        {
            base.Forward(x);
            var(n, c, h, w) = x.GetConv2DShape();

            int pad = 0;

            if (Padding == PaddingType.Same)
            {
                pad = 1;
            }
            else if (Padding == PaddingType.Full)
            {
                pad = 2;
            }

            var h_out = (h - PoolSize.Item1 + 2 * pad) / Strides + 1;
            var w_out = (w - PoolSize.Item2 + 2 * pad) / Strides + 1;

            var x_reshaped = x.Reshape(n * c, 1, h, w);

            xCols  = ImUtil.Im2Col(x_reshaped, PoolSize, pad, Strides);
            Output = Ops.Max(xCols, 0);
            Output = Output.Reshape(h_out, w_out, n, c).Transpose(2, 3, 0, 1);
        }
Пример #18
0
        /// <summary>
        /// Updates the specified iteration.
        /// </summary>
        /// <param name="iteration">The iteration.</param>
        /// <param name="layer">The layer.</param>
        internal override void Update(int iteration, BaseLayer layer)
        {
            if (DecayRate > 0)
            {
                LearningRate = LearningRate * (1 / (1 + DecayRate * iteration));
            }

            float t    = iteration + 1;
            float lr_t = Convert.ToSingle(LearningRate / (1f - Math.Pow(Beta1, t)));

            foreach (var item in layer.Params)
            {
                var param = item.Value;

                if (!ms.ContainsKey(param.Name))
                {
                    ms[param.Name] = SuperArray.Constant(0f, param.Data.Shape);
                    us[param.Name] = SuperArray.Constant(0f, param.Data.Shape);
                }

                var m_t = (Beta1 * ms[param.Name]) + (1 - Beta1) * param.Grad;
                var u_t = Ops.Maximum((Beta2 * us[param.Name]), Ops.Abs(param.Grad));

                param.Data     = param.Data - LearningRate * m_t / (u_t + Ops.EPSILON);
                ms[param.Name] = m_t;
                us[param.Name] = u_t;

                param.ApplyConstraint();
            }
        }
        /// <summary>
        /// Calculates the metric with predicted and true values.
        /// </summary>
        /// <param name="preds">The predicted value.</param>
        /// <param name="labels">The true value.</param>
        /// <returns></returns>
        public override SuperArray Calc(SuperArray preds, SuperArray labels)
        {
            preds = Ops.Clip(preds, 0, 1);
            var r = Ops.EqualTo(Ops.Round(preds.Ravel()), labels.Ravel());

            return(r);
        }
Пример #20
0
        public override void Backward(SuperArray outputgrad)
        {
            int pad = 0;

            if (Padding == PaddingType.Same)
            {
                pad = 1;
            }
            else if (Padding == PaddingType.Full)
            {
                pad = 2;
            }

            var dout_flat = outputgrad.Transpose(2, 0, 1).Reshape(Filters, -1);
            var dW        = Ops.Dot(dout_flat, xCols.Transpose());

            dW = dW.Reshape(Params["w"].Data.Shape);
            var db     = Ops.Sum(outputgrad, new uint[] { 0, 1, 2 }).Reshape(Filters, -1);
            var W_flat = Params["w"].Data.Reshape(Filters, -1);

            var dX_col = Ops.Dot(W_flat.Transpose(), dout_flat);

            Input.Grad = ImUtil.Col2Im(dX_col, Input.Data.Shape.Dims, Tuple.Create(KernalSize, KernalSize), pad, Strides);

            Params["w"].Grad = dW;
            if (UseBias)
            {
                Params["b"].Grad = db;
            }
        }
Пример #21
0
        /// <summary>
        /// Forwards the inputs and calculate the loss.
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Forward(SuperArray preds, SuperArray labels)
        {
            var first_log  = Ops.Log(Ops.Clip(preds, Ops.EPSILON, float.MaxValue) + 1);
            var second_log = Ops.Log(Ops.Clip(labels, Ops.EPSILON, float.MaxValue) + 1);

            return(Ops.Mean(Ops.Square(first_log - second_log), 1));
        }
Пример #22
0
        /// <summary>
        /// Forwards the inputs and compute the output
        /// </summary>
        /// <param name="x">The input SuperArray for this layer.</param>
        public override void Forward(SuperArray x)
        {
            base.Forward(x);
            var keepElements = x > 0;

            Output = x * keepElements + (1 - keepElements) * 0;
        }
Пример #23
0
        /// <summary>Invokes the constraints</summary>
        /// <param name="w">The weight SuperArray</param>
        /// <returns></returns>
        internal override SuperArray Call(SuperArray w)
        {
            SuperArray norms = Ops.Sqrt(Ops.Sum(Ops.Square(w), Axis));

            var desired = Ops.Clip(norms, 0, MaxValue);

            return(w * (desired / (Ops.EPSILON + norms)));
        }
Пример #24
0
        public SuperArray ToArray()
        {
            SuperArray result = new SuperArray(Shape, DataType);

            result.LoadFrom(Data);

            return(result);
        }
Пример #25
0
        /// <summary>
        /// Calculate the gradient of this layer function
        /// </summary>
        /// <param name="outputgrad">The calculated output grad from previous layer.</param>
        public override void Backward(SuperArray outputgrad)
        {
            var keepElements     = Input.Data > 0;
            var keepElements_Exp = Input.Data < 0;
            var d = Alpha * Ops.Exp(Input.Data * keepElements_Exp);

            Input.Grad = outputgrad * d;
        }
Пример #26
0
        /// <summary>
        /// Backpropagation method to calculate gradient of the loss function
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Backward(SuperArray preds, SuperArray labels)
        {
            float norm       = 2f / preds.Shape[0];
            var   first_log  = Ops.Log(Ops.Clip(preds, Ops.EPSILON, float.MaxValue) + 1);
            var   second_log = Ops.Log(Ops.Clip(labels, Ops.EPSILON, float.MaxValue) + 1);

            return(norm * (first_log - second_log) / (Ops.Clip(preds, Ops.EPSILON, float.MaxValue) + 1));
        }
Пример #27
0
        public override SuperArray NotEqual(SuperArray lhs, float rhs)
        {
            SuperArray rhs_arr = new SuperArray(lhs.Shape);

            rhs_arr.Fill(rhs);

            return(ExecuteReturn(GetFuncName("ndarr_ne", lhs.ElementType), lhs, rhs_arr));
        }
Пример #28
0
        public SuperArray LogNormal(float mean, float stdv, long[] shape, DType dtype)
        {
            SuperArray x = new SuperArray(shape, dtype);

            K.RandomLogNormal(x, mean, stdv, Seed);

            return(x);
        }
Пример #29
0
        public SuperArray Geometric(float p, long[] shape, DType dtype)
        {
            SuperArray x = new SuperArray(shape, dtype);

            K.RandomGeometric(x, p, Seed);

            return(x);
        }
Пример #30
0
        public override SuperArray Div(float lhs, SuperArray rhs)
        {
            SuperArray lhs_arr = new SuperArray(rhs.Shape);

            lhs_arr.Fill(lhs);

            return(ExecuteReturn(GetFuncName("ndarr_div", rhs.ElementType), lhs_arr, rhs));
        }