Пример #1
0
        public override void Update(int iteration, BaseLayer layer)
        {
            //If Decay rate is more than 0, the correct the learnng rate per iteration.
            if (DecayRate > 0)
            {
                LearningRate = LearningRate * (1 / (1 + DecayRate * iteration));
            }

            //Loop through all the parameters in the layer
            foreach (var p in layer.Parameters.ToList())
            {
                //Get the parameter name
                string paramName = p.Key;

                //Create a unique name to store in the dictionary
                string varName = layer.Name + "_" + p.Key;

                //Get the weight values
                NDArray param = p.Value;

                //Get the gradient/partial derivative values
                NDArray grad = layer.Grads[paramName];

                //If this is first time, initlalise all the moving average values with 0
                if (!ms.ContainsKey(varName))
                {
                    var ms_new = new NDArray(param.Shape);
                    ms_new.Fill(0);
                    ms[varName] = ms_new;
                }

                //If this is first time, initlalise all the moving average values with 0
                if (!vs.ContainsKey(varName))
                {
                    var vs_new = new NDArray(param.Shape);
                    vs_new.Fill(0);
                    vs[varName] = vs_new;
                }

                // Calculate the exponential moving average for Beta 1 against the gradient
                ms[varName] = (Beta1 * ms[varName]) + (1 - Beta1) * grad;

                //Calculate the exponential squared moving average for Beta 2 against the gradient
                vs[varName] = (Beta2 * vs[varName]) + (1 - Beta2) * Square(grad);

                //Correct the moving averages
                var m_cap = ms[varName] / (1 - Math.Pow(Beta1, iteration));
                var v_cap = vs[varName] / (1 - Math.Pow(Beta2, iteration));

                //Update the weight of of the neurons
                layer.Parameters[paramName] = param - (LearningRate * m_cap / (Sqrt(v_cap) + Epsilon));
            }
        }
Пример #2
0
        static void Main(string[] args)
        {
            Operations K = new Operations();

            //Load array to the tensor
            NDArray a = new NDArray(3, 6);

            a.Load(1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8, 7, 6, 5, 4, 3, 2, 1);
            a.Print("Load array");

            //Transpose of the matrix
            NDArray t = a.Transpose();

            t.Print("Transpose");

            //Create a tensor with all value 5
            NDArray b = new NDArray(6, 3);

            b.Fill(5);
            b.Print("Constant 5");

            //Create a tensor with all value 3
            NDArray c = new NDArray(6, 3);

            c.Fill(3);
            c.Print("Constant 3");

            // Subtract two tensor
            b = b - c;

            //Perform dot product
            NDArray r = K.Dot(a, b);

            r.Print("Dot product");

            Console.ReadLine();
        }