Example #1
0
        protected void AdjustWeights(Layer lyr)
        {
            bool convtype = lyr.GetType().ToString() == "AI.ML.CNN.Layers.Convolution" ? true : false;
            bool conctype = lyr.GetType().ToString() == "AI.ML.CNN.Layers.Connected" ? true : false;

            switch (convtype)
            {
            case true:
                CNN.Layers.Convolution convLyr = (CNN.Layers.Convolution)lyr;
                double wv;
                double ws, diff, gradient;
                AI.ML.CNN.Layers.Convolution.Kernel krn;
                double?[][][] weights;
                for (int j = 0; j < convLyr.Filters.Count; j++)
                {
                    krn                  = (AI.ML.CNN.Layers.Convolution.Kernel)((AI.ML.CNN.Layers.Convolution.Filter)convLyr.Filters[j]).Kernels[0];
                    weights              = krn.Weights;
                    wv                   = 0;
                    ws                   = 0;
                    wv                   = (beta1.Value * wv) + (1 - beta1.Value) * (weights[0][0][2].Value / batchSize.Value);
                    ws                   = (beta2.Value * ws) + (1 - beta2.Value) * System.Math.Pow((weights[0][0][2].Value / batchSize.Value), 2);;
                    gradient             = learningRate.Value * (wv / System.Math.Sqrt(ws + 1e-7));
                    diff                 = weights[0][0][0].Value - gradient;
                    krn.Weights[0][0][0] = diff;


                    // j: indexing filters
                    for (int k = 1; k < ((AI.ML.CNN.Layers.Convolution.Filter)convLyr.Filters[j]).Kernels.Length; k++)
                    {
                        //k: indexing kernels
                        krn     = (AI.ML.CNN.Layers.Convolution.Kernel)((AI.ML.CNN.Layers.Convolution.Filter)convLyr.Filters[j]).Kernels[k];
                        weights = krn.Weights;
                        for (int m = 0; m < weights.Length; m++)
                        {
                            for (int n = 0; n < weights[m].Length; n++)
                            {
                                wv                   = 0;
                                ws                   = 0;
                                wv                   = (beta1.Value * wv) + (1 - beta1.Value) * (weights[m][n][2].Value / batchSize.Value);
                                ws                   = (beta2.Value * ws) + (1 - beta2.Value) * System.Math.Pow((weights[m][n][2].Value / batchSize.Value), 2);
                                gradient             = learningRate.Value * (wv / System.Math.Sqrt(ws + 1e-7));
                                diff                 = weights[m][n][0].Value - gradient;
                                krn.Weights[m][n][0] = diff;
                            }
                        }
                    }
                }
                break;

            case false:
                switch (conctype)
                {
                case true:
                    CNN.Layers.Connected connLyr = (CNN.Layers.Connected)lyr;
                    Neuron n; Synapse syn;
                    for (int j = 0; j < connLyr.Neurons.Length; j++)
                    {
                        n = connLyr.Neurons[j];
                        for (int k = 0; k < n.Synapse.Count; k++)
                        {
                            syn      = n.Synapse[k];
                            wv       = 0;
                            ws       = 0;
                            wv       = (beta1.Value * wv) + (1 - beta1.Value) * (syn.Weights[2].Value / batchSize.Value);
                            ws       = (beta2.Value * ws) + (1 - beta2.Value) * System.Math.Pow((syn.Weights[2].Value / batchSize.Value), 2);
                            gradient = learningRate.Value * (wv / System.Math.Sqrt(ws + 1e-7));
                            diff     = syn.W.Value - gradient;
                            syn.W    = diff;
                        }
                    }
                    break;

                case false:
                    break;
                }
                break;

            default:
                break;
            }
        }
Example #2
0
        protected void AdjustWeights()
        {
            for (int i = 1; i < Model.Layers.Length; i++)
            {
                Model.Unit lyr;
                lyr = Model.Layers[i];
                bool convtype = lyr.GetType().ToString() == "AI.ML.CNN.Layers.Convolution" ? true : false;
                bool conctype = lyr.GetType().ToString() == "AI.ML.CNN.Layers.Connected" ? true : false;

                switch (convtype)
                {
                case true:
                    CNN.Layers.Convolution convLyr = (CNN.Layers.Convolution)lyr;
                    AI.ML.CNN.Layers.Convolution.Kernel krn;
                    double?[][][] weights;
                    double?[][]   wc;
                    for (int j = 0; j < convLyr.Filters.Count; j++)
                    {
                        krn = (AI.ML.CNN.Layers.Convolution.Kernel)((AI.ML.CNN.Layers.Convolution.Filter)convLyr.Filters[j]).Kernels[0];
                        wc  = krn.WeightCorrection;
                        krn.Weights[0][0][0] += learningRate.Value * wc[0][0];

                        //dW = (Momentum.Value * krn.Weights[0][0][2]) + (LearningRate.Value * wc[0][0]);
                        //krn.Weights[0][0][2] = dW;
                        //krn.Weights[0][0][0] += dW;

                        // j: indexing filters
                        for (int k = 1; k < ((AI.ML.CNN.Layers.Convolution.Filter)convLyr.Filters[j]).Kernels.Length; k++)
                        {
                            //k: indexing kernels
                            krn     = (AI.ML.CNN.Layers.Convolution.Kernel)((AI.ML.CNN.Layers.Convolution.Filter)convLyr.Filters[j]).Kernels[k];
                            weights = krn.Weights;
                            wc      = krn.WeightCorrection;
                            for (int m = 0; m < weights.Length; m++)
                            {
                                for (int n = 0; n < weights[m].Length; n++)
                                {
                                    //dW = (Momentum.Value * krn.Weights[m][n][2]) + (LearningRate.Value * wc[m][n]);
                                    //krn.Weights[m][n][2] = dW;
                                    krn.Weights[m][n][0] += learningRate.Value * wc[m][n];
                                }
                            }
                        }
                    }
                    break;

                case false:
                    switch (conctype)
                    {
                    case true:
                        CNN.Layers.Connected connLyr = (CNN.Layers.Connected)lyr;
                        Neuron n; Synapse syn;
                        for (int j = 0; j < connLyr.Neurons.Length; j++)
                        {
                            n = connLyr.Neurons[j];
                            for (int k = 0; k < n.Synapse.Count; k++)
                            {
                                syn    = n.Synapse[k];
                                syn.W -= learningRate * syn.dW;
                            }
                        }
                        break;

                    case false:
                        break;
                    }
                    break;

                default:
                    break;
                }
            }
        }