Пример #1
0
        public MLUnit3(int NA, int NB, MLUnit3 preM, Func <double, double> ActFunc, Func <double, double> BackFunc,
                       double gamma)
        {
            ID = ID0++;
            if (preM != null)
            {
                preM.nxtMLU = this; this.preMLU = preM;
            }
            this.ActFunc = ActFunc; this.BackFunc = BackFunc;
            this.NA      = NA; this.NB = NB;

            U_lst = new DenseVector(NA);
            Z_lst = new DenseVector(NA);
            D_lst = new DenseVector(NA);

            this.gamma = gamma;
            if (NB > 0)
            {
                DropoutLst = new bool[NA];
                W_lst      = new DenseMatrix(NB, NA);
                dW_lst     = new DenseMatrix(NB, NA);
                pdW_lst    = new DenseMatrix(NB, NA);
                Learning_Init();
            }
            WriteLine($"MLUnit3 ID:{ID} NA:{NA} NB:{NB} DropOutEnable:{DropOutEnable}");
        }
Пример #2
0
        private void RecursiveCalcu_Propagation(MLUnit3 P, ULeData X, bool DoB, bool DispB)
        {
            //===== Forward calculation =====
//$$            ApplyDropout_____(P,DoB:P.DropOutEnable);
            P.Z_lst[0] = 1.0;
            var Q = P.nxtMLU;

            Q.U_lst = P.W_lst * P.Z_lst;
            Q.U_lst.Apply(Q.Z_lst, x => P.ActFunc(x)); //Activation Function

            //===== Backward calculation =====
            if (Q.nxtMLU != null)                             // Intermediate layer
            {
                RecursiveCalcu_Propagation(Q, X, DoB, DispB); // [Next layer]
            }
            else                                              // Final layer
            //DenseVector E = new DenseVector(Nout,0.0); E.Clear();
            //DenseVector E = DenseVector.Create(Nout,0.0); E[X.ans]=1.0;
            {
                DenseVector E = DenseVector.Create(Nout, p => (p == X.ans)?1.0:0.0);
                Q.D_lst = E - Q.Z_lst;
            }

            DenseVector SU = sigmoidFDash(P.U_lst);
            DenseMatrix Wt = (DenseMatrix)P.W_lst.Transpose();
            DenseVector WD = Wt * Q.D_lst;

            P.D_lst = eProduct(SU, WD);  //Hadamard product //②
//$$                ApplyDropout_____(P,DoB:true);
            P.dW_lst = vvProduct(Q.D_lst, P.Z_lst);
            return;
        }
Пример #3
0
        private void   ApplyDropout_____(MLUnit3 MLU, bool DoB)
        {
            if (!dropoutB || !MLU.DropOutEnable)
            {
                return;
            }
            double gamma = MLU.gamma;
            var    Z     = MLU.Z_lst;

            if (DoB)
            {
                var B = MLU.DropoutLst;
                for (int k = 1; k < MLU.NA; k++)
                {
                    if (B[k])
                    {
                        Z[k] = 0.0;
                    }
                }
            }
            else
            {
                Z *= gamma;
            }
        }
Пример #4
0
        private void SetArray_Weight(int[] NSizeLst, double gammaC = 9999.0)
        {
            this.NSizeLst = NSizeLst;
            MLUnit3 preM = null, Q;

            MLUnit3.ID0 = 0;

            MLUs = new List <MLUnit3>();
            for (int n = 0; n < NLayer; n++)
            {
                int    NA    = NSizeLst[n];
                int    NB    = (n < NLayerM1)? NSizeLst[n + 1]: 0;
                double gamma = gammaC;
                if (n == NLayerM1)
                {
                    gamma = 1.0;
                }
                MLUs.Add(Q = new MLUnit3(NA, NB, preM, sigmoidF, sigmoidFDash, gamma));
                preM       = Q;
            }
        }