예제 #1
0
        public void updateWi()
        {
            //输出节点
            FullyLayer lastFullyLayer = fullyLayers[fullyLayers.Count - 1];

            for (int i = 0; i < lastFullyLayer.fullyneurns.Count; i++)
            {
                lastFullyLayer.fullyneurns[i].backpropagation(lossValue[i]);
            }
            //隐藏节点
            for (int i = fullyLayers.Count - 1; i > 0; i--)
            {
                backward(fullyLayers[i - 1], fullyLayers[i]);
            }
            //输出敏感度
            if (OutSensitive == null)
            {
                OutSensitive = new List <float[]>(AgentClass.Mini_batchsize);
                for (int i = 0; i < AgentClass.Mini_batchsize; i++)
                {
                    OutSensitive.Add(new float[NumberOfLayorNeuron[0]]);
                }
            }
            for (int i = 0; i < AgentClass.Mini_batchsize; i++)
            {
                for (int j = 0; j < fullyLayers[0].fullyneurns.Count; j++)
                {
                    OutSensitive[i][j] = fullyLayers[0].fullyneurns[j].SensitiveValue[i];
                }
            }
            //更新权值
            //初始化ADAM
            if (isAdamInit == false)
            {
                for (int i = 0; i < fullyLayers.Count - 1; i++)
                {
                    for (int j = 0; j < fullyLayers[i].fullyneurns.Count; j++)
                    {
                        Fullyneuron fullyneuron = fullyLayers[i].fullyneurns[j];
                        if (fullyneuron.S == null)
                        {
                            fullyneuron.S = new float[NumberOfLayorNeuron[i + 1]];
                            fullyneuron.V = new float[NumberOfLayorNeuron[i + 1]];
                        }
                    }
                }
                isAdamInit = true;
            }
            //用于保存各个dw
            float[] dwArray = new float[AgentClass.Mini_batchsize];
            //更新权值
            for (int i = 0; i < fullyLayers.Count - 1; i++)
            {
                float dw = 0;
                for (int j = 0; j < fullyLayers[i].fullyneurns.Count; j++)
                {
                    Fullyneuron fullyneuron = fullyLayers[i].fullyneurns[j];
                    for (int k = 0; k < fullyneuron.Wi.Length; k++)
                    {
                        for (int mini = 0; mini < AgentClass.Mini_batchsize; mini++)
                        {
                            dwArray[mini] = fullyneuron.Xout[mini] * fullyLayers[i + 1].fullyneurns[k].SensitiveValue[mini];
                        }
                        dw = ArrayAction.average(dwArray);
                        fullyneuron.AdamUpdateWi(dw, k);
                        //float quanzhi = Adam.GetAdamNumber(fullyneuron.V[k], fullyneuron.S[k], dw,out fullyneuron.V[k],out fullyneuron.S[k]);
                        //fullyneuron.Wi[k] -= AgentClass.Study_rate * quanzhi;
                    }
                }
            }
        }