Ejemplo n.º 1
0
        private float GetWi_Xi(FullyLayer lastfullyLayer, int fullyneurnIndex, int Xindex)
        {
            float sum = 0;

            for (int i = 0; i < lastfullyLayer.fullyneurns.Count; i++)
            {
                Fullyneuron fullyneuron = lastfullyLayer.fullyneurns[i];
                sum += fullyneuron.Wi[fullyneurnIndex] * fullyneuron.Xout[Xindex];
            }
            return(sum);
        }
Ejemplo n.º 2
0
        public void caculate()
        {
            if (InputArray == null || NumberOfLayorNeuron == null)
            {
                Console.WriteLine("Error!位置:\n\rFullyNet->caculate()");
                return;
            }
            //初始化神经元
            if (fullyLayers == null)
            {
                NumberOfLayorNeuron[0] = InputArray[0].Length;
                fullyLayers            = new List <FullyLayer>(NumberOfLayorNeuron.Length);
                for (int i = 0; i < NumberOfLayorNeuron.Length; i++)
                {
                    fullyLayers.Add(new FullyLayer());
                }
                for (int i = 0; i < fullyLayers.Count; i++)
                {
                    if (fullyLayers[i].fullyneurns == null)
                    {
                        fullyLayers[i].fullyneurns = new List <Fullyneuron>(NumberOfLayorNeuron[i]);
                        for (int j = 0; j < NumberOfLayorNeuron[i]; j++)
                        {
                            fullyLayers[i].fullyneurns.Add(new Fullyneuron());
                        }
                    }
                }
                //初始化权值
                for (int i = 0; i < fullyLayers.Count - 1; i++)
                {
                    for (int j = 0; j < fullyLayers[i].fullyneurns.Count; j++)
                    {
                        Fullyneuron fullyneuron = fullyLayers[i].fullyneurns[j];
                        if (fullyneuron.Wi == null)
                        {
                            fullyneuron.Wi = new float[NumberOfLayorNeuron[i + 1]];
                            for (int k = 0; k < NumberOfLayorNeuron[i + 1]; k++)
                            {
                                fullyneuron.Wi[k] = W_value_method(NumberOfLayorNeuron[i]);
                                //fullyneuron.BN.Gamma = W_value_method(NumberOfLayorNeuron[i]);
                                //fullyneuron.BN.Bata = W_value_method(NumberOfLayorNeuron[i]);
                            }
                        }
                    }
                }
            }
            //第一层输入
            FullyLayer firstLayer = fullyLayers[0];

            for (int i = 0; i < firstLayer.fullyneurns.Count; i++)
            {
                float[] XiArray = new float[AgentClass.Mini_batchsize];
                for (int j = 0; j < AgentClass.Mini_batchsize; j++)
                {
                    XiArray[j] = InputArray[j][i];
                }
                firstLayer.fullyneurns[i].forward(XiArray);
            }



            //隐藏层
            for (int i = 1; i < fullyLayers.Count; i++)
            {
                forwardcaculate(fullyLayers[i - 1], fullyLayers[i]);
            }
            //输出
            if (outData == null)
            {
                int lastNetNumber = NumberOfLayorNeuron[NumberOfLayorNeuron.Length - 1];
                outData = new List <float[]>(AgentClass.Mini_batchsize);
                for (int i = 0; i < AgentClass.Mini_batchsize; i++)
                {
                    outData.Add(new float[lastNetNumber]);
                }
            }
            FullyLayer lastFullyLayer = fullyLayers[fullyLayers.Count - 1];

            for (int i = 0; i < AgentClass.Mini_batchsize; i++)
            {
                for (int j = 0; j < NumberOfLayorNeuron[NumberOfLayorNeuron.Length - 1]; j++)
                {
                    outData[i][j] = lastFullyLayer.fullyneurns[j].Xout[i];
                }
                //outData[i] = lastFullyLayer.fullyneurns[i].Xout;
            }
        }
Ejemplo n.º 3
0
        public void updateWi()
        {
            //输出节点
            FullyLayer lastFullyLayer = fullyLayers[fullyLayers.Count - 1];

            for (int i = 0; i < lastFullyLayer.fullyneurns.Count; i++)
            {
                lastFullyLayer.fullyneurns[i].backpropagation(lossValue[i]);
            }
            //隐藏节点
            for (int i = fullyLayers.Count - 1; i > 0; i--)
            {
                backward(fullyLayers[i - 1], fullyLayers[i]);
            }
            //输出敏感度
            if (OutSensitive == null)
            {
                OutSensitive = new List <float[]>(AgentClass.Mini_batchsize);
                for (int i = 0; i < AgentClass.Mini_batchsize; i++)
                {
                    OutSensitive.Add(new float[NumberOfLayorNeuron[0]]);
                }
            }
            for (int i = 0; i < AgentClass.Mini_batchsize; i++)
            {
                for (int j = 0; j < fullyLayers[0].fullyneurns.Count; j++)
                {
                    OutSensitive[i][j] = fullyLayers[0].fullyneurns[j].SensitiveValue[i];
                }
            }
            //更新权值
            //初始化ADAM
            if (isAdamInit == false)
            {
                for (int i = 0; i < fullyLayers.Count - 1; i++)
                {
                    for (int j = 0; j < fullyLayers[i].fullyneurns.Count; j++)
                    {
                        Fullyneuron fullyneuron = fullyLayers[i].fullyneurns[j];
                        if (fullyneuron.S == null)
                        {
                            fullyneuron.S = new float[NumberOfLayorNeuron[i + 1]];
                            fullyneuron.V = new float[NumberOfLayorNeuron[i + 1]];
                        }
                    }
                }
                isAdamInit = true;
            }
            //用于保存各个dw
            float[] dwArray = new float[AgentClass.Mini_batchsize];
            //更新权值
            for (int i = 0; i < fullyLayers.Count - 1; i++)
            {
                float dw = 0;
                for (int j = 0; j < fullyLayers[i].fullyneurns.Count; j++)
                {
                    Fullyneuron fullyneuron = fullyLayers[i].fullyneurns[j];
                    for (int k = 0; k < fullyneuron.Wi.Length; k++)
                    {
                        for (int mini = 0; mini < AgentClass.Mini_batchsize; mini++)
                        {
                            dwArray[mini] = fullyneuron.Xout[mini] * fullyLayers[i + 1].fullyneurns[k].SensitiveValue[mini];
                        }
                        dw = ArrayAction.average(dwArray);
                        fullyneuron.AdamUpdateWi(dw, k);
                        //float quanzhi = Adam.GetAdamNumber(fullyneuron.V[k], fullyneuron.S[k], dw,out fullyneuron.V[k],out fullyneuron.S[k]);
                        //fullyneuron.Wi[k] -= AgentClass.Study_rate * quanzhi;
                    }
                }
            }
        }