Пример #1
0
        public NdArray ForwardCpu(NdArray x)
        {
            Real[][] upwards = new Real[4][];
            upwards[0] = this.upward0.Forward(x)[0].Data;
            upwards[1] = this.upward1.Forward(x)[0].Data;
            upwards[2] = this.upward2.Forward(x)[0].Data;
            upwards[3] = this.upward3.Forward(x)[0].Data;

            int outputDataSize = x.BatchCount * this.OutputCount;

            if (this.hParam == null)
            {
                //値がなければ初期化
                this.aParam = new List <Real[]>();
                this.iParam = new List <Real[]>();
                this.fParam = new List <Real[]>();
                this.oParam = new List <Real[]>();
                this.cParam = new List <Real[]>();
            }
            else
            {
                Real[] laterals0 = this.lateral0.Forward(hParam)[0].Data;
                Real[] laterals1 = this.lateral1.Forward(hParam)[0].Data;
                Real[] laterals2 = this.lateral2.Forward(hParam)[0].Data;
                Real[] laterals3 = this.lateral3.Forward(hParam)[0].Data;
                hParam.UseCount -= 4; //回数を補正 RFI

                for (int i = 0; i < outputDataSize; i++)
                {
                    upwards[0][i] += laterals0[i];
                    upwards[1][i] += laterals1[i];
                    upwards[2][i] += laterals2[i];
                    upwards[3][i] += laterals3[i];
                }
            }

            if (this.cParam.Count == 0)
            {
                this.cParam.Add(new Real[outputDataSize]);
            }

            Real[] la      = new Real[outputDataSize];
            Real[] li      = new Real[outputDataSize];
            Real[] lf      = new Real[outputDataSize];
            Real[] lo      = new Real[outputDataSize];
            Real[] cPrev   = this.cParam[this.cParam.Count - 1];
            Real[] cResult = new Real[cPrev.Length];
            Real[] lhParam = new Real[outputDataSize];

            for (int b = 0; b < x.BatchCount; b++)
            {
                //再配置
                for (int j = 0; j < this.OutputCount; j++)
                {
                    int index      = j * 4;
                    int batchIndex = b * OutputCount + j;

                    la[batchIndex] = Math.Tanh(upwards[index / this.OutputCount][index % this.OutputCount + b * OutputCount]);
                    li[batchIndex] = Sigmoid(upwards[++index / this.OutputCount][index % this.OutputCount + b * OutputCount]);
                    lf[batchIndex] = Sigmoid(upwards[++index / this.OutputCount][index % this.OutputCount + b * OutputCount]);
                    lo[batchIndex] = Sigmoid(upwards[++index / this.OutputCount][index % this.OutputCount + b * OutputCount]);

                    cResult[batchIndex] = la[batchIndex] * li[batchIndex] + lf[batchIndex] * cPrev[batchIndex];

                    lhParam[batchIndex] = lo[batchIndex] * Math.Tanh(cResult[batchIndex]);
                }
            }

            //Backward用
            this.cParam.Add(cResult);
            this.aParam.Add(la);
            this.iParam.Add(li);
            this.fParam.Add(lf);
            this.oParam.Add(lo);

            this.hParam = new NdArray(lhParam, new[] { OutputCount }, x.BatchCount, this);
            return(this.hParam);
        }
Пример #2
0
 public ELU(double alpha = 1, string name = FUNCTION_NAME, string[] inputNames = null, string[] outputNames = null) : base(name, inputNames, outputNames)
 {
     this.Alpha = alpha;
 }
Пример #3
0
 internal override Real ForwardActivate(Real x)
 {
     return(Math.Tanh(x * 0.5) * 0.5 + 0.5);
 }
Пример #4
0
 public MomentumSGD(double learningRate = 0.01, double momentum = 0.9)
 {
     this.LearningRate = learningRate;
     this.Momentum     = momentum;
 }
Пример #5
0
        private NdArray ForwardCpu(NdArray input)
        {
            int outputHeight = _coverAll ?
                               (int)Math.Floor((input.Shape[1] - this._kHeight + this._padY * 2.0 + this._strideY - 1.0) / this._strideY) + 1 :
                               (int)Math.Floor((input.Shape[1] - this._kHeight + this._padY * 2.0) / this._strideY) + 1;
            int outputWidth = _coverAll ?
                              (int)Math.Floor((input.Shape[2] - this._kWidth + this._padX * 2.0 + this._strideX - 1.0) / this._strideX) + 1 :
                              (int)Math.Floor((input.Shape[2] - this._kWidth + this._padX * 2.0) / this._strideX) + 1;

            int[] outputIndices = new int[input.Shape[0] * outputHeight * outputWidth * input.BatchCount];

            for (int i = 0; i < outputIndices.Length; i++)
            {
                outputIndices[i] = -1;
            }

            for (int b = 0; b < input.BatchCount; b++)
            {
                int outBatchOffset = b * input.Shape[0] * outputHeight * outputWidth;
                int inBatchOffset  = b * input.Length;

                for (int i = 0; i < input.Shape[0]; i++)
                {
                    int outChOffset = outBatchOffset + i * outputHeight * outputWidth;
                    int inChOffset  = inBatchOffset + i * input.Shape[1] * input.Shape[2];

                    for (int y = 0; y < outputHeight; y++)
                    {
                        int inIndexY = y * _strideY - _padY;
                        int dyLimit  = this._kHeight < input.Shape[1] - inIndexY ? this._kHeight : input.Shape[1] - inIndexY;
                        int dyStart  = inIndexY < 0 ? -inIndexY : 0;

                        for (int x = 0; x < outputWidth; x++)
                        {
                            int inIndexX = x * _strideX - _padX;
                            int dxLimit  = this._kWidth < input.Shape[2] - inIndexX ? this._kWidth : input.Shape[2] - inIndexX;
                            int dxStart  = inIndexX < 0 ? -inIndexX : 0;

                            int inBaseIndex = inChOffset + inIndexY * input.Shape[2] + inIndexX;
                            int outIndex    = outChOffset + y * outputWidth + x;

                            Real maxVal = float.NegativeInfinity;
                            outputIndices[outIndex] = -1;

                            for (int dy = dyStart; dy < dyLimit; dy++)
                            {
                                for (int dx = dxStart; dx < dxLimit; dx++)
                                {
                                    int inputIndex = inBaseIndex + dy * input.Shape[2] + dx;

                                    if (maxVal < input.Data[inputIndex])
                                    {
                                        maxVal = input.Data[inputIndex];
                                        outputIndices[outIndex] = inputIndex;
                                    }
                                }
                            }
                        }
                    }
                }
            }

            return(GetForwardResult(input, outputIndices, outputWidth, outputHeight));
        }
Пример #6
0
        private NdArray ForwardCpu(NdArray x)
        {
            int dataSize = x.Length / ChannelSize;

            //計算用パラメータの取得
            if (this.IsTrain)
            {
                //メンバのMeanとVarianceを設定する
                this.Variance = new Real[this.ChannelSize];
                this.Mean     = new Real[this.ChannelSize];

                for (int i = 0; i < this.ChannelSize; i++)
                {
                    for (int b = 0; b < x.BatchCount; b++)
                    {
                        for (int location = 0; location < dataSize; location++)
                        {
                            this.Mean[i] += x.Data[b * x.Length + i * dataSize + location];
                        }
                    }

                    this.Mean[i] /= x.BatchCount * dataSize;

                    for (int b = 0; b < x.BatchCount; b++)
                    {
                        for (int location = 0; location < dataSize; location++)
                        {
                            this.Variance[i] += (x.Data[b * x.Length + i * dataSize + location] - this.Mean[i]) * (x.Data[b * x.Length + i * dataSize + location] - this.Mean[i]);
                        }
                    }

                    this.Variance[i] = this.Variance[i] / (x.BatchCount * dataSize) + this.Eps;
                }
            }
            else
            {
                this.Mean     = this.AvgMean.Data;
                this.Variance = this.AvgVar.Data;
            }

            this.Std = new Real[this.ChannelSize];
            for (int i = 0; i < this.Std.Length; i++)
            {
                this.Std[i] = Math.Sqrt(this.Variance[i]);
            }

            //結果を計算
            this.Xhat = new Real[x.Data.Length];

            Real[] y = new Real[x.Data.Length];

            for (int i = 0; i < this.ChannelSize; i++)
            {
                for (int b = 0; b < x.BatchCount; b++)
                {
                    for (int location = 0; location < dataSize; location++)
                    {
                        int index = b * x.Length + i * dataSize + location;
                        this.Xhat[index] = (x.Data[index] - this.Mean[i]) / this.Std[i];
                        y[index]         = this.Gamma.Data[i] * this.Xhat[index] + this.Beta.Data[i];
                    }
                }
            }

            //パラメータを更新
            if (this.IsTrain)
            {
                int  m      = x.BatchCount;
                Real adjust = m / Math.Max(m - 1.0, 1.0); // unbiased estimation

                for (int i = 0; i < this.AvgMean.Data.Length; i++)
                {
                    this.AvgMean.Data[i] *= this.Decay;
                    this.Mean[i]         *= 1 - this.Decay; // reuse buffer as a temporary
                    this.AvgMean.Data[i] += this.Mean[i];

                    this.AvgVar.Data[i] *= this.Decay;
                    this.Variance[i]    *= (1 - this.Decay) * adjust; // reuse buffer as a temporary
                    this.AvgVar.Data[i] += this.Variance[i];
                }
            }

            return(NdArray.Convert(y, x.Shape, x.BatchCount, this));
        }
Пример #7
0
 internal override Real BackwardActivate(Real gy, Real y)
 {
     return(y <= 0 ? 0 : gy);
 }
Пример #8
0
        public BatchNormalization(int channelSize, double decay = 0.9, double eps = 2e-5, bool useGamma = true, bool useBeta = true, int initialGamma = 1, int initialBeta = 0, int?axis = null, int initialAvgMean = 0, int initialAvgVar = 1, bool train = true, bool finetune = false, string name = FUNCTION_NAME, string[] inputNames = null, string[] outputNames = null) : base(name, inputNames, outputNames)
        {
            this.ChannelSize = channelSize;
            this.Decay       = decay;
            this.Eps         = eps;
            this.Train       = train;

            this.Finetune = finetune;

            this.Gamma      = new NdArray(channelSize);
            this.Gamma.Name = this.Name + " Gamma";

            this.Beta      = new NdArray(channelSize);
            this.Beta.Name = this.Name + " Beta";

            int paramIndex = 0;
            int paramCount = 0;

            if (useGamma)
            {
                paramCount++;
            }
            if (useBeta)
            {
                paramCount++;
            }
            if (!train)
            {
                paramCount += 2;
            }

            this.Parameters = new NdArray[paramCount];

            //学習対象のParameterを登録
            if (useGamma)
            {
                this.Parameters[paramIndex++] = this.Gamma;
            }
            if (useBeta)
            {
                this.Parameters[paramIndex++] = this.Beta;
            }

            this.AvgMean      = new NdArray(channelSize);
            this.AvgMean.Name = this.Name + " Mean";

            this.AvgVar      = new NdArray(channelSize);
            this.AvgVar.Name = this.Name + " Variance";

            this.Gamma.Data = Enumerable.Repeat((Real)initialGamma, this.Gamma.Data.Length).ToArray();
            this.Beta.Data  = Enumerable.Repeat((Real)initialBeta, this.Beta.Data.Length).ToArray();

            this.AvgMean.Data = Enumerable.Repeat((Real)initialAvgMean, this.AvgMean.Data.Length).ToArray();
            this.AvgVar.Data  = Enumerable.Repeat((Real)initialAvgVar, this.AvgVar.Data.Length).ToArray();

            if (!this.Train)
            {
                this.Parameters[paramIndex++] = this.AvgMean;
                this.Parameters[paramIndex]   = this.AvgVar;
            }

            SingleInputForward   = ForwardCpu;
            SingleOutputBackward = BackwardCpu;
        }
Пример #9
0
 public GradientClipping(double threshold)
 {
     this.Threshold = threshold;
 }
Пример #10
0
        public LSTM(int inSize, int outSize, Array lateralInit = null, Array upwardInit = null, Array biasInit = null, Array forgetBiasInit = null, string name = FUNCTION_NAME, string[] inputNames = null, string[] outputNames = null, bool gpuEnable = false) : base(name, inputNames, outputNames)
        {
            this.InputCount  = inSize;
            this.OutputCount = outSize;

            List <NdArray> functionParameters = new List <NdArray>();

            Real[] lateralW = null;
            Real[] upwardW  = null;
            Real[] upwardb  = null;

            if (upwardInit != null)
            {
                upwardW = new Real[inSize * outSize * 4];

                Real[] tmpUpwardInit = Real.ToRealArray(upwardInit);

                for (int i = 0; i < 4; i++)
                {
                    Array.Copy(tmpUpwardInit, 0, upwardW, i * tmpUpwardInit.Length, tmpUpwardInit.Length);
                }
            }

            if (lateralInit != null)
            {
                lateralW = new Real[outSize * outSize * 4];

                Real[] tmpLateralInit = Real.ToRealArray(lateralInit);

                for (int i = 0; i < 4; i++)
                {
                    Array.Copy(tmpLateralInit, 0, lateralW, i * tmpLateralInit.Length, tmpLateralInit.Length);
                }
            }

            if (biasInit != null && forgetBiasInit != null)
            {
                upwardb = new Real[outSize * 4];

                Real[] tmpBiasInit = Real.ToRealArray(biasInit);

                for (int i = 0; i < biasInit.Length; i++)
                {
                    upwardb[i * 4 + 0] = tmpBiasInit[i];
                    upwardb[i * 4 + 1] = tmpBiasInit[i];
                    upwardb[i * 4 + 3] = tmpBiasInit[i];
                }

                Real[] tmpforgetBiasInit = Real.ToRealArray(forgetBiasInit);

                for (int i = 0; i < tmpforgetBiasInit.Length; i++)
                {
                    upwardb[i * 4 + 2] = tmpforgetBiasInit[i];
                }
            }

            this.upward = new Linear(inSize, outSize * 4, noBias: false, initialW: upwardW, initialb: upwardb, name: "upward", gpuEnable: gpuEnable);
            functionParameters.AddRange(this.upward.Parameters);

            //lateralはBiasは無し
            this.lateral = new Linear(outSize, outSize * 4, noBias: true, initialW: lateralW, name: "lateral", gpuEnable: gpuEnable);
            functionParameters.AddRange(this.lateral.Parameters);

            this.Parameters = functionParameters.ToArray();

            SingleInputForward   = ForwardCpu;
            SingleOutputBackward = BackwardCpu;
        }
Пример #11
0
 public RMSprop(double learningRate = 0.01, double alpha = 0.99, double epsilon = 1e-8)
 {
     this.LearningRate = learningRate;
     this.Alpha        = alpha;
     this.Epsilon      = epsilon;
 }
Пример #12
0
        public void BackwardCpu(NdArray y, NdArray x)
        {
            Real[] gxPrevGrad = new Real[y.BatchCount * OutputCount * 4];

            Real[] gcPrev = new Real[y.BatchCount * this.OutputCount];

            Real[] lcNextParam = this.cNextParam[this.cPrevParam.Count - 1];
            this.cNextParam.RemoveAt(this.cNextParam.Count - 1);
            this.cUsedNextParam.Add(lcNextParam);

            Real[] tanh_a = this.aParam[this.aParam.Count - 1];
            this.aParam.RemoveAt(this.aParam.Count - 1);
            this.aUsedParam.Add(tanh_a);

            Real[] sig_i = this.iParam[this.iParam.Count - 1];
            this.iParam.RemoveAt(this.iParam.Count - 1);
            this.iUsedParam.Add(sig_i);

            Real[] sig_f = this.fParam[this.fParam.Count - 1];
            this.fParam.RemoveAt(this.fParam.Count - 1);
            this.fUsedParam.Add(sig_f);

            Real[] sig_o = this.oParam[this.oParam.Count - 1];
            this.oParam.RemoveAt(this.oParam.Count - 1);
            this.oUsedParam.Add(sig_o);

            Real[] lcPrev = this.cPrevParam[this.cPrevParam.Count - 1];
            this.cPrevParam.RemoveAt(this.cPrevParam.Count - 1);
            this.cUsedPrevParam.Add(lcPrev);

            for (int b = 0; b < y.BatchCount; b++)
            {
                int index = b * OutputCount * 4;

                for (int i = 0; i < this.OutputCount; i++)
                {
                    int prevOutputIndex = b * this.OutputCount + i;

                    double co = Math.Tanh(lcNextParam[prevOutputIndex]);

                    gcPrev[prevOutputIndex] += y.Grad[prevOutputIndex] * sig_o[prevOutputIndex] * GradTanh(co);

                    gxPrevGrad[index++] = gcPrev[prevOutputIndex] * sig_i[prevOutputIndex] * GradTanh(tanh_a[prevOutputIndex]);
                    gxPrevGrad[index++] = gcPrev[prevOutputIndex] * tanh_a[prevOutputIndex] * GradSigmoid(sig_i[prevOutputIndex]);
                    gxPrevGrad[index++] = gcPrev[prevOutputIndex] * lcPrev[prevOutputIndex] * GradSigmoid(sig_f[prevOutputIndex]);
                    gxPrevGrad[index++] = y.Grad[prevOutputIndex] * co *GradSigmoid(sig_o[prevOutputIndex]);

                    gcPrev[prevOutputIndex] *= sig_f[prevOutputIndex];
                }
            }

            gxPrevGrads.Add(gxPrevGrad);

            //gxPrevをlateralとupwardに渡すことでaddのBackwardを兼ねる
            if (hPrevParams.Count > 0)
            {
                NdArray gxPrev = new NdArray(new[] { OutputCount * 4 }, y.BatchCount);
                gxPrev.Grad = gxPrevGrad;
                this.lateral.Backward(gxPrev);

                NdArray hPrevParam = hPrevParams[hPrevParams.Count - 1];
                hPrevParams.RemoveAt(hPrevParams.Count - 1);
                hUsedPrevParams.Add(hPrevParam);

                //hのBakckward
                this.Backward(hPrevParam);

                //使い切ったら戻す
                if (hPrevParams.Count == 0)
                {
                    hPrevParams.AddRange(hUsedPrevParams);
                    hUsedPrevParams.Clear();
                }
            }

            NdArray gy = new NdArray(new[] { OutputCount * 4 }, y.BatchCount);

            gy.Grad = gxPrevGrads[0];
            gxPrevGrads.RemoveAt(0);
            this.upward.Backward(gy);

            //linearのBackwardはgxPrev.Gradしか使わないのでgxPrev.Dataは空
            //使い切ったら戻す
            if (cNextParam.Count == 0)
            {
                this.cNextParam.AddRange(cUsedNextParam);
                this.aParam.AddRange(aUsedParam);
                this.iParam.AddRange(iUsedParam);
                this.fParam.AddRange(fUsedParam);
                this.oParam.AddRange(oUsedParam);
                this.cPrevParam.AddRange(cUsedPrevParam);
                this.cUsedNextParam.Clear();
                this.aUsedParam.Clear();
                this.iUsedParam.Clear();
                this.fUsedParam.Clear();
                this.oUsedParam.Clear();
                this.cUsedPrevParam.Clear();
            }
        }
Пример #13
0
        public NdArray ForwardCpu(NdArray x)
        {
            NdArray lstmIn = this.upward.Forward(x)[0]; //a

            int outputDataSize = x.BatchCount * this.OutputCount;

            if (this.hParam == null)
            {
                //値がなければ初期化
                this.aParam      = new List <Real[]>();
                this.iParam      = new List <Real[]>();
                this.fParam      = new List <Real[]>();
                this.oParam      = new List <Real[]>();
                this.cNextParam  = new List <Real[]>();
                this.cPrevParam  = new List <Real[]>();
                this.hPrevParams = new List <NdArray>();

                this.aUsedParam      = new List <Real[]>();
                this.iUsedParam      = new List <Real[]>();
                this.fUsedParam      = new List <Real[]>();
                this.oUsedParam      = new List <Real[]>();
                this.cUsedNextParam  = new List <Real[]>();
                this.cUsedPrevParam  = new List <Real[]>();
                this.hUsedPrevParams = new List <NdArray>();

                gxPrevGrads = new List <Real[]>();

                cPrev = new Real[outputDataSize];
            }
            else
            {
                NdArray hPrevParam = this.hParam.Clone();
                if (hPrevParam.Grad != null)
                {
                    hPrevParam.InitGrad();
                }
                lstmIn += this.lateral.Forward(hPrevParam)[0];
                hPrevParams.Add(hPrevParam);
            }

            Real[] la      = new Real[outputDataSize];
            Real[] li      = new Real[outputDataSize];
            Real[] lf      = new Real[outputDataSize];
            Real[] lo      = new Real[outputDataSize];
            Real[] cNext   = new Real[outputDataSize];
            Real[] lhParam = new Real[outputDataSize];

            for (int b = 0; b < x.BatchCount; b++)
            {
                int index = b * lstmIn.Length;

                for (int i = 0; i < this.OutputCount; i++)
                {
                    int outIndex = b * this.OutputCount + i;

                    la[outIndex] = Math.Tanh(lstmIn.Data[index++]);
                    li[outIndex] = Sigmoid(lstmIn.Data[index++]);
                    lf[outIndex] = Sigmoid(lstmIn.Data[index++]);
                    lo[outIndex] = Sigmoid(lstmIn.Data[index++]);

                    cNext[outIndex] = la[outIndex] * li[outIndex] + lf[outIndex] * cPrev[outIndex];

                    lhParam[outIndex] = lo[outIndex] * Math.Tanh(cNext[outIndex]);
                }
            }

            this.cPrevParam.Add(cPrev);
            this.cNextParam.Add(cNext);
            this.aParam.Add(la);
            this.iParam.Add(li);
            this.fParam.Add(lf);
            this.oParam.Add(lo);

            //Backwardで消えないように別で保管
            cPrev = cNext;

            this.hParam = new NdArray(lhParam, new[] { OutputCount }, x.BatchCount, this);
            return(this.hParam);
        }
Пример #14
0
 internal override Real ForwardActivate(Real x)
 {
     return(1 / (1 + Math.Exp(-x)));
 }
Пример #15
0
 internal override Real ForwardActivate(Real x)
 {
     return(x < 0 ? (Real)(x * this._slope) : x);
 }
Пример #16
0
        protected override NdArray NeedPreviousForwardCpu(NdArray input)
        {
            int outputHeight = (input.Shape[1] - 1) * this._subSampleY + this._kHeight - this._trimY * 2;
            int outputWidth  = (input.Shape[2] - 1) * this._subSampleX + this._kWidth - this._trimX * 2;

            Real[] result = new Real[input.BatchCount * this.OutputCount * outputWidth * outputHeight];

            int outSizeOffset   = outputWidth * outputHeight;
            int inputSizeOffset = input.Shape[1] * input.Shape[2];
            int kSizeOffset     = this.Weight.Shape[2] * this.Weight.Shape[3];

            for (int batchCount = 0; batchCount < input.BatchCount; batchCount++)
            {
                for (int och = 0; och < this.OutputCount; och++)
                {
                    for (int oy = this._trimY; oy < outputHeight + this._trimY; oy++)
                    {
                        int iyLimit = oy / this._subSampleY + 1 < input.Shape[1] ? oy / this._subSampleY + 1 : input.Shape[1];
                        int iyStart = oy - this.Weight.Shape[2] < 0 ? 0 : (oy - this.Weight.Shape[2]) / this._subSampleY + 1;

                        for (int ox = this._trimX; ox < outputWidth + this._trimX; ox++)
                        {
                            int ixLimit = ox / this._subSampleX + 1 < input.Shape[2] ? ox / this._subSampleX + 1 : input.Shape[2];
                            int ixStart = ox - this.Weight.Shape[3] < 0 ? 0 : (ox - this.Weight.Shape[3]) / this._subSampleX + 1;

                            int outputIndex = batchCount * this.OutputCount * outSizeOffset + och * outSizeOffset + (oy - this._trimY) * outputWidth + ox - this._trimX;

                            for (int ich = 0; ich < input.Shape[0]; ich++)
                            {
                                int inputIndexOffset  = batchCount * input.Length + ich * inputSizeOffset;
                                int kernelIndexOffset = och * this.Weight.Shape[1] * kSizeOffset + ich * kSizeOffset;

                                for (int iy = iyStart; iy < iyLimit; iy++)
                                {
                                    for (int ix = ixStart; ix < ixLimit; ix++)
                                    {
                                        int inputIndex  = inputIndexOffset + iy * input.Shape[2] + ix;
                                        int kernelIndex = kernelIndexOffset + (oy - iy * this._subSampleY) * this.Weight.Shape[3] + (ox - ix * this._subSampleX);

                                        result[outputIndex] += input.Data[inputIndex] * this.Weight.Data[kernelIndex];
                                    }
                                }
                            }
                        }
                    }
                }
            }

            if (this.Activator != null && !NoBias)
            {
                for (int batchCount = 0; batchCount < input.BatchCount; batchCount++)
                {
                    for (int och = 0; och < this.OutputCount; och++)
                    {
                        for (int oy = this._trimY; oy < outputHeight + this._trimY; oy++)
                        {
                            for (int ox = this._trimX; ox < outputWidth + this._trimX; ox++)
                            {
                                int outputIndex = batchCount * this.OutputCount * outSizeOffset + och * outSizeOffset + (oy - this._trimY) * outputWidth + ox - this._trimX;

                                result[outputIndex] += this.Bias.Data[och];
                                result[outputIndex]  = this.Activator.ForwardActivate(result[outputIndex]);
                            }
                        }
                    }
                }
            }
            else if (!NoBias)
            {
                for (int batchCount = 0; batchCount < input.BatchCount; batchCount++)
                {
                    for (int och = 0; och < this.OutputCount; och++)
                    {
                        for (int oy = this._trimY; oy < outputHeight + this._trimY; oy++)
                        {
                            for (int ox = this._trimX; ox < outputWidth + this._trimX; ox++)
                            {
                                int outputIndex = batchCount * this.OutputCount * outSizeOffset + och * outSizeOffset + (oy - this._trimY) * outputWidth + ox - this._trimX;

                                result[outputIndex] += this.Bias.Data[och];
                            }
                        }
                    }
                }
            }
            else if (this.Activator != null)
            {
                for (int batchCount = 0; batchCount < input.BatchCount; batchCount++)
                {
                    for (int och = 0; och < this.OutputCount; och++)
                    {
                        for (int oy = this._trimY; oy < outputHeight + this._trimY; oy++)
                        {
                            for (int ox = this._trimX; ox < outputWidth + this._trimX; ox++)
                            {
                                int outputIndex = batchCount * this.OutputCount * outSizeOffset + och * outSizeOffset + (oy - this._trimY) * outputWidth + ox - this._trimX;

                                result[outputIndex] = this.Activator.ForwardActivate(result[outputIndex]);
                            }
                        }
                    }
                }
            }

            return(NdArray.Convert(result, new[] { this.OutputCount, outputHeight, outputWidth }, input.BatchCount, this));
        }
Пример #17
0
 internal override Real BackwardActivate(Real gy, Real y)
 {
     return(y <= 0 ? (Real)(y * this._slope) : gy);
 }
Пример #18
0
        protected override void NeedPreviousBackwardGpu(NdArray y, NdArray x)
        {
            Real[] gx          = new Real[x.Data.Length];
            Real[] activatedgy = this.Activator != null?GetActivatedgy(y) : y.Grad;

            if (!NoBias)
            {
                CalcBiasGrad(activatedgy, y.Shape, y.BatchCount);
            }

            //gyは共通で使用
            using (ComputeBuffer <Real> gpugY = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.CopyHostPointer, activatedgy))
            {
                using (ComputeBuffer <Real> gpugW = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadWrite | ComputeMemoryFlags.CopyHostPointer, this.Weight.Grad))
                    using (ComputeBuffer <Real> gpuX = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.CopyHostPointer, x.Data))
                    {
                        this.BackwardgWKernel.SetMemoryArgument(0, gpugY);
                        this.BackwardgWKernel.SetMemoryArgument(1, gpuX);
                        this.BackwardgWKernel.SetMemoryArgument(2, gpugW);
                        this.BackwardgWKernel.SetValueArgument(3, y.BatchCount);
                        this.BackwardgWKernel.SetValueArgument(4, this.InputCount);
                        this.BackwardgWKernel.SetValueArgument(5, y.Length);
                        this.BackwardgWKernel.SetValueArgument(6, y.Shape[1]);
                        this.BackwardgWKernel.SetValueArgument(7, y.Shape[2]);
                        this.BackwardgWKernel.SetValueArgument(8, x.Shape[1]);
                        this.BackwardgWKernel.SetValueArgument(9, x.Shape[2]);
                        this.BackwardgWKernel.SetValueArgument(10, x.Length);
                        this.BackwardgWKernel.SetValueArgument(11, this._subSampleX);
                        this.BackwardgWKernel.SetValueArgument(12, this._subSampleY);
                        this.BackwardgWKernel.SetValueArgument(13, this._trimX);
                        this.BackwardgWKernel.SetValueArgument(14, this._trimY);
                        this.BackwardgWKernel.SetValueArgument(15, this._kHeight);
                        this.BackwardgWKernel.SetValueArgument(16, this._kWidth);

                        Weaver.CommandQueue.Execute
                        (
                            this.BackwardgWKernel,
                            null,
                            new long[] { OutputCount *InputCount, this._kHeight, this._kWidth },
                            null,
                            null
                        );

                        Weaver.CommandQueue.Finish();
                        Weaver.CommandQueue.ReadFromBuffer(gpugW, ref this.Weight.Grad, true, null);
                    }

                using (ComputeBuffer <Real> gpugX = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.WriteOnly | ComputeMemoryFlags.AllocateHostPointer, gx.Length))
                    using (ComputeBuffer <Real> gpuW = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.CopyHostPointer, this.Weight.Data))
                    {
                        this.BackwardgXKernel.SetMemoryArgument(0, gpugY);
                        this.BackwardgXKernel.SetMemoryArgument(1, gpuW);
                        this.BackwardgXKernel.SetMemoryArgument(2, gpugX);
                        this.BackwardgXKernel.SetValueArgument(3, this.OutputCount);
                        this.BackwardgXKernel.SetValueArgument(4, this.InputCount);
                        this.BackwardgXKernel.SetValueArgument(5, y.Length);
                        this.BackwardgXKernel.SetValueArgument(6, y.Shape[1]);
                        this.BackwardgXKernel.SetValueArgument(7, y.Shape[2]);
                        this.BackwardgXKernel.SetValueArgument(8, x.Shape[1]);
                        this.BackwardgXKernel.SetValueArgument(9, x.Shape[2]);
                        this.BackwardgXKernel.SetValueArgument(10, x.Length);
                        this.BackwardgXKernel.SetValueArgument(11, this._subSampleX);
                        this.BackwardgXKernel.SetValueArgument(12, this._subSampleY);
                        this.BackwardgXKernel.SetValueArgument(13, this._trimX);
                        this.BackwardgXKernel.SetValueArgument(14, this._trimY);
                        this.BackwardgXKernel.SetValueArgument(15, this._kHeight);
                        this.BackwardgXKernel.SetValueArgument(16, this._kWidth);

                        Weaver.CommandQueue.Execute
                        (
                            this.BackwardgXKernel,
                            null,
                            new long[] { y.BatchCount *x.Shape[0], x.Shape[1], x.Shape[2] },
                            null,
                            null
                        );

                        Weaver.CommandQueue.Finish();
                        Weaver.CommandQueue.ReadFromBuffer(gpugX, ref gx, true, null);
                    }
            }

            for (int i = 0; i < x.Grad.Length; i++)
            {
                x.Grad[i] += gx[i];
            }
        }
Пример #19
0
 internal override Real ForwardActivate(Real x)
 {
     return(x < 0 ? 0 : x);
 }
Пример #20
0
        public Dropout(double dropoutRatio = 0.5, string name = FUNCTION_NAME, string[] inputNames = null, string[] outputNames = null, bool gpuEnable = false) : base(name, inputNames, outputNames)
        {
            this.dropoutRatio = dropoutRatio;

            this.SetGpuEnable(gpuEnable);
        }
Пример #21
0
 public AdaGrad(double learningRate = 0.01, double epsilon = 1e-8)
 {
     this.LearningRate = learningRate;
     this.Epsilon      = epsilon;
 }
Пример #22
0
 //.Netで使用するActivateの仮想関数
 internal abstract Real ForwardActivate(Real x);
Пример #23
0
 public AdaDelta(double rho = 0.95, double epsilon = 1e-6)
 {
     this.Rho     = rho;
     this.Epsilon = epsilon;
 }
Пример #24
0
 internal abstract Real BackwardActivate(Real gy, Real y);
Пример #25
0
 public AdamW(double alpha = 0.001, double beta1 = 0.9, double beta2 = 0.999, double epsilon = 1e-8, double eta = 1.0, double weightDecayRate = 0) :
     base(alpha: alpha, beta1: beta1, beta2: beta2, epsilon: epsilon, eta: eta)
 {
     this.WeightDecayRate = weightDecayRate;
 }
Пример #26
0
 public SGD(double learningRate = 0.1)
 {
     this.LearningRate = learningRate;
 }
Пример #27
0
        protected override void NeedPreviousBackwardGpu(NdArray y, NdArray x)
        {
            Real[] gx          = new Real[x.Data.Length];
            Real[] activatedgy = this.Activator != null?GetActivatedgy(y) : y.Grad;

            if (!NoBias)
            {
                CalcBiasGrad(activatedgy, y.BatchCount);
            }

            using (ComputeBuffer <Real> gpugY = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.UseHostPointer, activatedgy))
            {
                using (ComputeBuffer <Real> gpugW = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadWrite | ComputeMemoryFlags.UseHostPointer, this.Weight.Grad))
                    using (ComputeBuffer <Real> gpuX = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.UseHostPointer, x.Data))
                    {
                        BackwardgWKernel.SetMemoryArgument(0, gpugY);
                        BackwardgWKernel.SetMemoryArgument(1, gpuX);
                        BackwardgWKernel.SetMemoryArgument(2, gpugW);
                        BackwardgWKernel.SetValueArgument(3, y.BatchCount);
                        BackwardgWKernel.SetValueArgument(4, this.OutputCount);
                        BackwardgWKernel.SetValueArgument(5, this.InputCount);

                        Weaver.CommandQueue.Execute
                        (
                            BackwardgWKernel,
                            null,
                            new long[] { this.InputCount, this.OutputCount },
                            null,
                            null
                        );

                        Weaver.CommandQueue.Finish();
                        Weaver.CommandQueue.ReadFromBuffer(gpugW, ref this.Weight.Grad, true, null);
                    }

                using (ComputeBuffer <Real> gpugX = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.WriteOnly | ComputeMemoryFlags.AllocateHostPointer, gx.Length))
                    using (ComputeBuffer <Real> gpuW = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.UseHostPointer, this.Weight.Data))
                    {
                        BackwardgXKernel.SetMemoryArgument(0, gpugY);
                        BackwardgXKernel.SetMemoryArgument(1, gpuW);
                        BackwardgXKernel.SetMemoryArgument(2, gpugX);
                        BackwardgXKernel.SetValueArgument(3, y.BatchCount);
                        BackwardgXKernel.SetValueArgument(4, this.OutputCount);
                        BackwardgXKernel.SetValueArgument(5, this.InputCount);

                        Weaver.CommandQueue.Execute
                        (
                            BackwardgXKernel,
                            null,
                            new long[] { this.InputCount, y.BatchCount },
                            null,
                            null
                        );

                        Weaver.CommandQueue.Finish();
                        Weaver.CommandQueue.ReadFromBuffer(gpugX, ref gx, true, null);
                    }
            }

            for (int i = 0; i < x.Grad.Length; i++)
            {
                x.Grad[i] += gx[i];
            }
        }
Пример #28
0
 public LeakyReLU(double slope = 0.2, string name = FUNCTION_NAME, string[] inputNames = null, string[] outputNames = null, bool gpuEnable = false) : base(FUNCTION_NAME, new[] { new KeyValuePair <string, string>(PARAM_NAME, slope.ToString()) }, name, inputNames, outputNames, gpuEnable)
 {
     this._slope = slope;
 }
Пример #29
0
 internal override Real BackwardActivate(Real gy, Real y)
 {
     return(gy * y * (1.0 - y));
 }
Пример #30
0
        public void BackwardCpu(NdArray y, NdArray x)
        {
            if (gcPrev == null)
            {
                //値がなければ初期化
                this.gxPrev0 = new NdArray(new[] { OutputCount }, y.BatchCount);
                this.gxPrev1 = new NdArray(new[] { OutputCount }, y.BatchCount);
                this.gxPrev2 = new NdArray(new[] { OutputCount }, y.BatchCount);
                this.gxPrev3 = new NdArray(new[] { OutputCount }, y.BatchCount);
                this.gcPrev  = new Real[x.BatchCount * this.OutputCount];
            }
            else
            {
                this.lateral0.Backward(this.gxPrev0);
                this.lateral1.Backward(this.gxPrev1);
                this.lateral2.Backward(this.gxPrev2);
                this.lateral3.Backward(this.gxPrev3);
            }

            Real[] lcParam = this.cParam[this.cParam.Count - 1];
            this.cParam.RemoveAt(this.cParam.Count - 1);

            Real[] laParam = this.aParam[this.aParam.Count - 1];
            this.aParam.RemoveAt(this.aParam.Count - 1);

            Real[] liParam = this.iParam[this.iParam.Count - 1];
            this.iParam.RemoveAt(this.iParam.Count - 1);

            Real[] lfParam = this.fParam[this.fParam.Count - 1];
            this.fParam.RemoveAt(this.fParam.Count - 1);

            Real[] loParam = this.oParam[this.oParam.Count - 1];
            this.oParam.RemoveAt(this.oParam.Count - 1);

            Real[] cPrev = this.cParam[this.cParam.Count - 1];

            for (int i = 0; i < y.BatchCount; i++)
            {
                Real[] gParam = new Real[this.InputCount * 4];

                for (int j = 0; j < this.InputCount; j++)
                {
                    int prevOutputIndex = j + i * this.OutputCount;
                    int prevInputIndex  = j + i * this.InputCount;

                    double co = Math.Tanh(lcParam[prevOutputIndex]);

                    this.gcPrev[prevInputIndex] += y.Grad[prevOutputIndex] * loParam[prevOutputIndex] * GradTanh(co);
                    gParam[j + InputCount * 0]   = this.gcPrev[prevInputIndex] * liParam[prevOutputIndex] * GradTanh(laParam[prevOutputIndex]);
                    gParam[j + InputCount * 1]   = this.gcPrev[prevInputIndex] * laParam[prevOutputIndex] * GradSigmoid(liParam[prevOutputIndex]);
                    gParam[j + InputCount * 2]   = this.gcPrev[prevInputIndex] * cPrev[prevOutputIndex] * GradSigmoid(lfParam[prevOutputIndex]);
                    gParam[j + InputCount * 3]   = y.Grad[prevOutputIndex] * co *GradSigmoid(loParam[prevOutputIndex]);

                    this.gcPrev[prevInputIndex] *= lfParam[prevOutputIndex];
                }

                Real[] resultParam = new Real[this.OutputCount * 4];

                //配置換え
                for (int j = 0; j < this.OutputCount * 4; j++)
                {
                    //暗黙的に切り捨て
                    int index = j / this.OutputCount;
                    resultParam[j % this.OutputCount + index * OutputCount] = gParam[j / 4 + j % 4 * InputCount];
                }

                for (int j = 0; j < OutputCount; j++)
                {
                    this.gxPrev0.Grad[i * this.OutputCount + j] = resultParam[0 * this.OutputCount + j];
                    this.gxPrev1.Grad[i * this.OutputCount + j] = resultParam[1 * this.OutputCount + j];
                    this.gxPrev2.Grad[i * this.OutputCount + j] = resultParam[2 * this.OutputCount + j];
                    this.gxPrev3.Grad[i * this.OutputCount + j] = resultParam[3 * this.OutputCount + j];
                }
            }

            this.upward0.Backward(this.gxPrev0);
            this.upward1.Backward(this.gxPrev1);
            this.upward2.Backward(this.gxPrev2);
            this.upward3.Backward(this.gxPrev3);
        }