示例#1
0
        public void EmbedIDRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(2, 30);
            int outputCount = Mother.Dice.Next(1, 30);
            int batchCount  = Mother.Dice.Next(1, 5);

            int[,] input = (int[, ])Enumerable.Repeat(0, batchCount * inputCount).ToNdArray(batchCount, inputCount);
            input[0, 0]  = 1;

            Real[,,] dummyGy = (Real[, , ])Initializer.GetRealNdArray(new[] { batchCount, inputCount, outputCount });
            Real[,] w        = (Real[, ])Initializer.GetRealNdArray(new[] { inputCount, outputCount });

            //Chainer
            NChainer.EmbedID <Real> cEmbedId = new NChainer.EmbedID <Real>(inputCount, outputCount, Real.ToBaseNdArray(w));

            Variable <int> cX = new Variable <int>(input);

            Variable <Real> cY = cEmbedId.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();


            //KelpNet
            KelpNet.EmbedID embedId = new KelpNet.EmbedID(inputCount, outputCount, w);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { inputCount }, batchCount);

            NdArray y = embedId.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();


            Real[] cYdata = Real.ToRealArray((Real[, , ])cY.Data);

            Real[] cWgrad = Real.ToRealArray((Real[, ])cEmbedId.W.Grad);

            //許容範囲を算出
            double delta = 0.00001;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //W.grad
            Assert.AreEqual(cWgrad.Length, embedId.Weight.Grad.Length);
            for (int i = 0; i < embedId.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad[i], embedId.Weight.Grad[i], delta);
            }
        }
示例#2
0
        public void EmbedIDRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(2, 30);
            int outputCount = Mother.Dice.Next(1, 30);
            int batchCount  = Mother.Dice.Next(1, 5);

            int[,] input = (int[, ])Enumerable.Repeat(0, batchCount * inputCount).ToNdArray(batchCount, inputCount);
            input[0, 0]  = 1;

            Real[,,] dummyGy = Initializer.GetRandomValues <Real[, , ]>(batchCount, inputCount, outputCount);
            Real[,] w        = Initializer.GetRandomValues <Real[, ]>(inputCount, outputCount);

            //Chainer
            NChainer.EmbedID <Real> cEmbedId = new NChainer.EmbedID <Real>(inputCount, outputCount, w);

            Variable <int> cX = new Variable <int>(input);

            Variable <Real> cY = cEmbedId.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();


            //KelpNet
            EmbedID <Real> embedId = new EmbedID <Real>(inputCount, outputCount, w);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = embedId.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();


            Real[] cYdata = ((Real[, , ])cY.Data).Flatten();

            Real[] cWgrad = ((Real[, ])cEmbedId.W.Grad).Flatten();

            //許容範囲を算出
            Real delta = 0.00001f;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //W.grad
            Assert.AreEqual(cWgrad.Length, embedId.Weight.Grad.Length);
            for (int i = 0; i < embedId.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad[i], embedId.Weight.Grad[i], delta);
            }
        }
        public void SoftmaxCrossEntropyRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int channel    = Mother.Dice.Next(2, 5);
            int width      = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);

            Real[,,,] inputA = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, channel, height, width });
            int[,,] inputB   = (int[, , ])Enumerable.Repeat(0, batchCount * height * width).ToNdArray(batchCount, height, width);

            for (int b = 0; b < batchCount; b++)
            {
                inputB[b, Mother.Dice.Next(height), Mother.Dice.Next(width)] = 1;
            }

            //chainer
            NChainer.SoftmaxCrossEntropy <Real> cSoftmaxCrossEntropy = new NChainer.SoftmaxCrossEntropy <Real>();

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(inputA));
            Variable <int>  cY = new Variable <int>(inputB);

            Variable <Real> cZ = cSoftmaxCrossEntropy.Forward(cX, cY);

            cZ.Backward();

            Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad);

            //KelpNet
            KelpNet.SoftmaxCrossEntropy softmaxCrossEntropy = new KelpNet.SoftmaxCrossEntropy();

            NdArray x = new NdArray(Real.ToRealArray(inputA), new[] { channel, height, width }, batchCount);
            NdArray y = new NdArray(Real.ToRealArray(inputB), new[] { height, width }, batchCount);

            //KelpNetはBackwaward側のみEvaluateで実行される
            NdArray z = softmaxCrossEntropy.Evaluate(x, y);


            //許容範囲を算出(内部の割引順が違うため誤差が大きい)
            double delta = 0.00001;

            //Loss
            Assert.AreEqual(cZ.Data[0], z.Data[0], delta);

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
示例#4
0
        public void ConcatRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int ch         = Mother.Dice.Next(1, 5);
            int widthA     = Mother.Dice.Next(1, 16);
            int widthB     = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);
            int axis       = 3;

            Real[,,,] inputA  = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthA);
            Real[,,,] inputB  = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthB);
            Real[,,,] dummyGy = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthA + widthB);

            //chainer
            NChainer.Concat <Real> cConcat = new NChainer.Concat <Real>(axis);

            Variable <Real> cX = new Variable <Real>(inputA);
            Variable <Real> cY = new Variable <Real>(inputB);

            Variable <Real> cZ = cConcat.Forward(cX, cY);

            cZ.Grad = dummyGy;

            cZ.Backward();


            //KelpNet
            Concat <Real> concat = new Concat <Real>(axis - 1);//Chainerと異なり1次元目を暗黙的にBatchとみなさないため

            NdArray <Real> x = new NdArray <Real>(inputA, asBatch: true);
            NdArray <Real> y = new NdArray <Real>(inputB, asBatch: true);

            NdArray <Real> z = concat.Forward(x, y)[0];

            z.Grad = dummyGy.Flatten();

            z.Backward();


            Real[] cZdata = ((Real[, , , ])cZ.Data).Flatten();

            //Copyが必要
            Real[] cXgrad = ((Real[, , , ])cX.Grad.Copy()).Flatten();
            Real[] cYgrad = ((Real[, , , ])cY.Grad.Copy()).Flatten();

            //許容範囲を算出
            Real delta = 0.00001f;

            //z
            Assert.AreEqual(cZdata.Length, z.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cZdata[i], z.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //y.grad
            Assert.AreEqual(cYgrad.Length, y.Grad.Length);
            for (int i = 0; i < y.Grad.Length; i++)
            {
                Assert.AreEqual(cYgrad[i], y.Grad[i], delta);
            }
        }
示例#5
0
        public void LSTMRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount  = Mother.Dice.Next(1, 5);
            int inputCount  = Mother.Dice.Next(1, 5);
            int outputCount = Mother.Dice.Next(1, 5);

            Real[,] input   = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, inputCount });
            Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, outputCount });

            Real[,] upwardInit      = (Real[, ])Initializer.GetRealNdArray(new[] { outputCount, inputCount });
            Real[,] lateralInit     = (Real[, ])Initializer.GetRealNdArray(new[] { outputCount, outputCount });
            Real[,,] biasInit       = (Real[, , ])Initializer.GetRealNdArray(new[] { 1, outputCount, 1 });
            Real[,,] forgetBiasInit = (Real[, , ])Initializer.GetRealNdArray(new[] { 1, outputCount, 1 });

            //Chainer
            NChainer.LSTM <Real> clstm = new NChainer.LSTM <Real>(inputCount, outputCount, Real.ToBaseNdArray(lateralInit), Real.ToBaseNdArray(upwardInit), Real.ToBaseNdArray(biasInit), Real.ToBaseNdArray(forgetBiasInit));

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));
            Variable <Real> cY = clstm.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();

            //KelpNet
            KelpNet.LSTM lstm = new KelpNet.LSTM(inputCount, outputCount, lateralInit, upwardInit, biasInit, forgetBiasInit);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { inputCount }, batchCount);
            NdArray y = lstm.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();

            //許容範囲を算出
            double delta = 0.00001;

            Real[] cYdata = Real.ToRealArray((Real[, ])cY.Data);
            Real[] cXgrad = Real.ToRealArray((Real[, ])cX.Grad);

            Real[] cupwardWGrad = Real.ToRealArray((Real[, ])clstm.upward.W.Grad);
            Real[] cupwardbGrad = (Real[])clstm.upward.b.Grad;


            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < cYdata.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.Grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < cXgrad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //W.grad
            int wLen = lstm.upward.Weight.Grad.Length;

            Assert.AreEqual(cupwardWGrad.Length, lstm.upward.Weight.Grad.Length);
            for (int i = 0; i < wLen; i++)
            {
                Assert.AreEqual(cupwardWGrad[i + wLen * 0], lstm.upward.Weight.Grad[i], delta);
            }

            //b.grad
            int bLen = lstm.upward.Bias.Length;

            Assert.AreEqual(cupwardbGrad.Length, lstm.upward.Bias.Grad.Length);
            for (int i = 0; i < bLen; i++)
            {
                Assert.AreEqual(cupwardbGrad[i + wLen * 0], lstm.upward.Bias.Grad[i], delta);
            }


            //////////////////////////////////////////////////////////////////////////////////////////
            // 1度目はlateralに値はない                                                             //
            //////////////////////////////////////////////////////////////////////////////////////////
            //Real[] clateralWGrad = Real.ToRealArray((Real[,])clstm.lateral.W.Grad);
            //Assert.AreEqual(clateralWGrad.Length, lstm.lateral.Weight.Grad.Length);
            //for (int i = 0; i < wLen; i++)
            //{
            //    Assert.AreEqual(clateralWGrad[i + wLen * 0], lstm.lateral.Weight.Grad[i], delta);
            //}
            //////////////////////////////////////////////////////////////////////////////////////////

            ///////////
            //2周目 //
            ///////////
            Real[,] input2   = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, inputCount });
            Real[,] dummyGy2 = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, outputCount });

            //Chainer
            Variable <Real> cX2 = new Variable <Real>(Real.ToBaseNdArray(input2));
            Variable <Real> cY2 = clstm.Forward(cX2);

            cY2.Grad = Real.ToBaseNdArray(dummyGy2);

            cY2.Backward();

            //KelpNet
            NdArray x2 = new NdArray(Real.ToRealArray(input2), new[] { inputCount }, batchCount);
            NdArray y2 = lstm.Forward(x2)[0];

            y2.Grad = Real.ToRealArray(dummyGy2);

            y2.Backward();

            Real[] cYdata2 = Real.ToRealArray((Real[, ])cY2.Data);
            Real[] cXgrad2 = Real.ToRealArray((Real[, ])cX2.Grad);

            Real[] cupwardWGrad2 = Real.ToRealArray((Real[, ])clstm.upward.W.Grad);
            Real[] cupwardbGrad2 = (Real[])clstm.upward.b.Grad;
            Real[] clateralWGrad = Real.ToRealArray((Real[, ])clstm.lateral.W.Grad);

            //y
            Assert.AreEqual(cYdata2.Length, y2.Data.Length);
            for (int i = 0; i < cYdata2.Length; i++)
            {
                Assert.AreEqual(cYdata2[i], y2.Data[i], delta);
            }

            //x.Grad
            Assert.AreEqual(cXgrad2.Length, x2.Grad.Length);
            for (int i = 0; i < cXgrad2.Length; i++)
            {
                Assert.AreEqual(cXgrad2[i], x2.Grad[i], delta);
            }

            //W.grad
            Assert.AreEqual(clateralWGrad.Length, lstm.lateral.Weight.Grad.Length);
            for (int i = 0; i < clateralWGrad.Length; i++)
            {
                Assert.AreEqual(clateralWGrad[i + wLen * 0], lstm.lateral.Weight.Grad[i], delta);
            }

            //経由が多いため誤差が大きい
            delta = 1.0;
            for (int i = 0; i < wLen; i++)
            {
                Assert.AreEqual(cupwardWGrad2[i + wLen * 0], lstm.upward.Weight.Grad[i], delta);
            }

            //b.grad
            for (int i = 0; i < bLen; i++)
            {
                Assert.AreEqual(cupwardbGrad2[i + wLen * 0], lstm.upward.Bias.Grad[i], delta);
            }
        }
示例#6
0
        public void AdamRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount = Mother.Dice.Next(2, 50);
            int outputCount = Mother.Dice.Next(2, 50);
            int batchCount = Mother.Dice.Next(1, 5);

            Real[,] input = (Real[,])Initializer.GetRealNdArray(new[] { batchCount, inputCount });
            Real[,] dummyGy = (Real[,])Initializer.GetRealNdArray(new[] { batchCount, outputCount });
            Real[,] w = (Real[,])Initializer.GetRealNdArray(new[] { outputCount, inputCount });
            Real[] b = Initializer.GetRealArray(outputCount);


            float alpha = (float)Mother.Dice.NextDouble(); //0.001f
            float beta1 = (float)Mother.Dice.NextDouble(); //0.9f;
            float beta2 = (float)Mother.Dice.NextDouble(); //0.999f;
            float eps = (float)Mother.Dice.NextDouble(); //1e-08f;
            float eta = (float)Mother.Dice.NextDouble(); //1.0f;

            //Chainer
            NChainer.Linear<Real> cLinear = new NChainer.Linear<Real>(inputCount, outputCount, false, Real.ToBaseNdArray(w), Real.ToBaseArray(b));
            NChainer.Adam<Real> cAdam = new NChainer.Adam<Real>(alpha, beta1, beta2, eps, eta);
            cAdam.Setup(cLinear);

            Variable<Real> cX = new Variable<Real>(Real.ToBaseNdArray(input));

            Variable<Real> cY = cLinear.Forward(cX);
            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();

            cAdam.Update();

            //KelpNet
            KelpNet.CL.Linear linear = new KelpNet.CL.Linear(inputCount, outputCount, false, w, b);
            KelpNet.Adam adam = new Adam(alpha, beta1, beta2, eps, eta);
            adam.SetUp(linear);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { inputCount }, batchCount);

            NdArray y = linear.Forward(x)[0];
            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();

            adam.Update();


            Real[] cW = Real.ToRealArray((Real[,])cLinear.W.Data);
            Real[] cb = (Real[])cLinear.b.Data;

            //許容範囲を算出
            double delta = 0.00001;

            //W.grad
            Assert.AreEqual(cW.Length, linear.Weight.Data.Length);
            for (int i = 0; i < linear.Weight.Data.Length; i++)
            {
                Assert.AreEqual(cW[i], linear.Weight.Data[i], delta);
            }

            //b.grad
            Assert.AreEqual(cb.Length, linear.Bias.Data.Length);
            for (int i = 0; i < linear.Bias.Data.Length; i++)
            {
                Assert.AreEqual(cb[i], linear.Bias.Data[i], delta);
            }
        }
        public void RandomTest(bool gpuEnable)
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int inChCount  = Mother.Dice.Next(1, 5);
            int outChCount = Mother.Dice.Next(1, 5);
            int wideSize   = Mother.Dice.Next(8, 32);
            int heightSize = Mother.Dice.Next(8, 32);

            int kWidth  = Mother.Dice.Next(1, 5);
            int kHeight = Mother.Dice.Next(1, 5);

            int strideX = Mother.Dice.Next(1, 5);
            int strideY = Mother.Dice.Next(1, 5);

            int padX = Mother.Dice.Next(0, 5);
            int padY = Mother.Dice.Next(0, 5);

            int outputHeight = (int)Math.Floor((heightSize - kHeight + padY * 2.0) / strideY) + 1;
            int outputWidth  = (int)Math.Floor((wideSize - kWidth + padX * 2.0) / strideX) + 1;

            Real[,,,] input = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, inChCount, heightSize, wideSize });

            Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, outChCount, outputHeight, outputWidth });
            Real[,,,] w       = (Real[, , , ])Initializer.GetRealNdArray(new[] { outChCount, inChCount, kHeight, kWidth });

            Real[] b = Initializer.GetRealArray(outChCount);

            //Chainer
            NChainer.Convolution2D <Real> cConvolotion2D = new NChainer.Convolution2D <Real>(
                inChCount, outChCount,
                new[] { kHeight, kWidth },
                new[] { strideY, strideX },
                new[] { padY, padX },
                false,
                Real.ToBaseNdArray(w),
                Real.ToBaseArray(b));

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cConvolotion2D.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();


            //KelpNet
            KelpNet.CL.Convolution2D convolution2D = new KelpNet.CL.Convolution2D(
                inChCount, outChCount,
                new[] { kWidth, kHeight },
                new[] { strideX, strideY },
                new[] { padX, padY },
                false, w, b, gpuEnable: gpuEnable);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { inChCount, heightSize, wideSize }, batchCount);

            NdArray y = convolution2D.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();


            Real[] cYdata = Real.ToRealArray((Real[, , , ])cY.Data.Copy());
            Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad.Copy());

            Real[] cWgrad = Real.ToRealArray((Real[, , , ])cConvolotion2D.W.Grad);
            Real[] cbgrad = (Real[])cConvolotion2D.b.Grad;

            //許容範囲を算出
            double delta = 0.00001;

            Assert.AreEqual(cYdata.Length, y.Data.Length);
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            Assert.AreEqual(cWgrad.Length, convolution2D.Weight.Grad.Length);
            Assert.AreEqual(cbgrad.Length, convolution2D.Bias.Grad.Length);

            //y
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            delta = 0.1;
            //W.grad
            for (int i = 0; i < convolution2D.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad[i], convolution2D.Weight.Grad[i], delta);
            }

            //b.grad
            for (int i = 0; i < convolution2D.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad[i], convolution2D.Bias.Grad[i], delta);
            }
        }
示例#8
0
        public void RandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = 1 + Mother.Dice.Next() % 49;
            int outputCount = 1 + Mother.Dice.Next() % 49;

            Real[,] input = (Real[, ])Initializer.GetRealNdArray(new[] { 1, inputCount });

            Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { 1, outputCount });
            Real[,] w       = (Real[, ])Initializer.GetRealNdArray(new[] { outputCount, inputCount });

            Real[] b = Initializer.GetRealArray(outputCount);

            //Chainer
            NChainer.Linear <Real> cLinear = new NChainer.Linear <Real>(inputCount, outputCount, false, Real.ToBaseNdArray(w), Real.ToBaseArray(b));

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cLinear.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();


            //KelpNet
            KelpNet.Linear linear = new KelpNet.Linear(inputCount, outputCount, false, w, b);

            NdArray x = new NdArray(input);

            NdArray y = linear.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();


            Real[] cYdata = Real.ToRealArray((Real[, ])cY.Data);
            Real[] cXgrad = Real.ToRealArray((Real[, ])cX.Grad);

            Real[] cWgrad = Real.ToRealArray((Real[, ])cLinear.W.Grad);
            Real[] cbgrad = (Real[])cLinear.b.Grad;

            //許容範囲を算出
            double delta = 0.00001;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //W.grad
            Assert.AreEqual(cWgrad.Length, linear.Weight.Grad.Length);
            for (int i = 0; i < linear.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad[i], linear.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad.Length, linear.Bias.Grad.Length);
            for (int i = 0; i < linear.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad[i], linear.Bias.Grad[i], delta);
            }
        }
示例#9
0
        public void SwishRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int ioCount = Mother.Dice.Next(1, 50);
            int batchCount = Mother.Dice.Next(1, 5);

            Real[,] input = (Real[,])Initializer.GetRealNdArray(new[] { batchCount, ioCount });

            Real[,] dummyGy = (Real[,])Initializer.GetRealNdArray(new[] { batchCount, ioCount });

            Real beta = Mother.Dice.NextDouble();

            //Chainer
            NChainer.Swish<Real> cSwish = new NChainer.Swish<Real>(new[] { ioCount }, beta);

            Variable<Real> cX = new Variable<Real>(Real.ToBaseNdArray(input));

            Variable<Real> cY = cSwish.Forward(cX);
            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();


            //KelpNet
            KelpNet.Swish swish = new KelpNet.Swish(new[] { ioCount }, beta);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { ioCount }, batchCount);

            NdArray y = swish.Forward(x)[0];
            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();


            Real[] cYdata = Real.ToRealArray((Real[,])cY.Data);
            Real[] cXgrad = Real.ToRealArray((Real[,])cX.Grad);

            Real[] cbgrad = (Real[])cSwish.beta.Grad;

            //許容範囲を算出
            double delta = 0.00001;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad.Length, swish.Beta.Grad.Length);
            for (int i = 0; i < swish.Beta.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad[i], swish.Beta.Grad[i], delta);
            }
        }
示例#10
0
        public void LSTMRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount  = Mother.Dice.Next(1, 5);
            int inputCount  = Mother.Dice.Next(1, 5);
            int outputCount = Mother.Dice.Next(1, 5);

            Real[,] input   = Initializer.GetRandomValues <Real[, ]>(batchCount, inputCount);
            Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, outputCount);

            Real[,] upwardInit      = Initializer.GetRandomValues <Real[, ]>(outputCount, inputCount);
            Real[,] lateralInit     = Initializer.GetRandomValues <Real[, ]>(outputCount, outputCount);
            Real[,,] biasInit       = Initializer.GetRandomValues <Real[, , ]>(1, outputCount, 1);
            Real[,,] forgetBiasInit = Initializer.GetRandomValues <Real[, , ]>(1, outputCount, 1);

            //Chainer
            NChainer.LSTM <Real> clstm = new NChainer.LSTM <Real>(inputCount, outputCount, lateralInit, upwardInit, biasInit, forgetBiasInit);

            Variable <Real> cX = new Variable <Real>(input);
            Variable <Real> cY = clstm.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();

            //KelpNet
            LSTM <Real> lstm = new LSTM <Real>(inputCount, outputCount, lateralInit, upwardInit, biasInit, forgetBiasInit);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);
            NdArray <Real> y = lstm.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();

            //許容範囲を算出
            Real delta = 0.00001f;

            Real[] cYdata = ((Real[, ])cY.Data).Flatten();
            Real[] cXgrad = ((Real[, ])cX.Grad).Flatten();

            Real[] cupwardWGrad = ((Real[, ])clstm.upward.W.Grad).Flatten();
            Real[] cupwardbGrad = (Real[])clstm.upward.b.Grad;


            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < cYdata.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.Grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < cXgrad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //W.grad
            int wLen = lstm.upward.Weight.Grad.Length;

            Assert.AreEqual(cupwardWGrad.Length, lstm.upward.Weight.Grad.Length);
            for (int i = 0; i < wLen; i++)
            {
                Assert.AreEqual(cupwardWGrad[i + wLen * 0], lstm.upward.Weight.Grad[i], delta);
            }

            //b.grad
            int bLen = lstm.upward.Bias.Length;

            Assert.AreEqual(cupwardbGrad.Length, lstm.upward.Bias.Grad.Length);
            for (int i = 0; i < bLen; i++)
            {
                Assert.AreEqual(cupwardbGrad[i + wLen * 0], lstm.upward.Bias.Grad[i], delta);
            }


            //////////////////////////////////////////////////////////////////////////////////////////
            // 1度目はlateralに値はない                                                             //
            //////////////////////////////////////////////////////////////////////////////////////////
            //Real[] clateralWGrad = Real.ToRealArray((Real[,])clstm.lateral.W.Grad);
            //Assert.AreEqual(clateralWGrad.Length, lstm.lateral.Weight.Grad.Length);
            //for (int i = 0; i < wLen; i++)
            //{
            //    Assert.AreEqual(clateralWGrad[i + wLen * 0], lstm.lateral.Weight.Grad[i], delta);
            //}
            //////////////////////////////////////////////////////////////////////////////////////////

            ///////////
            //2周目 //
            ///////////
            Real[,] input2   = Initializer.GetRandomValues <Real[, ]>(batchCount, inputCount);
            Real[,] dummyGy2 = Initializer.GetRandomValues <Real[, ]>(batchCount, outputCount);

            //Chainer
            Variable <Real> cX2 = new Variable <Real>(input2);
            Variable <Real> cY2 = clstm.Forward(cX2);

            cY2.Grad = dummyGy2;

            cY2.Backward();

            //KelpNet
            NdArray <Real> x2 = new NdArray <Real>(input2, asBatch: true);
            NdArray <Real> y2 = lstm.Forward(x2)[0];

            y2.Grad = dummyGy2.Flatten();

            y2.Backward();

            Real[] cYdata2 = ((Real[, ])cY2.Data).Flatten();
            Real[] cXgrad2 = ((Real[, ])cX2.Grad).Flatten();

            Real[] cupwardWGrad2 = ((Real[, ])clstm.upward.W.Grad).Flatten();
            Real[] cupwardbGrad2 = (Real[])clstm.upward.b.Grad;
            Real[] clateralWGrad = ((Real[, ])clstm.lateral.W.Grad).Flatten();

            //y
            Assert.AreEqual(cYdata2.Length, y2.Data.Length);
            for (int i = 0; i < cYdata2.Length; i++)
            {
                Assert.AreEqual(cYdata2[i], y2.Data[i], delta);
            }

            //x.Grad
            Assert.AreEqual(cXgrad2.Length, x2.Grad.Length);
            for (int i = 0; i < cXgrad2.Length; i++)
            {
                Assert.AreEqual(cXgrad2[i], x2.Grad[i], delta);
            }

            //W.grad
            Assert.AreEqual(clateralWGrad.Length, lstm.lateral.Weight.Grad.Length);
            for (int i = 0; i < clateralWGrad.Length; i++)
            {
                Assert.AreEqual(clateralWGrad[i + wLen * 0], lstm.lateral.Weight.Grad[i], delta);
            }

            //経由が多いため誤差が大きい
            delta = 1.0f;
            for (int i = 0; i < wLen; i++)
            {
                Assert.AreEqual(cupwardWGrad2[i + wLen * 0], lstm.upward.Weight.Grad[i], delta);
            }

            //b.grad
            for (int i = 0; i < bLen; i++)
            {
                Assert.AreEqual(cupwardbGrad2[i + wLen * 0], lstm.upward.Bias.Grad[i], delta);
            }
        }
示例#11
0
        public void BasicMathRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            //Make random value.
            Real[] val = { 1 + Mother.Dice.Next() };

            //Chainer
            Variable <Real> cX = new Variable <Real>(val);

            //Add
            Variable <Real> cAdd = 2 + cX + cX + 2;

            cAdd.Backward();
            Real[] pyGadd = (Real[])cX.Grad;

            //Mul
            Variable <Real> cMul = 2 * cX * cX * 3;

            cMul.Backward();
            Real[] pyGmul = (Real[])cX.Grad;

            //Sub
            Variable <Real> cSub = 30 - cX - cX - 2;

            cSub.Backward();
            Real[] pyGsub = (Real[])cX.Grad;

            //Div
            Variable <Real> cDiv = 50 / cX / cX / 2;

            cDiv.Backward();
            Real[] pyGdiv = (Real[])cX.Grad;


            //KelpNet
            NdArray <Real> x = new NdArray <Real>(val);

            //Add
            NdArray <Real> add = 2 + x + x + 2;

            add.Backward();
            Real[] gadd = x.Grad.ToArray(); //このToArrayはコピーのため

            //mul
            NdArray <Real> mul = 2 * x * x * 3;

            mul.Backward();
            Real[] gmul = x.Grad.ToArray();

            //sub
            NdArray <Real> sub = 30 - x - x - 2;

            sub.Backward();
            Real[] gsub = x.Grad.ToArray();

            //mul
            NdArray <Real> div = 50 / x / x / 2;

            div.Backward();
            Real[] gdiv = x.Grad.ToArray();


            //Check
            CollectionAssert.AreEqual(pyGadd, gadd);
            CollectionAssert.AreEqual(pyGmul, gmul);
            CollectionAssert.AreEqual(pyGsub, gsub);
            CollectionAssert.AreEqual(pyGdiv, gdiv);
        }
        public void RnnLSTMRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            Real[,] input = { { 1.0 }, { 3.0 }, { 5.0 }, { 7.0 }, { 9.0 } };
            Real[,] teach = { { 3.0 }, { 5.0 }, { 7.0 }, { 9.0 }, { 11.0 } };

            Real[,] input2 = { { 3.0 }, { 5.0 }, { 7.0 }, { 9.0 }, { 11.0 } };
            Real[,] teach2 = { { 5.0 }, { 7.0 }, { 9.0 }, { 11.0 }, { 13.0 } };

            int outputCount = 1;
            int inputCount  = 1;
            int hiddenCount = 2;

            Real[,] upwardInit      = (Real[, ])Initializer.GetRealNdArray(new[] { hiddenCount, hiddenCount });
            Real[,] lateralInit     = (Real[, ])Initializer.GetRealNdArray(new[] { hiddenCount, hiddenCount });
            Real[,,] biasInit       = (Real[, , ])Initializer.GetRealNdArray(new[] { 1, hiddenCount, 1 });
            Real[,,] forgetBiasInit = (Real[, , ])Initializer.GetRealNdArray(new[] { 1, hiddenCount, 1 });

            //Chainer
            Real[,] w1 = (Real[, ])Initializer.GetRealNdArray(new[] { hiddenCount, inputCount });
            Real[] b1 = Initializer.GetRealArray(hiddenCount);

            //Chainer
            NChainer.Linear <Real> cLinear1 = new NChainer.Linear <Real>(inputCount, hiddenCount, false, Real.ToBaseNdArray(w1), Real.ToBaseArray(b1));
            NChainer.LSTM <Real>   cLstm    = new NChainer.LSTM <Real>(hiddenCount, hiddenCount, Real.ToBaseNdArray(lateralInit), Real.ToBaseNdArray(upwardInit), Real.ToBaseNdArray(biasInit), Real.ToBaseNdArray(forgetBiasInit));

            Real[,] w2 = (Real[, ])Initializer.GetRealNdArray(new[] { outputCount, hiddenCount });
            Real[] b2 = Initializer.GetRealArray(outputCount);
            NChainer.Linear <Real> cLinear2 = new NChainer.Linear <Real>(hiddenCount, outputCount, false, Real.ToBaseNdArray(w2), Real.ToBaseArray(b2));

            Variable <Real> cX1  = new Variable <Real>(Real.ToBaseNdArray(input));
            Variable <Real> cY11 = cLinear1.Forward(cX1);
            Variable <Real> cY12 = cLstm.Forward(cY11);
            Variable <Real> cY13 = cLinear2.Forward(cY12);
            Variable <Real> cT   = new Variable <Real>(Real.ToBaseNdArray(teach));

            Variable <Real> cLoss = new NChainer.MeanSquaredError <Real>().Forward(cY13, cT);

            cLoss.Backward();


            //KelpNet
            KelpNet.CL.Linear linear1 = new KelpNet.CL.Linear(inputCount, hiddenCount, false, w1, b1);
            KelpNet.LSTM      lstm    = new KelpNet.LSTM(hiddenCount, hiddenCount, lateralInit, upwardInit, biasInit, forgetBiasInit);
            KelpNet.CL.Linear linear2 = new KelpNet.CL.Linear(hiddenCount, outputCount, false, w2, b2);

            NdArray x1  = new NdArray(Real.ToRealArray(input), new[] { 1 }, 5);
            NdArray y11 = linear1.Forward(x1)[0];
            NdArray y12 = lstm.Forward(y11)[0];
            NdArray y13 = linear2.Forward(y12)[0];
            NdArray t   = new NdArray(Real.ToRealArray(teach), new[] { 1 }, 5);

            NdArray loss = new KelpNet.MeanSquaredError().Evaluate(y13, t);

            y13.Backward();

            Real[] cY11data = Real.ToRealArray((Real[, ])cY11.Data);
            Real[] cY12data = Real.ToRealArray((Real[, ])cY12.Data);
            Real[] cY13data = Real.ToRealArray((Real[, ])cY13.Data);
            Real[] cXgrad   = Real.ToRealArray((Real[, ])cX1.Grad);

            Real[] cupwardWGrad = Real.ToRealArray((Real[, ])cLstm.upward.W.Grad);
            Real[] cupwardbGrad = (Real[])cLstm.upward.b.Grad;


            //許容範囲を設定
            double delta = 0.00001;

            //y11
            Assert.AreEqual(cY11data.Length, y11.Data.Length);
            for (int i = 0; i < cY11data.Length; i++)
            {
                Assert.AreEqual(cY11data[i], y11.Data[i], delta);
            }

            //y12
            Assert.AreEqual(cY12data.Length, y12.Data.Length);
            for (int i = 0; i < cY12data.Length; i++)
            {
                Assert.AreEqual(cY12data[i], y12.Data[i], delta);
            }

            //y13
            Assert.AreEqual(cY13data.Length, y13.Data.Length);
            for (int i = 0; i < cY13data.Length; i++)
            {
                Assert.AreEqual(cY13data[i], y13.Data[i], delta);
            }

            //許容範囲を設定
            delta = 0.0001;

            //loss
            Assert.AreEqual(cLoss.Data[0], loss.Data[0], delta);

            //x.Grad
            Assert.AreEqual(cXgrad.Length, x1.Grad.Length);
            for (int i = 0; i < cXgrad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x1.Grad[i], delta);
            }

            Real[] cWgrad11 = Real.ToRealArray((Real[, ])cLinear1.W.Grad);
            Real[] cbgrad11 = (Real[])cLinear1.b.Grad;

            //W.grad
            Assert.AreEqual(cWgrad11.Length, linear1.Weight.Grad.Length);
            for (int i = 0; i < linear1.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad11[i], linear1.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad11.Length, linear1.Bias.Grad.Length);
            for (int i = 0; i < linear1.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad11[i], linear1.Bias.Grad[i], delta);
            }


            Real[] cWgrad12 = Real.ToRealArray((Real[, ])cLinear2.W.Grad);
            Real[] cbgrad12 = (Real[])cLinear2.b.Grad;


            //W.grad
            Assert.AreEqual(cWgrad12.Length, linear2.Weight.Grad.Length);
            for (int i = 0; i < linear2.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad12[i], linear2.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad12.Length, linear2.Bias.Grad.Length);
            for (int i = 0; i < linear2.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad12[i], linear2.Bias.Grad[i], delta);
            }

            //W.grad
            int wLen = lstm.upward.Weight.Grad.Length;

            Assert.AreEqual(cupwardWGrad.Length, lstm.upward.Weight.Grad.Length);
            for (int i = 0; i < wLen; i++)
            {
                Assert.AreEqual(cupwardWGrad[i + wLen * 0], lstm.upward.Weight.Grad[i], delta);
            }

            //b.grad
            int bLen = lstm.upward.Bias.Length;

            Assert.AreEqual(cupwardbGrad.Length, lstm.upward.Bias.Grad.Length);
            for (int i = 0; i < bLen; i++)
            {
                Assert.AreEqual(cupwardbGrad[i + wLen * 0], lstm.upward.Bias.Grad[i], delta);
            }


            //2周目
            Variable <Real> cX2  = new Variable <Real>(Real.ToBaseNdArray(input2));
            Variable <Real> cY21 = cLinear1.Forward(cX2);
            Variable <Real> cY22 = cLstm.Forward(cY21);
            Variable <Real> cY23 = cLinear2.Forward(cY22);
            Variable <Real> cT2  = new Variable <Real>(Real.ToBaseNdArray(teach2));

            Variable <Real> cLoss2 = new NChainer.MeanSquaredError <Real>().Forward(cY23, cT2);

            //KelpNet
            NdArray x2  = new NdArray(Real.ToRealArray(input2), new[] { 1 }, 5);
            NdArray y21 = linear1.Forward(x2)[0];
            NdArray y22 = lstm.Forward(y21)[0];
            NdArray y23 = linear2.Forward(y22)[0];
            NdArray t2  = new NdArray(Real.ToRealArray(teach2), new[] { 1 }, 5);

            NdArray loss2 = new KelpNet.MeanSquaredError().Evaluate(y23, t2);

            Assert.AreEqual(cLoss2.Data[0], loss2.Data[0], delta);

            //Backwardを実行
            cLoss2.Backward();
            y23.Backward();


            Real[] cYdata21 = Real.ToRealArray((Real[, ])cY21.Data);
            Real[] cYdata22 = Real.ToRealArray((Real[, ])cY22.Data);
            Real[] cYdata23 = Real.ToRealArray((Real[, ])cY23.Data);
            Real[] cXgrad2  = Real.ToRealArray((Real[, ])cX2.Grad);

            Real[] cupwardWGrad2 = Real.ToRealArray((Real[, ])cLstm.upward.W.Grad);
            Real[] cupwardbGrad2 = (Real[])cLstm.upward.b.Grad;
            Real[] clateralWGrad = Real.ToRealArray((Real[, ])cLstm.lateral.W.Grad);

            //y21
            Assert.AreEqual(cYdata21.Length, y21.Data.Length);
            for (int i = 0; i < cYdata21.Length; i++)
            {
                Assert.AreEqual(cYdata21[i], y21.Data[i], delta);
            }

            //y22
            Assert.AreEqual(cYdata22.Length, y22.Data.Length);
            for (int i = 0; i < cYdata22.Length; i++)
            {
                Assert.AreEqual(cYdata22[i], y22.Data[i], delta);
            }

            //y23
            Assert.AreEqual(cYdata23.Length, y23.Data.Length);
            for (int i = 0; i < cYdata23.Length; i++)
            {
                Assert.AreEqual(cYdata23[i], y23.Data[i], delta);
            }

            //x.Grad
            Assert.AreEqual(cXgrad2.Length, x2.Grad.Length);
            for (int i = 0; i < cXgrad2.Length; i++)
            {
                Assert.AreEqual(cXgrad2[i], x2.Grad[i], delta);
            }

            //経由が多くかなり誤差が大きい為
            delta = 1.0;

            Real[] cWgrad22 = Real.ToRealArray((Real[, ])cLinear2.W.Grad);
            Real[] cbgrad22 = (Real[])cLinear2.b.Grad;

            //W.grad
            Assert.AreEqual(cWgrad22.Length, linear2.Weight.Grad.Length);
            for (int i = 0; i < linear2.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad22[i], linear2.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad22.Length, linear2.Bias.Grad.Length);
            for (int i = 0; i < linear2.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad22[i], linear2.Bias.Grad[i], delta);
            }


            delta = 2.0;

            //W.grad
            Assert.AreEqual(clateralWGrad.Length, lstm.lateral.Weight.Grad.Length);
            for (int i = 0; i < clateralWGrad.Length; i++)
            {
                Assert.AreEqual(clateralWGrad[i + wLen * 0], lstm.lateral.Weight.Grad[i], delta);
            }

            for (int i = 0; i < wLen; i++)
            {
                Assert.AreEqual(cupwardWGrad2[i + wLen * 0], lstm.upward.Weight.Grad[i], delta);
            }

            //b.grad
            for (int i = 0; i < bLen; i++)
            {
                Assert.AreEqual(cupwardbGrad2[i + wLen * 0], lstm.upward.Bias.Grad[i], delta);
            }


            delta = 20.0;

            Real[] cWgrad21 = Real.ToRealArray((Real[, ])cLinear1.W.Grad);
            Real[] cbgrad21 = (Real[])cLinear1.b.Grad;

            //W.grad
            Assert.AreEqual(cWgrad21.Length, linear1.Weight.Grad.Length);
            for (int i = 0; i < linear1.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad21[i], linear1.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad21.Length, linear1.Bias.Grad.Length);
            for (int i = 0; i < linear1.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad21[i], linear1.Bias.Grad[i], delta);
            }
        }
示例#13
0
        public void LRNRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int   n = Mother.Dice.Next(2, 7);
            float k = (float)Mother.Dice.NextDouble() * 3;

            int batchCount = Mother.Dice.Next(1, 5);
            int ch         = Mother.Dice.Next(1, 5);
            int width      = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);

            Real[,,,] input   = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, width);
            Real[,,,] dummyGy = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, width);


            //chainer
            LocalResponseNormalization <Real> cLocalResponseNormalization = new LocalResponseNormalization <Real>(n, k);

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cLocalResponseNormalization.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();


            //kelpnet
            LRN <Real> lrn = new LRN <Real>(n, k);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = lrn.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();


            Real[] cYdata = ((Real[, , , ])cY.Data).Flatten();
            Real[] cXgrad = ((Real[, , , ])cX.Grad).Flatten();

            //許容範囲を算出
            Real delta = 0.00001f;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
示例#14
0
        public void SwishRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int ioCount    = Mother.Dice.Next(1, 50);
            int batchCount = Mother.Dice.Next(1, 5);

            Real[,] input = Initializer.GetRandomValues <Real[, ]>(batchCount, ioCount);

            Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, ioCount);

            Real beta = (Real)Mother.Dice.NextDouble();

            //Chainer
            NChainer.Swish <Real> cSwish = new NChainer.Swish <Real>(new[] { ioCount }, beta);

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cSwish.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();


            //KelpNet
            Swish <Real> swish = new Swish <Real>(new[] { ioCount }, beta);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = swish.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();


            Real[] cYdata = ((Real[, ])cY.Data).Flatten();
            Real[] cXgrad = ((Real[, ])cX.Grad).Flatten();

            Real[] cbgrad = (Real[])cSwish.beta.Grad;

            //許容範囲を算出
            Real delta = 0.00001f;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad.Length, swish.Beta.Grad.Length);
            for (int i = 0; i < swish.Beta.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad[i], swish.Beta.Grad[i], delta);
            }
        }
        public void TrainTest(bool isTtrain, bool finetune)
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 50);
            int ioCount    = Mother.Dice.Next(1, 50);

            Real[,] input = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, ioCount });

            Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, ioCount });

            Real[] gamma = Initializer.GetRealArray(ioCount);
            Real[] beta  = Initializer.GetRealArray(ioCount);

            Real[] avgMean = Initializer.GetRealArray(ioCount);
            Real[] avgVar  = Initializer.GetRealArray(ioCount);

            //Chainer
            Chainer.Config["train"] = isTtrain;

            NChainer.BatchNormalization <Real> cBatchNormalization = new NChainer.BatchNormalization <Real>(ioCount, dtype: Real.Type);

            cBatchNormalization.gamma = new Variable <Real>(Real.ToBaseNdArray(gamma));
            cBatchNormalization.beta  = new Variable <Real>(Real.ToBaseNdArray(beta));

            cBatchNormalization.avgMean = Real.ToBaseNdArray(avgMean);
            cBatchNormalization.avgVar  = Real.ToBaseNdArray(avgVar);

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cBatchNormalization.Forward(cX, finetune);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();

            //KelpNet
            KelpNet.BatchNormalization batchNormalization = new BatchNormalization(ioCount, train: isTtrain, finetune: finetune);

            batchNormalization.Gamma.Data = Real.ToRealArray(gamma);
            batchNormalization.Beta.Data  = Real.ToRealArray(beta);

            batchNormalization.AvgMean.Data = Real.ToRealArray(avgMean);
            batchNormalization.AvgVar.Data  = Real.ToRealArray(avgVar);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { ioCount }, batchCount);

            NdArray y = batchNormalization.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();


            Real[] cYdata = Real.ToRealArray((Real[, ])cY.Data);
            Real[] cXgrad = Real.ToRealArray((Real[, ])cX.Grad);

            Real[] cGammaGrad = Real.ToRealArray((Real[])cBatchNormalization.gamma.Grad);
            Real[] cBetaGrad  = Real.ToRealArray((Real[])cBatchNormalization.beta.Grad);

            //許容範囲を算出
            double delta = 0.00001;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //gamma.grad
            Assert.AreEqual(cGammaGrad.Length, batchNormalization.Gamma.Grad.Length);
            for (int i = 0; i < batchNormalization.Gamma.Grad.Length; i++)
            {
                Assert.AreEqual(cGammaGrad[i], batchNormalization.Gamma.Grad[i], delta);
            }

            //beta.grad
            Assert.AreEqual(cBetaGrad.Length, batchNormalization.Beta.Grad.Length);
            for (int i = 0; i < batchNormalization.Beta.Grad.Length; i++)
            {
                Assert.AreEqual(cBetaGrad[i], batchNormalization.Beta.Grad[i], delta);
            }
        }
示例#16
0
        public void ConcatRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int ch         = Mother.Dice.Next(1, 5);
            int widthA     = Mother.Dice.Next(1, 16);
            int widthB     = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);
            int axis       = 3;

            Real[,,,] inputA  = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, widthA });
            Real[,,,] inputB  = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, widthB });
            Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, widthA + widthB });

            //chainer
            NChainer.Concat <Real> cConcat = new NChainer.Concat <Real>(axis);

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(inputA));
            Variable <Real> cY = new Variable <Real>(Real.ToBaseNdArray(inputB));

            Variable <Real> cZ = cConcat.Forward(cX, cY);

            cZ.Grad = Real.ToBaseNdArray(dummyGy);

            cZ.Backward();


            //KelpNet
            KelpNet.Concat concat = new Concat(axis - 1);//Chainerと異なり1次元目を暗黙的にBatchとみなさないため

            NdArray x = new NdArray(Real.ToRealArray(inputA), new[] { ch, height, widthA }, batchCount);
            NdArray y = new NdArray(Real.ToRealArray(inputB), new[] { ch, height, widthB }, batchCount);

            NdArray z = concat.Forward(x, y)[0];

            z.Grad = Real.ToRealArray(dummyGy);

            z.Backward();


            Real[] cZdata = Real.ToRealArray((Real[, , , ])cZ.Data);

            //Copyが必要
            Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad.Copy());
            Real[] cYgrad = Real.ToRealArray((Real[, , , ])cY.Grad.Copy());

            //許容範囲を算出
            double delta = 0.00001;

            //z
            Assert.AreEqual(cZdata.Length, z.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cZdata[i], z.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //y.grad
            Assert.AreEqual(cYgrad.Length, y.Grad.Length);
            for (int i = 0; i < y.Grad.Length; i++)
            {
                Assert.AreEqual(cYgrad[i], y.Grad[i], delta);
            }
        }
示例#17
0
        public void AVGPoolingRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int chCount    = Mother.Dice.Next(1, 5);
            int wideSize   = Mother.Dice.Next(8, 32);
            int heightSize = Mother.Dice.Next(8, 32);

            int kWidth  = Mother.Dice.Next(1, 5);
            int kHeight = Mother.Dice.Next(1, 5);

            int strideX = Mother.Dice.Next(1, 5);
            int strideY = Mother.Dice.Next(1, 5);

            int padX = Mother.Dice.Next(0, 5);
            int padY = Mother.Dice.Next(0, 5);


            int outputHeight = (int)Math.Floor((heightSize - kHeight + padY * 2.0f) / strideY) + 1;

            int outputWidth = (int)Math.Floor((wideSize - kWidth + padX * 2.0f) / strideX) + 1;

            Real[,,,] input = Initializer.GetRandomValues <Real[, , , ]>(batchCount, chCount, heightSize, wideSize);

            Real[,,,] dummyGy = Initializer.GetRandomValues <Real[, , , ]>(batchCount, chCount, outputHeight, outputWidth);

            //Chainer
            NChainer.AveragePooling2D <Real> cMaxPooling2D = new NChainer.AveragePooling2D <Real>(
                new[] { kHeight, kWidth },
                new[] { strideY, strideX },
                new[] { padY, padX }
                );

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cMaxPooling2D.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();


            //KelpNet
            KelpNet.AveragePooling2D <Real> maxPooling2D = new KelpNet.AveragePooling2D <Real>(
                new[] { kWidth, kHeight },
                new[] { strideX, strideY },
                new[] { padX, padY });

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = maxPooling2D.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();

            Real[] cYdata = ((Real[, , , ])cY.Data.Copy()).Flatten();
            Real[] cXgrad = ((Real[, , , ])cX.Grad.Copy()).Flatten();

            //許容範囲を算出
            Real delta = 0.00001f;

            Assert.AreEqual(cYdata.Length, y.Data.Length);
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);

            //y
            for (int i = 0; i < y.Data.Length; i++)
            {
                if (cYdata[i] < float.Epsilon && cYdata[i] > -float.Epsilon)
                {
                    Assert.AreEqual(cYdata[i], y.Data[i], delta);
                }
            }

            //x.grad
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
示例#18
0
        public void AdamRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(2, 50);
            int outputCount = Mother.Dice.Next(2, 50);
            int batchCount  = Mother.Dice.Next(1, 5);

            Real[,] input   = Initializer.GetRandomValues <Real[, ]>(batchCount, inputCount);
            Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, outputCount);
            Real[,] w       = Initializer.GetRandomValues <Real[, ]>(outputCount, inputCount);
            Real[] b = Initializer.GetRandomValues <Real[]>(outputCount);


            float alpha = (float)Mother.Dice.NextDouble(); //0.001f
            float beta1 = (float)Mother.Dice.NextDouble(); //0.9f;
            float beta2 = (float)Mother.Dice.NextDouble(); //0.999f;
            float eps   = (float)Mother.Dice.NextDouble(); //1e-08f;
            float eta   = (float)Mother.Dice.NextDouble(); //1.0f;

            //Chainer
            NChainer.Linear <Real> cLinear = new NChainer.Linear <Real>(inputCount, outputCount, false, w, b);
            NChainer.Adam <Real>   cAdam   = new NChainer.Adam <Real>(alpha, beta1, beta2, eps, eta);
            cAdam.Setup(cLinear);

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cLinear.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();

            cAdam.Update();

            //KelpNet
            KelpNet.CL.Linear <Real> linear = new KelpNet.CL.Linear <Real>(inputCount, outputCount, false, w, b);
            KelpNet.Adam <Real>      adam   = new Adam <Real>(alpha, beta1, beta2, eps, eta);
            adam.SetUp(linear);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = linear.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();

            adam.Update();


            Real[] cW = ((Real[, ])cLinear.W.Data).Flatten();
            Real[] cb = (Real[])cLinear.b.Data;

            //許容範囲を算出
            Real delta = 0.00001f;

            //W.grad
            Assert.AreEqual(cW.Length, linear.Weight.Data.Length);
            for (int i = 0; i < linear.Weight.Data.Length; i++)
            {
                Assert.AreEqual(cW[i], linear.Weight.Data[i], delta);
            }

            //b.grad
            Assert.AreEqual(cb.Length, linear.Bias.Data.Length);
            for (int i = 0; i < linear.Bias.Data.Length; i++)
            {
                Assert.AreEqual(cb[i], linear.Bias.Data[i], delta);
            }
        }
示例#19
0
        public void SGDRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(2, 50);
            int outputCount = Mother.Dice.Next(2, 50);
            int batchCount  = Mother.Dice.Next(1, 5);

            Real[,] input = Initializer.GetRandomValues <Real[, ]>(batchCount, inputCount);

            Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, outputCount);
            Real[,] w       = Initializer.GetRandomValues <Real[, ]>(outputCount, inputCount);

            Real[] b = Initializer.GetRandomValues <Real[]>(outputCount);

            //Chainer
            Linear <Real> cLinear = new Linear <Real>(inputCount, outputCount, false, w, b);

            NChainer.SGD <Real> cSgd = new NChainer.SGD <Real>();
            cSgd.Setup(cLinear);

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cLinear.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();

            cSgd.Update();

            //KelpNet
            CL.Linear <Real>   linear = new CL.Linear <Real>(inputCount, outputCount, false, w, b);
            KelpNet.SGD <Real> sgd    = new SGD <Real>();
            sgd.SetUp(linear);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = linear.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();

            sgd.Update();


            Real[] cW = ((Real[, ])cLinear.W.Data).Flatten();
            Real[] cb = (Real[])cLinear.b.Data;

            //許容範囲を算出
            Real delta = 0.00001f;

            //W.grad
            Assert.AreEqual(cW.Length, linear.Weight.Data.Length);
            for (int i = 0; i < linear.Weight.Data.Length; i++)
            {
                Assert.AreEqual(cW[i], linear.Weight.Data[i], delta);
            }

            //b.grad
            Assert.AreEqual(cb.Length, linear.Bias.Data.Length);
            for (int i = 0; i < linear.Bias.Data.Length; i++)
            {
                Assert.AreEqual(cb[i], linear.Bias.Data[i], delta);
            }
        }
        public void MeanSquaredRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int ch         = Mother.Dice.Next(1, 5);
            int width      = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);

            Real[,,,] inputA = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, width });
            Real[,,,] inputB = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, width });

            for (int i = 0; i < inputB.GetLength(0); i++)
            {
                for (int j = 0; j < inputB.GetLength(1); j++)
                {
                    for (int k = 0; k < inputB.GetLength(2); k++)
                    {
                        for (int l = 0; l < inputB.GetLength(3); l++)
                        {
                            inputB[i, j, k, l] *= 3.1415;
                        }
                    }
                }
            }

            //chainer
            NChainer.MeanSquaredError <Real> cMeanSquaredError = new NChainer.MeanSquaredError <Real>();

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(inputA));
            Variable <Real> cY = new Variable <Real>(Real.ToBaseNdArray(inputB));

            Variable <Real> cZ = cMeanSquaredError.Forward(cX, cY);

            cZ.Backward();

            Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad);

            //KelpNet
            KelpNet.MeanSquaredError meanSquaredError = new KelpNet.MeanSquaredError();

            NdArray x = new NdArray(Real.ToRealArray(inputA), new[] { ch, height, width }, batchCount);
            NdArray y = new NdArray(Real.ToRealArray(inputB), new[] { ch, height, width }, batchCount);

            //KelpNetはBackwaward側のみEvaluateで実行される
            NdArray z = meanSquaredError.Evaluate(x, y);


            //許容範囲を算出(内部の割引順が違うため誤差が大きい)
            double delta = 0.001;

            //Loss
            Assert.AreEqual(cZ.Data[0], z.Data[0], delta);

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
示例#21
0
        public void LRNRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int   n = Mother.Dice.Next(2, 7);
            float k = (float)Mother.Dice.NextDouble() * 3;

            int batchCount = Mother.Dice.Next(1, 5);
            int ch         = Mother.Dice.Next(1, 5);
            int width      = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);

            Real[,,,] input   = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, width });
            Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, width });


            //chainer
            NChainer.LocalResponseNormalization <Real> cLocalResponseNormalization = new NChainer.LocalResponseNormalization <Real>(n, k);

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cLocalResponseNormalization.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();


            //kelpnet
            KelpNet.LRN lrn = new LRN(n, k);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { ch, height, width }, batchCount);

            NdArray y = lrn.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();


            Real[] cYdata = Real.ToRealArray((Real[, , , ])cY.Data);
            Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad);

            //許容範囲を算出
            double delta = 0.00001;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
示例#22
0
        public void SplitAxisRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int ch         = Mother.Dice.Next(1, 5);
            int widthA     = Mother.Dice.Next(1, 16);
            int widthB     = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);
            int axis       = 3;

            Real[,,,] input = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthA + widthB);

            Real[,,,] dummyGyA = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthA);
            Real[,,,] dummyGyB = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthB);

            //chainer
            NChainer.SplitAxis <Real> cSplitAxis = new NChainer.SplitAxis <Real>();

            Variable <Real> cX = new Variable <Real>(input);

            PyObject[] cY = cSplitAxis.Forward(cX, new[] { widthA }, axis);

            Variable <Real> cY0 = cY[0];
            Variable <Real> cY1 = cY[1];

            cY0.Grad = dummyGyA;
            cY1.Grad = dummyGyB;

            //Chainerはどちらか一方で両方分のBackwardが走る
            cY0.Backward();
            //cY1.Backward();


            //KelpNet
            SplitAxis <Real> splitAxis = new SplitAxis <Real>(new[] { widthA }, axis - 1); //Chainerと異なり1次元目を暗黙的にBatchとみなさないため

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);                   // new NdArray(Real.ToRealArray(), new[] { ch, height, widthA + widthB }, batchCount);

            NdArray <Real>[] y = splitAxis.Forward(x);
            y[0].Grad = dummyGyA.Flatten();
            y[1].Grad = dummyGyB.Flatten();

            //KelpNetは出力した両方からBackwardしないと処理が走らない
            y[0].Backward();
            y[1].Backward();

            //Copyが必要
            Real[] cY0data = ((Real[, , , ])cY0.Data.Copy()).Flatten();
            Real[] cY1data = ((Real[, , , ])cY1.Data.Copy()).Flatten();

            Real[] cXgrad = ((Real[, , , ])cX.Grad).Flatten();

            //許容範囲を算出
            Real delta = 0.00001f;

            //y0
            Assert.AreEqual(cY0data.Length, y[0].Data.Length);
            for (int i = 0; i < y[0].Data.Length; i++)
            {
                Assert.AreEqual(cY0data[i], y[0].Data[i], delta);
            }

            //y1
            Assert.AreEqual(cY1data.Length, y[1].Data.Length);
            for (int i = 0; i < y[1].Data.Length; i++)
            {
                Assert.AreEqual(cY1data[i], y[1].Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
示例#23
0
        public void RandomTest(bool gpuEnable)
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(1, 50);
            int outputCount = Mother.Dice.Next(1, 50);
            int batchCount  = Mother.Dice.Next(1, 5);

            Real[,] input = Initializer.GetRandomValues <Real[, ]>(batchCount, inputCount);

            Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, outputCount);
            Real[,] w       = Initializer.GetRandomValues <Real[, ]>(outputCount, inputCount);

            Real[] b = Initializer.GetRandomValues <Real[]>(outputCount);

            //Chainer
            NChainer.Linear <Real> cLinear = new NChainer.Linear <Real>(inputCount, outputCount, false, w, b);

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cLinear.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();


            //KelpNet
            CL.Linear <Real> linear = new CL.Linear <Real>(inputCount, outputCount, false, w, b, gpuEnable: gpuEnable);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = linear.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();


            Real[] cYdata = ((Real[, ])cY.Data).Flatten();
            Real[] cXgrad = ((Real[, ])cX.Grad).Flatten();

            Real[] cWgrad = ((Real[, ])cLinear.W.Grad).Flatten();
            Real[] cbgrad = (Real[])cLinear.b.Grad;

            //許容範囲を算出
            Real delta = 0.00001f;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //W.grad
            Assert.AreEqual(cWgrad.Length, linear.Weight.Grad.Length);
            for (int i = 0; i < linear.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad[i], linear.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad.Length, linear.Bias.Grad.Length);
            for (int i = 0; i < linear.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad[i], linear.Bias.Grad[i], delta);
            }
        }
示例#24
0
        public void RandomTest(bool gpuEnable)
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int inChCount  = Mother.Dice.Next(1, 5);
            int outChCount = Mother.Dice.Next(1, 5);
            int wideSize   = Mother.Dice.Next(8, 32);
            int heightSize = Mother.Dice.Next(8, 32);

            int kWidth  = Mother.Dice.Next(1, 5);
            int kHeight = Mother.Dice.Next(1, 5);

            int strideX = Mother.Dice.Next(1, 5);
            int strideY = Mother.Dice.Next(1, 5);

            int padX = Mother.Dice.Next(0, 5);
            int padY = Mother.Dice.Next(0, 5);

            int outputHeight = (int)Math.Floor((heightSize - kHeight + padY * 2.0f) / strideY) + 1;
            int outputWidth  = (int)Math.Floor((wideSize - kWidth + padX * 2.0f) / strideX) + 1;

            Real[,,,] input = Initializer.GetRandomValues <Real[, , , ]>(batchCount, inChCount, heightSize, wideSize);

            Real[,,,] dummyGy = Initializer.GetRandomValues <Real[, , , ]>(batchCount, outChCount, outputHeight, outputWidth);
            Real[,,,] w       = Initializer.GetRandomValues <Real[, , , ]>(outChCount, inChCount, kHeight, kWidth);

            Real[] b = Initializer.GetRandomValues <Real[]>(outChCount);

            //Chainer
            NChainer.Convolution2D <Real> cConvolotion2D = new NChainer.Convolution2D <Real>(
                inChCount, outChCount,
                new[] { kHeight, kWidth },
                new[] { strideY, strideX },
                new[] { padY, padX },
                false,
                w,
                b);

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cConvolotion2D.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();


            //KelpNet
            CL.Convolution2D <Real> convolution2D = new CL.Convolution2D <Real>(
                inChCount, outChCount,
                new[] { kWidth, kHeight },
                new[] { strideX, strideY },
                new[] { padX, padY },
                false, w, b, gpuEnable: gpuEnable);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = convolution2D.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();


            Real[] cYdata = ((Real[, , , ])cY.Data.Copy()).Flatten();
            Real[] cXgrad = ((Real[, , , ])cX.Grad.Copy()).Flatten();

            Real[] cWgrad = ((Real[, , , ])cConvolotion2D.W.Grad).Flatten();
            Real[] cbgrad = (Real[])cConvolotion2D.b.Grad;

            //許容範囲を算出
            Real delta = 0.00001f;

            Assert.AreEqual(cYdata.Length, y.Data.Length);
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            Assert.AreEqual(cWgrad.Length, convolution2D.Weight.Grad.Length);
            Assert.AreEqual(cbgrad.Length, convolution2D.Bias.Grad.Length);

            //y
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            delta = 0.1f;
            //W.grad
            for (int i = 0; i < convolution2D.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad[i], convolution2D.Weight.Grad[i], delta);
            }

            //b.grad
            for (int i = 0; i < convolution2D.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad[i], convolution2D.Bias.Grad[i], delta);
            }
        }
示例#25
0
        public void RnnLSTMRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            Real[,] input = { { 1.0f }, { 3.0f }, { 5.0f }, { 7.0f }, { 9.0f } };
            Real[,] teach = { { 3.0f }, { 5.0f }, { 7.0f }, { 9.0f }, { 11.0f } };

            Real[,] input2 = { { 3.0f }, { 5.0f }, { 7.0f }, { 9.0f }, { 11.0f } };
            Real[,] teach2 = { { 5.0f }, { 7.0f }, { 9.0f }, { 11.0f }, { 13.0f } };

            int outputCount = 1;
            int inputCount  = 1;
            int hiddenCount = 2;

            Real[,] upwardInit      = Initializer.GetRandomValues <Real[, ]>(hiddenCount, hiddenCount);
            Real[,] lateralInit     = Initializer.GetRandomValues <Real[, ]>(hiddenCount, hiddenCount);
            Real[,,] biasInit       = Initializer.GetRandomValues <Real[, , ]>(1, hiddenCount, 1);
            Real[,,] forgetBiasInit = Initializer.GetRandomValues <Real[, , ]>(1, hiddenCount, 1);

            //Chainer
            Real[,] w1 = Initializer.GetRandomValues <Real[, ]>(hiddenCount, inputCount);
            Real[] b1 = Initializer.GetRandomValues <Real[]>(hiddenCount);

            //Chainer
            Linear <Real> cLinear1 = new Linear <Real>(inputCount, hiddenCount, false, w1, b1);

            NChainer.LSTM <Real> cLstm = new NChainer.LSTM <Real>(hiddenCount, hiddenCount, lateralInit, upwardInit, biasInit, forgetBiasInit);

            Real[,] w2 = Initializer.GetRandomValues <Real[, ]>(outputCount, hiddenCount);
            Real[]        b2       = Initializer.GetRandomValues <Real[]>(outputCount);
            Linear <Real> cLinear2 = new Linear <Real>(hiddenCount, outputCount, false, w2, b2);

            Variable <Real> cX1  = new Variable <Real>(input);
            Variable <Real> cY11 = cLinear1.Forward(cX1);
            Variable <Real> cY12 = cLstm.Forward(cY11);
            Variable <Real> cY13 = cLinear2.Forward(cY12);
            Variable <Real> cT   = new Variable <Real>(teach);

            Variable <Real> cLoss = new NChainer.MeanSquaredError <Real>().Forward(cY13, cT);

            cLoss.Backward();


            //KelpNet
            CL.Linear <Real> linear1 = new CL.Linear <Real>(inputCount, hiddenCount, false, w1, b1);
            LSTM <Real>      lstm    = new LSTM <Real>(hiddenCount, hiddenCount, lateralInit, upwardInit, biasInit, forgetBiasInit);

            CL.Linear <Real> linear2 = new CL.Linear <Real>(hiddenCount, outputCount, false, w2, b2);

            NdArray <Real> x1  = new NdArray <Real>(input, asBatch: true);
            NdArray <Real> y11 = linear1.Forward(x1)[0];
            NdArray <Real> y12 = lstm.Forward(y11)[0];
            NdArray <Real> y13 = linear2.Forward(y12)[0];
            NdArray <Real> t   = new NdArray <Real>(teach, asBatch: true);

            NdArray <Real> loss = new MeanSquaredError <Real>().Evaluate(y13, t);

            y13.Backward();

            Real[] cY11data = ((Real[, ])cY11.Data).Flatten();
            Real[] cY12data = ((Real[, ])cY12.Data).Flatten();
            Real[] cY13data = ((Real[, ])cY13.Data).Flatten();
            Real[] cXgrad   = ((Real[, ])cX1.Grad).Flatten();

            Real[] cupwardWGrad = ((Real[, ])cLstm.upward.W.Grad).Flatten();
            Real[] cupwardbGrad = (Real[])cLstm.upward.b.Grad;


            //許容範囲を設定
            Real delta = 0.00001f;

            //y11
            Assert.AreEqual(cY11data.Length, y11.Data.Length);
            for (int i = 0; i < cY11data.Length; i++)
            {
                Assert.AreEqual(cY11data[i], y11.Data[i], delta);
            }

            //y12
            Assert.AreEqual(cY12data.Length, y12.Data.Length);
            for (int i = 0; i < cY12data.Length; i++)
            {
                Assert.AreEqual(cY12data[i], y12.Data[i], delta);
            }

            //y13
            Assert.AreEqual(cY13data.Length, y13.Data.Length);
            for (int i = 0; i < cY13data.Length; i++)
            {
                Assert.AreEqual(cY13data[i], y13.Data[i], delta);
            }

            //許容範囲を設定
            delta = 0.0001f;

            //loss
            Assert.AreEqual(cLoss.Data[0], loss.Data[0], delta);

            //x.Grad
            Assert.AreEqual(cXgrad.Length, x1.Grad.Length);
            for (int i = 0; i < cXgrad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x1.Grad[i], delta);
            }

            Real[] cWgrad11 = ((Real[, ])cLinear1.W.Grad).Flatten();
            Real[] cbgrad11 = (Real[])cLinear1.b.Grad;

            //W.grad
            Assert.AreEqual(cWgrad11.Length, linear1.Weight.Grad.Length);
            for (int i = 0; i < linear1.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad11[i], linear1.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad11.Length, linear1.Bias.Grad.Length);
            for (int i = 0; i < linear1.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad11[i], linear1.Bias.Grad[i], delta);
            }


            Real[] cWgrad12 = ((Real[, ])cLinear2.W.Grad).Flatten();
            Real[] cbgrad12 = (Real[])cLinear2.b.Grad;


            //W.grad
            Assert.AreEqual(cWgrad12.Length, linear2.Weight.Grad.Length);
            for (int i = 0; i < linear2.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad12[i], linear2.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad12.Length, linear2.Bias.Grad.Length);
            for (int i = 0; i < linear2.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad12[i], linear2.Bias.Grad[i], delta);
            }

            //W.grad
            int wLen = lstm.upward.Weight.Grad.Length;

            Assert.AreEqual(cupwardWGrad.Length, lstm.upward.Weight.Grad.Length);
            for (int i = 0; i < wLen; i++)
            {
                Assert.AreEqual(cupwardWGrad[i + wLen * 0], lstm.upward.Weight.Grad[i], delta);
            }

            //b.grad
            int bLen = lstm.upward.Bias.Length;

            Assert.AreEqual(cupwardbGrad.Length, lstm.upward.Bias.Grad.Length);
            for (int i = 0; i < bLen; i++)
            {
                Assert.AreEqual(cupwardbGrad[i + wLen * 0], lstm.upward.Bias.Grad[i], delta);
            }


            //2周目
            Variable <Real> cX2  = new Variable <Real>(input2);
            Variable <Real> cY21 = cLinear1.Forward(cX2);
            Variable <Real> cY22 = cLstm.Forward(cY21);
            Variable <Real> cY23 = cLinear2.Forward(cY22);
            Variable <Real> cT2  = new Variable <Real>(teach2);

            Variable <Real> cLoss2 = new NChainer.MeanSquaredError <Real>().Forward(cY23, cT2);

            //KelpNet
            NdArray <Real> x2  = new NdArray <Real>(input2, asBatch: true);
            NdArray <Real> y21 = linear1.Forward(x2)[0];
            NdArray <Real> y22 = lstm.Forward(y21)[0];
            NdArray <Real> y23 = linear2.Forward(y22)[0];
            NdArray <Real> t2  = new NdArray <Real>(teach2, asBatch: true);

            NdArray <Real> loss2 = new MeanSquaredError <Real>().Evaluate(y23, t2);

            Assert.AreEqual(cLoss2.Data[0], loss2.Data[0], delta);

            //Backwardを実行
            cLoss2.Backward();
            y23.Backward();


            Real[] cYdata21 = ((Real[, ])cY21.Data).Flatten();
            Real[] cYdata22 = ((Real[, ])cY22.Data).Flatten();
            Real[] cYdata23 = ((Real[, ])cY23.Data).Flatten();
            Real[] cXgrad2  = ((Real[, ])cX2.Grad).Flatten();

            Real[] cupwardWGrad2 = ((Real[, ])cLstm.upward.W.Grad).Flatten();
            Real[] cupwardbGrad2 = (Real[])cLstm.upward.b.Grad;
            Real[] clateralWGrad = ((Real[, ])cLstm.lateral.W.Grad).Flatten();

            //y21
            Assert.AreEqual(cYdata21.Length, y21.Data.Length);
            for (int i = 0; i < cYdata21.Length; i++)
            {
                Assert.AreEqual(cYdata21[i], y21.Data[i], delta);
            }

            //y22
            Assert.AreEqual(cYdata22.Length, y22.Data.Length);
            for (int i = 0; i < cYdata22.Length; i++)
            {
                Assert.AreEqual(cYdata22[i], y22.Data[i], delta);
            }

            //y23
            Assert.AreEqual(cYdata23.Length, y23.Data.Length);
            for (int i = 0; i < cYdata23.Length; i++)
            {
                Assert.AreEqual(cYdata23[i], y23.Data[i], delta);
            }

            //x.Grad
            Assert.AreEqual(cXgrad2.Length, x2.Grad.Length);
            for (int i = 0; i < cXgrad2.Length; i++)
            {
                Assert.AreEqual(cXgrad2[i], x2.Grad[i], delta);
            }

            //経由が多くかなり誤差が大きい為
            delta = 1.0f;

            Real[] cWgrad22 = ((Real[, ])cLinear2.W.Grad).Flatten();
            Real[] cbgrad22 = (Real[])cLinear2.b.Grad;

            //W.grad
            Assert.AreEqual(cWgrad22.Length, linear2.Weight.Grad.Length);
            for (int i = 0; i < linear2.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad22[i], linear2.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad22.Length, linear2.Bias.Grad.Length);
            for (int i = 0; i < linear2.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad22[i], linear2.Bias.Grad[i], delta);
            }


            delta = 2.0f;

            //W.grad
            Assert.AreEqual(clateralWGrad.Length, lstm.lateral.Weight.Grad.Length);
            for (int i = 0; i < clateralWGrad.Length; i++)
            {
                Assert.AreEqual(clateralWGrad[i + wLen * 0], lstm.lateral.Weight.Grad[i], delta);
            }

            for (int i = 0; i < wLen; i++)
            {
                Assert.AreEqual(cupwardWGrad2[i + wLen * 0], lstm.upward.Weight.Grad[i], delta);
            }

            //b.grad
            for (int i = 0; i < bLen; i++)
            {
                Assert.AreEqual(cupwardbGrad2[i + wLen * 0], lstm.upward.Bias.Grad[i], delta);
            }


            delta = 20.0f;

            Real[] cWgrad21 = ((Real[, ])cLinear1.W.Grad).Flatten();
            Real[] cbgrad21 = (Real[])cLinear1.b.Grad;

            //W.grad
            Assert.AreEqual(cWgrad21.Length, linear1.Weight.Grad.Length);
            for (int i = 0; i < linear1.Weight.Grad.Length; i++)
            {
                Assert.AreEqual(cWgrad21[i], linear1.Weight.Grad[i], delta);
            }

            //b.grad
            Assert.AreEqual(cbgrad21.Length, linear1.Bias.Grad.Length);
            for (int i = 0; i < linear1.Bias.Grad.Length; i++)
            {
                Assert.AreEqual(cbgrad21[i], linear1.Bias.Grad[i], delta);
            }
        }
示例#26
0
        public void RandomTest(bool gpuEnable)
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int chCount    = Mother.Dice.Next(1, 5);
            int wideSize   = Mother.Dice.Next(8, 32);
            int heightSize = Mother.Dice.Next(8, 32);

            int kWidth  = Mother.Dice.Next(1, 5);
            int kHeight = Mother.Dice.Next(1, 5);

            int strideX = Mother.Dice.Next(1, 5);
            int strideY = Mother.Dice.Next(1, 5);

            int padX = Mother.Dice.Next(0, 5);
            int padY = Mother.Dice.Next(0, 5);

            bool coverAll = Mother.Dice.Next(1) == 0;

            int outputHeight = coverAll ?
                               (int)Math.Floor((heightSize - kHeight + padY * 2.0 + strideY - 1) / strideY) + 1 :
                               (int)Math.Floor((heightSize - kHeight + padY * 2.0) / strideY) + 1;

            int outputWidth = coverAll ?
                              (int)Math.Floor((wideSize - kWidth + padX * 2.0 + strideX - 1) / strideX) + 1 :
                              (int)Math.Floor((wideSize - kWidth + padX * 2.0) / strideX) + 1;

            Real[,,,] input = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, chCount, heightSize, wideSize });

            Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, chCount, outputHeight, outputWidth });

            //Chainer
            NChainer.MaxPooling2D <Real> cMaxPooling2D = new NChainer.MaxPooling2D <Real>(
                new[] { kHeight, kWidth },
                new[] { strideY, strideX },
                new[] { padY, padX }
                );

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cMaxPooling2D.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();


            //KelpNet
            KelpNet.CL.MaxPooling2D maxPooling2D = new KelpNet.CL.MaxPooling2D(
                new[] { kWidth, kHeight },
                new[] { strideX, strideY },
                new[] { padX, padY },
                gpuEnable: gpuEnable);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { chCount, heightSize, wideSize }, batchCount);

            NdArray y = maxPooling2D.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();

            Real[] cYdata = Real.ToRealArray((Real[, , , ])cY.Data.Copy());
            Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad.Copy());

            //許容範囲を算出
            double delta = 0.00001;

            Assert.AreEqual(cYdata.Length, y.Data.Length);
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);

            //y
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
示例#27
0
        public void SGDRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(2, 50);
            int outputCount = Mother.Dice.Next(2, 50);
            int batchCount  = Mother.Dice.Next(1, 5);

            Real[,] input = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, inputCount });

            Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, outputCount });
            Real[,] w       = (Real[, ])Initializer.GetRealNdArray(new[] { outputCount, inputCount });

            Real[] b = Initializer.GetRealArray(outputCount);

            //Chainer
            NChainer.Linear <Real> cLinear = new NChainer.Linear <Real>(inputCount, outputCount, false, Real.ToBaseNdArray(w), Real.ToBaseArray(b));
            NChainer.SGD <Real>    cSgd    = new NChainer.SGD <Real>();
            cSgd.Setup(cLinear);

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cLinear.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();

            cSgd.Update();

            //KelpNet
            KelpNet.Linear linear = new KelpNet.Linear(inputCount, outputCount, false, w, b);
            KelpNet.SGD    sgd    = new SGD();
            sgd.SetUp(linear);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { inputCount }, batchCount);

            NdArray y = linear.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();

            sgd.Update();


            Real[] cW = Real.ToRealArray((Real[, ])cLinear.W.Data);
            Real[] cb = (Real[])cLinear.b.Data;

            //許容範囲を算出
            double delta = 0.00001;

            //W.grad
            Assert.AreEqual(cW.Length, linear.Weight.Data.Length);
            for (int i = 0; i < linear.Weight.Data.Length; i++)
            {
                Assert.AreEqual(cW[i], linear.Weight.Data[i], delta);
            }

            //b.grad
            Assert.AreEqual(cb.Length, linear.Bias.Data.Length);
            for (int i = 0; i < linear.Bias.Data.Length; i++)
            {
                Assert.AreEqual(cb[i], linear.Bias.Data[i], delta);
            }
        }
示例#28
0
        public void MeanSquaredRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int ch         = Mother.Dice.Next(1, 5);
            int width      = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);

            Real[,,,] inputA = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, width);
            Real[,,,] inputB = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, width);

            for (int i = 0; i < inputB.GetLength(0); i++)
            {
                for (int j = 0; j < inputB.GetLength(1); j++)
                {
                    for (int k = 0; k < inputB.GetLength(2); k++)
                    {
                        for (int l = 0; l < inputB.GetLength(3); l++)
                        {
                            inputB[i, j, k, l] *= (Real)3.1415f;
                        }
                    }
                }
            }

            //chainer
            NChainer.MeanSquaredError <Real> cMeanSquaredError = new NChainer.MeanSquaredError <Real>();

            Variable <Real> cX = new Variable <Real>(inputA);
            Variable <Real> cY = new Variable <Real>(inputB);

            Variable <Real> cZ = cMeanSquaredError.Forward(cX, cY);

            cZ.Backward();

            Real[] cXgrad = ((Real[, , , ])cX.Grad).Flatten();

            //KelpNet
            KelpNet.MeanSquaredError <Real> meanSquaredError = new KelpNet.MeanSquaredError <Real>();

            NdArray <Real> x = new NdArray <Real>(inputA, asBatch: true);
            NdArray <Real> y = new NdArray <Real>(inputB, asBatch: true);

            //KelpNetはBackwaward側のみEvaluateで実行される
            NdArray <Real> z = meanSquaredError.Evaluate(x, y);


            //許容範囲を算出(内部の割引順が違うため誤差が大きい)
            Real delta = 0.001f;

            //Loss
            Assert.AreEqual(cZ.Data[0], z.Data[0], delta);

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
示例#29
0
        public void TrainTest(bool isTtrain, bool finetune)
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 50);
            int ioCount    = Mother.Dice.Next(1, 50);

            Real[,] input = Initializer.GetRandomValues <Real[, ]>(batchCount, ioCount);

            Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, ioCount);

            Real[] gamma = Initializer.GetRandomValues <Real[]>(ioCount);
            Real[] beta  = Initializer.GetRandomValues <Real[]>(ioCount);

            Real[] avgMean = Initializer.GetRandomValues <Real[]>(ioCount);
            Real[] avgVar  = Initializer.GetRandomValues <Real[]>(ioCount);

            //Chainer
            Chainer.Config["train"] = isTtrain;

            NChainer.BatchNormalization <Real> cBatchNormalization = new NChainer.BatchNormalization <Real>(ioCount, dtype: typeof(Real));

            cBatchNormalization.gamma = new Variable <Real>(gamma);
            cBatchNormalization.beta  = new Variable <Real>(beta);

            cBatchNormalization.avgMean = avgMean;
            cBatchNormalization.avgVar  = avgVar;

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cBatchNormalization.Forward(cX, finetune);

            cY.Grad = dummyGy;

            cY.Backward();

            //KelpNet
            KelpNet.BatchNormalization <Real> batchNormalization = new BatchNormalization <Real>(ioCount, train: isTtrain, finetune: finetune);

            batchNormalization.Gamma.Data = gamma;
            batchNormalization.Beta.Data  = beta;

            batchNormalization.AvgMean.Data = avgMean;
            batchNormalization.AvgVar.Data  = avgVar;

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = batchNormalization.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();


            Real[] cYdata = ((Real[, ])cY.Data).Flatten();
            Real[] cXgrad = ((Real[, ])cX.Grad).Flatten();

            Real[] cGammaGrad = (Real[])cBatchNormalization.gamma.Grad;
            Real[] cBetaGrad  = (Real[])cBatchNormalization.beta.Grad;

            //許容範囲を算出
            Real delta = 0.00005f;

            //y
            Assert.AreEqual(cYdata.Length, y.Data.Length);
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }

            //gamma.grad
            Assert.AreEqual(cGammaGrad.Length, batchNormalization.Gamma.Grad.Length);
            for (int i = 0; i < batchNormalization.Gamma.Grad.Length; i++)
            {
                Assert.AreEqual(cGammaGrad[i], batchNormalization.Gamma.Grad[i], delta);
            }

            //beta.grad
            Assert.AreEqual(cBetaGrad.Length, batchNormalization.Beta.Grad.Length);
            for (int i = 0; i < batchNormalization.Beta.Grad.Length; i++)
            {
                Assert.AreEqual(cBetaGrad[i], batchNormalization.Beta.Grad[i], delta);
            }
        }