Exemple #1
0
        public Sample13WinForm()
        {
            this.InitializeComponent();

            ClientSize = new Size(128 * 4, 128 * 4);

            //目標とするフィルタを作成(実践であればココは不明な値となる)
            this.decon_core = new Deconvolution2D <Real>(1, 1, 15, 1, 7, gpuEnable: true)
            {
                Weight = { Data = MakeOneCore() }
            };

            this.model = new Deconvolution2D <Real>(1, 1, 15, 1, 7, gpuEnable: true);

            this.optimizer = new SGD <Real>(learningRate: 0.01f);
            optimizer.SetUp(this.model);
        }
Exemple #2
0
        public static void Run()
        {
            //読み込みたいネットワークの構成を FunctionStack に書き連ね、各 Function のパラメータを合わせる
            //ここで必ず name を Chainer の変数名に合わせておくこと

            FunctionStack <Real> nn = new FunctionStack <Real>(
                new Convolution2D <Real>(1, 2, 3, name: "conv1", gpuEnable: true),//必要であればGPUフラグも忘れずに
                new ReLU <Real>(),
                new MaxPooling2D <Real>(2, 2),
                new Convolution2D <Real>(2, 2, 2, name: "conv2", gpuEnable: true),
                new ReLU <Real>(),
                new MaxPooling2D <Real>(2, 2),
                new Linear <Real>(8, 2, name: "fl3"),
                new ReLU <Real>(),
                new Linear <Real>(2, 2, name: "fl4")
                );

            /* Chainerでの宣言
             * class NN(chainer.Chain):
             *  def __init__(self):
             *      super(NN, self).__init__(
             *          conv1 = L.Convolution2D(1,2,3),
             *          conv2 = L.Convolution2D(2,2,2),
             *          fl3 = L.Linear(8,2),
             *          fl4 = L.Linear(2,2)
             *      )
             *
             *  def __call__(self, x):
             *      h_conv1 = F.relu(self.conv1(x))
             *      h_pool1 = F.max_pooling_2d(h_conv1, 2)
             *      h_conv2 = F.relu(self.conv2(h_pool1))
             *      h_pool2 = F.max_pooling_2d(h_conv2, 2)
             *      h_fc1 = F.relu(self.fl3(h_pool2))
             *      y = self.fl4(h_fc1)
             *      return y
             */


            //パラメータを読み込み
            ChainerModelDataLoader.ModelLoad(MODEL_FILE_PATH, nn);

            //あとは通常通り使用する
            SGD <Real> sgd = new SGD <Real>(0.1f);

            sgd.SetUp(nn);

            //入力データ
            NdArray <Real> x = new NdArray <Real>(new Real[, , ] {
                {
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.2f, 0.9f, 0.2f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.2f, 0.8f, 0.9f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.1f, 0.8f, 0.5f, 0.8f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.3f, 0.3f, 0.1f, 0.7f, 0.2f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.1f, 0.7f, 0.2f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.7f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.4f, 0.8f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.8f, 0.4f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.2f, 0.8f, 0.3f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.8f, 0.2f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.7f, 0.2f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f },
                    { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.3f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }
                }
            });

            //教師信号
            NdArray <Real> t = new NdArray <Real>(new Real[] { 0.0f, 1.0f });

            //訓練を実施
            Trainer.Train(nn, x, t, new MeanSquaredError <Real>());

            //結果表示用に退避
            Convolution2D <Real> l2 = (Convolution2D <Real>)nn.Functions[0];


            //Updateを実行するとgradが消費されてしまうため値を先に出力
            Console.WriteLine("gw1");
            Console.WriteLine(l2.Weight.ToString("Grad"));

            Console.WriteLine("gb1");
            Console.WriteLine(l2.Bias.ToString("Grad"));

            //更新
            sgd.Update();

            Console.WriteLine("w1");
            Console.WriteLine(l2.Weight);

            Console.WriteLine("b1");
            Console.WriteLine(l2.Bias);
        }
Exemple #3
0
        public void SGDRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(2, 50);
            int outputCount = Mother.Dice.Next(2, 50);
            int batchCount  = Mother.Dice.Next(1, 5);

            Real[,] input = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, inputCount });

            Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, outputCount });
            Real[,] w       = (Real[, ])Initializer.GetRealNdArray(new[] { outputCount, inputCount });

            Real[] b = Initializer.GetRealArray(outputCount);

            //Chainer
            NChainer.Linear <Real> cLinear = new NChainer.Linear <Real>(inputCount, outputCount, false, Real.ToBaseNdArray(w), Real.ToBaseArray(b));
            NChainer.SGD <Real>    cSgd    = new NChainer.SGD <Real>();
            cSgd.Setup(cLinear);

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cLinear.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();

            cSgd.Update();

            //KelpNet
            KelpNet.Linear linear = new KelpNet.Linear(inputCount, outputCount, false, w, b);
            KelpNet.SGD    sgd    = new SGD();
            sgd.SetUp(linear);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { inputCount }, batchCount);

            NdArray y = linear.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();

            sgd.Update();


            Real[] cW = Real.ToRealArray((Real[, ])cLinear.W.Data);
            Real[] cb = (Real[])cLinear.b.Data;

            //許容範囲を算出
            double delta = 0.00001;

            //W.grad
            Assert.AreEqual(cW.Length, linear.Weight.Data.Length);
            for (int i = 0; i < linear.Weight.Data.Length; i++)
            {
                Assert.AreEqual(cW[i], linear.Weight.Data[i], delta);
            }

            //b.grad
            Assert.AreEqual(cb.Length, linear.Bias.Data.Length);
            for (int i = 0; i < linear.Bias.Data.Length; i++)
            {
                Assert.AreEqual(cb[i], linear.Bias.Data[i], delta);
            }
        }
Exemple #4
0
        public static void Run()
        {
            //Weightを分割の前と後で揃える
            Real[,] testWeightValues = new Real[, ] {
                { -0.02690255f, 0.08830735f, -0.02041466f, -0.0431439f, -0.07749002f },
                { -0.06963444f, -0.03971611f, 0.0597842f, 0.08824182f, -0.06649109f },
                { -0.04966073f, -0.04697048f, -0.02235234f, -0.09396666f, 0.073189f },
                { 0.06563969f, 0.04446745f, -0.07192299f, 0.06784364f, 0.09575776f },
                { 0.05012317f, -0.08874852f, -0.05977172f, -0.05910181f, -0.06009106f },
                { -0.05200623f, -0.09679124f, 0.02159978f, -0.08058041f, -0.01340541f },
                { -0.0254951f, 0.09963084f, 0.00936683f, -0.08179696f, 0.09604459f },
                { -0.0732494f, 0.07253634f, 0.05981455f, -0.01007657f, -0.02992892f },
                { -0.06818873f, -0.02579817f, 0.06767359f, -0.03379837f, -0.04880046f },
                { -0.06429326f, -0.08964688f, -0.0960066f, -0.00286683f, -0.05761427f },
                { -0.0454098f, 0.07809167f, -0.05030088f, -0.02533244f, -0.02322736f },
                { -0.00866754f, -0.03614252f, 0.05237325f, 0.06478979f, -0.03599609f },
                { -0.01789357f, -0.04479434f, -0.05765592f, 0.03237658f, -0.06403019f },
                { -0.02421552f, 0.05533903f, -0.08627617f, 0.094624f, 0.03319318f },
                { 0.02328842f, -0.08234859f, -0.07979888f, 0.01439688f, -0.03267198f },
                { -0.07128382f, 0.08531934f, 0.07180037f, 0.04772871f, -0.08938966f },
                { 0.09431138f, 0.02094762f, 0.04443646f, 0.07653841f, 0.02028433f },
                { 0.01844446f, -0.08441339f, 0.01957355f, 0.04430714f, -0.03080243f },
                { -0.0261334f, -0.03794889f, -0.00638074f, 0.07278767f, -0.02165155f },
                { 0.08390063f, -0.03253863f, 0.0311571f, 0.08088892f, -0.07267931f }
            };

            Real[][,] testJaggWeightValues =
            {
                new Real[, ] {
                    { -0.02690255f,0.08830735f, -0.02041466f, -0.0431439f, -0.07749002f },
                    { -0.06963444f,-0.03971611f, 0.0597842f, 0.08824182f, -0.06649109f },
                    { -0.04966073f,-0.04697048f, -0.02235234f, -0.09396666f, 0.073189f },
                    { 0.06563969f,0.04446745f, -0.07192299f, 0.06784364f, 0.09575776f },
                    { 0.05012317f,         -0.08874852f, -0.05977172f, -0.05910181f, -0.06009106f }
                },
                new Real[, ] {
                    { -0.05200623f,-0.09679124f, 0.02159978f, -0.08058041f, -0.01340541f },
                    { -0.0254951f,0.09963084f, 0.00936683f, -0.08179696f, 0.09604459f },
                    { -0.0732494f,0.07253634f, 0.05981455f, -0.01007657f, -0.02992892f },
                    { -0.06818873f,-0.02579817f, 0.06767359f, -0.03379837f, -0.04880046f },
                    { -0.06429326f,         -0.08964688f, -0.0960066f, -0.00286683f, -0.05761427f }
                },
                new Real[, ] {
                    { -0.0454098f,0.07809167f, -0.05030088f, -0.02533244f, -0.02322736f },
                    { -0.00866754f,-0.03614252f, 0.05237325f, 0.06478979f, -0.03599609f },
                    { -0.01789357f,-0.04479434f, -0.05765592f, 0.03237658f, -0.06403019f },
                    { -0.02421552f,0.05533903f, -0.08627617f, 0.094624f, 0.03319318f },
                    { 0.02328842f,         -0.08234859f, -0.07979888f, 0.01439688f, -0.03267198f }
                },
                new Real[, ] {
                    { -0.07128382f,0.08531934f, 0.07180037f, 0.04772871f, -0.08938966f },
                    { 0.09431138f,0.02094762f, 0.04443646f, 0.07653841f, 0.02028433f },
                    { 0.01844446f,-0.08441339f, 0.01957355f, 0.04430714f, -0.03080243f },
                    { -0.0261334f,-0.03794889f, -0.00638074f, 0.07278767f, -0.02165155f },
                    { 0.08390063f, -0.03253863f, 0.0311571f, 0.08088892f, -0.07267931f }
                }
            };

            Linear <Real> l0 = new Linear <Real>(5, 20, initialW: testWeightValues, name: "l0");

            Linear <Real> l1 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[0], name: "l1");
            Linear <Real> l2 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[1], name: "l2");
            Linear <Real> l3 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[2], name: "l3");
            Linear <Real> l4 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[3], name: "l4");

            //FunctionにOptimizerを設定
            SGD <Real> sgd = new SGD <Real>();

            sgd.SetUp(l0);

            //OptimiserにFunctionを登録
            SGD <Real> sgdSplit = new SGD <Real>();

            sgdSplit.SetUp(l1);
            sgdSplit.SetUp(l2);
            sgdSplit.SetUp(l3);
            sgdSplit.SetUp(l4);


            //入力は同値だがGradが加算されてしまうため分ける
            Real[]         testValue        = new Real[] { 0.01618112f, -0.08296648f, -0.05545357f, 0.00389254f, -0.05727582f };
            NdArray <Real> testInputValuesA = new NdArray <Real>(testValue);
            NdArray <Real> testInputValuesB = new NdArray <Real>(testValue);

            Console.WriteLine("l0 for");
            NdArray <Real> l0Result = l0.Forward(testInputValuesA)[0];

            Console.WriteLine(l0Result);

            Console.WriteLine("\nl1 for");
            NdArray <Real> l1Result = l1.Forward(testInputValuesB)[0];

            Console.WriteLine(l1Result);

            Console.WriteLine("\nl2 for");
            NdArray <Real> l2Result = l2.Forward(testInputValuesB)[0];

            Console.WriteLine(l2Result);

            Console.WriteLine("\nl3 for");
            NdArray <Real> l3Result = l3.Forward(testInputValuesB)[0];

            Console.WriteLine(l3Result);

            Console.WriteLine("\nl4 for");
            NdArray <Real> l4Result = l4.Forward(testInputValuesB)[0];

            Console.WriteLine(l4Result);

            Console.WriteLine();

            //適当なGrad値をでっち上げる
            l0Result.Grad = new Real[]
            {
                -2.42022760e-02f, 5.02482988e-04f, 2.52015481e-04f, 8.08797951e-04f, -7.19293347e-03f,
                1.40045900e-04f, 7.09874439e-05f, 2.07651625e-04f, 3.80124636e-02f, -8.87162634e-04f,
                -4.64874669e-04f, -1.40792923e-03f, -4.12280299e-02f, -3.36557830e-04f, -1.50323089e-04f,
                -4.70047118e-04f, 3.61101292e-02f, -7.12957408e-04f, -3.63163825e-04f, -1.12809543e-03f
            };

            l1Result.Grad = new Real[] { -2.42022760e-02f, 5.02482988e-04f, 2.52015481e-04f, 8.08797951e-04f, -7.19293347e-03f };
            l2Result.Grad = new Real[] { 1.40045900e-04f, 7.09874439e-05f, 2.07651625e-04f, 3.80124636e-02f, -8.87162634e-04f };
            l3Result.Grad = new Real[] { -4.64874669e-04f, -1.40792923e-03f, -4.12280299e-02f, -3.36557830e-04f, -1.50323089e-04f };
            l4Result.Grad = new Real[] { -4.70047118e-04f, 3.61101292e-02f, -7.12957408e-04f, -3.63163825e-04f, -1.12809543e-03f };


            //Backwardを実行
            l0.Backward(l0Result);

            l1.Backward(l1Result);
            l2.Backward(l2Result);
            l3.Backward(l3Result);
            l4.Backward(l4Result);

            Console.WriteLine("\nl0 back");
            Console.WriteLine(testInputValuesA.ToString("Grad"));

            Console.WriteLine("\nl1-l4 sum back");
            Console.WriteLine(testInputValuesB.ToString("Grad"));

            sgd.Update();
            sgdSplit.Update();

            Console.WriteLine("\nl0 Weight");
            Console.WriteLine(l0.Weight);

            Console.WriteLine("\nl1 Weight");
            Console.WriteLine(l1.Weight);

            Console.WriteLine("\nl0 Bias");
            Console.WriteLine(l0.Bias);

            Console.WriteLine("\nl1 Bias");
            Console.WriteLine(l1.Bias);
        }
Exemple #5
0
        public void SGDRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(2, 50);
            int outputCount = Mother.Dice.Next(2, 50);
            int batchCount  = Mother.Dice.Next(1, 5);

            Real[,] input = Initializer.GetRandomValues <Real[, ]>(batchCount, inputCount);

            Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, outputCount);
            Real[,] w       = Initializer.GetRandomValues <Real[, ]>(outputCount, inputCount);

            Real[] b = Initializer.GetRandomValues <Real[]>(outputCount);

            //Chainer
            Linear <Real> cLinear = new Linear <Real>(inputCount, outputCount, false, w, b);

            NChainer.SGD <Real> cSgd = new NChainer.SGD <Real>();
            cSgd.Setup(cLinear);

            Variable <Real> cX = new Variable <Real>(input);

            Variable <Real> cY = cLinear.Forward(cX);

            cY.Grad = dummyGy;

            cY.Backward();

            cSgd.Update();

            //KelpNet
            CL.Linear <Real>   linear = new CL.Linear <Real>(inputCount, outputCount, false, w, b);
            KelpNet.SGD <Real> sgd    = new SGD <Real>();
            sgd.SetUp(linear);

            NdArray <Real> x = new NdArray <Real>(input, asBatch: true);

            NdArray <Real> y = linear.Forward(x)[0];

            y.Grad = dummyGy.Flatten();

            y.Backward();

            sgd.Update();


            Real[] cW = ((Real[, ])cLinear.W.Data).Flatten();
            Real[] cb = (Real[])cLinear.b.Data;

            //許容範囲を算出
            Real delta = 0.00001f;

            //W.grad
            Assert.AreEqual(cW.Length, linear.Weight.Data.Length);
            for (int i = 0; i < linear.Weight.Data.Length; i++)
            {
                Assert.AreEqual(cW[i], linear.Weight.Data[i], delta);
            }

            //b.grad
            Assert.AreEqual(cb.Length, linear.Bias.Data.Length);
            for (int i = 0; i < linear.Bias.Data.Length; i++)
            {
                Assert.AreEqual(cb[i], linear.Bias.Data[i], delta);
            }
        }
Exemple #6
0
        public static void Run()
        {
            Console.WriteLine("Build Vocabulary.");

            Vocabulary vocabulary = new Vocabulary();

            string trainPath = InternetFileDownloader.Donwload(DOWNLOAD_URL + TRAIN_FILE, TRAIN_FILE, TRAIN_FILE_HASH);
            string validPath = InternetFileDownloader.Donwload(DOWNLOAD_URL + VALID_FILE, VALID_FILE, VALID_FILE_HASH);
            string testPath  = InternetFileDownloader.Donwload(DOWNLOAD_URL + TEST_FILE, TEST_FILE, TEST_FILE_HASH);

            int[] trainData = vocabulary.LoadData(trainPath);
            int[] validData = vocabulary.LoadData(validPath);
            int[] testData  = vocabulary.LoadData(testPath);

            int nVocab = vocabulary.Length;

            Console.WriteLine("Network Initilizing.");
            FunctionStack <Real> model = new FunctionStack <Real>(
                new EmbedID <Real>(nVocab, N_UNITS, name: "l1 EmbedID"),
                new Dropout <Real>(),
                new LSTM <Real>(N_UNITS, N_UNITS, name: "l2 LSTM"),
                new Dropout <Real>(),
                new LSTM <Real>(N_UNITS, N_UNITS, name: "l3 LSTM"),
                new Dropout <Real>(),
                new Linear <Real>(N_UNITS, nVocab, name: "l4 Linear")
                );

            for (int i = 0; i < model.Functions.Length; i++)
            {
                for (int j = 0; j < model.Functions[i].Parameters.Length; j++)
                {
                    for (int k = 0; k < model.Functions[i].Parameters[j].Data.Length; k++)
                    {
                        model.Functions[i].Parameters[j].Data[k] = ((Real)Mother.Dice.NextDouble() * 2.0f - 1.0f) / 10.0f;
                    }
                }
            }

            //与えられたthresholdで頭打ちではなく、全パラメータのL2Normからレートを取り補正を行う
            GradientClipping <Real> gradientClipping = new GradientClipping <Real>(threshold: GRAD_CLIP);
            SGD <Real> sgd = new SGD <Real>(learningRate: 0.1f);

            gradientClipping.SetUp(model);
            sgd.SetUp(model);

            Real wholeLen = trainData.Length;
            int  jump     = (int)Math.Floor(wholeLen / BATCH_SIZE);
            int  epoch    = 0;

            Console.WriteLine("Train Start.");

            for (int i = 0; i < jump * N_EPOCH; i++)
            {
                NdArray <Real> x = new NdArray <Real>(new[] { 1 }, BATCH_SIZE);
                NdArray <int>  t = new NdArray <int>(new[] { 1 }, BATCH_SIZE);

                for (int j = 0; j < BATCH_SIZE; j++)
                {
                    x.Data[j] = trainData[(int)((jump * j + i) % wholeLen)];
                    t.Data[j] = trainData[(int)((jump * j + i + 1) % wholeLen)];
                }

                NdArray <Real> result  = model.Forward(x)[0];
                Real           sumLoss = new SoftmaxCrossEntropy <Real>().Evaluate(result, t);
                Console.WriteLine("[{0}/{1}] Loss: {2}", i + 1, jump, sumLoss);
                model.Backward(result);

                //Run truncated BPTT
                if ((i + 1) % BPROP_LEN == 0)
                {
                    gradientClipping.Update();
                    sgd.Update();
                    model.ResetState();
                }

                if ((i + 1) % jump == 0)
                {
                    epoch++;
                    Console.WriteLine("evaluate");
                    Console.WriteLine("validation perplexity: {0}", Evaluate(model, validData));

                    if (epoch >= 6)
                    {
                        sgd.LearningRate /= 1.2f;
                        Console.WriteLine("learning rate =" + sgd.LearningRate);
                    }
                }
            }

            Console.WriteLine("test start");
            Console.WriteLine("test perplexity:" + Evaluate(model, testData));
        }