Exemplo n.º 1
0
        static void predict(NdArray seq, FunctionStack model, int pre_length)
        {
            Ensure.Argument(model).NotNull();
            Ensure.Argument(seq).NotNull();
            Ensure.Argument(pre_length).GreaterThanOrEqualTo(0);


            Real[] pre_input_seq = new Real[seq.Data.Length / 4];
            if (pre_input_seq.Length < 1)
            {
                pre_input_seq = new Real[1];
            }
            Array.Copy(seq.Data, pre_input_seq, pre_input_seq.Length);

            List <Real> input_seq = new List <Real>();

            input_seq.AddRange(pre_input_seq);

            List <Real> output_seq = new List <Real> {
                input_seq[input_seq.Count - 1]
            };

            for (int i = 0; i < pre_length; i++)
            {
                Real future = predict_sequence(model, input_seq);
                input_seq.RemoveAt(0);
                input_seq.Add(future);
                output_seq.Add(future);
            }

            foreach (var t in output_seq)
            {
                RILogManager.Default?.SendDebug(t.ToString());
            }

            RILogManager.Default?.SendDebug(seq.ToString());
        }
Exemplo n.º 2
0
        public static void Run()
        {
            //Align Weight before and after splitting
            Real[,] testWeightValues =
            {
                { -0.02690255,  0.08830735, -0.02041466,  -0.0431439, -0.07749002 },
                { -0.06963444, -0.03971611,   0.0597842,  0.08824182, -0.06649109 },
                { -0.04966073, -0.04697048, -0.02235234, -0.09396666,    0.073189 },
                {  0.06563969,  0.04446745, -0.07192299,  0.06784364,  0.09575776 },
                {  0.05012317, -0.08874852, -0.05977172, -0.05910181, -0.06009106 },
                { -0.05200623, -0.09679124,  0.02159978, -0.08058041, -0.01340541 },
                {  -0.0254951,  0.09963084,  0.00936683, -0.08179696,  0.09604459 },
                {  -0.0732494,  0.07253634,  0.05981455, -0.01007657, -0.02992892 },
                { -0.06818873, -0.02579817,  0.06767359, -0.03379837, -0.04880046 },
                { -0.06429326, -0.08964688,  -0.0960066, -0.00286683, -0.05761427 },
                {  -0.0454098,  0.07809167, -0.05030088, -0.02533244, -0.02322736 },
                { -0.00866754, -0.03614252,  0.05237325,  0.06478979, -0.03599609 },
                { -0.01789357, -0.04479434, -0.05765592,  0.03237658, -0.06403019 },
                { -0.02421552,  0.05533903, -0.08627617,    0.094624,  0.03319318 },
                {  0.02328842, -0.08234859, -0.07979888,  0.01439688, -0.03267198 },
                { -0.07128382,  0.08531934,  0.07180037,  0.04772871, -0.08938966 },
                {  0.09431138,  0.02094762,  0.04443646,  0.07653841,  0.02028433 },
                {  0.01844446, -0.08441339,  0.01957355,  0.04430714, -0.03080243 },
                {  -0.0261334, -0.03794889, -0.00638074,  0.07278767, -0.02165155 },
                {  0.08390063, -0.03253863,   0.0311571,  0.08088892, -0.07267931 }
            };

            Real[][,] testJaggWeightValues =
            {
                new Real[, ] {
                    { -0.02690255,0.08830735, -0.02041466, -0.0431439, -0.07749002 },
                    { -0.06963444,-0.03971611, 0.0597842, 0.08824182, -0.06649109 },
                    { -0.04966073,-0.04697048, -0.02235234, -0.09396666, 0.073189 },
                    { 0.06563969,0.04446745, -0.07192299, 0.06784364, 0.09575776 },
                    { 0.05012317,         -0.08874852, -0.05977172, -0.05910181, -0.06009106 }
                },
                new Real[, ] {
                    { -0.05200623,-0.09679124, 0.02159978, -0.08058041, -0.01340541 },
                    { -0.0254951,0.09963084, 0.00936683, -0.08179696, 0.09604459 },
                    { -0.0732494,0.07253634, 0.05981455, -0.01007657, -0.02992892 },
                    { -0.06818873,-0.02579817, 0.06767359, -0.03379837, -0.04880046 },
                    { -0.06429326,         -0.08964688, -0.0960066, -0.00286683, -0.05761427 }
                },
                new Real[, ] {
                    { -0.0454098,0.07809167, -0.05030088, -0.02533244, -0.02322736 },
                    { -0.00866754,-0.03614252, 0.05237325, 0.06478979, -0.03599609 },
                    { -0.01789357,-0.04479434, -0.05765592, 0.03237658, -0.06403019 },
                    { -0.02421552,0.05533903, -0.08627617, 0.094624, 0.03319318 },
                    { 0.02328842,         -0.08234859, -0.07979888, 0.01439688, -0.03267198 }
                },
                new Real[, ] {
                    { -0.07128382,0.08531934, 0.07180037, 0.04772871, -0.08938966 },
                    { 0.09431138,0.02094762, 0.04443646, 0.07653841, 0.02028433 },
                    { 0.01844446,-0.08441339, 0.01957355, 0.04430714, -0.03080243 },
                    { -0.0261334,-0.03794889, -0.00638074, 0.07278767, -0.02165155 },
                    { 0.08390063, -0.03253863, 0.0311571, 0.08088892, -0.07267931 }
                }
            };

            Linear l0 = new Linear(true, 5, 20, initialW: testWeightValues, name: "l0");
            Linear l1 = new Linear(true, 5, 5, initialW: testJaggWeightValues[0], name: "l1");
            Linear l2 = new Linear(true, 5, 5, initialW: testJaggWeightValues[1], name: "l2");
            Linear l3 = new Linear(true, 5, 5, initialW: testJaggWeightValues[2], name: "l3");
            Linear l4 = new Linear(true, 5, 5, initialW: testJaggWeightValues[3], name: "l4");

            l0.SetOptimizer(new SGD());

            SGD sgd = new SGD();

            l1.SetOptimizer(sgd);
            l2.SetOptimizer(sgd);
            l3.SetOptimizer(sgd);
            l4.SetOptimizer(sgd);


            //Input is equivalent, but Grad is added and it is divided
            Real[]  testValue        = { 0.01618112, -0.08296648, -0.05545357, 0.00389254, -0.05727582 };
            NdArray testInputValuesA = new NdArray(testValue);
            NdArray testInputValuesB = new NdArray(testValue);

            RILogManager.Default?.SendDebug("l0 for");
            NdArray[] l0Result = l0.Forward(true, testInputValuesA);
            RILogManager.Default?.SendDebug(l0Result.ToString());

            RILogManager.Default?.SendDebug("l1 for");
            NdArray[] l1Result = l1.Forward(true, testInputValuesB);
            RILogManager.Default?.SendDebug(l1Result.ToString());

            RILogManager.Default?.SendDebug("l2 for");
            NdArray[] l2Result = l2.Forward(true, testInputValuesB);
            RILogManager.Default?.SendDebug(l2Result.ToString());

            RILogManager.Default?.SendDebug("l3 for");
            NdArray[] l3Result = l3.Forward(true, testInputValuesB);
            RILogManager.Default?.SendDebug(l3Result.ToString());

            RILogManager.Default?.SendDebug("l4 for");
            NdArray[] l4Result = l4.Forward(true, testInputValuesB);
            RILogManager.Default?.SendDebug(l4Result.ToString());


            //Create an appropriate Grad value
            l0Result[0].Grad = new Real[]
            {
                -2.42022760e-02, 5.02482988e-04, 2.52015481e-04, 8.08797951e-04, -7.19293347e-03,
                1.40045900e-04, 7.09874439e-05, 2.07651625e-04, 3.80124636e-02, -8.87162634e-04,
                -4.64874669e-04, -1.40792923e-03, -4.12280299e-02, -3.36557830e-04, -1.50323089e-04,
                -4.70047118e-04, 3.61101292e-02, -7.12957408e-04, -3.63163825e-04, -1.12809543e-03
            };

            l1Result[0].Grad = new Real[] { -2.42022760e-02, 5.02482988e-04, 2.52015481e-04, 8.08797951e-04, -7.19293347e-03 };
            l2Result[0].Grad = new Real[] { 1.40045900e-04, 7.09874439e-05, 2.07651625e-04, 3.80124636e-02, -8.87162634e-04 };
            l3Result[0].Grad = new Real[] { -4.64874669e-04, -1.40792923e-03, -4.12280299e-02, -3.36557830e-04, -1.50323089e-04 };
            l4Result[0].Grad = new Real[] { -4.70047118e-04, 3.61101292e-02, -7.12957408e-04, -3.63163825e-04, -1.12809543e-03 };


            //Backward
            l0.Backward(true, l0Result);
            l1.Backward(true, l1Result);
            l2.Backward(true, l2Result);
            l3.Backward(true, l3Result);
            l4.Backward(true, l4Result);

            RILogManager.Default?.SendDebug("l0 back");
            RILogManager.Default?.SendDebug(testInputValuesA.ToString("Grad"));
            RILogManager.Default?.SendDebug("l1-l4 sum back");
            RILogManager.Default?.SendDebug(testInputValuesB.ToString("Grad"));

            l0.Update();  //Although the format is irregular, since 10 contains SGD
            sgd.Update(); // Use stochastic gradient descent as the optimizer

            RILogManager.Default?.SendDebug("l0 Weight");
            RILogManager.Default?.SendDebug(l0.Weight.ToString());
            RILogManager.Default?.SendDebug("l1 Weight");
            RILogManager.Default?.SendDebug(l1.Weight.ToString());
            RILogManager.Default?.SendDebug("l0 Bias");
            RILogManager.Default?.SendDebug(l0.Bias.ToString());
            RILogManager.Default?.SendDebug("l1 Bias");
            RILogManager.Default?.SendDebug(l1.Bias.ToString());
        }
 public void Shape(NdArray <int> array, string expSubstr)
 {
     Assert.Contains(expSubstr, array.ToString("S"));
 }
        public void ElementFormat(NdArray <int> array, string expected)
        {
            var actual = array.ToString("E(0000)");

            Assert.Equal(expected, actual);
        }
        public void Basic(NdArray <int> array, string expected)
        {
            var actual = array.ToString();

            Assert.Equal(expected, actual);
        }
Exemplo n.º 6
0
        public static void Run()
        {
            //Weightを分割の前と後で揃える
            Real[,] testWeightValues =
            {
                { -0.02690255,  0.08830735, -0.02041466,  -0.0431439, -0.07749002 },
                { -0.06963444, -0.03971611,   0.0597842,  0.08824182, -0.06649109 },
                { -0.04966073, -0.04697048, -0.02235234, -0.09396666,    0.073189 },
                {  0.06563969,  0.04446745, -0.07192299,  0.06784364,  0.09575776 },
                {  0.05012317, -0.08874852, -0.05977172, -0.05910181, -0.06009106 },
                { -0.05200623, -0.09679124,  0.02159978, -0.08058041, -0.01340541 },
                {  -0.0254951,  0.09963084,  0.00936683, -0.08179696,  0.09604459 },
                {  -0.0732494,  0.07253634,  0.05981455, -0.01007657, -0.02992892 },
                { -0.06818873, -0.02579817,  0.06767359, -0.03379837, -0.04880046 },
                { -0.06429326, -0.08964688,  -0.0960066, -0.00286683, -0.05761427 },
                {  -0.0454098,  0.07809167, -0.05030088, -0.02533244, -0.02322736 },
                { -0.00866754, -0.03614252,  0.05237325,  0.06478979, -0.03599609 },
                { -0.01789357, -0.04479434, -0.05765592,  0.03237658, -0.06403019 },
                { -0.02421552,  0.05533903, -0.08627617,    0.094624,  0.03319318 },
                {  0.02328842, -0.08234859, -0.07979888,  0.01439688, -0.03267198 },
                { -0.07128382,  0.08531934,  0.07180037,  0.04772871, -0.08938966 },
                {  0.09431138,  0.02094762,  0.04443646,  0.07653841,  0.02028433 },
                {  0.01844446, -0.08441339,  0.01957355,  0.04430714, -0.03080243 },
                {  -0.0261334, -0.03794889, -0.00638074,  0.07278767, -0.02165155 },
                {  0.08390063, -0.03253863,   0.0311571,  0.08088892, -0.07267931 }
            };

            Real[][,] testJaggWeightValues =
            {
                new Real[, ] {
                    { -0.02690255,0.08830735, -0.02041466, -0.0431439, -0.07749002 },
                    { -0.06963444,-0.03971611, 0.0597842, 0.08824182, -0.06649109 },
                    { -0.04966073,-0.04697048, -0.02235234, -0.09396666, 0.073189 },
                    { 0.06563969,0.04446745, -0.07192299, 0.06784364, 0.09575776 },
                    { 0.05012317,         -0.08874852, -0.05977172, -0.05910181, -0.06009106 }
                },
                new Real[, ] {
                    { -0.05200623,-0.09679124, 0.02159978, -0.08058041, -0.01340541 },
                    { -0.0254951,0.09963084, 0.00936683, -0.08179696, 0.09604459 },
                    { -0.0732494,0.07253634, 0.05981455, -0.01007657, -0.02992892 },
                    { -0.06818873,-0.02579817, 0.06767359, -0.03379837, -0.04880046 },
                    { -0.06429326,         -0.08964688, -0.0960066, -0.00286683, -0.05761427 }
                },
                new Real[, ] {
                    { -0.0454098,0.07809167, -0.05030088, -0.02533244, -0.02322736 },
                    { -0.00866754,-0.03614252, 0.05237325, 0.06478979, -0.03599609 },
                    { -0.01789357,-0.04479434, -0.05765592, 0.03237658, -0.06403019 },
                    { -0.02421552,0.05533903, -0.08627617, 0.094624, 0.03319318 },
                    { 0.02328842,         -0.08234859, -0.07979888, 0.01439688, -0.03267198 }
                },
                new Real[, ] {
                    { -0.07128382,0.08531934, 0.07180037, 0.04772871, -0.08938966 },
                    { 0.09431138,0.02094762, 0.04443646, 0.07653841, 0.02028433 },
                    { 0.01844446,-0.08441339, 0.01957355, 0.04430714, -0.03080243 },
                    { -0.0261334,-0.03794889, -0.00638074, 0.07278767, -0.02165155 },
                    { 0.08390063, -0.03253863, 0.0311571, 0.08088892, -0.07267931 }
                }
            };

            Linear l0 = new Linear(5, 20, initialW: testWeightValues, name: "l0");

            Linear l1 = new Linear(5, 5, initialW: testJaggWeightValues[0], name: "l1");
            Linear l2 = new Linear(5, 5, initialW: testJaggWeightValues[1], name: "l2");
            Linear l3 = new Linear(5, 5, initialW: testJaggWeightValues[2], name: "l3");
            Linear l4 = new Linear(5, 5, initialW: testJaggWeightValues[3], name: "l4");

            //FunctionにOptimizerを設定
            l0.SetOptimizer(new SGD());

            //OptimiserにFunctionを登録
            SGD sgd = new SGD();

            l1.SetOptimizer(sgd);
            l2.SetOptimizer(sgd);
            l3.SetOptimizer(sgd);
            l4.SetOptimizer(sgd);


            //入力は同値だがGradが加算されてしまうため分ける
            Real[]  testValue        = { 0.01618112, -0.08296648, -0.05545357, 0.00389254, -0.05727582 };
            NdArray testInputValuesA = new NdArray(testValue);
            NdArray testInputValuesB = new NdArray(testValue);

            Console.WriteLine("l0 for");
            NdArray l0Result = l0.Forward(testInputValuesA)[0];

            Console.WriteLine(l0Result);

            Console.WriteLine("\nl1 for");
            NdArray l1Result = l1.Forward(testInputValuesB)[0];

            Console.WriteLine(l1Result);

            Console.WriteLine("\nl2 for");
            NdArray l2Result = l2.Forward(testInputValuesB)[0];

            Console.WriteLine(l2Result);

            Console.WriteLine("\nl3 for");
            NdArray l3Result = l3.Forward(testInputValuesB)[0];

            Console.WriteLine(l3Result);

            Console.WriteLine("\nl4 for");
            NdArray l4Result = l4.Forward(testInputValuesB)[0];

            Console.WriteLine(l4Result);

            Console.WriteLine();

            //適当なGrad値をでっち上げる
            l0Result.Grad = new Real[]
            {
                -2.42022760e-02, 5.02482988e-04, 2.52015481e-04, 8.08797951e-04, -7.19293347e-03,
                1.40045900e-04, 7.09874439e-05, 2.07651625e-04, 3.80124636e-02, -8.87162634e-04,
                -4.64874669e-04, -1.40792923e-03, -4.12280299e-02, -3.36557830e-04, -1.50323089e-04,
                -4.70047118e-04, 3.61101292e-02, -7.12957408e-04, -3.63163825e-04, -1.12809543e-03
            };

            l1Result.Grad = new Real[] { -2.42022760e-02, 5.02482988e-04, 2.52015481e-04, 8.08797951e-04, -7.19293347e-03 };
            l2Result.Grad = new Real[] { 1.40045900e-04, 7.09874439e-05, 2.07651625e-04, 3.80124636e-02, -8.87162634e-04 };
            l3Result.Grad = new Real[] { -4.64874669e-04, -1.40792923e-03, -4.12280299e-02, -3.36557830e-04, -1.50323089e-04 };
            l4Result.Grad = new Real[] { -4.70047118e-04, 3.61101292e-02, -7.12957408e-04, -3.63163825e-04, -1.12809543e-03 };


            //Backwardを実行
            l0.Backward(l0Result);

            l1.Backward(l1Result);
            l2.Backward(l2Result);
            l3.Backward(l3Result);
            l4.Backward(l4Result);

            Console.WriteLine("\nl0 back");
            Console.WriteLine(testInputValuesA.ToString("Grad"));

            Console.WriteLine("\nl1-l4 sum back");
            Console.WriteLine(testInputValuesB.ToString("Grad"));

            l0.Update();  //書式が変則的だがl0はSGDを内包しているため
            sgd.Update(); //こちらはOptimizerに関数を登録して使用している

            Console.WriteLine("\nl0 Weight");
            Console.WriteLine(l0.Weight);

            Console.WriteLine("\nl1 Weight");
            Console.WriteLine(l1.Weight);

            Console.WriteLine("\nl0 Bias");
            Console.WriteLine(l0.Bias);

            Console.WriteLine("\nl1 Bias");
            Console.WriteLine(l1.Bias);
        }
Exemplo n.º 7
0
        public static void Run()
        {
            //Weightを分割の前と後で揃える
            Real[,] testWeightValues = new Real[, ] {
                { -0.02690255f, 0.08830735f, -0.02041466f, -0.0431439f, -0.07749002f },
                { -0.06963444f, -0.03971611f, 0.0597842f, 0.08824182f, -0.06649109f },
                { -0.04966073f, -0.04697048f, -0.02235234f, -0.09396666f, 0.073189f },
                { 0.06563969f, 0.04446745f, -0.07192299f, 0.06784364f, 0.09575776f },
                { 0.05012317f, -0.08874852f, -0.05977172f, -0.05910181f, -0.06009106f },
                { -0.05200623f, -0.09679124f, 0.02159978f, -0.08058041f, -0.01340541f },
                { -0.0254951f, 0.09963084f, 0.00936683f, -0.08179696f, 0.09604459f },
                { -0.0732494f, 0.07253634f, 0.05981455f, -0.01007657f, -0.02992892f },
                { -0.06818873f, -0.02579817f, 0.06767359f, -0.03379837f, -0.04880046f },
                { -0.06429326f, -0.08964688f, -0.0960066f, -0.00286683f, -0.05761427f },
                { -0.0454098f, 0.07809167f, -0.05030088f, -0.02533244f, -0.02322736f },
                { -0.00866754f, -0.03614252f, 0.05237325f, 0.06478979f, -0.03599609f },
                { -0.01789357f, -0.04479434f, -0.05765592f, 0.03237658f, -0.06403019f },
                { -0.02421552f, 0.05533903f, -0.08627617f, 0.094624f, 0.03319318f },
                { 0.02328842f, -0.08234859f, -0.07979888f, 0.01439688f, -0.03267198f },
                { -0.07128382f, 0.08531934f, 0.07180037f, 0.04772871f, -0.08938966f },
                { 0.09431138f, 0.02094762f, 0.04443646f, 0.07653841f, 0.02028433f },
                { 0.01844446f, -0.08441339f, 0.01957355f, 0.04430714f, -0.03080243f },
                { -0.0261334f, -0.03794889f, -0.00638074f, 0.07278767f, -0.02165155f },
                { 0.08390063f, -0.03253863f, 0.0311571f, 0.08088892f, -0.07267931f }
            };

            Real[][,] testJaggWeightValues =
            {
                new Real[, ] {
                    { -0.02690255f,0.08830735f, -0.02041466f, -0.0431439f, -0.07749002f },
                    { -0.06963444f,-0.03971611f, 0.0597842f, 0.08824182f, -0.06649109f },
                    { -0.04966073f,-0.04697048f, -0.02235234f, -0.09396666f, 0.073189f },
                    { 0.06563969f,0.04446745f, -0.07192299f, 0.06784364f, 0.09575776f },
                    { 0.05012317f,         -0.08874852f, -0.05977172f, -0.05910181f, -0.06009106f }
                },
                new Real[, ] {
                    { -0.05200623f,-0.09679124f, 0.02159978f, -0.08058041f, -0.01340541f },
                    { -0.0254951f,0.09963084f, 0.00936683f, -0.08179696f, 0.09604459f },
                    { -0.0732494f,0.07253634f, 0.05981455f, -0.01007657f, -0.02992892f },
                    { -0.06818873f,-0.02579817f, 0.06767359f, -0.03379837f, -0.04880046f },
                    { -0.06429326f,         -0.08964688f, -0.0960066f, -0.00286683f, -0.05761427f }
                },
                new Real[, ] {
                    { -0.0454098f,0.07809167f, -0.05030088f, -0.02533244f, -0.02322736f },
                    { -0.00866754f,-0.03614252f, 0.05237325f, 0.06478979f, -0.03599609f },
                    { -0.01789357f,-0.04479434f, -0.05765592f, 0.03237658f, -0.06403019f },
                    { -0.02421552f,0.05533903f, -0.08627617f, 0.094624f, 0.03319318f },
                    { 0.02328842f,         -0.08234859f, -0.07979888f, 0.01439688f, -0.03267198f }
                },
                new Real[, ] {
                    { -0.07128382f,0.08531934f, 0.07180037f, 0.04772871f, -0.08938966f },
                    { 0.09431138f,0.02094762f, 0.04443646f, 0.07653841f, 0.02028433f },
                    { 0.01844446f,-0.08441339f, 0.01957355f, 0.04430714f, -0.03080243f },
                    { -0.0261334f,-0.03794889f, -0.00638074f, 0.07278767f, -0.02165155f },
                    { 0.08390063f, -0.03253863f, 0.0311571f, 0.08088892f, -0.07267931f }
                }
            };

            Linear <Real> l0 = new Linear <Real>(5, 20, initialW: testWeightValues, name: "l0");

            Linear <Real> l1 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[0], name: "l1");
            Linear <Real> l2 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[1], name: "l2");
            Linear <Real> l3 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[2], name: "l3");
            Linear <Real> l4 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[3], name: "l4");

            //FunctionにOptimizerを設定
            SGD <Real> sgd = new SGD <Real>();

            sgd.SetUp(l0);

            //OptimiserにFunctionを登録
            SGD <Real> sgdSplit = new SGD <Real>();

            sgdSplit.SetUp(l1);
            sgdSplit.SetUp(l2);
            sgdSplit.SetUp(l3);
            sgdSplit.SetUp(l4);


            //入力は同値だがGradが加算されてしまうため分ける
            Real[]         testValue        = new Real[] { 0.01618112f, -0.08296648f, -0.05545357f, 0.00389254f, -0.05727582f };
            NdArray <Real> testInputValuesA = new NdArray <Real>(testValue);
            NdArray <Real> testInputValuesB = new NdArray <Real>(testValue);

            Console.WriteLine("l0 for");
            NdArray <Real> l0Result = l0.Forward(testInputValuesA)[0];

            Console.WriteLine(l0Result);

            Console.WriteLine("\nl1 for");
            NdArray <Real> l1Result = l1.Forward(testInputValuesB)[0];

            Console.WriteLine(l1Result);

            Console.WriteLine("\nl2 for");
            NdArray <Real> l2Result = l2.Forward(testInputValuesB)[0];

            Console.WriteLine(l2Result);

            Console.WriteLine("\nl3 for");
            NdArray <Real> l3Result = l3.Forward(testInputValuesB)[0];

            Console.WriteLine(l3Result);

            Console.WriteLine("\nl4 for");
            NdArray <Real> l4Result = l4.Forward(testInputValuesB)[0];

            Console.WriteLine(l4Result);

            Console.WriteLine();

            //適当なGrad値をでっち上げる
            l0Result.Grad = new Real[]
            {
                -2.42022760e-02f, 5.02482988e-04f, 2.52015481e-04f, 8.08797951e-04f, -7.19293347e-03f,
                1.40045900e-04f, 7.09874439e-05f, 2.07651625e-04f, 3.80124636e-02f, -8.87162634e-04f,
                -4.64874669e-04f, -1.40792923e-03f, -4.12280299e-02f, -3.36557830e-04f, -1.50323089e-04f,
                -4.70047118e-04f, 3.61101292e-02f, -7.12957408e-04f, -3.63163825e-04f, -1.12809543e-03f
            };

            l1Result.Grad = new Real[] { -2.42022760e-02f, 5.02482988e-04f, 2.52015481e-04f, 8.08797951e-04f, -7.19293347e-03f };
            l2Result.Grad = new Real[] { 1.40045900e-04f, 7.09874439e-05f, 2.07651625e-04f, 3.80124636e-02f, -8.87162634e-04f };
            l3Result.Grad = new Real[] { -4.64874669e-04f, -1.40792923e-03f, -4.12280299e-02f, -3.36557830e-04f, -1.50323089e-04f };
            l4Result.Grad = new Real[] { -4.70047118e-04f, 3.61101292e-02f, -7.12957408e-04f, -3.63163825e-04f, -1.12809543e-03f };


            //Backwardを実行
            l0.Backward(l0Result);

            l1.Backward(l1Result);
            l2.Backward(l2Result);
            l3.Backward(l3Result);
            l4.Backward(l4Result);

            Console.WriteLine("\nl0 back");
            Console.WriteLine(testInputValuesA.ToString("Grad"));

            Console.WriteLine("\nl1-l4 sum back");
            Console.WriteLine(testInputValuesB.ToString("Grad"));

            sgd.Update();
            sgdSplit.Update();

            Console.WriteLine("\nl0 Weight");
            Console.WriteLine(l0.Weight);

            Console.WriteLine("\nl1 Weight");
            Console.WriteLine(l1.Weight);

            Console.WriteLine("\nl0 Bias");
            Console.WriteLine(l0.Bias);

            Console.WriteLine("\nl1 Bias");
            Console.WriteLine(l1.Bias);
        }