コード例 #1
0
ファイル: BlackScholesSolver.cs プロジェクト: omegahm/bohrium
        private static NdArray BlackScholes(bool callputflag, NdArray S, DATA X, DATA T, DATA r, DATA v)
        {
            var d1 = ((S / X).Log() + (r + v * v / 2.0f) * T) / (v * (DATA)Math.Sqrt(T));
            var d2 = d1 - v * (DATA)Math.Sqrt(T);

            if (callputflag)
                return S * CND(d1) - X * (DATA)Math.Exp(-r * T) * CND(d2);
            else
                return X * (DATA)Math.Exp(-r * T) * CND(-d2) - S * CND(-d1);
        }
コード例 #2
0
ファイル: kNNSolver.cs プロジェクト: omegahm/bohrium
        private static NdArray ComputeTargets(NdArray src, NdArray targets)
        {
            var @base = src[R.All, R.NewAxis];
            var target = src[R.All, R.All, R.NewAxis];

            var tmp = (@base - target).Pow(2);
            tmp = Add.Reduce(tmp);
            Sqrt.Apply(tmp, tmp);

            return Max.Reduce(tmp);
        }
コード例 #3
0
ファイル: BlackScholesSolver.cs プロジェクト: omegahm/bohrium
        private static NdArray CND(NdArray X)
        {
            DATA a1 = 0.31938153f, a2 = -0.356563782f, a3 = 1.781477937f, a4 = -1.821255978f, a5 = 1.330274429f;
            var L = X.Abs();
            var K = 1.0f / (1.0f + 0.2316419f * L);
            var w = 1.0f - 1.0f / ((DATA)Math.Sqrt(2 * Math.PI)) * (-L * L / 2.0f).Exp() * (a1 * K + a2 * (K.Pow(2)) + a3 * (K.Pow(3)) + a4 * (K.Pow(4)) + a5 * (K.Pow(5)));

            var mask1 = (NdArray)(X < 0);
            var mask2 = (NdArray)(X >= 0);

            w = w * mask2 + (1.0f - w) * mask1;
            return w;
        }
コード例 #4
0
        public static void FillCountTrueAxis(NdArray <int> target, int axis, NdArray <bool> source)
        {
            var(preparedSource, _) = NdArray <int> .PrepareAxisReduceSources(target, axis, source, null);

            target.Backend.CountTrueLastAxis(target, preparedSource);
        }
コード例 #5
0
        /// <summary>
        /// Checks if any element of the NdArray is true returning the result as a NdArray.
        /// </summary>
        /// <param name="source">The NdArray containing the source values.</param>
        /// <returns>A new NdArray containing the result of this operation.</returns>
        public static NdArray <bool> AnyNdArray(NdArray <bool> source)
        {
            var flattendArray = NdArray <bool> .Flattern(source);

            return(AnyAxis(0, flattendArray));
        }
コード例 #6
0
        public static void FillXor(NdArray <bool> target, NdArray <bool> lhs, NdArray <bool> rhs)
        {
            var(preparedLhs, preparedRhs) = NdArray <bool> .PrepareElemwiseSources(target, lhs, rhs);

            target.Backend.Xor(target, preparedLhs, preparedRhs);
        }
コード例 #7
0
 public override NdArray SingleInputForward(NdArray x)
 {
     return(IsParallel ? this.NeedPreviousForwardGpu(x) : base.SingleInputForward(x));
 }
コード例 #8
0
        public void SGDRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount  = Mother.Dice.Next(2, 50);
            int outputCount = Mother.Dice.Next(2, 50);
            int batchCount  = Mother.Dice.Next(1, 5);

            Real[,] input = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, inputCount });

            Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, outputCount });
            Real[,] w       = (Real[, ])Initializer.GetRealNdArray(new[] { outputCount, inputCount });

            Real[] b = Initializer.GetRealArray(outputCount);

            //Chainer
            NChainer.Linear <Real> cLinear = new NChainer.Linear <Real>(inputCount, outputCount, false, Real.ToBaseNdArray(w), Real.ToBaseArray(b));
            NChainer.SGD <Real>    cSgd    = new NChainer.SGD <Real>();
            cSgd.Setup(cLinear);

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cLinear.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();

            cSgd.Update();

            //KelpNet
            KelpNet.Linear linear = new KelpNet.Linear(inputCount, outputCount, false, w, b);
            KelpNet.SGD    sgd    = new SGD();
            sgd.SetUp(linear);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { inputCount }, batchCount);

            NdArray y = linear.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();

            sgd.Update();


            Real[] cW = Real.ToRealArray((Real[, ])cLinear.W.Data);
            Real[] cb = (Real[])cLinear.b.Data;

            //許容範囲を算出
            double delta = 0.00001;

            //W.grad
            Assert.AreEqual(cW.Length, linear.Weight.Data.Length);
            for (int i = 0; i < linear.Weight.Data.Length; i++)
            {
                Assert.AreEqual(cW[i], linear.Weight.Data[i], delta);
            }

            //b.grad
            Assert.AreEqual(cb.Length, linear.Bias.Data.Length);
            for (int i = 0; i < linear.Bias.Data.Length; i++)
            {
                Assert.AreEqual(cb[i], linear.Bias.Data[i], delta);
            }
        }
コード例 #9
0
        public void Slice()
        {
            void core(NdArray <int> input, IndexOrRange[] indices, NdArray <int> ans)
            => Assert.Equal(ans, input[indices]);

            core(NdArray.Create(new int[] { 0, 1, 2, 3, 4, 5, 6, 7 }),
                 new IndexOrRange[] { Range.Create(0, 8) },
                 NdArray.Create(new int[] { 0, 1, 2, 3, 4, 5, 6, 7 })
                 );
            core(NdArray.Create(new int[] { 0, 1, 2, 3, 4, 5, 6, 7 }),
                 new IndexOrRange[] { Range.Create(0, 4) },
                 NdArray.Create(new int[] { 0, 1, 2, 3 })
                 );
            core(NdArray.Create(new int[] { 0, 1, 2, 3, 4, 5, 6, 7 }),
                 new IndexOrRange[] { Range.Create(0, -4) },
                 NdArray.Create(new int[] { 0, 1, 2, 3 })
                 );
            core(NdArray.Create(new int[] { 0, 1, 2, 3, 4, 5, 6, 7 }),
                 new IndexOrRange[] { Range.Create(0, 8, 2) },
                 NdArray.Create(new int[] { 0, 2, 4, 6 })
                 );

            core(NdArray.Create(new int[, ] {
                { 0, 1, 2, 3, 4 }, { 5, 6, 7, 8, 9 }, { 10, 11, 12, 13, 14 }, { 15, 16, 17, 18, 19 }
            }),
                 new IndexOrRange[] { Range.Create(0, 4), Range.Create(0, 5) },
                 NdArray.Create(new int[, ]
            {
                { 0, 1, 2, 3, 4 }, { 5, 6, 7, 8, 9 }, { 10, 11, 12, 13, 14 },
                { 15, 16, 17, 18, 19 }
            })
                 );
            core(NdArray.Create(new int[, ] {
                { 0, 1, 2, 3, 4 }, { 5, 6, 7, 8, 9 }, { 10, 11, 12, 13, 14 }, { 15, 16, 17, 18, 19 }
            }),
                 new IndexOrRange[] { Range.Create(1, 3), Range.Create(1, 4) },
                 NdArray.Create(new int[, ] {
                { 6, 7, 8 }, { 11, 12, 13 }
            })
                 );
            core(NdArray.Create(new int[, ] {
                { 0, 1, 2, 3, 4 }, { 5, 6, 7, 8, 9 }, { 10, 11, 12, 13, 14 }, { 15, 16, 17, 18, 19 }
            }),
                 new IndexOrRange[] { Range.Create(1, -1), Range.Create(1, -1) },
                 NdArray.Create(new int[, ] {
                { 6, 7, 8 }, { 11, 12, 13 }
            })
                 );
            core(NdArray.Create(new int[, ] {
                { 0, 1, 2, 3, 4 }, { 5, 6, 7, 8, 9 }, { 10, 11, 12, 13, 14 }, { 15, 16, 17, 18, 19 }
            }),
                 new IndexOrRange[] { Range.Create(1, 4, 2), Range.Create(1, 4, 2) },
                 NdArray.Create(new int[, ] {
                { 6, 8 }, { 16, 18 }
            })
                 );

            core(NdArray.Create(new int[, , ]
            {
                {
                    { 0, 1, 2, 3, 4, 5 }, { 10, 11, 12, 13, 14, 15 },
                    { 20, 21, 22, 23, 24, 25 }, { 30, 31, 32, 33, 34, 35 },
                    { 40, 41, 42, 43, 44, 45 }
                },
                {
                    { 100, 101, 102, 103, 104, 105 }, { 110, 111, 112, 113, 114, 115 },
                    { 120, 121, 122, 123, 124, 125 }, { 130, 131, 132, 133, 134, 135 },
                    { 140, 141, 142, 143, 144, 145 }
                },
                {
                    { 200, 201, 202, 203, 204, 205 }, { 210, 211, 212, 213, 214, 215 },
                    { 220, 221, 222, 223, 224, 225 }, { 230, 231, 232, 233, 234, 235 },
                    { 240, 241, 242, 243, 244, 245 }
                },
                {
                    { 300, 301, 302, 303, 304, 305 }, { 310, 311, 312, 313, 314, 315 },
                    { 320, 321, 322, 323, 324, 325 }, { 330, 331, 332, 333, 334, 335 },
                    { 340, 341, 342, 343, 344, 345 }
                },
            }),
                 new IndexOrRange[]
                 { Range.Create(0, 4), Range.Create(0, 5), Range.Create(0, 6) },
                 NdArray.Create(new int[, , ]
            {
                {
                    { 0, 1, 2, 3, 4, 5 }, { 10, 11, 12, 13, 14, 15 },
                    { 20, 21, 22, 23, 24, 25 }, { 30, 31, 32, 33, 34, 35 },
                    { 40, 41, 42, 43, 44, 45 }
                },
                {
                    { 100, 101, 102, 103, 104, 105 }, { 110, 111, 112, 113, 114, 115 },
                    { 120, 121, 122, 123, 124, 125 }, { 130, 131, 132, 133, 134, 135 },
                    { 140, 141, 142, 143, 144, 145 }
                },
                {
                    { 200, 201, 202, 203, 204, 205 }, { 210, 211, 212, 213, 214, 215 },
                    { 220, 221, 222, 223, 224, 225 }, { 230, 231, 232, 233, 234, 235 },
                    { 240, 241, 242, 243, 244, 245 }
                },
                {
                    { 300, 301, 302, 303, 304, 305 }, { 310, 311, 312, 313, 314, 315 },
                    { 320, 321, 322, 323, 324, 325 }, { 330, 331, 332, 333, 334, 335 },
                    { 340, 341, 342, 343, 344, 345 }
                },
            })
                 );
            core(NdArray.Create(new int[, , ]
            {
                {
                    { 0, 1, 2, 3, 4, 5 }, { 10, 11, 12, 13, 14, 15 },
                    { 20, 21, 22, 23, 24, 25 }, { 30, 31, 32, 33, 34, 35 },
                    { 40, 41, 42, 43, 44, 45 }
                },
                {
                    { 100, 101, 102, 103, 104, 105 }, { 110, 111, 112, 113, 114, 115 },
                    { 120, 121, 122, 123, 124, 125 }, { 130, 131, 132, 133, 134, 135 },
                    { 140, 141, 142, 143, 144, 145 }
                },
                {
                    { 200, 201, 202, 203, 204, 205 }, { 210, 211, 212, 213, 214, 215 },
                    { 220, 221, 222, 223, 224, 225 }, { 230, 231, 232, 233, 234, 235 },
                    { 240, 241, 242, 243, 244, 245 }
                },
                {
                    { 300, 301, 302, 303, 304, 305 }, { 310, 311, 312, 313, 314, 315 },
                    { 320, 321, 322, 323, 324, 325 }, { 330, 331, 332, 333, 334, 335 },
                    { 340, 341, 342, 343, 344, 345 }
                },
            }),
                 new IndexOrRange[]
                 { Range.Create(1, 3), Range.Create(1, 4), Range.Create(1, 5) },
                 NdArray.Create(new int[, , ]
            {
                { { 111, 112, 113, 114 }, { 121, 122, 123, 124 }, { 131, 132, 133, 134 } },
                { { 211, 212, 213, 214 }, { 221, 222, 223, 224 }, { 231, 232, 233, 234 } },
            })
                 );
            core(NdArray.Create(new int[, , ]
            {
                {
                    { 0, 1, 2, 3, 4, 5 }, { 10, 11, 12, 13, 14, 15 },
                    { 20, 21, 22, 23, 24, 25 }, { 30, 31, 32, 33, 34, 35 },
                    { 40, 41, 42, 43, 44, 45 }
                },
                {
                    { 100, 101, 102, 103, 104, 105 }, { 110, 111, 112, 113, 114, 115 },
                    { 120, 121, 122, 123, 124, 125 }, { 130, 131, 132, 133, 134, 135 },
                    { 140, 141, 142, 143, 144, 145 }
                },
                {
                    { 200, 201, 202, 203, 204, 205 }, { 210, 211, 212, 213, 214, 215 },
                    { 220, 221, 222, 223, 224, 225 }, { 230, 231, 232, 233, 234, 235 },
                    { 240, 241, 242, 243, 244, 245 }
                },
                {
                    { 300, 301, 302, 303, 304, 305 }, { 310, 311, 312, 313, 314, 315 },
                    { 320, 321, 322, 323, 324, 325 }, { 330, 331, 332, 333, 334, 335 },
                    { 340, 341, 342, 343, 344, 345 }
                },
            }),
                 new IndexOrRange[]
                 { Range.Create(1, -1), Range.Create(1, -1), Range.Create(1, -1) },
                 NdArray.Create(new int[, , ]
            {
                { { 111, 112, 113, 114 }, { 121, 122, 123, 124 }, { 131, 132, 133, 134 } },
                { { 211, 212, 213, 214 }, { 221, 222, 223, 224 }, { 231, 232, 233, 234 } },
            })
                 );
            core(NdArray.Create(new int[, , ]
            {
                {
                    { 0, 1, 2, 3, 4, 5 }, { 10, 11, 12, 13, 14, 15 },
                    { 20, 21, 22, 23, 24, 25 }, { 30, 31, 32, 33, 34, 35 },
                    { 40, 41, 42, 43, 44, 45 }
                },
                {
                    { 100, 101, 102, 103, 104, 105 }, { 110, 111, 112, 113, 114, 115 },
                    { 120, 121, 122, 123, 124, 125 }, { 130, 131, 132, 133, 134, 135 },
                    { 140, 141, 142, 143, 144, 145 }
                },
                {
                    { 200, 201, 202, 203, 204, 205 }, { 210, 211, 212, 213, 214, 215 },
                    { 220, 221, 222, 223, 224, 225 }, { 230, 231, 232, 233, 234, 235 },
                    { 240, 241, 242, 243, 244, 245 }
                },
                {
                    { 300, 301, 302, 303, 304, 305 }, { 310, 311, 312, 313, 314, 315 },
                    { 320, 321, 322, 323, 324, 325 }, { 330, 331, 332, 333, 334, 335 },
                    { 340, 341, 342, 343, 344, 345 }
                },
            }),
                 new IndexOrRange[]
                 { Range.Create(0, 4, 3), Range.Create(0, 5, 3), Range.Create(0, 6, 3) },
                 NdArray.Create(new int[, , ]
            {
                { { 0, 3, }, { 30, 33 } },
                { { 300, 303, }, { 330, 333 } },
            })
                 );
        }
コード例 #10
0
ファイル: kNNSolver.cs プロジェクト: omegahm/bohrium
 private static NdArray kNN(NdArray input)
 {
     return ComputeTargets(input, input.Clone());
 }
コード例 #11
0
        protected override void NeedPreviousBackwardGpu(NdArray y, NdArray x)
        {
            Real[] gx          = new Real[x.Data.Length];
            Real[] activatedgy = this.Activator != null?GetActivatedgy(y) : y.Grad;

            if (!NoBias)
            {
                CalcBiasGrad(activatedgy, y.Shape, y.BatchCount);
            }

            //gyは共通で使用
            using (ComputeBuffer <Real> gpugY = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.CopyHostPointer, activatedgy))
            {
                using (ComputeBuffer <Real> gpugW = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadWrite | ComputeMemoryFlags.CopyHostPointer, this.Weight.Grad))
                    using (ComputeBuffer <Real> gpuX = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.CopyHostPointer, x.Data))
                    {
                        this.BackwardgWKernel.SetMemoryArgument(0, gpugY);
                        this.BackwardgWKernel.SetMemoryArgument(1, gpuX);
                        this.BackwardgWKernel.SetMemoryArgument(2, gpugW);
                        this.BackwardgWKernel.SetValueArgument(3, y.BatchCount);
                        this.BackwardgWKernel.SetValueArgument(4, this.InputCount);
                        this.BackwardgWKernel.SetValueArgument(5, y.Shape[0]);
                        this.BackwardgWKernel.SetValueArgument(6, y.Shape[1]);
                        this.BackwardgWKernel.SetValueArgument(7, y.Shape[2]);
                        this.BackwardgWKernel.SetValueArgument(8, x.Shape[1]);
                        this.BackwardgWKernel.SetValueArgument(9, x.Shape[2]);
                        this.BackwardgWKernel.SetValueArgument(10, x.Length);
                        this.BackwardgWKernel.SetValueArgument(11, this._strideX);
                        this.BackwardgWKernel.SetValueArgument(12, this._strideY);
                        this.BackwardgWKernel.SetValueArgument(13, this._padX);
                        this.BackwardgWKernel.SetValueArgument(14, this._padY);
                        this.BackwardgWKernel.SetValueArgument(15, this._kHeight);
                        this.BackwardgWKernel.SetValueArgument(16, this._kWidth);

                        Weaver.CommandQueue.Execute
                        (
                            this.BackwardgWKernel,
                            null,
                            new long[] { OutputCount *InputCount, this._kHeight, this._kWidth },
                            null,
                            null
                        );

                        Weaver.CommandQueue.Finish();
                        Weaver.CommandQueue.ReadFromBuffer(gpugW, ref this.Weight.Grad, true, null);
                    }

                using (ComputeBuffer <Real> gpugX = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.WriteOnly | ComputeMemoryFlags.AllocateHostPointer, gx.Length))
                    using (ComputeBuffer <Real> gpuW = new ComputeBuffer <Real>(Weaver.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.CopyHostPointer, this.Weight.Data))
                    {
                        this.BackwardgXKernel.SetMemoryArgument(0, gpugY);
                        this.BackwardgXKernel.SetMemoryArgument(1, gpuW);
                        this.BackwardgXKernel.SetMemoryArgument(2, gpugX);
                        this.BackwardgXKernel.SetValueArgument(3, this.OutputCount);
                        this.BackwardgXKernel.SetValueArgument(4, this.InputCount);
                        this.BackwardgXKernel.SetValueArgument(5, y.Shape[0]);
                        this.BackwardgXKernel.SetValueArgument(6, y.Shape[1]);
                        this.BackwardgXKernel.SetValueArgument(7, y.Shape[2]);
                        this.BackwardgXKernel.SetValueArgument(8, x.Shape[1]);
                        this.BackwardgXKernel.SetValueArgument(9, x.Shape[2]);
                        this.BackwardgXKernel.SetValueArgument(10, x.Length);
                        this.BackwardgXKernel.SetValueArgument(11, this._strideX);
                        this.BackwardgXKernel.SetValueArgument(12, this._strideY);
                        this.BackwardgXKernel.SetValueArgument(13, this._padX);
                        this.BackwardgXKernel.SetValueArgument(14, this._padY);
                        this.BackwardgXKernel.SetValueArgument(15, this._kHeight);
                        this.BackwardgXKernel.SetValueArgument(16, this._kWidth);

                        Weaver.CommandQueue.Execute
                        (
                            this.BackwardgXKernel,
                            null,
                            new long[] { y.BatchCount *x.Shape[0], x.Shape[1], x.Shape[2] },
                            null,
                            null
                        );

                        Weaver.CommandQueue.Finish();
                        Weaver.CommandQueue.ReadFromBuffer(gpugX, ref gx, true, null);
                    }
            }

            for (int i = 0; i < x.Grad.Length; i++)
            {
                x.Grad[i] += gx[i];
            }
        }
コード例 #12
0
        protected override void NeedPreviousBackwardCpu(NdArray y, NdArray x)
        {
            Real[] activatedgy = this.Activator != null?GetActivatedgy(y) : y.Grad;

            if (!NoBias)
            {
                CalcBiasGrad(activatedgy, y.Shape, y.BatchCount);
            }

            for (int batchCounter = 0; batchCounter < y.BatchCount; batchCounter++)
            {
                for (int och = 0; och < y.Shape[0]; och++)
                {
                    //gWインデックス用
                    int outChOffset = och * this.InputCount * this._kHeight * this._kWidth;

                    for (int oy = 0; oy < y.Shape[1] * this._strideY; oy += this._strideY)
                    {
                        //計算省略のためにジャンプ
                        int kyStartIndex = this._padY - oy < 0 ? 0 : this._padY - oy;
                        int kyLimit      = this._kHeight < x.Shape[1] - oy + this._padY ? this._kHeight : x.Shape[1] - oy + this._padY;

                        for (int ox = 0; ox < y.Shape[2] * this._strideX; ox += this._strideX)
                        {
                            //計算省略のためにジャンプ
                            int kxStartIndex = this._padX - ox < 0 ? 0 : this._padX - ox;
                            int kxLimit      = this._kWidth < x.Shape[2] - ox + this._padX ? this._kWidth : x.Shape[2] - ox + this._padX;

                            int gyIndex = batchCounter * y.Length + och * y.Shape[1] * y.Shape[2] + oy * y.Shape[2] + ox;

                            Real gyData = activatedgy[gyIndex];

                            for (int ich = 0; ich < x.Shape[0]; ich++)
                            {
                                //gWインデックス用
                                int inChOffset = ich * this._kHeight * this._kWidth;

                                //inputインデックス用
                                int inputOffset = ich * x.Shape[1] * x.Shape[2] + batchCounter * x.Length;

                                for (int ky = kyStartIndex; ky < kyLimit; ky++)
                                {
                                    for (int kx = kxStartIndex; kx < kxLimit; kx++)
                                    {
                                        //WとgWのshapeは等しい
                                        int wIndex = outChOffset + inChOffset + ky * this._kWidth + kx;

                                        //xとgxのshapeは等しい
                                        int inputIndex = inputOffset + (ky + oy - this._padY) * x.Shape[2] + kx + ox - this._padX;

                                        this.Weight.Grad[wIndex] += x.Data[inputIndex] * gyData;

                                        x.Grad[inputIndex] += this.Weight.Data[wIndex] * gyData;
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
コード例 #13
0
        protected override NdArray NeedPreviousForwardCpu(NdArray input)
        {
            int outputHeight = (int)Math.Floor((input.Shape[1] - this._kHeight + this._padY * 2.0) / this._strideY) + 1;
            int outputWidth  = (int)Math.Floor((input.Shape[2] - this._kWidth + this._padX * 2.0) / this._strideX) + 1;

            Real[] result = new Real[this.OutputCount * outputHeight * outputWidth * input.BatchCount];

            for (int batchCounter = 0; batchCounter < input.BatchCount; batchCounter++)
            {
                int resultIndex = batchCounter * this.OutputCount * outputHeight * outputWidth;

                for (int och = 0; och < this.OutputCount; och++)
                {
                    //Wインデックス用
                    int outChOffset = och * this.InputCount * this._kHeight * this._kWidth;

                    for (int oy = 0; oy < outputHeight * this._strideY; oy += this._strideY)
                    {
                        int kyStartIndex = oy - this._padY < 0 ? 0 : oy - this._padY;
                        int kyLimit      = this._kHeight + oy - this._padY < input.Shape[1] ? this._kHeight + oy - this._padY : input.Shape[1];

                        for (int ox = 0; ox < outputWidth * this._strideX; ox += this._strideX)
                        {
                            int kxStartIndex = ox - this._padX < 0 ? 0 : ox - this._padX;
                            int kxLimit      = this._kWidth + ox - this._padX < input.Shape[2] ? this._kWidth + ox - this._padX : input.Shape[2];

                            for (int ich = 0; ich < this.InputCount; ich++)
                            {
                                //Wインデックス用
                                int inChOffset = ich * this._kHeight * this._kWidth;

                                //inputインデックス用
                                int inputOffset = ich * input.Shape[1] * input.Shape[2];

                                for (int ky = kyStartIndex; ky < kyLimit; ky++)
                                {
                                    for (int kx = kxStartIndex; kx < kxLimit; kx++)
                                    {
                                        int wIndex     = outChOffset + inChOffset + (ky - oy + this._padY) * this._kWidth + kx - ox + this._padX;
                                        int inputIndex = inputOffset + ky * input.Shape[2] + kx + batchCounter * input.Length;

                                        result[resultIndex] += input.Data[inputIndex] * this.Weight.Data[wIndex];
                                    }
                                }
                            }

                            resultIndex++;
                        }
                    }
                }
            }

            if (this.Activator != null && !NoBias)
            {
                for (int batchCounter = 0; batchCounter < input.BatchCount; batchCounter++)
                {
                    int resultIndex = batchCounter * this.OutputCount * outputHeight * outputWidth;

                    for (int och = 0; och < this.OutputCount; och++)
                    {
                        for (int location = 0; location < outputHeight * outputWidth; location++)
                        {
                            result[resultIndex] += this.Bias.Data[och];
                            result[resultIndex]  = this.Activator.ForwardActivate(result[resultIndex]);

                            resultIndex++;
                        }
                    }
                }
            }
            else if (!NoBias)
            {
                for (int batchCounter = 0; batchCounter < input.BatchCount; batchCounter++)
                {
                    int resultIndex = batchCounter * this.OutputCount * outputHeight * outputWidth;

                    for (int och = 0; och < this.OutputCount; och++)
                    {
                        for (int location = 0; location < outputHeight * outputWidth; location++)
                        {
                            result[resultIndex] += this.Bias.Data[och];
                            resultIndex++;
                        }
                    }
                }
            }
            else if (this.Activator != null)
            {
                for (int batchCounter = 0; batchCounter < input.BatchCount; batchCounter++)
                {
                    int resultIndex = batchCounter * this.OutputCount * outputHeight * outputWidth;

                    for (int och = 0; och < this.OutputCount; och++)
                    {
                        for (int location = 0; location < outputHeight * outputWidth; location++)
                        {
                            result[resultIndex] = this.Activator.ForwardActivate(result[resultIndex]);
                            resultIndex++;
                        }
                    }
                }
            }

            return(NdArray.Convert(result, new[] { this.OutputCount, outputHeight, outputWidth }, input.BatchCount, this));
        }
コード例 #14
0
 /// <summary>
 /// Returns a view of the diagonal along the given axes.
 /// </summary>
 /// <param name="ax1">The first dimension of the diagonal.</param>
 /// <param name="ax2">The seconds dimension of the diagonal.</param>
 /// <param name="source">The NdArray to operate on.</param>
 /// <returns>A NdArray where dimension <paramref name="ax1"/> is the diagonal and dimension
 public static NdArray <T> DiagAxis(int ax1, int ax2, NdArray <T> source)
 {
     return(source.Relayout(Layout.DiagAxis(ax1, ax2, source.Layout)));
 }
コード例 #15
0
ファイル: nBodySolver.cs プロジェクト: omegahm/bohrium
 private static void FillDiagonal(NdArray a, DATA val)
 {
     long d  = a.Shape.Dimensions[0].Length;
     a.Reshape(new NumCIL.Shape(new long[] { d }, 0, new long[] { d+1 })).Set(val);
 }
コード例 #16
0
 /// <summary>Counts the elements being true.</summary>
 /// <param name="source">The NdArray containing the source values.</param>
 /// <returns>A scalar containing the result of this operation.</returns>
 public static int CountTrue(NdArray <bool> source)
 {
     return(CountTrueNdArray(source).Value);
 }
コード例 #17
0
        public static void FillNegate(NdArray <bool> target, NdArray <bool> source)
        {
            var preparedSource = NdArray <bool> .PrepareElemwiseSources(target, source);

            target.Backend.Negate(target, preparedSource);
        }
コード例 #18
0
        public void RandomTest(bool gpuEnable)
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int chCount    = Mother.Dice.Next(1, 5);
            int wideSize   = Mother.Dice.Next(8, 32);
            int heightSize = Mother.Dice.Next(8, 32);

            int kWidth  = Mother.Dice.Next(1, 5);
            int kHeight = Mother.Dice.Next(1, 5);

            int strideX = Mother.Dice.Next(1, 5);
            int strideY = Mother.Dice.Next(1, 5);

            int padX = Mother.Dice.Next(0, 5);
            int padY = Mother.Dice.Next(0, 5);

            bool coverAll = Mother.Dice.Next(1) == 0;

            int outputHeight = coverAll ?
                               (int)Math.Floor((heightSize - kHeight + padY * 2.0 + strideY - 1) / strideY) + 1 :
                               (int)Math.Floor((heightSize - kHeight + padY * 2.0) / strideY) + 1;

            int outputWidth = coverAll ?
                              (int)Math.Floor((wideSize - kWidth + padX * 2.0 + strideX - 1) / strideX) + 1 :
                              (int)Math.Floor((wideSize - kWidth + padX * 2.0) / strideX) + 1;

            Real[,,,] input = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, chCount, heightSize, wideSize });

            Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, chCount, outputHeight, outputWidth });

            //Chainer
            NChainer.MaxPooling2D <Real> cMaxPooling2D = new NChainer.MaxPooling2D <Real>(
                new[] { kHeight, kWidth },
                new[] { strideY, strideX },
                new[] { padY, padX }
                );

            Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input));

            Variable <Real> cY = cMaxPooling2D.Forward(cX);

            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();


            //KelpNet
            KelpNet.MaxPooling2D maxPooling2D = new KelpNet.MaxPooling2D(
                new[] { kWidth, kHeight },
                new[] { strideX, strideY },
                new[] { padX, padY },
                gpuEnable: gpuEnable);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { chCount, heightSize, wideSize }, batchCount);

            NdArray y = maxPooling2D.Forward(x)[0];

            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();

            Real[] cYdata = Real.ToRealArray((Real[, , , ])cY.Data.Copy());
            Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad.Copy());

            //許容範囲を算出
            double delta = 0.00001;

            Assert.AreEqual(cYdata.Length, y.Data.Length);
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);

            //y
            for (int i = 0; i < y.Data.Length; i++)
            {
                Assert.AreEqual(cYdata[i], y.Data[i], delta);
            }

            //x.grad
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
コード例 #19
0
        public static void SingleOutputBackward(NdArray <Real> y, NdArray <Real> x, NdArray <Real> weight, NdArray <Real> bias, ComputeKernel backwardgWKernel, ComputeKernel backwardgXKernel, ICompressibleActivation <Real> activation)
        {
            int outputCount = weight.Shape[0];
            int inputCount  = weight.Shape[1];

            Real[] gx          = new Real[x.Data.Length];
            Real[] activatedgy = activation != null?activation.GetActivatedgy(y, x) : y.Grad;

            if (bias != null)
            {
                LinearFunc.CalcBiasGrad(activatedgy, y.BatchCount, outputCount, bias.Grad);
            }

            using (ComputeBuffer <Real> gpugY = new ComputeBuffer <Real>(OpenCL.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.UseHostPointer, activatedgy))
            {
                using (ComputeBuffer <Real> gpugW = new ComputeBuffer <Real>(OpenCL.Context, ComputeMemoryFlags.ReadWrite | ComputeMemoryFlags.UseHostPointer, weight.Grad))
                    using (ComputeBuffer <Real> gpuX = new ComputeBuffer <Real>(OpenCL.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.UseHostPointer, x.Data))
                    {
                        backwardgWKernel.SetMemoryArgument(0, gpugY);
                        backwardgWKernel.SetMemoryArgument(1, gpuX);
                        backwardgWKernel.SetMemoryArgument(2, gpugW);
                        backwardgWKernel.SetValueArgument(3, y.BatchCount);
                        backwardgWKernel.SetValueArgument(4, outputCount);
                        backwardgWKernel.SetValueArgument(5, inputCount);

                        OpenCL.CommandQueue.Execute
                        (
                            backwardgWKernel,
                            null,
                            new long[] { inputCount, outputCount },
                            null,
                            null
                        );

                        OpenCL.CommandQueue.Finish();
                        OpenCL.CommandQueue.ReadFromBuffer(gpugW, ref weight.Grad, true, null);
                    }

                using (ComputeBuffer <Real> gpugX = new ComputeBuffer <Real>(OpenCL.Context, ComputeMemoryFlags.WriteOnly | ComputeMemoryFlags.AllocateHostPointer, gx.Length))
                    using (ComputeBuffer <Real> gpuW = new ComputeBuffer <Real>(OpenCL.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.UseHostPointer, weight.Data))
                    {
                        backwardgXKernel.SetMemoryArgument(0, gpugY);
                        backwardgXKernel.SetMemoryArgument(1, gpuW);
                        backwardgXKernel.SetMemoryArgument(2, gpugX);
                        backwardgXKernel.SetValueArgument(3, y.BatchCount);
                        backwardgXKernel.SetValueArgument(4, outputCount);
                        backwardgXKernel.SetValueArgument(5, inputCount);

                        OpenCL.CommandQueue.Execute
                        (
                            backwardgXKernel,
                            null,
                            new long[] { inputCount, y.BatchCount },
                            null,
                            null
                        );

                        OpenCL.CommandQueue.Finish();
                        OpenCL.CommandQueue.ReadFromBuffer(gpugX, ref gx, true, null);
                    }
            }

            for (int i = 0; i < x.Grad.Length; i++)
            {
                x.Grad[i] += gx[i];
            }
        }
コード例 #20
0
 public static Series <double> Random(int count, Randoms.IRandomGenerator gen = null)
 {
     return(NdArray.Random(count, gen).ToSeries());
 }
コード例 #21
0
        public void Create()
        {
            void doNothing <T>(T value)
            {
            }

            Assert.Equal(new IndexArray(0),
                         NdArray.Create(new int[0]).Shape);
            Assert.Equal(new IndexArray(0, 0),
                         NdArray.Create(new int[0, 0]).Shape);
            Assert.Equal(new IndexArray(0, 0, 0),
                         NdArray.Create(new int[0, 0, 0]).Shape);
            Assert.Equal(new IndexArray(0, 0, 0, 0),
                         NdArray.Create(new int[0, 0, 0, 0]).Shape);
            Assert.Equal(new IndexArray(0, 0, 0, 0, 0),
                         NdArray.Create(new int[0, 0, 0, 0, 0]).Shape);
            Assert.Equal(new IndexArray(0, 0, 0, 0, 0, 0),
                         NdArray.Create(new int[0, 0, 0, 0, 0, 0]).Shape);

            {
                var ndarray = NdArray.Create(new int[] { 0, 1, 2, 3 });
                Assert.Equal(new IndexArray(4), ndarray.Shape);
                Assert.Equal(0, ndarray[0]);
                Assert.Equal(1, ndarray[1]);
                Assert.Equal(2, ndarray[2]);
                Assert.Equal(3, ndarray[3]);
                Assert.Equal(0, ndarray[-4]);
                Assert.Equal(1, ndarray[-3]);
                Assert.Equal(2, ndarray[-2]);
                Assert.Equal(3, ndarray[-1]);
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[4]));
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[0, 0]));
            }
            {
                var ndarray = NdArray.Create(new int[, ] {
                    { 0, 1, 2, 3 }, { 4, 5, 6, 7 }
                });
                Assert.Equal(new IndexArray(2, 4), ndarray.Shape);
                Assert.Equal(0, ndarray[0, 0]);
                Assert.Equal(1, ndarray[0, 1]);
                Assert.Equal(2, ndarray[0, 2]);
                Assert.Equal(3, ndarray[0, 3]);
                Assert.Equal(4, ndarray[1, 0]);
                Assert.Equal(5, ndarray[1, 1]);
                Assert.Equal(6, ndarray[1, 2]);
                Assert.Equal(7, ndarray[1, 3]);
                Assert.Equal(0, ndarray[0, -4]);
                Assert.Equal(1, ndarray[0, -3]);
                Assert.Equal(2, ndarray[0, -2]);
                Assert.Equal(3, ndarray[0, -1]);
                Assert.Equal(4, ndarray[1, -4]);
                Assert.Equal(5, ndarray[1, -3]);
                Assert.Equal(6, ndarray[1, -2]);
                Assert.Equal(7, ndarray[1, -1]);
                Assert.Equal(0, ndarray[-2, 0]);
                Assert.Equal(1, ndarray[-2, 1]);
                Assert.Equal(2, ndarray[-2, 2]);
                Assert.Equal(3, ndarray[-2, 3]);
                Assert.Equal(4, ndarray[-1, 0]);
                Assert.Equal(5, ndarray[-1, 1]);
                Assert.Equal(6, ndarray[-1, 2]);
                Assert.Equal(7, ndarray[-1, 3]);
                Assert.Equal(0, ndarray[-2, -4]);
                Assert.Equal(1, ndarray[-2, -3]);
                Assert.Equal(2, ndarray[-2, -2]);
                Assert.Equal(3, ndarray[-2, -1]);
                Assert.Equal(4, ndarray[-1, -4]);
                Assert.Equal(5, ndarray[-1, -3]);
                Assert.Equal(6, ndarray[-1, -2]);
                Assert.Equal(7, ndarray[-1, -1]);
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[0]));
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[2, 0]));
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[0, 4]));
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[0, 0, 0]));
            }
            {
                var ndarray = NdArray.Create(new int[, , ]
                {
                    { { 0, 1, 2, 3 }, { 4, 5, 6, 7 }, { 8, 9, 10, 11 } },
                    { { 12, 13, 14, 15 }, { 16, 17, 18, 19 }, { 20, 21, 22, 23 } }
                });
                Assert.Equal(new IndexArray(2, 3, 4), ndarray.Shape);
                Assert.Equal(0, ndarray[0, 0, 0]);
                Assert.Equal(1, ndarray[0, 0, 1]);
                Assert.Equal(2, ndarray[0, 0, 2]);
                Assert.Equal(3, ndarray[0, 0, 3]);
                Assert.Equal(4, ndarray[0, 1, 0]);
                Assert.Equal(5, ndarray[0, 1, 1]);
                Assert.Equal(6, ndarray[0, 1, 2]);
                Assert.Equal(7, ndarray[0, 1, 3]);
                Assert.Equal(8, ndarray[0, 2, 0]);
                Assert.Equal(9, ndarray[0, 2, 1]);
                Assert.Equal(10, ndarray[0, 2, 2]);
                Assert.Equal(11, ndarray[0, 2, 3]);
                Assert.Equal(12, ndarray[1, 0, 0]);
                Assert.Equal(13, ndarray[1, 0, 1]);
                Assert.Equal(14, ndarray[1, 0, 2]);
                Assert.Equal(15, ndarray[1, 0, 3]);
                Assert.Equal(16, ndarray[1, 1, 0]);
                Assert.Equal(17, ndarray[1, 1, 1]);
                Assert.Equal(18, ndarray[1, 1, 2]);
                Assert.Equal(19, ndarray[1, 1, 3]);
                Assert.Equal(20, ndarray[1, 2, 0]);
                Assert.Equal(21, ndarray[1, 2, 1]);
                Assert.Equal(22, ndarray[1, 2, 2]);
                Assert.Equal(23, ndarray[1, 2, 3]);
                Assert.Equal(0, ndarray[-2, -3, -4]);
                Assert.Equal(1, ndarray[-2, -3, -3]);
                Assert.Equal(2, ndarray[-2, -3, -2]);
                Assert.Equal(3, ndarray[-2, -3, -1]);
                Assert.Equal(4, ndarray[-2, -2, -4]);
                Assert.Equal(5, ndarray[-2, -2, -3]);
                Assert.Equal(6, ndarray[-2, -2, -2]);
                Assert.Equal(7, ndarray[-2, -2, -1]);
                Assert.Equal(8, ndarray[-2, -1, -4]);
                Assert.Equal(9, ndarray[-2, -1, -3]);
                Assert.Equal(10, ndarray[-2, -1, -2]);
                Assert.Equal(11, ndarray[-2, -1, -1]);
                Assert.Equal(12, ndarray[-1, -3, -4]);
                Assert.Equal(13, ndarray[-1, -3, -3]);
                Assert.Equal(14, ndarray[-1, -3, -2]);
                Assert.Equal(15, ndarray[-1, -3, -1]);
                Assert.Equal(16, ndarray[-1, -2, -4]);
                Assert.Equal(17, ndarray[-1, -2, -3]);
                Assert.Equal(18, ndarray[-1, -2, -2]);
                Assert.Equal(19, ndarray[-1, -2, -1]);
                Assert.Equal(20, ndarray[-1, -1, -4]);
                Assert.Equal(21, ndarray[-1, -1, -3]);
                Assert.Equal(22, ndarray[-1, -1, -2]);
                Assert.Equal(23, ndarray[-1, -1, -1]);
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[0]));
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[0, 0]));
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[2, 0, 0]));
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[0, 3, 0]));
                Assert.Throws <ArgumentOutOfRangeException>(() => doNothing(ndarray[0, 0, 4]));
                Assert.Throws <ArgumentOutOfRangeException>(() =>
                                                            doNothing(ndarray[0, 0, 0, 0]));
            }
        }
コード例 #22
0
ファイル: Program.cs プロジェクト: play3577/mxnet.csharp
        static void Main(string[] args)
        {
            OpWrapperGenerator op_wrapper_generator = new OpWrapperGenerator();

            var(Symbol, NdArray, Enums) = op_wrapper_generator.ParseAllOps();

            Symbol  = Symbol.Replace("\n", "\r\n");
            NdArray = NdArray.Replace("\n", "\r\n");
            Enums   = Enums.Replace("\n", "\r\n");

            string strSymbol = @"using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
// ReSharper disable UnusedMember.Global

namespace mxnet.csharp
{
    public partial class Symbol
    {
" + Symbol +
                               @"}
}
";

            File.WriteAllText(@"..\..\..\..\mxnet.csharp\OperatorWarpSymbol.cs", strSymbol);


            string strNdArray = @"using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
// ReSharper disable UnusedMember.Global

namespace mxnet.csharp
{
    public partial class NdArray
    {
" + NdArray +
                                @"}
}
";

            File.WriteAllText(@"..\..\..\..\mxnet.csharp\OperatorWarpNdArray.cs", strNdArray);


            string strEnum = @"using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
// ReSharper disable UnusedMember.Global

namespace mxnet.csharp
{
" + Enums + @"
                              
}
";

            File.WriteAllText(@"..\..\..\..\mxnet.csharp\OperatorWarpEnum.cs", strEnum);
        }
コード例 #23
0
        public void MeanSquaredRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int batchCount = Mother.Dice.Next(1, 5);
            int ch         = Mother.Dice.Next(1, 5);
            int width      = Mother.Dice.Next(1, 16);
            int height     = Mother.Dice.Next(1, 16);

            Real[,,,] inputA = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, width);
            Real[,,,] inputB = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, width);

            for (int i = 0; i < inputB.GetLength(0); i++)
            {
                for (int j = 0; j < inputB.GetLength(1); j++)
                {
                    for (int k = 0; k < inputB.GetLength(2); k++)
                    {
                        for (int l = 0; l < inputB.GetLength(3); l++)
                        {
                            inputB[i, j, k, l] *= (Real)3.1415f;
                        }
                    }
                }
            }

            //chainer
            NChainer.MeanSquaredError <Real> cMeanSquaredError = new NChainer.MeanSquaredError <Real>();

            Variable <Real> cX = new Variable <Real>(inputA);
            Variable <Real> cY = new Variable <Real>(inputB);

            Variable <Real> cZ = cMeanSquaredError.Forward(cX, cY);

            cZ.Backward();

            Real[] cXgrad = ((Real[, , , ])cX.Grad).Flatten();

            //KelpNet
            KelpNet.MeanSquaredError <Real> meanSquaredError = new KelpNet.MeanSquaredError <Real>();

            NdArray <Real> x = new NdArray <Real>(inputA, asBatch: true);
            NdArray <Real> y = new NdArray <Real>(inputB, asBatch: true);

            //KelpNetはBackwaward側のみEvaluateで実行される
            NdArray <Real> z = meanSquaredError.Evaluate(x, y);


            //許容範囲を算出(内部の割引順が違うため誤差が大きい)
            Real delta = 0.001f;

            //Loss
            Assert.AreEqual(cZ.Data[0], z.Data[0], delta);

            //x.grad
            Assert.AreEqual(cXgrad.Length, x.Grad.Length);
            for (int i = 0; i < x.Grad.Length; i++)
            {
                Assert.AreEqual(cXgrad[i], x.Grad[i], delta);
            }
        }
コード例 #24
0
ファイル: Optimizer.cs プロジェクト: ziichuan/KelpNet-master
        ////////////////////////////////////////////////////////////////////////////////////////////////////
        /// <summary>
        /// Initializes a new instance of the KelpNet.Common.Optimizers.OptimizerParameter class.
        /// </summary>
        ///
        /// <param name="functionParameter">    The function parameter. </param>
        ////////////////////////////////////////////////////////////////////////////////////////////////////

        protected OptimizerParameter([CanBeNull] NdArray functionParameter)
        {
            FunctionParameter = functionParameter;
        }
コード例 #25
0
ファイル: TestAdam.cs プロジェクト: ShTair/ConvolutionChecker
        public void AdamRandomTest()
        {
            Python.Initialize();
            Chainer.Initialize();

            int inputCount = Mother.Dice.Next(2, 50);
            int outputCount = Mother.Dice.Next(2, 50);
            int batchCount = Mother.Dice.Next(1, 5);

            Real[,] input = (Real[,])Initializer.GetRealNdArray(new[] { batchCount, inputCount });
            Real[,] dummyGy = (Real[,])Initializer.GetRealNdArray(new[] { batchCount, outputCount });
            Real[,] w = (Real[,])Initializer.GetRealNdArray(new[] { outputCount, inputCount });
            Real[] b = Initializer.GetRealArray(outputCount);


            float alpha = (float)Mother.Dice.NextDouble(); //0.001f
            float beta1 = (float)Mother.Dice.NextDouble(); //0.9f;
            float beta2 = (float)Mother.Dice.NextDouble(); //0.999f;
            float eps = (float)Mother.Dice.NextDouble(); //1e-08f;
            float eta = (float)Mother.Dice.NextDouble(); //1.0f;

            //Chainer
            NChainer.Linear<Real> cLinear = new NChainer.Linear<Real>(inputCount, outputCount, false, Real.ToBaseNdArray(w), Real.ToBaseArray(b));
            NChainer.Adam<Real> cAdam = new NChainer.Adam<Real>(alpha, beta1, beta2, eps, eta);
            cAdam.Setup(cLinear);

            Variable<Real> cX = new Variable<Real>(Real.ToBaseNdArray(input));

            Variable<Real> cY = cLinear.Forward(cX);
            cY.Grad = Real.ToBaseNdArray(dummyGy);

            cY.Backward();

            cAdam.Update();

            //KelpNet
            KelpNet.CL.Linear linear = new KelpNet.CL.Linear(inputCount, outputCount, false, w, b);
            KelpNet.Adam adam = new Adam(alpha, beta1, beta2, eps, eta);
            adam.SetUp(linear);

            NdArray x = new NdArray(Real.ToRealArray(input), new[] { inputCount }, batchCount);

            NdArray y = linear.Forward(x)[0];
            y.Grad = Real.ToRealArray(dummyGy);

            y.Backward();

            adam.Update();


            Real[] cW = Real.ToRealArray((Real[,])cLinear.W.Data);
            Real[] cb = (Real[])cLinear.b.Data;

            //許容範囲を算出
            double delta = 0.00001;

            //W.grad
            Assert.AreEqual(cW.Length, linear.Weight.Data.Length);
            for (int i = 0; i < linear.Weight.Data.Length; i++)
            {
                Assert.AreEqual(cW[i], linear.Weight.Data[i], delta);
            }

            //b.grad
            Assert.AreEqual(cb.Length, linear.Bias.Data.Length);
            for (int i = 0; i < linear.Bias.Data.Length; i++)
            {
                Assert.AreEqual(cb[i], linear.Bias.Data[i], delta);
            }
        }
コード例 #26
0
        ////////////////////////////////////////////////////////////////////////////////////////////////////
        /// <summary>   Backward CPU. </summary>
        ///
        /// <param name="y">    A NdArray to process. </param>
        /// <param name="x">    A NdArray to process. </param>
        ////////////////////////////////////////////////////////////////////////////////////////////////////

        void BackwardCpu([NotNull] NdArray y, [NotNull] NdArray x)
        {
            y.Grad = x.Grad.ToArray();
        }
コード例 #27
0
 /// <summary>
 /// Checks if all elements of the NdArray are true.
 /// </summary>
 /// <param name="source">The NdArray containing the source values.</param>
 /// <returns>A scalar containing the result of this operation.</returns>
 public static bool All(NdArray <bool> source)
 {
     return(AllNdArray(source).Value);
 }
コード例 #28
0
        public static void Run()
        {
            //訓練回数
            const int learningCount = 10000;

            //訓練データ
            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            //訓練データラベル
            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            //ネットワークの構成は FunctionStack に書き連ねる
            FunctionStack nn = new FunctionStack(
                new Linear(2, 2, name: "l1 Linear"),
                new Sigmoid(name: "l1 Sigmoid"),
                new Linear(2, 2, name: "l2 Linear")
                );

            //optimizerを宣言
            nn.SetOptimizer(new MomentumSGD());

            //訓練ループ
            Console.WriteLine("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    //訓練実行時にロス関数を記述
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            //訓練結果を表示
            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }

            //学習の終わったネットワークを保存
            ModelIO.Save(nn, "test.nn");

            //学習の終わったネットワークを読み込み
            FunctionStack testnn = ModelIO.Load("test.nn");

            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }
        }
コード例 #29
0
 /// <summary>
 /// Checks if any elements of the NdArray are true.
 /// </summary>
 /// <param name="source">The NdArray containing the source values.</param>
 /// <returns>A scalar containing the result of this operation.</returns>
 public static bool Any(NdArray <bool> source)
 {
     return(AnyNdArray(source).Value);
 }
コード例 #30
0
ファイル: Test5.cs プロジェクト: Kawaian/KelpNet
        public static void Run()
        {
            //Describe each initial value
            Real[,,,] initial_W1 =
            {
                { { { 1.0,  0.5, 0.0 }, {  0.5, 0.0, -0.5 }, { 0.0, -0.5, -1.0 } } },
                { { { 0.0, -0.1, 0.1 }, { -0.3, 0.4,  0.7 }, { 0.5, -0.2,  0.2 } } }
            };
            Real[] initial_b1 = { 0.5, 1.0 };

            Real[,,,] initial_W2 =
            {
                { { { -0.1,  0.6 }, { 0.3, -0.9 } }, { {  0.7, 0.9 }, { -0.2, -0.3 } } },
                { { { -0.6, -0.1 }, { 0.3,  0.3 } }, { { -0.5, 0.8 }, {  0.9,  0.1 } } }
            };
            Real[] initial_b2 = { 0.1, 0.9 };

            Real[,] initial_W3 =
            {
                { 0.5, 0.3, 0.4, 0.2, 0.6, 0.1, 0.4, 0.3 },
                { 0.6, 0.4, 0.9, 0.1, 0.5, 0.2, 0.3, 0.4 }
            };
            Real[] initial_b3 = { 0.01, 0.02 };

            Real[,] initial_W4 = { { 0.8, 0.2 }, { 0.4, 0.6 } };
            Real[] initial_b4 = { 0.02, 0.01 };


            //Input data
            NdArray x = new NdArray(new Real[, , ] {
                {
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.2, 0.9, 0.2, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.2, 0.8, 0.9, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.1, 0.8, 0.5, 0.8, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.3, 0.3, 0.1, 0.7, 0.2, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.1, 0.0, 0.1, 0.7, 0.2, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.7, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.4, 0.8, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.8, 0.4, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.2, 0.8, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.1, 0.8, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.1, 0.7, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }
                }
            });

            //Teacher signal
            Real[] t = { 0.0, 1.0 };


            //If you want to check the contents of a layer, have an instance as a single layer
            Convolution2D l2 = new Convolution2D(1, 2, 3, initialW: initial_W1, initialb: initial_b1, name: "l2 Conv2D");

            //Writing the network configuration in FunctionStack
            FunctionStack nn = new FunctionStack(
                l2, //new Convolution 2 D (1, 2, 3, initial W: initial W 1, initial b: initial _ b 1),
                new ReLU(name: "l2 ReLU"),
                //new AveragePooling (2, 2, name: "l2 AVGPooling"),
                new MaxPooling(2, 2, name: "l2 MaxPooling"),
                new Convolution2D(2, 2, 2, initialW: initial_W2, initialb: initial_b2, name: "l3 Conv2D"),
                new ReLU(name: "l3 ReLU"),
                //new AveragePooling (2, 2, name: "l3 AVGPooling"),
                new MaxPooling(2, 2, name: "l3 MaxPooling"),
                new Linear(8, 2, initialW: initial_W3, initialb: initial_b3, name: "l4 Linear"),
                new ReLU(name: "l4 ReLU"),
                new Linear(2, 2, initialW: initial_W4, initialb: initial_b4, name: "l5 Linear")
                );

            //If you omit the optimizer declaration, the default SGD (0.1) is used
            nn.SetOptimizer(new SGD());

            //Training conducted
            Trainer.Train(nn, x, t, new MeanSquaredError(), false);

            //When updating is executed grad will be consumed, so output the value first
            Console.WriteLine("gw1");
            Console.WriteLine(l2.Weight.ToString("Grad"));

            Console.WriteLine("gb1");
            Console.WriteLine(l2.Bias.ToString("Grad"));

            //update
            nn.Update();

            Console.WriteLine("w1");
            Console.WriteLine(l2.Weight);

            Console.WriteLine("b1");
            Console.WriteLine(l2.Bias);
        }
コード例 #31
0
 /// <summary>
 /// Counts the elements being true returning the result as a NdArray.
 /// </summary>
 /// <param name="source">The NdArray containing the source values.</param>
 /// <returns>A new scalar NdArray containing the result of this operation.</returns>
 public static NdArray <int> CountTrueNdArray(NdArray <bool> source)
 {
     return(CountTrueAxis(0, NdArray <bool> .Flatten(source)));
 }
コード例 #32
0
 public AdaDeltaParameter(NdArray functionParameter, AdaDelta optimizer) : base(functionParameter)
 {
     this.msg       = new Real[functionParameter.Data.Length];
     this.msdx      = new Real[functionParameter.Data.Length];
     this.optimizer = optimizer;
 }
コード例 #33
0
        /// <summary>
        /// Element-wise choice between two sources depending on a condition.
        /// </summary>
        /// <param name="cond">The condition NdArray.</param>
        /// <param name="ifTrue">The NdArray containing the values to use for when an element of the condition is true.</param>
        /// <param name="ifFalse">The NdArray containing the values to use for when an element of the condition is false.</param>
        /// <returns>A new NdArray containing the result of this operation.</returns>
        public static NdArray <T> IfThenElse(NdArray <bool> condition, NdArray <T> ifTrue, NdArray <T> ifFalse)
        {
            var(target, cond, ift, iff) = NdArray <T> .PrepareElemwise <T, bool, T, T>(condition, ifTrue, ifFalse);

            var(cond2, ift2, iff2) = NdArray <T> .PrepareElemwiseSources(target, cond, ift, iff);

            target.Backend.IfThenElse(target, cond2, ift2, iff2);

            return(target);
        }
コード例 #34
0
ファイル: SampleX.cs プロジェクト: ziichuan/KelpNet
        public static void Run()
        {
            //Weightを分割の前と後で揃える
            Real[,] testWeightValues = new Real[, ] {
                { -0.02690255f, 0.08830735f, -0.02041466f, -0.0431439f, -0.07749002f },
                { -0.06963444f, -0.03971611f, 0.0597842f, 0.08824182f, -0.06649109f },
                { -0.04966073f, -0.04697048f, -0.02235234f, -0.09396666f, 0.073189f },
                { 0.06563969f, 0.04446745f, -0.07192299f, 0.06784364f, 0.09575776f },
                { 0.05012317f, -0.08874852f, -0.05977172f, -0.05910181f, -0.06009106f },
                { -0.05200623f, -0.09679124f, 0.02159978f, -0.08058041f, -0.01340541f },
                { -0.0254951f, 0.09963084f, 0.00936683f, -0.08179696f, 0.09604459f },
                { -0.0732494f, 0.07253634f, 0.05981455f, -0.01007657f, -0.02992892f },
                { -0.06818873f, -0.02579817f, 0.06767359f, -0.03379837f, -0.04880046f },
                { -0.06429326f, -0.08964688f, -0.0960066f, -0.00286683f, -0.05761427f },
                { -0.0454098f, 0.07809167f, -0.05030088f, -0.02533244f, -0.02322736f },
                { -0.00866754f, -0.03614252f, 0.05237325f, 0.06478979f, -0.03599609f },
                { -0.01789357f, -0.04479434f, -0.05765592f, 0.03237658f, -0.06403019f },
                { -0.02421552f, 0.05533903f, -0.08627617f, 0.094624f, 0.03319318f },
                { 0.02328842f, -0.08234859f, -0.07979888f, 0.01439688f, -0.03267198f },
                { -0.07128382f, 0.08531934f, 0.07180037f, 0.04772871f, -0.08938966f },
                { 0.09431138f, 0.02094762f, 0.04443646f, 0.07653841f, 0.02028433f },
                { 0.01844446f, -0.08441339f, 0.01957355f, 0.04430714f, -0.03080243f },
                { -0.0261334f, -0.03794889f, -0.00638074f, 0.07278767f, -0.02165155f },
                { 0.08390063f, -0.03253863f, 0.0311571f, 0.08088892f, -0.07267931f }
            };

            Real[][,] testJaggWeightValues =
            {
                new Real[, ] {
                    { -0.02690255f,0.08830735f, -0.02041466f, -0.0431439f, -0.07749002f },
                    { -0.06963444f,-0.03971611f, 0.0597842f, 0.08824182f, -0.06649109f },
                    { -0.04966073f,-0.04697048f, -0.02235234f, -0.09396666f, 0.073189f },
                    { 0.06563969f,0.04446745f, -0.07192299f, 0.06784364f, 0.09575776f },
                    { 0.05012317f,         -0.08874852f, -0.05977172f, -0.05910181f, -0.06009106f }
                },
                new Real[, ] {
                    { -0.05200623f,-0.09679124f, 0.02159978f, -0.08058041f, -0.01340541f },
                    { -0.0254951f,0.09963084f, 0.00936683f, -0.08179696f, 0.09604459f },
                    { -0.0732494f,0.07253634f, 0.05981455f, -0.01007657f, -0.02992892f },
                    { -0.06818873f,-0.02579817f, 0.06767359f, -0.03379837f, -0.04880046f },
                    { -0.06429326f,         -0.08964688f, -0.0960066f, -0.00286683f, -0.05761427f }
                },
                new Real[, ] {
                    { -0.0454098f,0.07809167f, -0.05030088f, -0.02533244f, -0.02322736f },
                    { -0.00866754f,-0.03614252f, 0.05237325f, 0.06478979f, -0.03599609f },
                    { -0.01789357f,-0.04479434f, -0.05765592f, 0.03237658f, -0.06403019f },
                    { -0.02421552f,0.05533903f, -0.08627617f, 0.094624f, 0.03319318f },
                    { 0.02328842f,         -0.08234859f, -0.07979888f, 0.01439688f, -0.03267198f }
                },
                new Real[, ] {
                    { -0.07128382f,0.08531934f, 0.07180037f, 0.04772871f, -0.08938966f },
                    { 0.09431138f,0.02094762f, 0.04443646f, 0.07653841f, 0.02028433f },
                    { 0.01844446f,-0.08441339f, 0.01957355f, 0.04430714f, -0.03080243f },
                    { -0.0261334f,-0.03794889f, -0.00638074f, 0.07278767f, -0.02165155f },
                    { 0.08390063f, -0.03253863f, 0.0311571f, 0.08088892f, -0.07267931f }
                }
            };

            Linear <Real> l0 = new Linear <Real>(5, 20, initialW: testWeightValues, name: "l0");

            Linear <Real> l1 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[0], name: "l1");
            Linear <Real> l2 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[1], name: "l2");
            Linear <Real> l3 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[2], name: "l3");
            Linear <Real> l4 = new Linear <Real>(5, 5, initialW: testJaggWeightValues[3], name: "l4");

            //FunctionにOptimizerを設定
            SGD <Real> sgd = new SGD <Real>();

            sgd.SetUp(l0);

            //OptimiserにFunctionを登録
            SGD <Real> sgdSplit = new SGD <Real>();

            sgdSplit.SetUp(l1);
            sgdSplit.SetUp(l2);
            sgdSplit.SetUp(l3);
            sgdSplit.SetUp(l4);


            //入力は同値だがGradが加算されてしまうため分ける
            Real[]         testValue        = new Real[] { 0.01618112f, -0.08296648f, -0.05545357f, 0.00389254f, -0.05727582f };
            NdArray <Real> testInputValuesA = new NdArray <Real>(testValue);
            NdArray <Real> testInputValuesB = new NdArray <Real>(testValue);

            Console.WriteLine("l0 for");
            NdArray <Real> l0Result = l0.Forward(testInputValuesA)[0];

            Console.WriteLine(l0Result);

            Console.WriteLine("\nl1 for");
            NdArray <Real> l1Result = l1.Forward(testInputValuesB)[0];

            Console.WriteLine(l1Result);

            Console.WriteLine("\nl2 for");
            NdArray <Real> l2Result = l2.Forward(testInputValuesB)[0];

            Console.WriteLine(l2Result);

            Console.WriteLine("\nl3 for");
            NdArray <Real> l3Result = l3.Forward(testInputValuesB)[0];

            Console.WriteLine(l3Result);

            Console.WriteLine("\nl4 for");
            NdArray <Real> l4Result = l4.Forward(testInputValuesB)[0];

            Console.WriteLine(l4Result);

            Console.WriteLine();

            //適当なGrad値をでっち上げる
            l0Result.Grad = new Real[]
            {
                -2.42022760e-02f, 5.02482988e-04f, 2.52015481e-04f, 8.08797951e-04f, -7.19293347e-03f,
                1.40045900e-04f, 7.09874439e-05f, 2.07651625e-04f, 3.80124636e-02f, -8.87162634e-04f,
                -4.64874669e-04f, -1.40792923e-03f, -4.12280299e-02f, -3.36557830e-04f, -1.50323089e-04f,
                -4.70047118e-04f, 3.61101292e-02f, -7.12957408e-04f, -3.63163825e-04f, -1.12809543e-03f
            };

            l1Result.Grad = new Real[] { -2.42022760e-02f, 5.02482988e-04f, 2.52015481e-04f, 8.08797951e-04f, -7.19293347e-03f };
            l2Result.Grad = new Real[] { 1.40045900e-04f, 7.09874439e-05f, 2.07651625e-04f, 3.80124636e-02f, -8.87162634e-04f };
            l3Result.Grad = new Real[] { -4.64874669e-04f, -1.40792923e-03f, -4.12280299e-02f, -3.36557830e-04f, -1.50323089e-04f };
            l4Result.Grad = new Real[] { -4.70047118e-04f, 3.61101292e-02f, -7.12957408e-04f, -3.63163825e-04f, -1.12809543e-03f };


            //Backwardを実行
            l0.Backward(l0Result);

            l1.Backward(l1Result);
            l2.Backward(l2Result);
            l3.Backward(l3Result);
            l4.Backward(l4Result);

            Console.WriteLine("\nl0 back");
            Console.WriteLine(testInputValuesA.ToString("Grad"));

            Console.WriteLine("\nl1-l4 sum back");
            Console.WriteLine(testInputValuesB.ToString("Grad"));

            sgd.Update();
            sgdSplit.Update();

            Console.WriteLine("\nl0 Weight");
            Console.WriteLine(l0.Weight);

            Console.WriteLine("\nl1 Weight");
            Console.WriteLine(l1.Weight);

            Console.WriteLine("\nl0 Bias");
            Console.WriteLine(l0.Bias);

            Console.WriteLine("\nl1 Bias");
            Console.WriteLine(l1.Bias);
        }
コード例 #35
0
ファイル: RepeatTests.cs プロジェクト: omegahm/bohrium
        public static void RunTests()
        {
            var a = Generate.Range(2, 3, 4);
            var b = a.Transpose();

            if (b.Value[0, 0, 1] != 12)
                throw new Exception("Something failed in transpose");

            var c = Generate.Zeroes(100);
            c[new Range(0, 0, (long)Math.Sqrt(c.Shape.Length) + 1)] = 1;

            if (c.Value[99] != 1 || c.Value[22] != 1 || c.Value[10] != 0 || c.Value[12] != 0 || Add.Reduce(c).Value[0] != 10)
                throw new Exception(string.Format("Something failed in stride tricks: {0}, {1}, {2}, {3}, {4}", c.Value[99], c.Value[22], c.Value[10], c.Value[12], Add.Reduce(c).Value[0]));

            var d = Generate.Range(8) + 1;
            var e = d.Repeat(2);
            if (e.AsArray().LongLength != 16 || Add.Reduce(e).Value[0] != 72)
                throw new Exception("Something failed in simple repeat");
            var f = d.Repeat(3);
            if (f.AsArray().LongLength != 24 || Add.Reduce(f).Value[0] != 108)
                throw new Exception("Something failed in simple repeat");

            var g = d.Repeat(new long[] { 1, 2, 1, 3, 4, 1, 1, 2 });
            if (g.AsArray().LongLength != 15 || Add.Reduce(g).Value[0] != 69)
                throw new Exception("Something failed in extended repeat");

            var h = d.Reshape(new Shape(new long[] { 2, 2, 2 }));
            var i = h.Repeat(4, 0);
            if (i.Shape.Dimensions[0].Length != 8 || i.Shape.Elements != 8 * 4 || Add.Reduce(Add.Reduce(Add.Reduce(i))).Value[0] != 144 || i.Value[4, 0, 0] != 5 || i.Value[4, 1, 1] != 8 || i.Value[3, 0, 0] != 1)
                throw new Exception("Something failed in axis repeat");

            var j = h.Repeat(4, 1);
            if (j.Shape.Dimensions[1].Length != 8 || j.Shape.Elements != 8 * 4 || Add.Reduce(Add.Reduce(Add.Reduce(j))).Value[0] != 144 || j.Value[0, 4, 0] != 3 || j.Value[1, 4, 1] != 8 || j.Value[0, 3, 0] != 1)
                throw new Exception("Something failed in axis repeat");

            var k = h.Repeat(4, 2);
            if (k.Shape.Dimensions[2].Length != 8 || k.Shape.Elements != 8 * 4 || Add.Reduce(Add.Reduce(Add.Reduce(k))).Value[0] != 144 || k.Value[0, 0, 4] != 2 || k.Value[1, 1, 4] != 8 || k.Value[0, 0, 3] != 1)
                throw new Exception("Something failed in axis repeat");

            var l = h.Repeat(new long[] {4, 2}, 0);
            if (l.Shape.Dimensions[0].Length != 6 || l.Shape.Elements != 24 || Add.Reduce(Add.Reduce(Add.Reduce(l))).Value[0] != 92)
                throw new Exception("Something failed in axis repeat");

            var m = h.Repeat(new long[] { 4, 2 }, 1);
            if (m.Shape.Dimensions[1].Length != 6 || m.Shape.Elements != 24 || Add.Reduce(Add.Reduce(Add.Reduce(m))).Value[0] != 100)
                throw new Exception("Something failed in axis repeat");

            var n = h.Repeat(new long[] { 4, 2 }, 2);
            if (n.Shape.Dimensions[2].Length != 6 || n.Shape.Elements != 24 || Add.Reduce(Add.Reduce(Add.Reduce(n))).Value[0] != 104)
                throw new Exception("Something failed in axis repeat");

            var o = n.Flatten();
            if (o.AsArray().LongLength != 24)
                throw new Exception("Something failed in flatten");

            if (o.Sum() != 104 || o.Max() != 8)
                throw new Exception("Something failed in aggregate");

            var p = new NdArray(new float[] { 1, 2, 3, 4 }, new long[] { 2, 2 });
            var q = new NdArray(new float[] { 5, 6 }, new long[] { 1, 2 });
            var r = p.Concatenate(q);
            var s = p.Concatenate(q.Transposed, 1);

            if (r.Value[2, 1] != 6)
                throw new Exception("Something failed in Concatenate");
            if (s.Value[1, 2] != 6)
                throw new Exception("Something failed in Concatenate");

            var t = new NdArray(42).Subview(Range.NewAxis, 0);
            var u = o[Range.NewAxis, Range.All].Concatenate(t, 1);
            if (u.Reduce<Add>().Reduce<Add>().Value[0] != 104 + 42)
                throw new Exception("Something failed in Concatenate");

            var v = o.Concatenate(new NdArray(42), 1);
            if (v.Shape.Dimensions.LongLength != 1 || v.Reduce<Add>().Value[0] != 104 + 42)
                throw new Exception("Something failed in broadcast extended Concatenate");

            var w = Generate.Range(4, 2);
            var x = Generate.Range(2, 3);
            var y = w.MatrixMultiply(x);
            if (y.Sum() != 228)
                throw new Exception("Failure in matrix multiply");

            var z = w.MatrixMultiply(new NdArray(new float[] {1,2}).Reshape(new Shape(new long[] {2})));
            if (z.Sum() != 44)
                throw new Exception("Failure in matrix multiply");

            var x0 = Generate.Range(20).Repeat(new long[] { 10 }, 0);
            var x1 = Generate.Range(20).Repeat(10, 0);
            var s1 = x1.Sum();
            var s0 = x0.Sum();
            if (s0 != 1900 || s1 != 1900)
                throw new Exception(string.Format("Failure in repeat: {0}, {1}", s0, s1));

            var t0 = Generate.Range(new long[] { 101, 100 });
            var t1 = t0.Reduce<Add>(0);
            var t2 = t0.Reduce<Add>(1);
            if (t1.Shape.Dimensions[0].Length != 100 || t2.Shape.Dimensions[0].Length != 101 || t1.Value[0] != 505000 || t2.Value[0] != 4950)
                throw new Exception("Irregular reduction failed?");
        }
コード例 #36
0
        public static NdArray <Real> SingleInputForward(NdArray <Real> x, NdArray <Real> weight, NdArray <Real> bias, ComputeKernel forwardKernel, IFunction <Real> linear)
        {
            int outputCount = weight.Shape[0];
            int inputCount  = weight.Shape[1];

            Real[] y = bias == null ? new Real[outputCount * x.BatchCount] : LinearFunc.GetBiasedValue(x.BatchCount, outputCount, bias.Data);

            using (ComputeBuffer <Real> gpuX = new ComputeBuffer <Real>(OpenCL.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.UseHostPointer, x.Data))
                using (ComputeBuffer <Real> gpuW = new ComputeBuffer <Real>(OpenCL.Context, ComputeMemoryFlags.ReadOnly | ComputeMemoryFlags.UseHostPointer, weight.Data))
                    using (ComputeBuffer <Real> gpuY = new ComputeBuffer <Real>(OpenCL.Context, ComputeMemoryFlags.ReadWrite | ComputeMemoryFlags.UseHostPointer, y))
                    {
                        forwardKernel.SetMemoryArgument(0, gpuX);
                        forwardKernel.SetMemoryArgument(1, gpuW);
                        forwardKernel.SetMemoryArgument(2, gpuY);
                        forwardKernel.SetValueArgument(3, outputCount);
                        forwardKernel.SetValueArgument(4, inputCount);

                        OpenCL.CommandQueue.Execute
                        (
                            forwardKernel,
                            null,
                            new long[] { outputCount, x.BatchCount },
                            null,
                            null
                        );

                        OpenCL.CommandQueue.Finish();
                        OpenCL.CommandQueue.ReadFromBuffer(gpuY, ref y, true, null);
                    }

            return(NdArray.Convert(y, new[] { outputCount }, x.BatchCount, linear));
        }