示例#1
0
 public static void DataTableToTensor(DataTable table, TensorOld result, params string[] columns)
 {
     for (int i = 0; i < table.Rows.Count; i++)
     {
         for (int j = 0; j < columns.Length; j++)
         {
             result[i, j] = double.Parse(table.Rows[i][columns[j]].ToString());
         }
     }
 }
示例#2
0
        /// <summary>
        /// 用于预测时的内存分配
        /// </summary>
        /// <param name="input"></param>
        /// <returns></returns>
        public TensorOld PreparePredict(TensorOld input)
        {
            if (input.Rank != 2)
            {
                throw new TensorShapeException("input tensor must have Rank=2");
            }

            ForwardOutput = new TensorOld(input.shape[0], UnitCount);
            return(ForwardOutput);
        }
示例#3
0
        public void OptimizeTest()
        {
            var target   = new TensorOld(new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 3, 3);
            var gradient = new TensorOld(new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 3, 3);
            var expected = new TensorOld(new double[] { 0.9, 1.8, 2.7, 3.6, 4.5, 5.4, 6.3, 7.2, 8.1 }, 3, 3);
            var gd       = new GradientDescent(0.1);

            gd.Optimize(target, gradient);
            Assert.Equal(expected, target);
        }
示例#4
0
        public TensorOld Predict(TensorOld X)
        {
            CheckPredictShape(X);

            for (int i = 0; i < PredictLayers.Count; i++)
            {
                X = PredictLayers[i].Forward(X);
            }
            return(X);
        }
示例#5
0
        private void CheckTrainShape(TensorOld input, TensorOld y)
        {
            if (CheckShape(input.shape, trainInputShape) &&
                CheckShape(y.shape, trainYShape))
            {
                return;
            }

            PrepareTrain(input, y);
        }
示例#6
0
        public TensorOld Backward(TensorOld error)
        {
            var layers = TrainingLayers.Count - 1;

            for (int i = layers; i > -1; i--)
            {
                error = TrainingLayers[i].Backward(error);
            }
            return(error);
        }
示例#7
0
        public void ForwardBackwardTest()
        {
            var input = new TensorOld(new double[]
            {
                4, 6, 1, 4,
                8, 4, 5, 1,
                5, 3, 5, 7,
                1, 7, 2, 8,
            }, 1, 1, 4, 4);

            var conv = new ConvLayer(4, 2, 1, 1);

            conv.SetFilters(new TensorOld(new double[]
            {
                1, 1, 0, 0,
                0, 0, 1, 1,
                1, 0, 1, 0,
                0, 1, 0, 1,
            }, 4, 1, 2, 2));

            var expected = new TensorOld(new double[]
            {
                0, 0, 0, 0, 0,
                4, 10, 7, 5, 4,
                8, 12, 9, 6, 1,
                5, 8, 8, 12, 7,
                1, 8, 9, 10, 8,

                4, 10, 7, 5, 4,
                8, 12, 9, 6, 1,
                5, 8, 8, 12, 7,
                1, 8, 9, 10, 8,
                0, 0, 0, 0, 0,

                0, 4, 6, 1, 4,
                0, 12, 10, 6, 5,
                0, 13, 7, 10, 8,
                0, 6, 10, 7, 15,
                0, 1, 7, 2, 8,

                4, 6, 1, 4, 0,
                12, 10, 6, 5, 0,
                13, 7, 10, 8, 0,
                6, 10, 7, 15, 0,
                1, 7, 2, 8, 0,
            }, 1, 4, 5, 5);

            conv.PrepareTrain(input);
            var acutal = conv.Forward(input);

            Assert.Equal(acutal, expected);

            var error = expected / 10;
            var back  = conv.Backward(error);
        }
示例#8
0
        public MinMaxNorm(TensorOld tensor)
        {
            Min   = tensor.Min();
            Max   = tensor.Max();
            Denom = Max - Min;

            if (Denom == 0)
            {
                throw new Exception("max=min");
            }
        }
示例#9
0
 /// <summary>
 /// 反向传播或叫向前传播
 /// </summary>
 /// <param name="error">传回来的误差</param>
 /// <returns>传到前面的误差</returns>
 public override TensorOld Backward(TensorOld error)
 {
     ComputeDerivative();
     //Parallel.For(0, sampleNumber, (Action<int>)(i =>
     //{
     //    //这个方法直接将计算结果写入result,不需要开辟中间内存
     //    ErrorBP((Tensor)base.ForwardOutput, error, BackwardOutput, i);
     //}));
     ErrorBP(error);
     return(BackwardOutput);
 }
示例#10
0
 public override TensorOld Forward(TensorOld input)
 {
     Parallel.For(0, ForwardOutput.shape[0], sampleIndex =>
     {
         Parallel.For(0, ForwardOutput.shape[1], channel =>
         {
             PoolingChannel(input, sampleIndex, channel);
         });
     });
     return(ForwardOutput);
 }
示例#11
0
        public static TensorOld <TOut> DoScalarCellByCell <TOut, TIn1, TIn2>(this TensorOld <TIn1> t1, TIn2 val2, Func <TIn1, TIn2, TOut> operation)
        {
            TensorOld <TOut> result = new TensorOld <TOut>(t1.ShapeDimensions.ToArray());

            for (int i = 0; i < t1.TheSpan.NumberSubArrays; i++)
            {
                result.TheSpan[i] = operation(t1.TheSpan[i], val2);
            }

            return(result);
        }
示例#12
0
 public void SetFilters(TensorOld filters)
 {
     if (Filters == null)
     {
         foreach (var item in mirrorList)
         {
             item.Filters = filters;
         }
     }
     TensorOld.CheckShape(filters, Filters);
     Array.Copy(filters.values, 0, Filters.values, 0, Filters.ElementCount);
 }
示例#13
0
        public void TransposeTest()
        {
            var a = new TensorOld(new double[, ] {
                { 1, 2, 3 }, { 4, 5, 6 }
            });
            var actual   = a.Transpose();
            var expected = new TensorOld(new double[, ] {
                { 1, 4 }, { 2, 5 }, { 3, 6 }
            });

            Assert.Equal(expected, actual);
        }
示例#14
0
        public static void ApproximatelyEqual(TensorOld a, TensorOld b, double allowError = 0.00001)
        {
            Assert.True(TensorOld.CheckShapeBool(a, b));

            var dataA = a.GetRawValues();
            var dataB = b.GetRawValues();

            for (int i = 0; i < a.ElementCount; i++)
            {
                Assert.True(Math.Abs(dataA[i] - dataB[i]) < allowError);
            }
        }
示例#15
0
        private void ComputeGradient(TensorOld error)
        {
            FiltersGradient.Clear();

            Parallel.For(0, samples, sampleIndex =>
            {
                Parallel.For(0, FilterCount, filterIndex =>
                {
                    ComputeGradient(error, sampleIndex, filterIndex);
                });
            });
        }
示例#16
0
        private void ErrorBackward(TensorOld error)
        {
            BackwardOutput.Clear();

            Parallel.For(0, samples, sampleIndex =>
            {
                Parallel.For(0, FilterCount, filterIndex =>
                {
                    ErrorBackward(error, sampleIndex, filterIndex);
                });
            });
        }
示例#17
0
 public void SetBias(TensorOld bias)
 {
     if (Bias == null)
     {
         foreach (var item in mirrorList)
         {
             item.Bias = bias;
         }
     }
     TensorOld.CheckShape(Bias, bias);
     Array.Copy(bias.GetRawValues(), 0, Bias.GetRawValues(), 0, Bias.ElementCount);
 }
示例#18
0
        /// <summary>
        /// Map解码
        /// </summary>
        /// <param name="t">要解码的数据</param>
        /// <returns>解码结果</returns>
        public override List <string> Decode(TensorOld t)
        {
            var result = new List <string>(t.ElementCount);

            for (int i = 0; i < t.shape[0]; i++)
            {
                var index = Map2Index((int)t.GetRawValues()[i]);
                result.Add(Categories[index]);
            }

            return(result);
        }
示例#19
0
        public void PrepareTrain(TensorOld X, TensorOld y)
        {
            trainInputShape = X.shape;
            trainYShape     = y.shape;

            for (int i = 0; i < TrainingLayers.Count; i++)
            {
                X = TrainingLayers[i].PrepareTrain(X);
            }

            LossFunction.PrepareTrain(X, y);
        }
示例#20
0
        public void ZScoreTest()
        {
            var data   = new TensorOld(new double[] { 1, 2, 3 });
            var norm   = new ZScoreNorm(data);
            var test   = new TensorOld(new double[] { 1.5, 2.5 });
            var result = norm.Normalize(test);

            Assert.Equal(2, norm.Mean);
            Assert.Equal(Math.Sqrt(2d / 3d), norm.Delta);
            Assert.True(result[0] < 0);
            Assert.True(result[1] > 0);
        }
示例#21
0
 /// <summary>
 /// 手动设置权重
 /// </summary>
 /// <param name="weights"></param>
 public void SetWeights(TensorOld weights)
 {
     if (Weights == null)
     {
         foreach (var item in mirrorList)
         {
             item.Weights = weights;
         }
     }
     TensorOld.CheckShape(weights, Weights);
     Array.Copy(weights.GetRawValues(), 0, Weights.GetRawValues(), 0, Weights.ElementCount);
 }
        /// <summary>
        /// 正向传播或叫向后传播
        /// </summary>
        /// <param name="input">输入的数值</param>
        /// <returns>输出的数值</returns>
        public override TensorOld Forward(TensorOld input)
        {
            int startIndex;

            for (int i = 0; i < sampleNumber; i++)
            {
                startIndex = categoryNumber * i;
                Array.Copy(input.values, startIndex, sampleBuff, 0, categoryNumber);
                Functions.Softmax(sampleBuff, sampleBuff);
                Array.Copy(sampleBuff, 0, ForwardOutput.values, startIndex, categoryNumber);
            }
            return(ForwardOutput);
        }
示例#23
0
        public void Play()
        {
            var data   = DataEmulator.Instance.RandomArray(100000);
            var tensor = new TensorOld(data, 500, 200);
            var full   = new FullLayer(100);

            full.PrepareTrain(tensor);
            var start1 = DateTime.Now;
            var output = full.Forward(tensor);
            var ts1    = DateTime.Now - start1;

            Console.WriteLine($"Forward Time:{ts1.TotalMilliseconds}");
        }
        public override TensorOld PreparePredict(TensorOld input)
        {
            if (input.Rank != 2)
            {
                throw new TensorShapeException("input.Rank must be 2");
            }

            ForwardOutput  = input.GetSameShape();
            sampleNumber   = input.Shape[0];
            categoryNumber = input.Shape[1];
            sampleBuff     = new double[categoryNumber];
            return(ForwardOutput);
        }
示例#25
0
        public TensorOld Forward(TensorOld input)
        {
            SetPaddingInput(input);

            Parallel.For(0, samples, sampleIndex =>
            {
                Parallel.For(0, FilterCount, filterIndex =>
                {
                    Forward(input, sampleIndex, filterIndex);
                });
            });
            return(ForwardOutput);
        }
示例#26
0
        public static TensorOld <TOut> Cast <TIn, TOut>(this TensorOld <TIn> t, Func <TIn, TOut> converter = null)
        {
            if (converter == null)
            {
                converter = NumericUtils.GetNumericConverter <TIn, TOut>();
            }

            TensorOld <TOut> result = new TensorOld <TOut>(t.ShapeDimensions.ToArray());

            result.SetSpan(t.TheSpan.Select(item => converter(item)));

            return(result);
        }
示例#27
0
        public override TensorOld Backward(TensorOld error)
        {
            BackwardOutput.Clear();

            Parallel.For(0, ForwardOutput.shape[0], sampleIndex =>
            {
                Parallel.For(0, ForwardOutput.shape[1], channel =>
                {
                    ErrorBP(error, sampleIndex, channel);
                });
            });
            return(BackwardOutput);
        }
示例#28
0
        public static TensorOld <TOut> DoCellByCell <TOut, TIn1, TIn2>(this TensorOld <TIn1> t1, TensorOld <TIn2> t2, Func <TIn1, TIn2, TOut> operation)
        {
            t1.CheckEqualDimensions(t2);

            TensorOld <TOut> result = new TensorOld <TOut>(t1.ShapeDimensions.ToArray());

            for (int i = 0; i < t1.TheSpan.NumberSubArrays; i++)
            {
                result.TheSpan[i] = operation(t1.TheSpan[i], t2.TheSpan[i]);
            }

            return(result);
        }
示例#29
0
        public void ApplyTest()
        {
            var t = TensorOld.Rand(20, 30);
            var n = TensorOld.Apply(t, a => a * a);

            for (int i = 0; i < 20; i++)
            {
                for (int j = 0; j < 30; j++)
                {
                    Assert.Equal(t[i, j] * t[i, j], n[i, j]);
                }
            }
        }
示例#30
0
        public void MeanSquareErrorTest()
        {
            var y        = new TensorOld(new double[] { 1, 3, 2, 4, 5, 6 });
            var yHat     = new TensorOld(new double[] { 1.5, 2.6, 2.1, 3.9, 5.3, 6.7 });
            var loss     = Functions.MeanSquareError(y, yHat);
            var gradient = Derivatives.MeanSquareError(y, yHat);
            var delta    = 0.00001;

            yHat[0] += delta;
            var expected = (Functions.MeanSquareError(y, yHat) - loss) / delta;

            MyAssert.ApproximatelyEqual(expected, gradient[0]);
        }