Ejemplo n.º 1
0
 private void LoadNextBatchSet(int offset)
 {
     if (offset < layers[0].matrix.GetLength(0))
     {
         X = layers[0].matrix.GetBatch(_batchSize, offset);
         X = X.AddFeatureBias(1);
         Y = AllY.GetBatch(_batchSize, offset);
     }
 }
Ejemplo n.º 2
0
        public double[,] CheckAnswer(double[,] TX)
        {
            double[][,] At    = new double[layers.Length - 1][, ];
            double[][,] Zt    = new double[layers.Length - 1][, ];
            double[,] TestedX = TX.AddFeatureBias(1);

            for (int i = 1; i < layers.Length; i++)
            {
                if (i == 1)
                {
                    At[i - 1] = new double[TestedX.GetLength(0), layers[i].neurons];
                }
                else if (i == layers.Length - 1)
                {
                    At[i - 1] = new double[layers[i - 1].neurons, layers[i].matrix.GetLength(1)];
                }
                else
                {
                    At[i - 1] = new double[layers[i - 1].neurons, layers[i].neurons];
                }
            }

            for (int i = 0; i < layers.Length - 1; i++)
            {
                if (i == 0)
                {
                    Zt[i] = TestedX.Mul(W[i]);
                    At[i] = Zt[i].Func(activationFunc[i]);
                }
                else if (i == layers.Length - 1)
                {
                    Zt[i] = At[i - 1].AddFeatureBias(1).Mul(W[i]);
                    At[i] = Zt[i].Func((x) => x);
                }
                else
                {
                    Zt[i] = At[i - 1].AddFeatureBias(1).Mul(W[i]);
                    At[i] = Zt[i].Func(activationFunc[i]);
                }
            }
            return(At[layers.Length - 2]);
        }
Ejemplo n.º 3
0
        private void Initialize()
        {
            X        = layers[0].matrix.GetBatch(_batchSize, 0);
            X        = X.AddFeatureBias(1);
            Y        = AllY.GetBatch(_batchSize, 0);
            W        = new double[layers.Length - 1][, ];
            Gradient = new double[layers.Length - 1][, ];
            Sigma    = new double[layers.Length - 1][, ];
            A        = new double[layers.Length - 1][, ];
            Z        = new double[layers.Length - 1][, ];

            activationFunc      = new Func <double, double> [layers.Length - 1];
            activationFuncPrime = new Func <double, double> [layers.Length - 1];

            for (int i = 1; i < layers.Length; i++)
            {
                if (i == 1)
                {
                    A[i - 1] = new double[X.GetLength(0), layers[i].neurons];
                    W[i - 1] = new double[X.GetLength(1) - 1, layers[i].neurons];
                }
                else if (i == layers.Length - 1)
                {
                    A[i - 1] = new double[layers[i - 1].neurons, layers[i].matrix.GetLength(1)];
                    W[i - 1] = new double[layers[i - 1].neurons, layers[i].matrix.GetLength(1)];
                }
                else
                {
                    A[i - 1] = new double[layers[i - 1].neurons, layers[i].neurons];
                    W[i - 1] = new double[layers[i - 1].neurons, layers[i].neurons];
                }

                ActivationFunctionFactory.SetFunctions(ref activationFunc[i - 1], ref activationFuncPrime[i - 1], layers[i].Type);

                W[i - 1] = W[i - 1].Rand().AddWeightBias(0.1);
            }
        }