Esempio n. 1
0
        int Predict(Vector <double> x)
        {
            if (x.Count != Layers[0])
            {
                throw new Exception("The input layer requires " + Layers[0] + " values. You gave " + x.Count + ".");
            }
            var             _x = x.ToColumnMatrix();
            Matrix <double> prev_activation = _x.Transpose(); //0th (Input) Layer's activation

            prev_activation = MathNetHelpers.AddRowOfOnes(prev_activation);
            for (int i = 1; i < Layers.Length - 1; i++)
            {
                var z = Theta[i - 1] * prev_activation;
                prev_activation = MathNetHelpers.AddRowOfOnes(MathNetHelpers.Sigmoid(z));
            }
            var _z = Theta[Theta.Length - 1] * prev_activation;

            prev_activation = MathNetHelpers.Sigmoid(_z);
            int max_index = 0;

            for (int i = 0; i < prev_activation.Column(0).Count; i++)
            {
                if (prev_activation[i, 0] > prev_activation[max_index, 0])
                {
                    max_index = i;
                }
            }
            return(max_index);
        }
Esempio n. 2
0
 //NOTE(sasha): Layers is the number of nodes in each layer. Layers does NOT include the bias nodes, includes the output layer
 public NeuralNet(int[] Layers, double Regularization = 0.1)
 {
     this.lambda = Regularization;
     this.Layers = Layers;
     num_labels  = Layers[Layers.Length - 1];
     Theta       = new Matrix <double> [Layers.Length - 1];
     for (int i = 0; i < Layers.Length - 1; i++)
     {
         Theta[i] = MathNetHelpers.RandomMatrix(Layers[i + 1], Layers[i] + 1, epsilon);
     }
 }
Esempio n. 3
0
        Matrix <double> Y_Matrix()
        {
            var             a        = X.ToArray();
            int             m        = a.GetUpperBound(0) + 1;
            Matrix <double> y_matrix = MathNetHelpers.Zeros(m, num_labels);

            for (int i = 0; i < m; i++)
            {
                y_matrix[i, (int)(Y[i] + 0.5)] = 1.0;
            }
            return(y_matrix);
        }
Esempio n. 4
0
        Matrix <double>[] Gradients()
        {
            var a = X.ToArray();
            int m = a.GetUpperBound(0) + 1;

            Matrix <double>[] grad = new Matrix <double> [Theta.Length];
            for (int i = 0; i < grad.Length; i++)
            {
                grad[i] = MathNetHelpers.Zeros(Theta[i].RowCount, Theta[i].ColumnCount);
            }

            Matrix <double>[] activations     = new Matrix <double> [Layers.Length];
            Matrix <double>   prev_activation = X.Transpose(); //0th (Input) Layer's activationk

            prev_activation = MathNetHelpers.AddRowOfOnes(prev_activation);
            activations[0]  = prev_activation;
            for (int i = 1; i < Layers.Length - 1; i++)
            {
                var z = Theta[i - 1] * prev_activation;
                prev_activation = MathNetHelpers.AddRowOfOnes(MathNetHelpers.Sigmoid(z));
                activations[i]  = prev_activation;
            }
            var _z = Theta[Theta.Length - 1] * prev_activation;

            prev_activation = MathNetHelpers.Sigmoid(_z);
            activations[activations.Length - 1] = prev_activation;

            Matrix <double>[] delta = new Matrix <double> [Layers.Length];
            delta[delta.Length - 1] = (prev_activation - Y_Matrix().Transpose()); //The delta of the output layer
            for (int i = delta.Length - 2; i > 0; i--)
            {
                var temp = MathNetHelpers.RemoveColumn(activations[i]);
                temp     = MathNetHelpers.AddColumnOfOnes(temp);
                delta[i] = (Theta[i].Transpose() * delta[i + 1]).PointwiseMultiply(temp).PointwiseMultiply(1 - temp);
                delta[i] = MathNetHelpers.RemoveRow(delta[i]);
            }
            Matrix <double>[] Delta = new Matrix <double> [Theta.Length];
            for (int i = 0; i < Delta.Length; i++)
            {
                Delta[i] = (delta[i + 1] * activations[i].Transpose());
            }
            for (int i = 0; i < grad.Length; i++)
            {
                var z = MathNetHelpers.RemoveRow(Theta[i]);
                z       = MathNetHelpers.AddRowOf(z, 0);
                grad[i] = (Delta[i] + lambda * z) / m;
            }
            return(grad);
        }
Esempio n. 5
0
        Matrix <double> Hypothesis()
        {
            Matrix <double> prev_activation = X.Transpose(); //0th (Input) Layer's activations

            prev_activation = MathNetHelpers.AddRowOfOnes(prev_activation);
            for (int i = 1; i < Layers.Length - 1; i++)
            {
                var z = Theta[i - 1] * prev_activation;
                prev_activation = MathNetHelpers.AddRowOfOnes(MathNetHelpers.Sigmoid(z));
            }
            var _z = Theta[Theta.Length - 1] * prev_activation;

            prev_activation = MathNetHelpers.Sigmoid(_z);
            return(prev_activation);
        }
Esempio n. 6
0
        double J()
        {
            var             a        = X.ToArray();
            int             m        = a.GetUpperBound(0) + 1;
            Matrix <double> tmp      = MathNetHelpers.Eye(num_labels);
            Matrix <double> y_matrix = Y_Matrix().Transpose();

            var h = Hypothesis();

            double regularization = 0;

            for (int i = 0; i < Theta.Length; i++)
            {
                regularization += MathNetHelpers.ColumnRange(Theta[i].PointwisePower(2), 0, Theta[i].ColumnCount - 1).ColumnSums().Sum();
            }
            regularization *= (lambda / (2 * m));

            double j = (-y_matrix.PointwiseMultiply(h.PointwiseLog()) - (1 - y_matrix).PointwiseMultiply(1 - h.PointwiseLog())).ColumnSums().Sum();

            return(j + regularization);
        }