Exemple #1
0
        int Predict(Vector <double> x)
        {
            if (x.Count != Layers[0])
            {
                throw new Exception("The input layer requires " + Layers[0] + " values. You gave " + x.Count + ".");
            }
            var             _x = x.ToRowMatrix();
            Matrix <double> prev_activation = _x.Transpose(); //0th (Input) Layer's activation

            prev_activation = MathNetHelpers.AddRowOfOnes(prev_activation);
            for (int i = 1; i < Layers.Length - 1; i++)
            {
                var z = Theta[i - 1] * prev_activation;
                prev_activation = MathNetHelpers.AddRowOfOnes(MathNetHelpers.Sigmoid(z));
            }
            var _z = Theta[Theta.Length - 1] * prev_activation;

            prev_activation = MathNetHelpers.Sigmoid(_z);
            int max_index = 0;

            for (int i = 0; i < prev_activation.Column(0).Count; i++)
            {
                if (prev_activation[i, 0] > prev_activation[max_index, 0])
                {
                    max_index = i;
                }
            }
            return(max_index);
        }
Exemple #2
0
        Matrix <double>[] Gradients()
        {
            var a = X.ToArray();
            int m = a.GetUpperBound(0) + 1;

            Matrix <double>[] grad = new Matrix <double> [Theta.Length];
            for (int i = 0; i < grad.Length; i++)
            {
                grad[i] = MathNetHelpers.Zeros(Theta[i].RowCount, Theta[i].ColumnCount);
            }

            Matrix <double>[] activations     = new Matrix <double> [Layers.Length];
            Matrix <double>   prev_activation = X.Transpose(); //0th (Input) Layer's activationk

            prev_activation = MathNetHelpers.AddRowOfOnes(prev_activation);
            activations[0]  = prev_activation;
            for (int i = 1; i < Layers.Length - 1; i++)
            {
                var z = Theta[i - 1] * prev_activation;
                prev_activation = MathNetHelpers.AddRowOfOnes(MathNetHelpers.Sigmoid(z));
                activations[i]  = prev_activation;
            }
            var _z = Theta[Theta.Length - 1] * prev_activation;

            prev_activation = MathNetHelpers.Sigmoid(_z);
            activations[activations.Length - 1] = prev_activation;

            Matrix <double>[] delta = new Matrix <double> [Layers.Length];
            delta[delta.Length - 1] = (prev_activation - Y_Matrix().Transpose()); //The delta of the output layer
            for (int i = delta.Length - 2; i > 0; i--)
            {
                var temp = MathNetHelpers.RemoveColumn(activations[i]);
                temp     = MathNetHelpers.AddColumnOfOnes(temp);
                delta[i] = (Theta[i].Transpose() * delta[i + 1]).PointwiseMultiply(temp).PointwiseMultiply(1 - temp);
                delta[i] = MathNetHelpers.RemoveRow(delta[i]);
            }
            Matrix <double>[] Delta = new Matrix <double> [Theta.Length];
            for (int i = 0; i < Delta.Length; i++)
            {
                Delta[i] = (delta[i + 1] * activations[i].Transpose());
            }
            for (int i = 0; i < grad.Length; i++)
            {
                var z = MathNetHelpers.RemoveRow(Theta[i]);
                z       = MathNetHelpers.AddRowOf(z, 0);
                grad[i] = (Delta[i] + lambda * z) / m;
            }
            return(grad);
        }
Exemple #3
0
        Matrix <double> Hypothesis()
        {
            Matrix <double> prev_activation = X.Transpose(); //0th (Input) Layer's activationk

            prev_activation = MathNetHelpers.AddRowOfOnes(prev_activation);
            for (int i = 1; i < Layers.Length - 1; i++)
            {
                var z = Theta[i - 1] * prev_activation;
                prev_activation = MathNetHelpers.AddRowOfOnes(MathNetHelpers.Sigmoid(z));
            }
            var _z = Theta[Theta.Length - 1] * prev_activation;

            prev_activation = MathNetHelpers.Sigmoid(_z);
            return(prev_activation);
        }