public ManagedArray Predict(ManagedArray test, NeuralNetworkOptions opts)
        {
            Forward(test);

            var prediction = new ManagedArray(test.y);

            for (var y = 0; y < test.y; y++)
            {
                if (opts.Categories > 1)
                {
                    double maxval = Double.MinValue;

                    for (var x = 0; x < opts.Categories; x++)
                    {
                        double val = Yk[x, y];

                        if (val > maxval)
                        {
                            maxval = val;
                        }
                    }

                    prediction[y] = maxval;
                }
                else
                {
                    prediction[y] = Yk[y];
                }
            }

            // cleanup of arrays allocated in Forward
            ManagedOps.Free(A2, Yk, Z2);

            return(prediction);
        }
Beispiel #2
0
 public void ApplyGradients(NeuralNetworkOptions opts)
 {
     for (var layer = 0; layer < Weights.GetLength(0); layer++)
     {
         ManagedMatrix.Add(Weights[layer], Deltas[layer], -opts.Alpha);
     }
 }
        public void Optimize(ManagedArray input, ManagedArray output, NeuralNetworkOptions opts)
        {
            SetupOptimizer(input, output, opts);

            while (!StepOptimizer(input, opts))
            {
            }
        }
        public void Train(ManagedArray input, ManagedArray output, NeuralNetworkOptions opts)
        {
            Setup(output, opts);

            while (!Step(input, opts))
            {
            }
        }
 public void ApplyGradients(NeuralNetworkOptions opts)
 {
     // dWji = learning_rate * dWji
     // dWkj = learning_rate * dWkj
     // w_ji = w_ji - dWji
     // w_kj = w_kj - dWkj
     ManagedMatrix.Add(Wkj, DeltaWkj, -opts.Alpha);
     ManagedMatrix.Add(Wji, DeltaWji, -opts.Alpha);
 }
        public bool Step(ManagedArray input, NeuralNetworkOptions opts)
        {
            Forward(input);
            BackPropagation(input);
            ApplyGradients(opts);

            Iterations = Iterations + 1;

            return(double.IsNaN(Cost) || Iterations >= opts.Epochs || Cost < opts.Tolerance);
        }
        public void SetupOptimizer(ManagedArray input, ManagedArray output, NeuralNetworkOptions opts, bool Reset = true)
        {
            Setup(output, opts, Reset);

            Optimizer.MaxIterations = opts.Epochs;

            var X = ReshapeWeights(Wji, Wkj);

            OptimizerInput = input;

            Optimizer.Setup(OptimizerCost, X);
        }
        public void ApplyGradients(NeuralNetworkOptions opts)
        {
            // dWji = learning_rate * dWji
            // dWkj = learning_rate * dWkj
            // w_ji = w_ji - dWji
            // w_kj = w_kj - dWkj
            ManagedMatrix.Add(Wkj, DeltaWkj, -opts.Alpha);
            ManagedMatrix.Add(Wji, DeltaWji, -opts.Alpha);

            // cleanup of arrays allocated in BackPropagation
            ManagedOps.Free(DeltaWji, DeltaWkj);
        }
        public bool StepOptimizer(ManagedArray input, NeuralNetworkOptions opts)
        {
            OptimizerInput = input;

            var X = ReshapeWeights(Wji, Wkj);

            Optimizer.Step(OptimizerCost, X);

            Iterations = Optimizer.Iterations;

            Cost = Optimizer.f1;

            OptimizerInput = null;

            return(double.IsNaN(Cost) || Iterations >= opts.Epochs || Cost < opts.Tolerance);
        }
        public void Setup(ManagedArray output, NeuralNetworkOptions opts)
        {
            Wji = new ManagedArray(opts.Inputs + 1, opts.Nodes);
            Wkj = new ManagedArray(opts.Nodes + 1, opts.Categories);

            Y_output = Labels(output, opts);

            var random = new Random(Guid.NewGuid().GetHashCode());

            Rand(Wji, random);
            Rand(Wkj, random);

            Cost = 1.0;
            L2   = 1.0;

            Iterations = 0;
        }
        public bool Step(ManagedArray input, NeuralNetworkOptions opts)
        {
            Forward(input);
            BackPropagation(input);

            var optimized = (double.IsNaN(Cost) || Cost < opts.Tolerance);

            if (!optimized)
            {
                ApplyGradients(opts);
            }

            ClearDeltas();

            Iterations = Iterations + 1;

            return(optimized || Iterations >= opts.Epochs);
        }
Beispiel #12
0
        public bool Step(ManagedArray input, NeuralNetworkOptions opts)
        {
            Forward(input);
            BackPropagation(input);

            var optimized = (double.IsNaN(opts.UseL2 ? L2 : Cost) || (opts.UseL2 ? L2 : Cost) < opts.Tolerance);

            // Apply gradients only if the error is still high
            if (!optimized)
            {
                ApplyGradients(opts);
            }

            ClearDeltas();

            Iterations = Iterations + 1;

            return(optimized || Iterations >= opts.Epochs);
        }
Beispiel #13
0
        public ManagedIntList Classify(ManagedArray test, NeuralNetworkOptions opts, double threshold = 0.5)
        {
            Forward(test);

            var classification = new ManagedIntList(test.y);

            for (var y = 0; y < test.y; y++)
            {
                if (opts.Categories > 1)
                {
                    var maxval = double.MinValue;
                    var maxind = 0;

                    for (var x = 0; x < opts.Categories; x++)
                    {
                        var val = Y[x, y];

                        if (val > maxval)
                        {
                            maxval = val;
                            maxind = x;
                        }
                    }

                    classification[y] = maxind + 1;
                }
                else
                {
                    classification[y] = Y[y] > threshold ? 1 : 0;
                }
            }

            // cleanup of arrays allocated in Forward propagation
            ManagedOps.Free(Y);

            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                ManagedOps.Free(X[layer], Z[layer]);
            }

            return(classification);
        }
Beispiel #14
0
        public ManagedArray Predict(ManagedArray test, NeuralNetworkOptions opts)
        {
            Forward(test);

            var prediction = new ManagedArray(test.y);

            for (var y = 0; y < test.y; y++)
            {
                if (opts.Categories > 1)
                {
                    double maxval = Double.MinValue;

                    for (var x = 0; x < opts.Categories; x++)
                    {
                        double val = Y[x, y];

                        if (val > maxval)
                        {
                            maxval = val;
                        }
                    }

                    prediction[y] = maxval;
                }
                else
                {
                    prediction[y] = Y[y];
                }
            }

            // cleanup of arrays allocated in Forward propagation
            ManagedOps.Free(Y);

            // Cleanup
            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                ManagedOps.Free(X[layer], Z[layer]);
            }

            return(prediction);
        }
        public ManagedIntList Classify(ManagedArray test, NeuralNetworkOptions opts, double threshold = 0.5)
        {
            Forward(test);

            var classification = new ManagedIntList(test.y);

            for (var y = 0; y < test.y; y++)
            {
                if (opts.Categories > 1)
                {
                    var maxval = double.MinValue;
                    var maxind = 0;

                    for (var x = 0; x < opts.Categories; x++)
                    {
                        var val = Yk[x, y];

                        if (val > maxval)
                        {
                            maxval = val;
                            maxind = x;
                        }
                    }

                    classification[y] = maxind + 1;
                }
                else
                {
                    classification[y] = Yk[y] > threshold ? 1 : 0;
                }
            }

            // cleanup of arrays allocated in Forward
            ManagedOps.Free(A2, Yk, Z2);

            return(classification);
        }
        ManagedArray Labels(ManagedArray output, NeuralNetworkOptions opts)
        {
            var result     = new ManagedArray(opts.Categories, opts.Items);
            var eye_matrix = ManagedMatrix.Diag(opts.Categories);

            for (var y = 0; y < opts.Items; y++)
            {
                if (opts.Categories > 1)
                {
                    for (var x = 0; x < opts.Categories; x++)
                    {
                        result[x, y] = eye_matrix[x, (int)output[y] - 1];
                    }
                }
                else
                {
                    result[y] = output[y];
                }
            }

            ManagedOps.Free(eye_matrix);

            return(result);
        }
 public void SetupLabels(ManagedArray output, NeuralNetworkOptions opts)
 {
     Y_output = Labels(output, opts);
 }
Beispiel #18
0
        public void Setup(ManagedArray output, NeuralNetworkOptions opts, bool Reset = true)
        {
            if (Reset)
            {
                if (Activations != null && Activations.GetLength(0) > 0)
                {
                    for (var layer = 0; layer < Activations.GetLength(0); layer++)
                    {
                        ManagedOps.Free(Activations[layer]);
                    }
                }

                if (D != null && D.GetLength(0) > 0)
                {
                    for (var layer = 0; layer < D.GetLength(0); layer++)
                    {
                        ManagedOps.Free(D[layer]);
                    }
                }

                if (Deltas != null && Deltas.GetLength(0) > 0)
                {
                    for (var layer = 0; layer < Deltas.GetLength(0); layer++)
                    {
                        ManagedOps.Free(Deltas[layer]);
                    }
                }

                if (X != null && X.GetLength(0) > 0)
                {
                    for (var layer = 0; layer < X.GetLength(0); layer++)
                    {
                        ManagedOps.Free(X[layer]);
                    }
                }

                if (Z != null && Z.GetLength(0) > 0)
                {
                    for (var layer = 0; layer < Z.GetLength(0); layer++)
                    {
                        ManagedOps.Free(Z[layer]);
                    }
                }

                if (Weights != null && Weights.GetLength(0) > 0)
                {
                    for (var layer = 0; layer < Weights.GetLength(0); layer++)
                    {
                        ManagedOps.Free(Weights[layer]);
                    }
                }

                if (Layers.Count > 0)
                {
                    Weights = new ManagedArray[Layers.Count];

                    for (var layer = 0; layer < Layers.Count; layer++)
                    {
                        Weights[layer] = new ManagedArray(Layers[layer].Inputs + 1, Layers[layer].Outputs);
                    }
                }
                else
                {
                    Weights = new ManagedArray[opts.HiddenLayers + 1];

                    Weights[0] = new ManagedArray(opts.Inputs + 1, opts.Nodes);

                    Layers.Add(new HiddenLayer(opts.Inputs, opts.Nodes));

                    for (var layer = 1; layer < opts.HiddenLayers; layer++)
                    {
                        Weights[layer] = (new ManagedArray(opts.Nodes + 1, opts.Nodes));

                        Layers.Add(new HiddenLayer(opts.Nodes, opts.Nodes));
                    }

                    Weights[opts.HiddenLayers] = (new ManagedArray(opts.Nodes + 1, opts.Categories));

                    Layers.Add(new HiddenLayer(opts.Nodes, opts.Categories));
                }
            }

            Activations = new ManagedArray[opts.HiddenLayers];
            Deltas      = new ManagedArray[opts.HiddenLayers + 1];
            X           = new ManagedArray[opts.HiddenLayers + 1];
            D           = new ManagedArray[opts.HiddenLayers + 1];
            Z           = new ManagedArray[opts.HiddenLayers + 1];

            SetupLabels(output, opts);

            var random = new Random(Guid.NewGuid().GetHashCode());

            if (Reset && Weights != null)
            {
                for (var layer = 0; layer < opts.HiddenLayers + 1; layer++)
                {
                    Rand(Weights[layer], random);
                }
            }

            Cost = 1.0;
            L2   = 1.0;

            Iterations = 0;
        }