public void thresholds()
        {
            // Example for https://github.com/accord-net/framework/issues/737

            // Let's say we have a dataset of US birds:
            string[] names = { "State", "Bird", "Percentage" };

            object[][] inputData =
            {
                new object[] { "Kansas", "Crow",    0.1 },
                new object[] { "Ohio",   "Pardal",  0.5 },
                new object[] { "Hawaii", "Penguim", 0.7 }
            };

            // Discretize the continous data from a doubles to a string representation
            var discretization = new Discretization <double, string>(names, inputData);

            discretization["Percentage"].Mapping[x => x >= 0.00 && x < 0.25] = x => "Q1";
            discretization["Percentage"].Mapping[x => x >= 0.25 && x < 0.50] = x => "Q2";
            discretization["Percentage"].Mapping[x => x >= 0.50 && x < 0.75] = x => "Q3";
            discretization["Percentage"].Mapping[x => x >= 0.75 && x < 1.09] = x => "Q4";

            // Transform the data into discrete categories
            string[][] discreteData = discretization.Transform(inputData);

            // Codify the discrete data from strings to integers
            var codebook = new Codification <string>(names, discreteData);

            // Transform the data into integer symbols
            int[][] values = codebook.Transform(discreteData);

            // Transform the symbols into 1-of-K vectors
            double[][] states = Jagged.OneHot(values.GetColumn(0));
            double[][] birds  = Jagged.OneHot(values.GetColumn(1));
            double[][] colors = Jagged.OneHot(values.GetColumn(2));

            // Normalize each variable separately if needed
            states = states.Divide(codebook["State"].NumberOfSymbols);
            birds  = birds.Divide(codebook["Bird"].NumberOfSymbols);
            colors = colors.Divide(codebook["Percentage"].NumberOfSymbols);

            // Create final feature vectors
            double[][] features = Matrix.Concatenate(states, birds, colors);

            Assert.AreEqual(new[] { 3, 3 }, states.GetLength());
            Assert.AreEqual(new[] { 3, 3 }, birds.GetLength());
            Assert.AreEqual(new[] { 3, 2 }, colors.GetLength());
            Assert.AreEqual(new[] { 3, 8 }, features.GetLength());

            // string t = features.ToCSharp();
            var expected = new double[][]
            {
                new double[] { 0.333333333333333, 0, 0, 0.333333333333333, 0, 0, 0.5, 0 },
                new double[] { 0, 0.333333333333333, 0, 0, 0.333333333333333, 0, 0, 0.5 },
                new double[] { 0, 0, 0.333333333333333, 0, 0, 0.333333333333333, 0, 0.5 }
            };

            Assert.IsTrue(features.IsEqual(expected, rtol: 1e-10));
        }
예제 #2
0
        /// <summary>
        ///   Initializes a new instance of the <see cref="HingeLoss"/> class.
        /// </summary>
        ///
        /// <param name="expected">The expected outputs (ground truth).</param>
        ///

        public HingeLoss(int[] expected)
        {
            if (Classes.IsMinusOnePlusOne(expected))
            {
                expected = expected.ToZeroOne();
            }

            this.expected = Jagged.OneHot <bool>(expected);
        }
예제 #3
0
        /// <summary>
        /// Initializes a new instance of the <see cref="LogisticLoss"/> class.
        /// </summary>
        ///
        /// <param name="expected">The expected outputs (ground truth).</param>
        ///
        public LogisticLoss(int[] expected)
        {
            if (Classes.IsMinusOnePlusOne(expected))
            {
                expected = expected.ToZeroOne();
            }

            var oneHot = Jagged.OneHot <bool>(expected);

            this.expected = Classes.ToMinusOnePlusOne(oneHot);
        }
예제 #4
0
        public override void Initialize()
        {
            var history = History("SPY", TimeSpan.FromDays(1000), Resolution.Daily);

            var highestClose  = history.Max(h => h.Close);
            var lowestClose   = history.Min(h => h.Close);
            var highestVolume = history.Max(h => h.Volume);
            var lowestVolume  = history.Min(h => h.Volume);

            var inputs = history.Select(h =>
                                        new[]
            {
                (double)((h.Close - lowestClose) / (highestClose - lowestClose)),
                (double)(h.Volume - lowestVolume) / (highestVolume - lowestVolume)
            }).ToArray();

            var classes = inputs.Take(inputs.Length - 1).Zip(inputs.Skip(1), (a, b) => b[0] < a[0] ? 0 : b[0] > a[0] ? 2 : 1).ToArray();

            var outputs = Jagged.OneHot(classes);

            var network = new ActivationNetwork(new SigmoidFunction(), 2, 3, 1);

            new NguyenWidrow(network).Randomize();

            var teacher2 = new ResilientBackpropagationLearning(network);
            var maxError = double.MaxValue;
            var error    = 0d;

            // Run supervised learning.
            while (error < maxError)
            {
                error = teacher2.RunEpoch(inputs, outputs);
                if (error < maxError)
                {
                    maxError = error;
                }
            }

            // Checks if the network has learned
            for (var i = 0; i < inputs.Length; i++)
            {
                var answer = network.Compute(inputs[i]);

                var expected = classes[i];
                int actual;
                answer.Max(out actual);
                // actual should be equal to expected
            }
        }
        public void StringApplyTest3()
        {
            // Example for https://github.com/accord-net/framework/issues/581

            // Let's say we have a dataset of US birds:
            string[] names = { "State", "Bird", "Color" };

            string[][] data =
            {
                new[] { "Kansas", "Crow",    "Black"  },
                new[] { "Ohio",   "Pardal",  "Yellow" },
                new[] { "Hawaii", "Penguim", "Black"  }
            };

            // Create a codebook for the dataset
            var codebook = new Codification(names, data);

            // Transform the data into integer symbols
            int[][] values = codebook.Transform(data);

            // Transform the symbols into 1-of-K vectors
            double[][] states = Jagged.OneHot(values.GetColumn(0));
            double[][] birds  = Jagged.OneHot(values.GetColumn(1));
            double[][] colors = Jagged.OneHot(values.GetColumn(2));

            // Normalize each variable separately if needed
            states = states.Divide(codebook["State"].NumberOfSymbols);
            birds  = birds.Divide(codebook["Bird"].NumberOfSymbols);
            colors = colors.Divide(codebook["Color"].NumberOfSymbols);

            // Create final feature vectors
            double[][] features = Matrix.Concatenate(states, birds, colors);

            Assert.AreEqual(new[] { 3, 3 }, states.GetLength());
            Assert.AreEqual(new[] { 3, 3 }, birds.GetLength());
            Assert.AreEqual(new[] { 3, 2 }, colors.GetLength());
            Assert.AreEqual(new[] { 3, 8 }, features.GetLength());

            // string t = features.ToCSharp();
            var expected = new double[][]
            {
                new double[] { 0.333333333333333, 0, 0, 0.333333333333333, 0, 0, 0.5, 0 },
                new double[] { 0, 0.333333333333333, 0, 0, 0.333333333333333, 0, 0, 0.5 },
                new double[] { 0, 0, 0.333333333333333, 0, 0, 0.333333333333333, 0.5, 0 }
            };

            Assert.IsTrue(features.IsEqual(expected, rtol: 1e-10));
        }
        public MultinomialLogisticRegressionAnalysis(double[][] inputs, int[] outputs)
        {
            // Initial argument checking
            if (inputs == null)
            {
                throw new ArgumentNullException("inputs");
            }

            if (outputs == null)
            {
                throw new ArgumentNullException("outputs");
            }

            if (inputs.Length != outputs.Length)
            {
                throw new ArgumentException("The number of rows in the input array must match the number of given outputs.");
            }

            init(inputs, Jagged.OneHot(outputs));
        }
예제 #7
0
파일: SVMSignal.cs 프로젝트: godtopus/Lean
        public void TrainNN(List <double[]> inputs, List <int> outputs, List <double> weights = null)
        {
            var tempInputs = _inputs.Take(_inputs.Count).Concat(inputs).ToArray();

            tempInputs = Accord.Statistics.Tools.ZScores(tempInputs);
            var trainingInputs  = tempInputs.Skip(_inputs.Count).Take(inputs.Count).ToArray();
            var trainingOutputs = Jagged.OneHot(outputs.ToArray());

            var network = new ActivationNetwork(new GaussianFunction(), trainingInputs.First().Length, 5, 2);

            _dbn = network;

            // Initialize the network with Gaussian weights
            new GaussianWeights(network, 0.1).Randomize();

            // Setup the learning algorithm.
            var teacher = new ParallelResilientBackpropagationLearning(network);

            double error = Double.MaxValue;

            for (int i = 0; i < 5000; i++)
            {
                error = teacher.RunEpoch(trainingInputs, trainingOutputs);
            }

            // Test the resulting accuracy.
            int correct = 0;

            for (int i = 0; i < trainingInputs.Length; i++)
            {
                double[] outputValues = network.Compute(inputs[i]);
                double   outputResult = outputValues.First() >= 0.5 ? 1 : 0;

                if (outputResult == trainingOutputs[i].First())
                {
                    correct++;
                }
            }

            Console.WriteLine("DBN Correct: {0} Total: {1} Accuracy: {2}, Training Error: {3}", correct, trainingOutputs.Length, (double)correct / (double)trainingOutputs.Length, error);
        }
        public void Ucz() //algorytm uczacy siec neuronowa
        {
            int NumberOfImputs        = 6;
            int Classnes              = 11;
            int NumberOfHiddenNeurons = 6;

            network = new ActivationNetwork(new BipolarSigmoidFunction(2), NumberOfImputs, NumberOfHiddenNeurons, Classnes);
            var    teacher = new BackPropagationLearning(network);
            double error   = 1;

            double[][] input = odczytDanychIN();
            int[]      label = odczytDanychOUT();

            double[][] output = Jagged.OneHot(label, Classnes);

            for (int i = 0; i < 10000; i++)
            {
                error = teacher.RunEpoch(input, output);
            }
            label2.Text = error.ToString();
        }
예제 #9
0
 /// <summary>
 /// Initializes a new instance of the <see cref="BinaryCrossEntropyLoss"/> class.
 /// </summary>
 /// <param name="expected">The expected outputs (ground truth).</param>
 public BinaryCrossEntropyLoss(int[] expected)
     : this(Jagged.OneHot(expected))
 {
 }
예제 #10
0
 /// <summary>
 /// Computes the loss between the expected values (ground truth)
 /// and the given actual values that have been predicted.
 /// </summary>
 /// <param name="actual">The actual values that have been predicted.</param>
 /// <returns>
 /// The loss value between the expected values and
 /// the actual predicted values.
 /// </returns>
 public double Loss(int[] actual)
 {
     return(Loss(Jagged.OneHot(actual)));
 }
예제 #11
0
 /// <summary>
 /// Initializes a new instance of the <see cref="HammingLoss"/> class.
 /// </summary>
 /// <param name="expected">The expected outputs (ground truth).</param>
 public HammingLoss(int[] expected)
     : this(Jagged.OneHot(expected))
 {
 }
예제 #12
0
 /// <summary>
 /// Learns a model that can map the given inputs to the given outputs.
 /// </summary>
 /// <param name="x">The model inputs.</param>
 /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
 /// <param name="weights">The weight of importance for each input-output pair.</param>
 /// <returns>
 /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
 /// </returns>
 public MultinomialLogisticRegression Learn(double[][] x, int[] y, double[] weights = null)
 {
     return(Learn(x, Jagged.OneHot(y), weights));
 }
예제 #13
0
 double[][] IClassifier <TInput, double[]> .Decide(TInput[] input, double[][] result)
 {
     return(Jagged.OneHot <double>(Decide(input), result));
 }
예제 #14
0
 bool[][] ICovariantTransform <int[], bool[]> .Transform(int[][] input)
 {
     return(Jagged.OneHot <bool>(Decide(input), NumberOfOutputs));
 }
예제 #15
0
 /// <summary>
 /// Applies the transformation to an input, producing an associated output.
 /// </summary>
 /// <param name="input">The input data to which the transformation should be applied.</param>
 /// <param name="result">A location to store the output, avoiding unnecessary memory allocations.</param>
 /// <returns>
 /// The output generated by applying this transformation to the given input.
 /// </returns>
 public int[][] Transform(float[][] input, int[][] result)
 {
     return(Jagged.OneHot <int>(Decide(input), result));
 }
예제 #16
0
 public static double[][] Expand(int[] labels, int classes)
 {
     return(Jagged.OneHot(labels, classes));
 }
예제 #17
0
 public static double[][] Expand(int[] labels)
 {
     return(Jagged.OneHot(labels, labels.DistinctCount()));
 }
예제 #18
0
 /// <summary>
 /// Applies the transformation to an input, producing an associated output.
 /// </summary>
 /// <param name="input">The input data to which the transformation should be applied.</param>
 /// <returns>
 /// The output generated by applying this transformation to the given input.
 /// </returns>
 bool[][] ITransform <float[], bool[]> .Transform(float[][] input)
 {
     return(Jagged.OneHot <bool>(Decide(input), NumberOfOutputs));
 }
예제 #19
0
 /// <summary>
 /// Applies the transformation to an input, producing an associated output.
 /// </summary>
 /// <param name="input">The input data to which the transformation should be applied.</param>
 /// <returns>
 /// The output generated by applying this transformation to the given input.
 /// </returns>
 int[][] ITransform <int[], int[]> .Transform(int[][] input)
 {
     return(Jagged.OneHot <int>(Decide(input), NumberOfOutputs));
 }
예제 #20
0
 /// <summary>
 ///   Gets the Deviance for the model.
 /// </summary>
 ///
 /// <remarks>
 ///   The deviance is defined as -2*Log-Likelihood.
 /// </remarks>
 ///
 /// <param name="inputs">A set of input data.</param>
 /// <param name="classes">A set of output data.</param>
 /// <returns>
 ///   The deviance (a measure of performance) of the model
 ///   calculated over the given data sets.
 /// </returns>
 ///
 public double GetLogLikelihood(double[][] inputs, int[] classes)
 {
     return(GetLogLikelihood(inputs, Jagged.OneHot(classes)));
 }
예제 #21
0
 /// <summary>
 ///   The likelihood ratio test of the overall model, also called the model chi-square test.
 /// </summary>
 ///
 /// <remarks>
 ///   <para>
 ///   The Chi-square test, also called the likelihood ratio test or the log-likelihood test
 ///   is based on the deviance of the model (-2*log-likelihood). The log-likelihood ratio test
 ///   indicates whether there is evidence of the need to move from a simpler model to a more
 ///   complicated one (where the simpler model is nested within the complicated one).</para>
 ///   <para>
 ///   The difference between the log-likelihood ratios for the researcher's model and a
 ///   simpler model is often called the "model chi-square".</para>
 /// </remarks>
 ///
 public ChiSquareTest ChiSquare(double[][] input, int[] classes)
 {
     return(ChiSquare(input, Jagged.OneHot(classes)));
 }
예제 #22
0
 /// <summary>
 /// Applies the transformation to an input, producing an associated output.
 /// </summary>
 /// <param name="input">The input data to which the transformation should be applied.</param>
 /// <param name="result">A location to store the output, avoiding unnecessary memory allocations.</param>
 /// <returns>
 /// The output generated by applying this transformation to the given input.
 /// </returns>
 public double[][] Transform(float[][] input, double[][] result)
 {
     return(Jagged.OneHot(Decide(input), result));
 }
예제 #23
0
 /// <summary>
 /// Applies the transformation to an input, producing an associated output.
 /// </summary>
 /// <param name="input">The input data to which the transformation should be applied.</param>
 /// <param name="result">A location to store the output, avoiding unnecessary memory allocations.</param>
 /// <returns>
 /// The output generated by applying this transformation to the given input.
 /// </returns>
 public bool[][] Transform(float[][] input, bool[][] result)
 {
     return(Jagged.OneHot <bool>(Decide(input), result));
 }
 public MultilabelSupportVectorLearning(MultilabelSupportVectorMachine model, double[][] input, int[] output)
 {
     this.Model  = model;
     this.input  = input;
     this.output = Jagged.OneHot <int>(output);
 }
예제 #25
0
 int[][] ICovariantTransform <float[], int[]> .Transform(float[][] input)
 {
     return(Jagged.OneHot <int>(Decide(input), NumberOfOutputs));
 }
 /// <summary>
 /// Initializes a new instance of the <see cref="CategoryCrossEntropyLoss"/> class.
 /// </summary>
 /// <param name="expected">The expected outputs (ground truth).</param>
 public CategoryCrossEntropyLoss(int[] expected)
 {
     this.Expected = Jagged.OneHot <bool>(expected);
 }
예제 #27
0
 int[][] IClassifier <TInput, int[]> .Decide(TInput[] input, int[][] result)
 {
     return(Jagged.OneHot <int>(Decide(input), result));
 }
 /// <summary>
 /// Initializes a new instance of the <see cref="CategoryCrossEntropyLoss"/> class.
 /// </summary>
 /// <param name="expected">The expected outputs (ground truth).</param>
 public CategoryCrossEntropyLoss(int[] expected)
     : base(Jagged.OneHot(expected))
 {
 }
예제 #29
0
 public static double[][] Expand(int[] labels, int classes, double negative, double positive)
 {
     return(Jagged.OneHot(labels, classes).Replace(0, negative).Replace(1, positive));
 }
예제 #30
0
 /// <summary>
 /// Applies the transformation to a set of input vectors,
 /// producing an associated set of output vectors.
 /// </summary>
 /// <param name="input">The input data to which
 /// the transformation should be applied.</param>
 /// <param name="result">The location to where to store the
 /// result of this transformation.</param>
 /// <returns>The output generated by applying this
 /// transformation to the given input.</returns>
 public double[][] Transform(T[] input, double[][] result)
 {
     return(Jagged.OneHot(Transform(input), result: result));
 }