/// <summary> /// Gets all zodiac signs that are associated with either Yin or Yang. /// This is a fixed yinyang-to-zodiac-sign association. /// </summary> /// <param name="yinOrYang">Supply either Yin or Yang.</param> /// <returns>Returns a list of Chinese zodiac signs that match the YinYang filter.</returns> public static List <ChineseZodiacModel> GetAllZodiacSignsForYinYang(YinYang yinOrYang) { return(ChineseZodiacHelper.InitializeAndGetAllZodiacSigns().Values .Where(z => z.ZodiacYinYang == yinOrYang) .Select(c => c) .ToList()); }
public void yinyang_test() { // https://github.com/accord-net/framework/issues/451 double[][] observations = new YinYang().Instances; Accord.Math.Random.Generator.Seed = 0; var kmeans = new BalancedKMeans(2) { // If a limit is not set, the following Learn call does not return.... MaxIterations = 1000 }; var clusters = kmeans.Learn(observations); int[] labels = kmeans.Labels; int[] expected = new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1 }; string str = labels.ToCSharp(); Assert.IsTrue(labels.IsEqual(expected)); int[] hist = Accord.Math.Vector.Histogram(labels); for (int i = 0; i < hist.Length; i++) { Assert.AreEqual(hist[i], 50); } }
internal ChineseZodiacModel(string name, string english, string zodiacPersonality, ChineseZodiacElements chineseZodiacFixedElement, YinYang yinYang) : base(name, english) { ZodiacPersonality = zodiacPersonality; ZodiacFixedElement = chineseZodiacFixedElement; ZodiacYinYang = yinYang; }
public void LearnTest2() { var dataset = new YinYang(); double[][] inputs = dataset.Instances; int[] outputs = dataset.ClassLabels.Apply(x => x ? 1 : -1); // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(3), inputs[0].Length); // Create the Least Squares Support Vector Machine teacher LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, outputs); learn.Complexity = 1 / 0.1; // Run the learning algorithm learn.Run(); int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p))); for (int i = 0; i < output.Length; i++) { Assert.AreEqual(System.Math.Sign(outputs[i]), System.Math.Sign(output[i])); } }
public void learn_logistic_regression() { #region doc_learn_lr // This example shows how to use AdaBoost to train more complex // models than a simple DecisionStump. For example, we will use // it to train a boosted Logistic Regression classifier. // Let's use some synthetic data for that: The Yin-Yang dataset is // a simple 2D binary non-linear decision problem where the points // belong to each of the classes interwine in a Yin-Yang shape: var dataset = new YinYang(); double[][] inputs = dataset.Instances; int[] outputs = Classes.ToZeroOne(dataset.ClassLabels); // Create an AdaBoost for Logistic Regression as: var teacher = new AdaBoost <LogisticRegression>() { // Here we can specify how each regression should be learned: Learner = (param) => new IterativeReweightedLeastSquares <LogisticRegression>() { ComputeStandardErrors = false, MaxIterations = 50, Tolerance = 0 }, // Train until: MaxIterations = 50, Tolerance = 1e-5, }; // Now, we can use the Learn method to learn a boosted classifier Boost <LogisticRegression> classifier = teacher.Learn(inputs, outputs); // And we can test its performance using (error should be 0.11): ConfusionMatrix cm = ConfusionMatrix.Estimate(classifier, inputs, outputs); double error = cm.Error; // should be 0.11 double acc = cm.Accuracy; // should be 0.89 double kappa = cm.Kappa; // should be 0.78 // And compute a decision for a single data point using: bool y = classifier.Decide(inputs[0]); // result should false #endregion Assert.AreEqual(false, y); Assert.AreEqual(0.11, error); Assert.AreEqual(0.89, acc); Assert.AreEqual(0.78, kappa); Assert.AreEqual(2, classifier.Models.Count); Assert.AreEqual(0.63576818449825168, classifier.Models[0].Weight); Assert.AreEqual(0.36423181550174832, classifier.Models[1].Weight); int[] actual = new int[outputs.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = classifier.Compute(inputs[i]); } }
public void learn_decision_trees() { #region doc_learn_dt // This example shows how to use AdaBoost to train more complex // models than a simple DecisionStump. For example, we will use // it to train a boosted Decision Trees. // Let's use some synthetic data for that: The Yin-Yang dataset is // a simple 2D binary non-linear decision problem where the points // belong to each of the classes interwine in a Yin-Yang shape: var dataset = new YinYang(); double[][] inputs = dataset.Instances; int[] outputs = Classes.ToZeroOne(dataset.ClassLabels); // Create an AdaBoost for Logistic Regression as: var teacher = new AdaBoost <DecisionTree>() { // Here we can specify how each regression should be learned: Learner = (param) => new C45Learning() { // i.e. // MaxHeight = // MaxVariables = }, // Train until: MaxIterations = 50, Tolerance = 1e-5, }; // Now, we can use the Learn method to learn a boosted classifier Boost <DecisionTree> classifier = teacher.Learn(inputs, outputs); // And we can test its performance using (error should be 0.11): double error = ConfusionMatrix.Estimate(classifier, inputs, outputs).Error; // And compute a decision for a single data point using: bool y = classifier.Decide(inputs[0]); // result should false #endregion Assert.AreEqual(false, y); Assert.AreEqual(0, error); Assert.AreEqual(22, classifier.Models.Count); Assert.AreEqual(0.063497989403001331, classifier.Models[0].Weight); Assert.AreEqual(0.081129615464770655, classifier.Models[1].Weight); Assert.AreEqual(0.083062765085567689, classifier.Models[2].Weight); Assert.AreEqual(0.050307480220333232, classifier.Models[3].Weight); Assert.AreEqual(0.044287142080877882, classifier.Models[4].Weight); Assert.AreEqual(0.042772219812778081, classifier.Models[5].Weight); int[] actual = new int[outputs.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = classifier.Compute(inputs[i]); } }
internal ZhouyiTrigram(int index, string name, string nature, YinYang yinYang, FiveElement fiveElement) { this.Index = index; this.Name = name; this.Nature = nature; this.YinYang = yinYang; this.FiveElement = fiveElement; }
public void ConstructorTest2() { var dataset = new YinYang(); double[][] inputs = dataset.Instances; bool[] outputs2 = dataset.ClassLabels; int[] outputs = outputs2.Apply(x => x ? 1 : 0); var classifier = new Boost <Weak <LogisticRegression> >(); var teacher = new AdaBoost <Weak <LogisticRegression> >(classifier) { Creation = (weights) => { LogisticRegression reg = new LogisticRegression(2, intercept: 1); IterativeReweightedLeastSquares irls = new IterativeReweightedLeastSquares(reg) { ComputeStandardErrors = false }; for (int i = 0; i < 50; i++) { irls.Run(inputs, outputs, weights); } return(new Weak <LogisticRegression>(reg, (s, x) => Math.Sign(s.Compute(x) - 0.5))); }, Iterations = 50, Tolerance = 1e-5, }; double error = teacher.Run(inputs, outputs); Assert.AreEqual(0.11, error); Assert.AreEqual(2, classifier.Models.Count); Assert.AreEqual(0.63576818449825168, classifier.Models[0].Weight); Assert.AreEqual(0.36423181550174832, classifier.Models[1].Weight); int[] actual = new int[outputs.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = classifier.Compute(inputs[i]); } //for (int i = 0; i < actual.Length; i++) // Assert.AreEqual(outputs[i], actual[i]); }
internal HorseChineseZodiacModel(string name, string english, string personality, ChineseZodiacElements chineseZodiacFixedElement, YinYang yinYang) : base(name, english, personality, chineseZodiacFixedElement, yinYang) { }
public string GetYinYang() { var attr = YinYang.GetAttribute <EnumDescriptionAttribute>(); return(attr.GetDescription()); }