public void SetDistribution( RandomVariable variable, IDictionary<string, string> variableAbbreviations, IEnumerable<RandomVariable> variableParents, DiscreteDistribution distribution) { _variable = variable; _variableParents = variableParents; _variableAbbreviations = variableAbbreviations; if (variable != null) { if (distribution != null) { _distributions = new DistributionSet(distribution); } else { _distributions = variable.Distributions; } } else { _distributions = new DistributionSet(); } RefreshUI(); }
public DateDraw() { _dayOfWeek = new DiscreteDistribution <DayOfWeek>(); _months = new DiscreteDistribution <int>(); _hours = new DiscreteDistribution <int>(); InitDays(); InitMonths(); InitHours(); }
internal TestResult( string discreteName, int discreteValue, DiscreteDistribution discreteDistribution, string continuousName, double continuousValue, ContinuousDistribution continuousDistribution, TestType type) { this.discreteStatistic = new DiscreteTestStatistic(discreteName, discreteValue, discreteDistribution); this.continuousStatistic = new ContinuousTestStatistic(continuousName, continuousValue, continuousDistribution); this.type = type; }
/** * <summary> The predict method takes an {@link Instance} as an input and loops through the {@link ArrayList} of {@link DecisionTree}s. * Makes prediction for the items of that ArrayList and returns the maximum item of that ArrayList.</summary> * * <param name="instance">Instance to make prediction.</param> * <returns>The maximum prediction of a given Instance.</returns> */ public override string Predict(Instance.Instance instance) { var distribution = new DiscreteDistribution(); foreach (var tree in _forest) { distribution.AddItem(tree.Predict(instance)); } return(distribution.GetMaxItem()); }
private DiscreteDistribution GetMutationTypeDistribution(NeatGenome <T> parent) { // If there is only one connection then avoid destructive mutations to avoid the // creation of genomes with no connections. DiscreteDistribution dist = (parent.ConnectionGenes.Length < 2) ? _mutationTypeDistributionsCurrent.MutationTypeDistributionNonDestructive : _mutationTypeDistributionsCurrent.MutationTypeDistribution; return(dist); }
/** * <summary> The classDistribution method returns the distribution of all the class labels of instances.</summary> * * <returns>Distribution of the class labels.</returns> */ public DiscreteDistribution ClassDistribution() { var distribution = new DiscreteDistribution(); foreach (var instance in _list) { distribution.AddItem(instance.GetClassLabel()); } return(distribution); }
public void Setup() { smallDistribution = new DiscreteDistribution(); smallDistribution.AddItem("item1"); smallDistribution.AddItem("item2"); smallDistribution.AddItem("item3"); smallDistribution.AddItem("item1"); smallDistribution.AddItem("item2"); smallDistribution.AddItem("item1"); }
public override Dictionary <string, double> PredictProbability(Instance.Instance instance) { var distribution = new DiscreteDistribution(); foreach (var tree in _forest) { distribution.AddItem(tree.Predict(instance)); } return(distribution.GetProbabilityDistribution()); }
public void Run_DefaultModelAndObservagtions_TrainedModel() { var startDistribution = new[] { 0.85, 0.15 }; // s = 0, t = 1 var tpm = new double[2][]; tpm[0] = new[] { 0.3, 0.7 }; tpm[1] = new[] { 0.1, 0.9 }; var observations = new List <IObservation> { new Observation(new double[] { 0 }, "A"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 0 }, "A") }; var emissions = new DiscreteDistribution[2]; emissions[0] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.4, 0.6 }); emissions[1] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.5, 0.5 }); var symbols = new List <IObservation> { new Observation(new double[] { 0 }, "A"), new Observation(new double[] { 1 }, "B") }; var model = HiddenMarkovModelFactory.GetModel(new ModelCreationParameters <DiscreteDistribution>() { Pi = startDistribution, TransitionProbabilityMatrix = tpm, Emissions = emissions }); //new HiddenMarkovModel(startDistribution, tpm, emissions) { LogNormalized = false }; model.Normalized = false; var algo = new BaumWelch(observations, model, symbols); var res = algo.Run(100, LikelihoodTolerance); Assert.AreEqual(0.8258482510939813, res.Pi[0]); Assert.AreEqual(0.17415174890601867, res.Pi[1]); Assert.AreEqual(0.330050127737348, res.TransitionProbabilityMatrix[0][0]); Assert.AreEqual(0.669949872262652, res.TransitionProbabilityMatrix[0][1]); Assert.AreEqual(0.098712289730350428, res.TransitionProbabilityMatrix[1][0]); Assert.AreEqual(0.90128771026964949, res.TransitionProbabilityMatrix[1][1]); Assert.AreEqual(0.65845050146912831, res.Emission[0].ProbabilityMassFunction(0)); Assert.AreEqual(0.34154949853087169, res.Emission[0].ProbabilityMassFunction(1)); Assert.AreEqual(0.4122484947955643, res.Emission[1].ProbabilityMassFunction(0)); Assert.AreEqual(0.58775150520443575, res.Emission[1].ProbabilityMassFunction(1)); Assert.AreEqual(0.0770055392812707, res.Likelihood); Assert.AreEqual(1d, res.Pi.Sum()); Assert.AreEqual(1d, res.TransitionProbabilityMatrix[0].Sum()); Assert.AreEqual(1d, Math.Round(res.TransitionProbabilityMatrix[1].Sum(), 5)); Assert.AreEqual(1d, res.Emission[0].ProbabilityMassFunction(0) + res.Emission[0].ProbabilityMassFunction(1)); Assert.AreEqual(1d, res.Emission[1].ProbabilityMassFunction(0) + res.Emission[1].ProbabilityMassFunction(1)); }
public void Run_DefaultModelAndObservagtionsAndNormalized_TrainedMode() { var startDistribution = new[] { 0.85, 0.15 }; var tpm = new double[2][]; tpm[0] = new[] { 0.3, 0.7 }; tpm[1] = new[] { 0.1, 0.9 }; var observations = new List <IObservation> { new Observation(new double[] { 0 }, "A"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 0 }, "A") }; var emissions = new DiscreteDistribution[2]; emissions[0] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.4, 0.6 }); emissions[1] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.5, 0.5 }); var symbols = new List <IObservation> { new Observation(new double[] { 0 }, "A"), new Observation(new double[] { 1 }, "B") }; var model = HiddenMarkovModelFactory.GetModel(new ModelCreationParameters <DiscreteDistribution>() { Pi = startDistribution, TransitionProbabilityMatrix = tpm, Emissions = emissions }); //new HiddenMarkovModel(startDistribution, tpm, emissions) { LogNormalized = true}; model.Normalized = true; var algo = new BaumWelch(observations, model, symbols); var res = algo.Run(100, LikelihoodTolerance); Assert.AreEqual(0.82585, Math.Round(res.Pi[0], 5)); Assert.AreEqual(0.17415, Math.Round(res.Pi[1], 5)); Assert.AreEqual(0.33005, Math.Round(res.TransitionProbabilityMatrix[0][0], 5)); Assert.AreEqual(0.66995, Math.Round(res.TransitionProbabilityMatrix[0][1], 5)); Assert.AreEqual(0.09871, Math.Round(res.TransitionProbabilityMatrix[1][0], 5)); Assert.AreEqual(0.90129, Math.Round(res.TransitionProbabilityMatrix[1][1], 5)); Assert.AreEqual(0.65845, Math.Round(res.Emission[0].ProbabilityMassFunction(0), 5)); Assert.AreEqual(0.34155, Math.Round(res.Emission[0].ProbabilityMassFunction(1), 5)); Assert.AreEqual(0.41225, Math.Round(res.Emission[1].ProbabilityMassFunction(0), 5)); Assert.AreEqual(0.58775, Math.Round(res.Emission[1].ProbabilityMassFunction(1), 5)); Assert.AreEqual(-2.5638779209981162, res.Likelihood); Assert.AreEqual(1d, Math.Round(res.Pi.Sum(), 5)); Assert.AreEqual(1d, Math.Round(res.TransitionProbabilityMatrix[0].Sum(), 5)); Assert.AreEqual(1d, Math.Round(res.TransitionProbabilityMatrix[1].Sum(), 5)); Assert.AreEqual(1d, Math.Round(res.Emission[0].ProbabilityMassFunction(0) + res.Emission[0].ProbabilityMassFunction(1), 5)); Assert.AreEqual(1d, Math.Round(res.Emission[1].ProbabilityMassFunction(0) + res.Emission[1].ProbabilityMassFunction(1), 5)); }
public void BivariateAssociationDiscreteNullDistribution() { Random rng = new Random(1); // Pick very non-normal distributions for our non-parameteric tests ContinuousDistribution xd = new FrechetDistribution(1.0); ContinuousDistribution yd = new CauchyDistribution(); // Pick small sample sizes to get exact distributions foreach (int n in TestUtilities.GenerateIntegerValues(4, 24, 4)) { // Do a bunch of test runs, recording reported statistic for each. List <int> spearmanStatistics = new List <int>(); List <int> kendallStatistics = new List <int>(); DiscreteDistribution spearmanDistribution = null; DiscreteDistribution kendallDistribution = null; for (int i = 0; i < 512; i++) { List <double> x = new List <double>(); List <double> y = new List <double>(); for (int j = 0; j < n; j++) { x.Add(xd.GetRandomValue(rng)); y.Add(yd.GetRandomValue(rng)); } DiscreteTestStatistic spearman = Bivariate.SpearmanRhoTest(x, y).UnderlyingStatistic; if (spearman != null) { spearmanStatistics.Add(spearman.Value); spearmanDistribution = spearman.Distribution; } DiscreteTestStatistic kendall = Bivariate.KendallTauTest(x, y).UnderlyingStatistic; if (kendall != null) { kendallStatistics.Add(kendall.Value); kendallDistribution = kendall.Distribution; } } // Test whether statistics are actually distributed as claimed if (spearmanDistribution != null) { TestResult spearmanChiSquared = spearmanStatistics.ChiSquaredTest(spearmanDistribution); Assert.IsTrue(spearmanChiSquared.Probability > 0.01); } if (kendallDistribution != null) { TestResult kendallChiSquared = kendallStatistics.ChiSquaredTest(kendallDistribution); Assert.IsTrue(kendallChiSquared.Probability > 0.01); } } }
/// <summary> /// Select a subset of items from a superset of a given size. /// </summary> /// <param name="supersetCount">The size of the superset to select from.</param> /// <param name="rng">Random source.</param> /// <returns>An array of indexes that are the selected items.</returns> public int[] SelectSubset(int supersetCount, IRandomSource rng) { // Note. Ideally we'd return a sorted list of indexes to improve performance of the code that consumes them, // however, the sampling process inherently produces samples in randomized order, thus the decision of whether // to sort or not depends on the cost to the code using the samples. I.e. don't sort here! int selectionCount = (int)NumericsUtils.StochasticRound(supersetCount * _selectionProportion, rng); int[] idxArr = new int[selectionCount]; DiscreteDistribution.SampleUniformWithoutReplacement(rng, supersetCount, idxArr); return(idxArr); }
private List <NeatGenome <T> > CreateOffspring( Species <T>[] speciesArr, DiscreteDistribution speciesDist, DiscreteDistribution[] genomeDistArr, double interspeciesMatingProportion, IRandomSource rng) { // Calc total number of offspring to produce for the population as a whole. int offspringCount = speciesArr.Sum(x => x.Stats.OffspringCount); // Create an empty list to add the offspring to (with preallocated storage). var offspringList = new List <NeatGenome <T> >(offspringCount); // Loop the species. for (int speciesIdx = 0; speciesIdx < speciesArr.Length; speciesIdx++) { // Get the current species. Species <T> species = speciesArr[speciesIdx]; // Get the DiscreteDistribution for genome selection within the current species. DiscreteDistribution genomeDist = genomeDistArr[speciesIdx]; // Determine how many offspring to create through asexual and sexual reproduction. SpeciesStats stats = species.Stats; int offspringCountAsexual = stats.OffspringAsexualCount; int offspringCountSexual = stats.OffspringSexualCount; // Special case: A species with a single genome marked for selection, cannot perform intra-species sexual reproduction. if (species.Stats.SelectionSizeInt == 1) { // Note. here we assign all the sexual reproduction allocation to asexual reproduction. In principle // we could still perform inter species sexual reproduction, but that complicates the code further // for minimal gain. offspringCountAsexual += offspringCountSexual; offspringCountSexual = 0; } // Create a copy of speciesDist with the current species removed from the set of possibilities. // Note. The remaining probabilities are normalised to sum to one. DiscreteDistribution speciesDistUpdated = speciesDist.RemoveOutcome(speciesIdx); // Create offspring from the current species. CreateSpeciesOffspringAsexual( species, genomeDist, offspringCountAsexual, offspringList, rng); CreateSpeciesOffspringSexual( speciesArr, species, speciesDistUpdated, genomeDistArr, genomeDist, offspringCountSexual, offspringList, interspeciesMatingProportion, rng); } return(offspringList); }
public static JArray ToJObject(this DiscreteDistribution d) { return(new JArray( d .Masses .Select(p => new JObject( new JProperty("value", p.Key), new JProperty("mass", p.Value)) ) )); }
private NeatGenome <T> Create( NeatGenome <T> parent, IRandomSource rng, ref DiscreteDistribution mutationTypeDist) { // Determine the type of mutation to attempt. MutationType mutationTypeId = (MutationType)DiscreteDistribution.Sample(rng, mutationTypeDist); // Attempt to create a child genome using the selected mutation type. NeatGenome <T> childGenome = null; switch (mutationTypeId) { // Note. These subroutines will return null if they cannot produce a child genome, // e.g. 'delete connection' will not succeed if there is only one connection. case MutationType.ConnectionWeight: childGenome = _mutateWeightsStrategy.CreateChildGenome(parent, rng); break; case MutationType.AddNode: // FIXME: Reinstate. childGenome = _addNodeStrategy.CreateChildGenome(parent, rng); break; case MutationType.AddConnection: childGenome = _addConnectionStrategy.CreateChildGenome(parent, rng); break; case MutationType.DeleteConnection: childGenome = _deleteConnectionStrategy.CreateChildGenome(parent, rng); break; default: throw new Exception($"Unexpected mutationTypeId [{mutationTypeId}]."); } if (null != childGenome) { return(childGenome); } // The chosen mutation type was not possible; remove that type from the set of possible types. mutationTypeDist = mutationTypeDist.RemoveOutcome((int)mutationTypeId); // Sanity test. if (0 == mutationTypeDist.Probabilities.Length) { // This shouldn't be possible, hence this is an exceptional circumstance. // Note. Connection weight and 'add node' mutations should always be possible, because there should // always be at least one connection. throw new Exception("All types of genome mutation failed."); } return(null); }
public void TestGetProbability1() { Random random = new Random(); DiscreteDistribution discreteDistribution = new DiscreteDistribution(); for (int i = 0; i < 1000; i++) { discreteDistribution.AddItem("" + i); } Assert.AreEqual(0.001, discreteDistribution.GetProbability("" + random.Next(1000)), 0.0); }
/// <summary> /// Create instances of <see cref="DiscreteDistribution"/> for sampling species, and for genomes within each given species. /// </summary> /// <param name="speciesArr">Species array.</param> /// <param name="speciesDist">Returns a new instance of <see cref="DiscreteDistribution"/> for sampling from the species array.</param> /// <param name="genomeDistArr">Returns an array of <see cref="DiscreteDistribution"/>, for sampling from genomes within each species.</param> /// <param name="nonEmptySpeciesCount">Returns the number of species that contain at least one genome.</param> public static void CreateSelectionDistributions( Species <T>[] speciesArr, out DiscreteDistribution speciesDist, out DiscreteDistribution?[] genomeDistArr, out int nonEmptySpeciesCount) { // Species selection distribution. speciesDist = CreateSpeciesSelectionDistribution(speciesArr, out nonEmptySpeciesCount); // Per-species genome selection distributions. genomeDistArr = CreateIntraSpeciesGenomeSelectionDistributions(speciesArr); }
public void TestGetSum2() { Random random = new Random(); DiscreteDistribution discreteDistribution = new DiscreteDistribution(); for (int i = 0; i < 1000; i++) { discreteDistribution.AddItem("" + random.Next(1000)); } Assert.AreEqual(1000, discreteDistribution.GetSum(), 0.0); }
/** * <summary> Training algorithm for Naive Bayes algorithm with a discrete data set.</summary> * <param name="priorDistribution">Probability distribution of classes P(C_i)</param> * <param name="classLists">Instances are divided into K lists, where each list contains only instances from a single class</param> */ private void TrainDiscreteVersion(DiscreteDistribution priorDistribution, Partition classLists) { var classAttributeDistributions = new Dictionary <string, List <DiscreteDistribution> >(); for (var i = 0; i < classLists.Size(); i++) { classAttributeDistributions[((InstanceListOfSameClass)classLists.Get(i)).GetClassLabel()] = classLists.Get(i).AllAttributesDistribution(); } model = new NaiveBayesModel(priorDistribution, classAttributeDistributions); }
public static float[] randoms(float[] probs, int count) { int W = probs.Length; var dist = new DiscreteDistribution(probs); float[] values = new float[count]; for (int i = 0; i < count; i++) { int x = _engine.Random(dist); values [i] = x / (float)W; } return(values); }
public void TestRemoveDistribution() { DiscreteDistribution discreteDistribution = new DiscreteDistribution(); discreteDistribution.AddItem("item1"); discreteDistribution.AddItem("item1"); discreteDistribution.AddItem("item2"); smallDistribution.RemoveDistribution(discreteDistribution); Assert.AreEqual(1, smallDistribution.GetCount("item1")); Assert.AreEqual(1, smallDistribution.GetCount("item2")); Assert.AreEqual(1, smallDistribution.GetCount("item3")); smallDistribution.AddDistribution(discreteDistribution); }
/** * <summary> The discreteIndexedAttributeClassDistribution method takes an attribute index and an attribute value as inputs. * It loops through the instances, gets the corresponding value of given attribute index and given attribute value. * Then, adds the class label of that instance to the discrete indexed distributions list.</summary> * * <param name="attributeIndex">Index of the attribute.</param> * <param name="attributeValue">Value of the attribute.</param> * <returns>Distribution of the class labels.</returns> */ public DiscreteDistribution DiscreteIndexedAttributeClassDistribution(int attributeIndex, int attributeValue) { var distribution = new DiscreteDistribution(); foreach (var instance in _list) { if (((DiscreteIndexedAttribute)instance.GetAttribute(attributeIndex)).GetIndex() == attributeValue) { distribution.AddItem(instance.GetClassLabel()); } } return(distribution); }
public void WilcoxonNullDistribution() { // Pick a very non-normal distribution ContinuousDistribution d = new ExponentialDistribution(); Random rng = new Random(271828); foreach (int n in TestUtilities.GenerateIntegerValues(4, 64, 4)) { Sample wContinuousSample = new Sample(); ContinuousDistribution wContinuousDistribution = null; List <int> wDiscreteSample = new List <int>(); DiscreteDistribution wDiscreteDistribution = null; for (int i = 0; i < 256; i++) { BivariateSample sample = new BivariateSample(); for (int j = 0; j < n; j++) { double x = d.GetRandomValue(rng); double y = d.GetRandomValue(rng); sample.Add(x, y); } TestResult wilcoxon = sample.WilcoxonSignedRankTest(); if (wilcoxon.UnderlyingStatistic != null) { wDiscreteSample.Add(wilcoxon.UnderlyingStatistic.Value); wDiscreteDistribution = wilcoxon.UnderlyingStatistic.Distribution; } else { wContinuousSample.Add(wilcoxon.Statistic.Value); wContinuousDistribution = wilcoxon.Statistic.Distribution; } } if (wDiscreteDistribution != null) { TestResult chi2 = wDiscreteSample.ChiSquaredTest(wDiscreteDistribution); Assert.IsTrue(chi2.Probability > 0.01); } else { TestResult ks = wContinuousSample.KolmogorovSmirnovTest(wContinuousDistribution); Assert.IsTrue(ks.Probability > 0.01); Assert.IsTrue(wContinuousSample.PopulationMean.ConfidenceInterval(0.99).ClosedContains(wContinuousDistribution.Mean)); Assert.IsTrue(wContinuousSample.PopulationStandardDeviation.ConfidenceInterval(0.99).ClosedContains(wContinuousDistribution.StandardDeviation)); } } }
/** * <summary> The attributeDistribution method takes an index as an input and if the attribute of the instance at given index is * discrete, it returns the distribution of the attributes of that instance.</summary> * * <param name="index">Index of the attribute.</param> * <returns>Distribution of the attribute.</returns> */ public DiscreteDistribution AttributeDistribution(int index) { var distribution = new DiscreteDistribution(); if (_list[0].GetAttribute(index) is DiscreteAttribute) { foreach (var instance in _list) { distribution.AddItem(((DiscreteAttribute)instance.GetAttribute(index)).GetValue()); } } return(distribution); }
public void TestGetProbabilityLaplaceSmoothing1() { Random random = new Random(); DiscreteDistribution discreteDistribution = new DiscreteDistribution(); for (int i = 0; i < 1000; i++) { discreteDistribution.AddItem("" + i); } Assert.AreEqual(2.0 / 2001, discreteDistribution.GetProbabilityLaplaceSmoothing("" + random.Next(1000)), 0.0); Assert.AreEqual(1.0 / 2001, discreteDistribution.GetProbabilityLaplaceSmoothing("item0"), 0.0); }
/// <summary> /// Performs a χ<sup>2</sup> test comparing the histogram to the given distribution. /// </summary> /// <param name="distribution">The distribution against which to test the histogram.</param> /// <returns>The test result.</returns> public TestResult ChiSquaredTest(DiscreteDistribution distribution) { if (distribution == null) { throw new ArgumentNullException(nameof(distribution)); } double chi2 = 0.0; int dof = 0; //int lastObservedCounts = extraCounts; //double lastExpectedCounts = (distribution.LeftExclusiveProbability(0) + distribution.RightExclusiveProbability(counts.Length - 1)) * total; int lastObservedCounts = 0; double lastExpectedCounts = 0.0; int observedCounts = 0; double expectedCounts = 0.0; for (int i = 0; i < storage.Count; i++) { observedCounts += storage.GetCounts(i + 1); expectedCounts += distribution.ProbabilityMass(i) * storage.Total; if (expectedCounts > 4.0) { if (lastExpectedCounts > 0.0) { chi2 += MoreMath.Sqr(lastObservedCounts - lastExpectedCounts) / lastExpectedCounts; dof++; } lastObservedCounts = observedCounts; lastExpectedCounts = expectedCounts; observedCounts = 0; expectedCounts = 0.0; } } lastObservedCounts += observedCounts; lastExpectedCounts += expectedCounts; if (lastExpectedCounts > 0.0) { chi2 += MoreMath.Sqr(lastObservedCounts - lastExpectedCounts) / lastExpectedCounts; } ContinuousDistribution nullDistribution = new ChiSquaredDistribution(dof); return(new TestResult("χ²", chi2, nullDistribution, TestType.RightTailed)); }
private void Awake() { if ((GameInfos.level + 1) % m_bossLevel == 0) { spawnBoss(); return; } var rand = new StaticRandomGenerator <DefaultRandomGenerator>(); int nb = m_enemyCountBase + new UniformIntDistribution((int)(m_minEnemyCountPerLevel * GameInfos.level), (int)(m_maxEnemyCountPerLevel * GameInfos.level) + 1).Next(rand); for (int i = 0; i < nb; i++) { Vector2 pos = Vector2.zero; for (int j = 0; j < 10; j++) { pos = new UniformVector2SquareDistribution(-m_spawnRadius, m_spawnRadius, -m_spawnRadius, m_spawnRadius).Next(rand); if (pos.sqrMagnitude < m_dontSpawnRadius * m_dontSpawnRadius) { continue; } break; } List <float> weights = new List <float>(); foreach (var en in m_enemy) { weights.Add(en.baseWeight + en.levelWeight * GameInfos.level); } var index = new DiscreteDistribution(weights).Next(rand); var e = m_enemy[index]; var mob = Instantiate(e.enemyPrefab); mob.transform.position = new Vector3(pos.x, pos.y, -1); Modifier m = new Modifier(); m.life = (int)(e.levelLife * GameInfos.level); m.speed = (int)(e.levelSpeed * GameInfos.level); m.fireRate = (int)(e.levelFireRate * GameInfos.level); m.power = (int)(e.levelPower * GameInfos.level); var s = mob.GetComponent <ShipLogic>(); s.modifiers.Add(m); s.updateModifierStats(); } spawnExit(); }
internal WeightMutationDistribution(WeightMutations weightMutations, IRandomSource rng) { _rng = rng; _weightMutations = weightMutations.Mutations; var probArr = new double[_weightMutations.Count]; var labelArr = new int[_weightMutations.Count]; for (var i = 0; i < _weightMutations.Count; i++) { probArr[i] = _weightMutations[i].RouletteWheelShare; labelArr[i] = i; } _distribution = new DiscreteDistribution(probArr, labelArr); }
/// <summary> /// Create a new child genome from a given parent genome. /// </summary> /// <param name="parent">The parent genome.</param> /// <param name="rng">Random source.</param> /// <returns>A new child genome.</returns> public NeatGenome <T> CreateChildGenome(NeatGenome <T> parent, IRandomSource rng) { // Get a discrete distribution over the set of possible mutation types. DiscreteDistribution mutationTypeDist = GetMutationTypeDistribution(parent); // Keep trying until a child genome is created. for (;;) { NeatGenome <T>?childGenome = Create(parent, rng, ref mutationTypeDist); if (childGenome is not null) { return(childGenome); } } }
public static DiscreteDistribution ToDiscreteDistribution(this JToken json0) { JArray json = (JArray)json0; List<Tuple<float, double>> masses = new List<Tuple<float, double>>(); foreach (JObject j in json) { masses.Add(new Tuple<float, double>( j.Property("value").Value.Value<float>(), j.Property("mass").Value.Value<double>())); } DiscreteDistribution distribution = new DiscreteDistribution(masses); return distribution; }
private static DiscreteDistribution[] CreateIntraSpeciesGenomeSelectionDistributions( Species <T>[] speciesArr) { int speciesCount = speciesArr.Length; DiscreteDistribution[] distArr = new DiscreteDistribution[speciesCount]; // For each species build a DiscreteDistribution for genome selection within // that species. I.e. fitter genomes have higher probability of selection. for (int i = 0; i < speciesCount; i++) { distArr[i] = CreateIntraSpeciesGenomeSelectionDistribution(speciesArr[i]); } return(distArr); }
/** * <summary> Training algorithm for Naive Bayes algorithm with a continuous data set.</summary> * * <param name="priorDistribution">Probability distribution of classes P(C_i)</param> * <param name="classLists"> Instances are divided into K lists, where each list contains only instances from a single class</param> */ private void TrainContinuousVersion(DiscreteDistribution priorDistribution, Partition classLists) { var classMeans = new Dictionary <string, Vector>(); var classDeviations = new Dictionary <string, Vector>(); for (var i = 0; i < classLists.Size(); i++) { var classLabel = ((InstanceListOfSameClass)classLists.Get(i)).GetClassLabel(); var averageVector = classLists.Get(i).Average().ToVector(); classMeans[classLabel] = averageVector; var standardDeviationVector = classLists.Get(i).StandardDeviation().ToVector(); classDeviations[classLabel] = standardDeviationVector; } model = new NaiveBayesModel(priorDistribution, classMeans, classDeviations); }
protected double MeasureDissimilarityES(DiscreteDistribution d1, DiscreteDistribution d2) { // Error sum. var errorSum = d1.ErrorSum(d2); return errorSum; }
/// <summary> /// Initializes a new instance of the <see cref="iohmma.MarkovProcessBase"/> class with the given number of hidden states. /// </summary> /// <param name="nhidden">The number of hidden states for the initialized hidden Markov saw.</param> public MarkovProcessBase(int nhidden) { this.Pi = new DiscreteDistribution (nhidden); }
/// <summary> /// Initializes a new shim that represents a discrete distribution as a continuous distribution. /// </summary> /// <param name="distribution">The discrete distiribution to represent.</param> public DiscreteAsContinuousDistribution(DiscreteDistribution distribution) { if (distribution == null) throw new ArgumentNullException("distribution"); this.d = distribution; this.xSupport = Interval.FromEndpoints(d.Minimum, d.Maximum); }
/// <summary> /// Initializes a new shim that represents a discrete distribution as a continuous distribution. /// </summary> /// <param name="distribution">The discrete distiribution to represent.</param> /// <param name="support">The continuous support interval into which the discrete support interval is to be mapped.</param> public DiscreteAsContinuousDistribution(DiscreteDistribution distribution, Interval support) { if (distribution == null) throw new ArgumentNullException("distribution"); this.d = distribution; this.xSupport = support; }
protected double MeasureDissimilarityKL(DiscreteDistribution d1, DiscreteDistribution d2) { // Symmetric KL divergence. var kl1 = d1.KLDivergence(d2); var kl2 = d2.KLDivergence(d1); return kl1 + kl2; }
public void TestCumulant2() { int n = 8; //Distribution[] set = new Distribution[] { new UniformDistribution(), new ExponentialDistribution(2.0), new GammaDistribution(3.0), new LogisticDistribution(-4.0, 3.0), new NormalDistribution(-1.0, 2.0), new WaldDistribution(1.0, 2.0) }; DiscreteDistribution[] set = new DiscreteDistribution[] { new PoissonDistribution(0.25), new DiscreteUniformDistribution(0, 10) }; foreach (DiscreteDistribution d in set) { //foreach (Distribution d in set) { Console.WriteLine(d.GetType().Name); // From cumulants to central and raw moments double[] inK = new double[n]; for (int r = 0; r < n; r++) inK[r] = d.Cumulant(r); double[] outC = MomentMath.CumulantToCentral(inK); for (int r = 0; r < n; r++) Console.WriteLine("r={0} K={1} -> C={2} v C={3}", r, inK[r], d.MomentAboutMean(r), outC[r]); for (int r = 0; r < n; r++) Assert.IsTrue(Math.Abs(outC[r] - d.MomentAboutMean(r)) <= 1.0E-14 * Math.Abs(outC[r])); double[] outM = MomentMath.CumulantToRaw(inK); for (int r = 0; r < n; r++) Assert.IsTrue(Math.Abs(outM[r] - d.Moment(r)) <= 1.0E-14 * Math.Abs(outM[r])); // From central moments to cumulants and raw moments double[] inC = new double[n]; for (int r = 0; r < n; r++) inC[r] = d.MomentAboutMean(r); double[] outK = MomentMath.CentralToCumulant(d.Mean, inC); for (int r = 0; r < n; r++) Console.WriteLine("r={0} C={1:R} -> K={2:R} v K={3:R}", r, inC[r], outK[r], d.Cumulant(r)); for (int r = 0; r < n; r++) Assert.IsTrue(Math.Abs(outK[r] - d.Cumulant(r)) <= 1.0E-13 * Math.Abs(outK[r])); // moved to 10E-13 due to K_4 of Logistic; why is there even that much disagreement? double[] outM2 = MomentMath.CentralToRaw(d.Mean, inC); for (int r = 0; r < n; r++) Assert.IsTrue(Math.Abs(outM2[r] - d.Moment(r)) <= 1.0E-14 * Math.Abs(outM2[r])); // From raw moments to central moments and cumulants // This is unstable. } }