public void CloneTest() { var source = new SystemRandomSource(new Random()); var clone = source.Clone(); Assert.IsNull(clone); }
public void TestRandomTupleArraySorting() { const int len = 0x1 << 10; SystemRandomSource random = new SystemRandomSource(); int[] keys = new int[len]; int[] items = new int[len]; int[] keysCopy = new int[len]; for (int i = 0; i < keys.Length; i++) { keys[i] = random.Next(); keysCopy[i] = keys[i]; items[i] = -keys[i]; } Sorting.Sort(keys, items); for (int i = 1; i < keys.Length; i++) { Assert.That(keys[i] >= keys[i - 1], "Sort Order - " + i.ToString()); Assert.That(items[i], Is.EqualTo(-keys[i]), "Items Permutation - " + i.ToString()); } for (int i = 0; i < keysCopy.Length; i++) { Assert.That(Array.IndexOf(keys, keysCopy[i]) >= 0, "All keys still there - " + i.ToString()); } }
public void TestRandomTripleArraySorting() { const int len = 0x1 << 10; SystemRandomSource random = new SystemRandomSource(); int[] keys = new int[len]; int[] items1 = new int[len]; int[] items2 = new int[len]; int[] keysCopy = new int[len]; for (int i = 0; i < keys.Length; i++) { keys[i] = random.Next(); keysCopy[i] = keys[i]; items1[i] = -keys[i]; items2[i] = keys[i] >> 2; } Sorting.Sort(keys, items1, items2); for (int i = 1; i < keys.Length; i++) { Assert.IsTrue(keys[i] >= keys[i - 1], "Sort Order - " + i.ToString()); Assert.AreEqual(-keys[i], items1[i], "Items1 Permutation - " + i.ToString()); Assert.AreEqual(keys[i] >> 2, items2[i], "Items2 Permutation - " + i.ToString()); } for (int i = 0; i < keysCopy.Length; i++) { Assert.IsTrue(Array.IndexOf(keys, keysCopy[i]) >= 0, "All keys still there - " + i.ToString()); } }
/// <summary> /// Sets this instance's AmiabilityLevel /// </summary> /// <param name="seed">Seed based on player and slice</param> private void SetLevel(int seed) { uniformRandSeed = new SystemRandomSource(seed); binomialRandSeed = new SystemRandomSource(seed); // Doesn't matter if it's the same seed uniformDist = new DiscreteUniform(0, Enum.GetNames(typeof(AmiabilityLevel)).Length - 1, uniformRandSeed); AmiabilityLevel = (AmiabilityLevel)uniformDist.Sample(); }
void Awake() { signalControl = new ControlSignal(); path = "Save/DroneSession/" + "Task-" + task + "/Seed-" + fromSeed + "/"; rndGenerator = new SystemRandomSource(fromSeed); taskObject = (DroneTask)Activator.CreateInstance(Type.GetType("Lexmou.MachineLearning.Drone" + task), rndGenerator, task); mlp = new MultiLayerMathsNet(fromSeed, null, taskObject.shapes, 1, 0); float[] floatArr = new float[taskObject.individualSize]; //float[] floatArr = new float[] {0,-0.33f,0,0,-0.33f,0,0,0,0,0,0.5f,0,0,-1,0,0,-1,0,0,0.5f,0,1,0,0,0,0,0,0,0,0,0,-1,0,0,0,0,0,0,0,0.5f}; Genetic.LoadBest(path, fromGeneration, floatArr); BuildCustomWeights(mlp.weights, taskObject.shapes, Vector <float> .Build.DenseOfArray(floatArr)); //Debug.Log(mlp.weights[0]); /*if (stabilizationGeneration != 0) * { * Debug.Log("Gene Move"); * gene = new Genetic(stabilizationSeed, null, 100, 40, 1.0f, 0.1f, 0.1f, 0.1f, 0.1f, "Save/DroneSession/Task-stabilization/", false); * } * if (moveGeneration != 0) * { * Debug.Log("Gene Move"); * gene = new Genetic(moveSeed, null, 100, 52, 1.0f, 0.1f, 0.1f, 0.1f, 0.1f, "Save/DroneSession/Task-move/", false); * } * deltaDistribution = new ContinuousUniform(-2, 2); * tmpBuildCustomWeights = new List<Matrix<float>>();*/ Restart(); }
public void TestRandomTupleListSorting() { const int len = 0x1 << 10; SystemRandomSource random = new SystemRandomSource(); List <int> keys = new List <int>(len); List <int> items = new List <int>(len); int[] keysCopy = new int[len]; for (int i = 0; i < len; i++) { int value = random.Next(); keys.Add(value); keysCopy[i] = value; items.Add(-value); } Sorting.Sort(keys, items); for (int i = 1; i < len; i++) { Assert.That(keys[i] >= keys[i - 1], "Sort Order - " + i.ToString()); Assert.That(items[i], Is.EqualTo(-keys[i]), "Items Permutation - " + i.ToString()); } for (int i = 0; i < keysCopy.Length; i++) { Assert.That(keys.IndexOf(keysCopy[i]) >= 0, "All keys still there - " + i.ToString()); } }
/// <summary> /// Sets a Region's wealth based on tech level and base region's wealth. Tech level is fed to /// a switch which determines alpha/beta of the Beta distribution. A sample is then called which /// is between 0 and 1. That value is then multiplied by the maxWealth to determine the region's /// actual wealth. /// </summary> /// <param name="seed">Seed based on player and slice</param> /// <param name="techLevel">Region's tech level</param> /// <param name="maxWealth">Max wealth for this Region</param> public Wealth(int seed, TechLevel techLevel, UInt64 maxWealth) { randSeed = new SystemRandomSource(seed); FillDistribution(techLevel); Weight = betaDist.Sample(); RegionWealth = (UInt64)(Weight * maxWealth); }
internal override void GenerateCurve() { //create the senseCurve, the start of the curve and start populating it with random values List <SensitivityPoint> sensCurve = new List <SensitivityPoint>(); SensitivityPoint firstPoint = new SensitivityPoint(0, sensMean); sensCurve.Add(firstPoint); var seededRandom = new SystemRandomSource(42);//creates a random source with the seed "42" for (double timecode = curveTimestep; timecode < this.lenght; timecode += curveTimestep) { double sensDirection = seededRandom.NextDouble(); //create a random double to determine if sense is going to be faster or slower if (sensDirection >= 0.5) //sens will be faster { double randomSens = sensMean + seededRandom.NextDouble() * (sensMax - sensMean); //generates a random value in the range of (basesens:maxsens) SensitivityPoint sensPoint = new SensitivityPoint(timecode, randomSens); sensCurve.Add(sensPoint); } else // sensDirection <0.5 -> sens will be slower { double randomSens = sensMin + seededRandom.NextDouble() * (sensMean - sensMin); SensitivityPoint sensPoint = new SensitivityPoint(timecode, randomSens); sensCurve.Add(sensPoint); } } SensitivityPoint finalSensPoint = new SensitivityPoint(this.lenght, 1);//Make sure the curve ends at base sens sensCurve.Add(finalSensPoint); base.sensCurve = sensCurve; }
public DroneMove(SystemRandomSource rndGenerator, string fromTask) : base(rndGenerator) { angleRandomRotation = new float[7] { 2, 10, 20, 30, 40, 50, 60 }; medianThreshold = new float[7] { 70, 70, 70, 70, 70, 70, 70 }; bestThreshold = new float[7] { 170, 170, 170, 170, 170, 170, 170 }; if (fromTask == "Stabilization") { rowIndex = 12; } else if (fromTask == "Move") { rowIndex = 0; } this.fromTask = fromTask; deltaDistribution = new ContinuousUniform(-5, 5, rndGenerator); shapes = new List <int>() { 12, 4 }; signal = new UCSignal(shapes[0]); individualSize = 0; for (int i = 0; i < shapes.Count - 1; i++) { individualSize += (shapes[i] + 1) * shapes[i + 1]; } }
public void CombineUnweighted() { var rnd = new SystemRandomSource(10); var a = Generate.Random(200, new Erlang(2, 0.2, rnd)).Select(datum => System.Tuple.Create(1.0, datum)).ToArray(); var b = Generate.Random(100, new Beta(1.2, 1.4, rnd)).Select(datum => System.Tuple.Create(1.0, datum)).ToArray(); var c = Generate.Random(150, new Rayleigh(0.8, rnd)).Select(datum => System.Tuple.Create(1.0, datum)).ToArray(); var d = a.Concat(b).Concat(c); var direct = d.Select(datum => datum.Item2).ToArray(); var x = new RunningWeightedStatistics(d); var y = new RunningWeightedStatistics(a); y.PushRange(b); y.PushRange(c); var za = new RunningWeightedStatistics(a); var zb = new RunningWeightedStatistics(b); var zc = new RunningWeightedStatistics(c); var z = za + zb + zc; Assert.That(x.Mean, Is.EqualTo(direct.Mean()).Within(1e-12), "Mean Reference"); Assert.That(y.Mean, Is.EqualTo(x.Mean).Within(1e-12), "Mean y"); Assert.That(z.Mean, Is.EqualTo(x.Mean).Within(1e-12), "Mean z"); Assert.That(x.Variance, Is.EqualTo(direct.Variance()).Within(1e-12), "Variance Reference"); Assert.That(y.Variance, Is.EqualTo(x.Variance).Within(1e-12), "Variance y"); Assert.That(z.Variance, Is.EqualTo(x.Variance).Within(1e-12), "Variance z"); Assert.That(x.PopulationVariance, Is.EqualTo(direct.PopulationVariance()).Within(1e-12), "PopulationVariance Reference"); Assert.That(y.PopulationVariance, Is.EqualTo(x.PopulationVariance).Within(1e-12), "PopulationVariance y"); Assert.That(z.PopulationVariance, Is.EqualTo(x.PopulationVariance).Within(1e-12), "PopulationVariance z"); Assert.That(x.StandardDeviation, Is.EqualTo(direct.StandardDeviation()).Within(1e-12), "StandardDeviation Reference"); Assert.That(y.StandardDeviation, Is.EqualTo(x.StandardDeviation).Within(1e-12), "StandardDeviation y"); Assert.That(z.StandardDeviation, Is.EqualTo(x.StandardDeviation).Within(1e-12), "StandardDeviation z"); Assert.That(x.PopulationStandardDeviation, Is.EqualTo(direct.PopulationStandardDeviation()).Within(1e-12), "PopulationStandardDeviation Reference"); Assert.That(y.PopulationStandardDeviation, Is.EqualTo(x.PopulationStandardDeviation).Within(1e-12), "PopulationStandardDeviation y"); Assert.That(z.PopulationStandardDeviation, Is.EqualTo(x.PopulationStandardDeviation).Within(1e-12), "PopulationStandardDeviation z"); Assert.That(x.Skewness, Is.EqualTo(direct.Skewness()).Within(1e-12), "Skewness Reference (not independent!)"); Assert.That(y.Skewness, Is.EqualTo(x.Skewness).Within(1e-12), "Skewness y"); Assert.That(z.Skewness, Is.EqualTo(x.Skewness).Within(1e-12), "Skewness z"); Assert.That(x.PopulationSkewness, Is.EqualTo(direct.PopulationSkewness()).Within(1e-12), "PopulationSkewness Reference (not independent!)"); Assert.That(y.PopulationSkewness, Is.EqualTo(x.PopulationSkewness).Within(1e-12), "PopulationSkewness y"); Assert.That(z.PopulationSkewness, Is.EqualTo(x.PopulationSkewness).Within(1e-12), "PopulationSkewness z"); Assert.That(x.Kurtosis, Is.EqualTo(direct.Kurtosis()).Within(1e-12), "Kurtosis Reference (not independent!)"); Assert.That(y.Kurtosis, Is.EqualTo(x.Kurtosis).Within(1e-12), "Kurtosis y"); Assert.That(z.Kurtosis, Is.EqualTo(x.Kurtosis).Within(1e-12), "Kurtosis z"); Assert.That(x.PopulationKurtosis, Is.EqualTo(direct.PopulationKurtosis()).Within(1e-12), "PopulationKurtosis Reference (not independent!)"); Assert.That(y.PopulationKurtosis, Is.EqualTo(x.PopulationKurtosis).Within(1e-12), "PopulationKurtosis y"); Assert.That(z.PopulationKurtosis, Is.EqualTo(x.PopulationKurtosis).Within(1e-12), "PopulationKurtosis z"); }
public void CustomResetSpeedTest() { int iter = 10000; float initialValueWeights = 1.0f; mlp.Reset(initialValueWeights, true); mlpMN.Reset(true); SystemRandomSource rndGenerator = new SystemRandomSource(seed); List <Matrix <float> > weightsMN = new List <Matrix <float> >(); List <float[, ]> weights = new List <float[, ]>(); for (int i = 0; i < shapes.Count - 1; i++) { weightsMN.Add(Matrix <float> .Build.Random(mlpMN.layers[i + 1].RowCount, mlpMN.layers[i].RowCount, new ContinuousUniform(-initialValueWeights, initialValueWeights, rndGenerator))); } for (int i = 0; i < shapes.Count - 1; i++) { weights.Add(new float[mlp.layers[i].GetLength(1), mlp.layers[i + 1].GetLength(1)]); } for (int i = 0; i < shapes.Count - 1; i++) { for (int j = 0; j < weights[i].GetLength(0); j++) { for (int k = 0; k < weights[i].GetLength(1); k++) { weights[i][j, k] = (float)ContinuousUniform.Sample(rndGenerator, -initialValueWeights, initialValueWeights); } } } var watch = System.Diagnostics.Stopwatch.StartNew(); for (int i = 0; i < iter; i++) { mlpMN.Reset(false, weightsMN); } watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; Debug.Log("Reset Time MathNet: " + elapsedMs); watch = System.Diagnostics.Stopwatch.StartNew(); for (int i = 0; i < iter; i++) { mlp.Reset(initialValueWeights, false, weights); } watch.Stop(); elapsedMs = watch.ElapsedMilliseconds; Debug.Log("Reset Time Array: " + elapsedMs); Assert.AreEqual(mlp.weights, weights); Assert.AreEqual(mlpMN.weights, weightsMN); }
/// <summary> /// Create random samples, uniform between 0 and 1. /// Faster than other methods but with reduced guarantees on randomness. /// </summary> public static double[] Uniform(int length) { if (length < 0) { throw new ArgumentOutOfRangeException(nameof(length)); } return(SystemRandomSource.FastDoubles(length)); }
private static int CalNumber() { { Random random = new SystemRandomSource(); decimal MahjongNumber = (random.NextDecimal() * 42) + 1; Math.Round(MahjongNumber); return((int)MahjongNumber); } }
/** * */ public Genetic(int seed, SystemRandomSource rndGenerator, int populationSize, int individualSize, float initialValueWeights, float mutationRate = 0.1f, float randomIndividualsRate = 0.05f, float bestIndividualsRate = 0.05f, float emptyRate = 0.0f, string path = "", bool save = true) { this.save = save; tmp = Matrix <float> .Build.Dense(individualSize, populationSize); this.path = path; this.bestScore = 0.0f; this.generation = 1; this.seed = seed; this.initialValueWeights = initialValueWeights; if (rndGenerator != null) { this.rndGenerator = rndGenerator; } else { this.rndGenerator = new SystemRandomSource(seed); } distribution = new ContinuousUniform(-initialValueWeights, initialValueWeights, rndGenerator); subDistribution = new ContinuousUniform(-0.01, 0.01, rndGenerator); drawDistribution = new ContinuousUniform(0.0f, 1.0f, rndGenerator); this.populationSize = populationSize; this.individualSize = individualSize; this.population = Matrix <float> .Build.Random(individualSize, populationSize, distribution); for (int i = 0; i < populationSize; i++) { distEmpty = Combinatorics.GenerateCombination(individualSize, Mathf.RoundToInt(emptyRate * individualSize), rndGenerator); for (int j = 0; j < individualSize; j++) { if (distEmpty[j]) { population[j, i] = 0.000f; } } } this.evaluations = Vector <float> .Build.Dense(populationSize); this.sumEvaluations = 0.0f; this.mutationRate = mutationRate; this.randomIndividualsRate = randomIndividualsRate; this.bestIndividualsRate = bestIndividualsRate; this.emptyRate = emptyRate; if (save) { writer = UIO.CreateStreamWriter(GeneratePath(), "GeneticResults.csv", false); UIO.WriteLine(writer, "Generation;Best;Mean;Std Deviation;Median"); } indexPermutation = Combinatorics.GeneratePermutation(populationSize); }
// Use this for initialization public NoiseSensor(float mean, float stdDevWhite, float stdDevBrown, float deltaT, SystemRandomSource rndGenerator) { this.rndGenerator = rndGenerator; this.mean = mean; this.stdDevWhite = stdDevWhite; this.stdDevBrown = stdDevBrown; this.deltaT = deltaT; normal = new Normal(mean, stdDevWhite, rndGenerator); stdNormal = new Normal(0.0f, 1.0f, rndGenerator); }
private void ApplyAmiablePop() { amiableRandSeed = new SystemRandomSource(Region.Seed); Tuple <double, double> alphaBeta = Amiability.Amiability.GetDistributionAlphaBeta(Region.Amiability.AmiabilityLevel); amiableDist = new Beta(alphaBeta.Item1, alphaBeta.Item2, amiableRandSeed); double amiableWeight = amiableDist.Sample(); Pop += (UInt64)(amiableWeight * amiableMultiplier * maxPop); }
private void ApplyTechPop() { techRandSeed = new SystemRandomSource(Region.Seed); Tuple <double, double> alphaBeta = BetaTechDist.GetDistributionAlphaBeta(Region.TechLevel); techDist = new Beta(alphaBeta.Item1, alphaBeta.Item2, techRandSeed); double techWeight = techDist.Sample(); Pop += (UInt64)(techWeight * techMultiplier * maxPop); }
public void MetropolisConstructor() { var normal = new Normal(0.0, 1.0); var rnd = new SystemRandomSource(1); var ms = new MetropolisSampler<double>(0.2, normal.Density, x => Normal.Sample(rnd, x, 0.1), 10); Assert.IsNotNull(ms.RandomSource); ms.RandomSource = rnd; Assert.IsNotNull(ms.RandomSource); }
/// <summary> /// Generate samples by sampling a function at samples from a probability distribution, uniform between 0 and 1. /// Faster than other methods but with reduced guarantees on randomness. /// </summary> public static T[] UniformMap <T>(int length, Func <double, T> map) { if (length < 0) { throw new ArgumentOutOfRangeException(nameof(length)); } var samples = SystemRandomSource.FastDoubles(length); return(Map(samples, map)); }
public void SetUp() { seed = 65; geneSeed = new Genetic(seed, null, 2, 10, initialValueWeights, 0.1f, 0.05f, 0.05f, 0, "", false); initialValueWeights = 1.0f; //Debug.Log(geneSeed.ToString()); //System.Threading.Thread.Sleep(10000); SystemRandomSource rndGenerator = new SystemRandomSource(seed); geneRndGenerator = new Genetic(seed, rndGenerator, 2, 10, initialValueWeights, 0.1f, 0.05f, 0.05f, 0, "", false); }
/// <summary> /// Generate samples by sampling a function at samples from a probability distribution, uniform between 0 and 1. /// Faster than other methods but with reduced guarantees on randomness. /// </summary> public static T[] UniformMap <T>(int length, Func <double, T> map) { var samples = SystemRandomSource.Doubles(length); var data = new T[length]; for (int i = 0; i < data.Length; i++) { data[i] = map(samples[i]); } return(data); }
public void SampleArrayTest() { var normal = new Normal(0.0, 1.0); var rnd = new SystemRandomSource(1); var ms = new MetropolisSampler<double>(0.2, normal.Density, x => Normal.Sample(rnd, x, 0.1), 10) { RandomSource = rnd }; ms.Sample(5); }
private int CalculateProcessDuration(int to) { SystemRandomSource randomNumberGenerator = SystemRandomSource.Default; //double rand = randomNumberGenerator.NextDouble(); //if ((rand = Math.Round(rand, 5)) == 1) rand = .99999; //double randomNumber = rand; //double x = -Math.Log(1.0 - randomNumber) * S[to]; //int ret = (int)Math.Ceiling(x); //return ret; return((int)Math.Ceiling(-Math.Log(1.0 - randomNumberGenerator.NextDouble()) * S[to])); }
public void SampleTest() { var normal = new Normal(0.0, 1.0); var rnd = new SystemRandomSource(1); var ms = new MetropolisSampler <double>(0.2, normal.Density, x => Normal.Sample(rnd, x, 0.1), 10) { RandomSource = rnd }; ms.Sample(); }
/// <summary> /// Gets a normal-like distribution from the 2D array /// </summary> /// <param name="rand">Random source component</param> /// <returns>Array of floats that sum <= 1</returns> public static float[] GetNormalDistribution(SystemRandomSource rand) { float[][] ret_val = { new float[] { 75.0f, 17.0f, 5.0f, 2.0f, 1.0f }, new float[] { 68.2f, 27.2f, 4.2f, 0.2f, 0.2f }, new float[] { 52.6875f, 25.0f, 12.75f, 6.375f, 3.1875f }, new float[] { 39.4f, 30.0f, 18.4f, 8.8f, 3.4f } }; return(ret_val[rand.Next(ret_val.Length)]); }
public void SampleTest() { var normal = new Normal(0.0, 1.0); var rnd = new SystemRandomSource(1); var ms = new MetropolisHastingsSampler<double>(0.2, normal.Density, (x, y) => Normal.PDF(x, 0.1, y), x => Normal.Sample(rnd, x, 0.1), 10) { RandomSource = rnd }; ms.Sample(); }
/// <summary> /// Generate samples by sampling a function at sample pairs from a probability distribution, uniform between 0 and 1. /// Faster than other methods but with reduced guarantees on randomness. /// </summary> public static T[] UniformMap2 <T>(int length, Func <double, double, T> map) { if (length < 0) { throw new ArgumentOutOfRangeException("length"); } var samples1 = SystemRandomSource.FastDoubles(length); var samples2 = SystemRandomSource.FastDoubles(length); return(Map2(samples1, samples2, map)); }
public void SampleArrayTest() { var normal = new Normal(0.0, 1.0); var rnd = new SystemRandomSource(1); var ms = new MetropolisHastingsSampler <double>(0.2, normal.Density, (x, y) => Normal.PDF(x, 0.1, y), x => Normal.Sample(rnd, x, 0.1), 10) { RandomSource = rnd }; ms.Sample(5); }
public void MetropolisConstructor() { var normal = new Normal(0.0, 1.0); var rnd = new SystemRandomSource(1); var ms = new MetropolisSampler <double>(0.2, normal.Density, x => Normal.Sample(rnd, x, 0.1), 10); Assert.IsNotNull(ms.RandomSource); ms.RandomSource = rnd; Assert.IsNotNull(ms.RandomSource); }
/// <summary> /// Samples a sequence of Poisson distributed random variables. /// </summary> /// <param name="lambda">The lambda (λ) parameter of the Poisson distribution. Range: λ > 0.</param> /// <returns>a sequence of samples from the distribution.</returns> public static IEnumerable <int> Samples(double lambda) { if (!(lambda > 0.0)) { throw new ArgumentException(Resources.InvalidDistributionParameters); } SystemRandomSource rnd = SystemRandomSource.Default; while (true) { yield return(SampleUnchecked(rnd, lambda)); } }
/// <summary> /// Samples a sequence of uniformly distributed random variables. /// </summary> /// <param name="lower">Lower bound. Range: lower ≤ upper.</param> /// <param name="upper">Upper bound. Range: lower ≤ upper.</param> /// <returns>a sequence of samples from the discrete uniform distribution.</returns> public static IEnumerable <int> Samples(int lower, int upper) { if (!(lower <= upper)) { throw new ArgumentException(Resources.InvalidDistributionParameters); } SystemRandomSource rnd = SystemRandomSource.Default; while (true) { yield return(SampleUnchecked(rnd, lower, upper)); } }
void Awake() { SetParametersFromCommandLine(); Debug.Log("Awake Session"); Build(); Time.timeScale = timeScale; BuildHUD(); BuildSessionWriter(); nextUpdate = intervalUpdate; rndGenerator = new SystemRandomSource(seed); Reset(); CloseSessionWriter(); }
/// <summary> /// Samples a sequence of binomially distributed random variable. /// </summary> /// <param name="rnd">The random number generator to use.</param> /// <param name="p">The success probability (p) in each trial. Range: 0 ≤ p ≤ 1.</param> /// <param name="n">The number of trials (n). Range: n ≥ 0.</param> /// <returns>a sequence of successes in <paramref name="n"/> trials.</returns> public static IEnumerable <int> Samples(double p, int n) { if (!(p >= 0.0 && p <= 1.0 && n >= 0)) { throw new ArgumentException(Resources.InvalidDistributionParameters); } SystemRandomSource rnd = SystemRandomSource.Default; while (true) { yield return(SampleUnchecked(rnd, p, n)); } }
public void MetropolisHastingsConstructor() { var normal = new Normal(0.0, 1.0); var rnd = new SystemRandomSource(1); var ms = new MetropolisHastingsSampler<double>(0.2, normal.Density, (x, y) => Normal.PDF(x, 0.1, y), x => Normal.Sample(rnd, x, 0.1), 10) { RandomSource = rnd }; Assert.IsNotNull(ms.RandomSource); ms.RandomSource = new System.Random(0); Assert.IsNotNull(ms.RandomSource); }
public void SamplesFollowsCorrectDistribution() { Random rnd = new SystemRandomSource(1); foreach (var dd in _discreteDistributions) { dd.RandomSource = rnd; VapnikChervonenkisTest(Error, ErrorProbability, dd.Samples().Select(x => (double)x).Take(NumberOfTestSamples), dd); } foreach (var cd in _continuousDistributions) { cd.RandomSource = rnd; VapnikChervonenkisTest(Error, ErrorProbability, cd.Samples().Take(NumberOfTestSamples), cd); } }
public void SampleFollowsCorrectDistribution() { Random rnd = new SystemRandomSource(1); foreach (var dd in _discreteDistributions) { dd.RandomSource = rnd; var samples = new double[NumberOfTestSamples]; for (var i = 0; i < NumberOfTestSamples; i++) { samples[i] = dd.Sample(); } VapnikChervonenkisTest(Error, ErrorProbability, samples, dd); } foreach (var cd in _continuousDistributions) { cd.RandomSource = rnd; var samples = new double[NumberOfTestSamples]; for (var i = 0; i < NumberOfTestSamples; i++) { samples[i] = cd.Sample(); } VapnikChervonenkisTest(Error, ErrorProbability, samples, cd); } }
public void Combine() { var rnd = new SystemRandomSource(10); var a = Generate.Random(200, new Erlang(2, 0.2, rnd)); var b = Generate.Random(100, new Beta(1.2, 1.4, rnd)); var c = Generate.Random(150, new Rayleigh(0.8, rnd)); var d = a.Concat(b).Concat(c).ToArray(); var x = new RunningStatistics(d); var y = new RunningStatistics(a); y.PushRange(b); y.PushRange(c); var za = new RunningStatistics(a); var zb = new RunningStatistics(b); var zc = new RunningStatistics(c); var z = za + zb + zc; Assert.That(x.Mean, Is.EqualTo(d.Mean()).Within(1e-12), "Mean Reference"); Assert.That(y.Mean, Is.EqualTo(x.Mean).Within(1e-12), "Mean y"); Assert.That(z.Mean, Is.EqualTo(x.Mean).Within(1e-12), "Mean z"); Assert.That(x.Variance, Is.EqualTo(d.Variance()).Within(1e-12), "Variance Reference"); Assert.That(y.Variance, Is.EqualTo(x.Variance).Within(1e-12), "Variance y"); Assert.That(z.Variance, Is.EqualTo(x.Variance).Within(1e-12), "Variance z"); Assert.That(x.PopulationVariance, Is.EqualTo(d.PopulationVariance()).Within(1e-12), "PopulationVariance Reference"); Assert.That(y.PopulationVariance, Is.EqualTo(x.PopulationVariance).Within(1e-12), "PopulationVariance y"); Assert.That(z.PopulationVariance, Is.EqualTo(x.PopulationVariance).Within(1e-12), "PopulationVariance z"); Assert.That(x.StandardDeviation, Is.EqualTo(d.StandardDeviation()).Within(1e-12), "StandardDeviation Reference"); Assert.That(y.StandardDeviation, Is.EqualTo(x.StandardDeviation).Within(1e-12), "StandardDeviation y"); Assert.That(z.StandardDeviation, Is.EqualTo(x.StandardDeviation).Within(1e-12), "StandardDeviation z"); Assert.That(x.PopulationStandardDeviation, Is.EqualTo(d.PopulationStandardDeviation()).Within(1e-12), "PopulationStandardDeviation Reference"); Assert.That(y.PopulationStandardDeviation, Is.EqualTo(x.PopulationStandardDeviation).Within(1e-12), "PopulationStandardDeviation y"); Assert.That(z.PopulationStandardDeviation, Is.EqualTo(x.PopulationStandardDeviation).Within(1e-12), "PopulationStandardDeviation z"); Assert.That(x.Skewness, Is.EqualTo(d.Skewness()).Within(1e-12), "Skewness Reference (not independent!)"); Assert.That(y.Skewness, Is.EqualTo(x.Skewness).Within(1e-12), "Skewness y"); Assert.That(z.Skewness, Is.EqualTo(x.Skewness).Within(1e-12), "Skewness z"); Assert.That(x.PopulationSkewness, Is.EqualTo(d.PopulationSkewness()).Within(1e-12), "PopulationSkewness Reference (not independent!)"); Assert.That(y.PopulationSkewness, Is.EqualTo(x.PopulationSkewness).Within(1e-12), "PopulationSkewness y"); Assert.That(z.PopulationSkewness, Is.EqualTo(x.PopulationSkewness).Within(1e-12), "PopulationSkewness z"); Assert.That(x.Kurtosis, Is.EqualTo(d.Kurtosis()).Within(1e-12), "Kurtosis Reference (not independent!)"); Assert.That(y.Kurtosis, Is.EqualTo(x.Kurtosis).Within(1e-12), "Kurtosis y"); Assert.That(z.Kurtosis, Is.EqualTo(x.Kurtosis).Within(1e-12), "Kurtosis z"); Assert.That(x.PopulationKurtosis, Is.EqualTo(d.PopulationKurtosis()).Within(1e-12), "PopulationKurtosis Reference (not independent!)"); Assert.That(y.PopulationKurtosis, Is.EqualTo(x.PopulationKurtosis).Within(1e-12), "PopulationKurtosis y"); Assert.That(z.PopulationKurtosis, Is.EqualTo(x.PopulationKurtosis).Within(1e-12), "PopulationKurtosis z"); }