private static Chromosome SelectParent(SortedSet<Chromosome> tournament, double sumFitness, Random random) { UniformDistribution uniform = new UniformDistribution(Interval.FromEndpoints(0, 1)); double randomNumber = uniform.GetRandomValue(random); double sumLength = 0; foreach (var chromosome in tournament) { sumLength += chromosome.Fitness / sumFitness; if (randomNumber < sumLength) return chromosome; } return tournament.Last(); }
public static Chromosome Cross(Tuple<Chromosome, Chromosome> pair, Random random) { IList<double> values; UniformDistribution uniform = new UniformDistribution(Interval.FromEndpoints(0, 1)); DiscreteUniformDistribution discrete = new DiscreteUniformDistribution(0, 1); double randomNumber = uniform.GetRandomValue(random); if (randomNumber <= SELECTION_1_PROBABILITY) // whole arithmetic recombination values = pair.Item1.Values.Zip(pair.Item2.Values, (x, y) => (x + y) / 2).ToList(); else if (randomNumber <= SELECTION_1_PROBABILITY + SELECTION_2_PROBABILITY) // discrete recombination values = pair.Item1.Values.Zip(pair.Item2.Values, (x, y) => discrete.GetRandomValue(random) == 0 ? x : y).ToList(); else // simple arithmetic recombination values = pair.Item1.Values.Zip(pair.Item2.Values, (x, y) => pair.Item1.Values.IndexOf(x) < pair.Item1.Values.Count / 2 ? x : (x + y) / 2).ToList(); return new Chromosome(values); }
public static void Mutate(Chromosome chromosome, Random random) { NormalDistribution normal; UniformDistribution uniform = new UniformDistribution(Interval.FromEndpoints(0, 1)); for (int i = 0; i < chromosome.Values.Count; i++) { if (uniform.GetRandomValue(random) <= MUTATION_PROBABILITY) { if (uniform.GetRandomValue(random) <= SELECTION_PROBABILITY) normal = new NormalDistribution(MEAN, SIGMA_1); else normal = new NormalDistribution(MEAN, SIGMA_2); chromosome.Values[i] += normal.GetRandomValue(random); } } }
public void BuildGenerators(List<FloorData> floors) { Distribution floorDistr = new UniformDistribution(1, floors.Count); int min, max; foreach (FloorData data in floors) { // Add generator Generators.Add(new TenantGenerator(data.ID, floorDistr)); // Add distribution min = data.Period - data.Spread; max = data.Period + data.Spread; GeneratorsDistr.Add(data.ID, new UniformDistribution(min, max)); } NumFloors = floors.Count; }
public void GenerateUniformVariableAndCheckCharacteristics(double a, double b) { var uniformDistributedVariable = new UniformDistribution(a, b); var mean = (a + b) / 2; // мат. ожидание var variance = Math.Pow(b - a, 2) / 12; // дисперсия var standardDeviation = Math.Sqrt(variance); // СКО var skewness = 0; // коэффициент асимметрии var kurtosis = (double)(-1) * 6 / 5; // эксцесс var delta = Math.Pow(10, -3); Assert.AreEqual(mean, uniformDistributedVariable.Mean, delta); Assert.AreEqual(variance, uniformDistributedVariable.Variance, delta); Assert.AreEqual(standardDeviation, uniformDistributedVariable.StandardDeviation, delta); Assert.AreEqual(skewness, uniformDistributedVariable.Skewness, delta); Assert.AreEqual(kurtosis, uniformDistributedVariable.Kurtosis, delta); }
public void SumOfSeveralUniformsChiSquareTest(double a, double b, int count) { var distr1 = new UniformDistribution(a, b); var distr2 = new UniformDistribution(a, b); var sum = distr1 + distr2; if (count > 2) { for (var i = 0; i < count - 2; i++) { var distr = new UniformDistribution(a, b); sum += distr; } } var test = ChiSquareTest.Test(sum); Assert.IsTrue(test); }
public void QuotientOfSeveralUniformsChiSquareTest(double a, double b, int count) { var distr1 = new UniformDistribution(a, b); var distr2 = new UniformDistribution(a, b); var quotient = distr1 / distr2; if (count > 2) { for (var i = 0; i < count - 2; i++) { var distr = new UniformDistribution(a, b); quotient /= distr; } } var test = ChiSquareTest.Test(quotient); Assert.IsTrue(test); }
public void ProductOfSeveralUniformsChiSquareTest(double a, double b, int count) { var distr1 = new UniformDistribution(a, b); var distr2 = new UniformDistribution(a, b); var product = distr1 * distr2; if (count > 2) { for (var i = 0; i < count - 2; i++) { var distr = new UniformDistribution(a, b); product *= distr; } } var test = ChiSquareTest.Test(product); Assert.IsTrue(test); }
public void DifferenceOfSeveralUniformsChiSquareTest(double a, double b, int count) { var distr1 = new UniformDistribution(a, b); var distr2 = new UniformDistribution(a, b); var diff = distr1 - distr2; if (count > 2) { for (var i = 0; i < count - 2; i++) { var distr = new UniformDistribution(a, b); diff -= distr; } } var test = ChiSquareTest.Test(diff); Assert.IsTrue(test); }
public void TestDistributionUniform() { IDoubleDistribution dist = new UniformDistribution(m_model, "UniformDistribution", Guid.NewGuid(), 3.5, 7.0); Assert.IsTrue(dist.GetValueWithCumulativeProbability(0.50) == 5.25); dist.SetCDFInterval(0.5, 0.5); Assert.IsTrue(dist.GetNext() == 5.25); dist.SetCDFInterval(0.0, 1.0); System.IO.StreamWriter tw = new System.IO.StreamWriter(Environment.GetEnvironmentVariable("TEMP") + "\\DistributionUniform.csv"); Debug.WriteLine("Generating raw data."); int DATASETSIZE = 1500000; double[] rawData = new double[DATASETSIZE]; for (int x = 0; x < DATASETSIZE; x++) { rawData[x] = dist.GetNext(); //tw.WriteLine(rawData[x]); } Debug.WriteLine("Performing histogram analysis."); Histogram1D_Double hist = new Histogram1D_Double(rawData, 0, 7.5, 100, "distribution"); hist.LabelProvider = new LabelProvider(((Histogram1D_Double)hist).DefaultLabelProvider); hist.Recalculate(); Debug.WriteLine("Writing data dump file."); int[] bins = (int[])hist.Bins; for (int i = 0; i < bins.Length; i++) { //Debug.WriteLine(hist.GetLabel(new int[]{i}) + ", " + bins[i]); tw.WriteLine(hist.GetLabel(new int[] { i }) + ", " + bins[i]); } tw.Flush(); tw.Close(); if (m_visuallyVerify) { System.Diagnostics.Process.Start("excel.exe", Environment.GetEnvironmentVariable("TEMP") + "\\DistributionUniform.csv"); } }
public void UniformOrderStatistics() { // Check that the order statistics of the uniform distribution are distributed as expected. Random rng = new Random(1); UniformDistribution u = new UniformDistribution(); Sample maxima = new Sample(); Sample minima = new Sample(); for (int i = 0; i < 100; i++) { double maximum = 0.0; double minimum = 1.0; for (int j = 0; j < 4; j++) { double value = u.GetRandomValue(rng); if (value > maximum) { maximum = value; } if (value < minimum) { minimum = value; } } maxima.Add(maximum); minima.Add(minimum); } // maxima should be distributed according to Beta(n,1) TestResult maxTest = maxima.KolmogorovSmirnovTest(new BetaDistribution(4, 1)); Assert.IsTrue(maxTest.Probability > 0.05); // minima should be distributed according to Beta(1,n) TestResult minTest = minima.KolmogorovSmirnovTest(new BetaDistribution(1, 4)); Assert.IsTrue(minTest.Probability > 0.05); }
public void TestSample() { int sampleCount = 10_000_000; UniformDistribution dist = new UniformDistribution(); double[] sampleArr = new double[sampleCount]; for(int i=0; i<sampleCount; i++){ sampleArr[i] = dist.Sample(); } UniformDistributionTest(sampleArr, 0.0, 1.0); // Configure a scale and a signed flag. dist = new UniformDistribution(100.0, true); for(int i=0; i<sampleCount; i++){ sampleArr[i] = dist.Sample(); } UniformDistributionTest(sampleArr, -100.0, 100.0); }
public static Chromosome Cross(Tuple <Chromosome, Chromosome> pair, Random random) { IList <double> values; UniformDistribution uniform = new UniformDistribution(Interval.FromEndpoints(0, 1)); DiscreteUniformDistribution discrete = new DiscreteUniformDistribution(0, 1); double randomNumber = uniform.GetRandomValue(random); if (randomNumber <= SELECTION_1_PROBABILITY) // whole arithmetic recombination { values = pair.Item1.Values.Zip(pair.Item2.Values, (x, y) => (x + y) / 2).ToList(); } else if (randomNumber <= SELECTION_1_PROBABILITY + SELECTION_2_PROBABILITY) // discrete recombination { values = pair.Item1.Values.Zip(pair.Item2.Values, (x, y) => discrete.GetRandomValue(random) == 0 ? x : y).ToList(); } else // simple arithmetic recombination { values = pair.Item1.Values.Zip(pair.Item2.Values, (x, y) => pair.Item1.Values.IndexOf(x) < pair.Item1.Values.Count / 2 ? x : (x + y) / 2).ToList(); } return(new Chromosome(values)); }
public static void Mutate(Chromosome chromosome, Random random) { NormalDistribution normal; UniformDistribution uniform = new UniformDistribution(Interval.FromEndpoints(0, 1)); for (int i = 0; i < chromosome.Values.Count; i++) { if (uniform.GetRandomValue(random) <= MUTATION_PROBABILITY) { if (uniform.GetRandomValue(random) <= SELECTION_PROBABILITY) { normal = new NormalDistribution(MEAN, SIGMA_1); } else { normal = new NormalDistribution(MEAN, SIGMA_2); } chromosome.Values[i] += normal.GetRandomValue(random); } } }
public void TestMultivariateRegression() { // Collect r^2 values from multivariate linear regressions. double cz = 1.0; double cx = 0.0; double cy = 0.0; Random rng = new Random(1001110000); ContinuousDistribution xDistribution = new UniformDistribution(Interval.FromEndpoints(-4.0, 8.0)); ContinuousDistribution yDistribution = new UniformDistribution(Interval.FromEndpoints(-8.0, 4.0)); ContinuousDistribution eDistribution = new NormalDistribution(); List <double> r2Sample = new List <double>(); for (int i = 0; i < 500; i++) { MultivariateSample xyzSample = new MultivariateSample(3); for (int k = 0; k < 12; k++) { double x = xDistribution.GetRandomValue(rng); double y = yDistribution.GetRandomValue(rng); double z = cx * x + cy * y + cz + eDistribution.GetRandomValue(rng); xyzSample.Add(x, y, z); } MultiLinearRegressionResult fit = xyzSample.LinearRegression(2); double fcx = fit.Parameters.ValuesVector[0]; double fcy = fit.Parameters.ValuesVector[1]; double fcz = fit.Parameters.ValuesVector[2]; r2Sample.Add(fit.RSquared); } // r^2 values should be distributed as expected. ContinuousDistribution r2Distribution = new BetaDistribution((3 - 1) / 2.0, (12 - 3) / 2.0); TestResult ks = r2Sample.KolmogorovSmirnovTest(r2Distribution); Assert.IsTrue(ks.Probability > 0.05); }
/* R file: random.c * R fucntion name: ProbSampleNoReplace * On tire depuis 0..weights.Length-1 une suite d'entiers distincts dont la cardinalité est 'sampleSize' */ public int[] GetSample() { int[] perm = new int[weights.Length]; int[] ans = new int[sampleSize]; double rT, mass, totalMass; int i, j, k, n1; double[] normalizedWeights = new double[weights.Length]; Array.Copy(weights, normalizedWeights, weights.Length); FixUpProb(normalizedWeights, ans.Length, false); for (i = 0; i < weights.Length; i++) { perm[i] = i; } Array.Sort(normalizedWeights, perm, Zygotine.Util.DescendingComparer.Desc); totalMass = 1.0; for (i = 0, n1 = weights.Length - 1; i < ans.Length; i++, n1--) { rT = totalMass * UniformDistribution.RUnif(); mass = 0.0; for (j = 0; j < n1; j++) { mass += normalizedWeights[j]; if (rT <= mass) { break; } } ans[i] = perm[j]; totalMass -= normalizedWeights[j]; for (k = j; k < n1; k++) { normalizedWeights[k] = normalizedWeights[k + 1]; perm[k] = perm[k + 1]; } } return(ans); }
} //# end of rnorm.censored internal static double RUnifLogP1(double[] logPLim, bool lowerTail = true, double u = double.NaN) { // On suppose logPLim est de longueur 2, et que logPLim[1] > logPLim[0] if (double.IsNaN(u)) { u = UniformDistribution.RUnif(); } //# To sample 'size' values uniformly in c(exp(logp.lim[1]), exp(logp.lim[2])) double w = logPLim[1] - logPLim[0]; if (!lowerTail) { w = Math.Abs(w); } double logP = Math.Log(u) + ExponentialDistribution.PExp(w, logP: true); double x = ExponentialDistribution.QExp(logP, logP: true); x = logPLim.Max() - x; return(x); }
private UncertainMeasurementSample CreateDataSet(Interval r, Func <double, double> fv, Func <double, double> fu, int n, int seed) { UncertainMeasurementSample set = new UncertainMeasurementSample(); UniformDistribution xd = new UniformDistribution(r); Random rng = new Random(seed); for (int i = 0; i < n; i++) { double x = xd.InverseLeftProbability(rng.NextDouble()); double ym = fv(x); double ys = fu(x); NormalDistribution yd = new NormalDistribution(ym, ys); double y = yd.InverseLeftProbability(rng.NextDouble()); //Console.WriteLine("{0}, {1}", x, new UncertainValue(y, ys)); UncertainMeasurement <double> point = new UncertainMeasurement <double>(x, y, ys); set.Add(point); } return(set); }
public void TestHistogramUniformDistTimeSpan() { IDoubleDistribution dist = new UniformDistribution(m_model, "UniformDistribution", Guid.NewGuid(), (double)TimeSpan.FromMinutes(10).Ticks, (double)TimeSpan.FromMinutes(25).Ticks); _TestTimeSpanHistogram(dist, 1500, TimeSpan.FromMinutes(12).Ticks, TimeSpan.FromMinutes(24).Ticks, 10); }
public void BivariateLogisticRegression() { double[] c = new double[] { -0.1, 1.0 }; Random rng = new Random(1); UniformDistribution pointDistribution = new UniformDistribution(Interval.FromEndpoints(-4.0, 4.0)); BivariateSample sample1 = new BivariateSample(); MultivariateSample sample2 = new MultivariateSample(2); for (int k = 0; k < 1000; k++) { double x = pointDistribution.GetRandomValue(rng); double z = c[0] * x + c[1]; double ez = Math.Exp(z); double p = ez / (1.0 + ez); double y = (rng.NextDouble() < p) ? 1.0 : 0.0; sample1.Add(x, y); sample2.Add(x, y); } Console.WriteLine(sample1.Covariance / sample1.X.Variance / sample1.Y.Mean / (1.0 - sample1.Y.Mean)); Console.WriteLine(sample1.Covariance / sample1.X.Variance / sample1.Y.Variance); FitResult result1 = sample1.LinearLogisticRegression(); FitResult result2 = sample2.TwoColumns(0, 1).LinearLogisticRegression(); FitResult result3 = sample2.LogisticLinearRegression(1); for (int i = 0; i < result1.Dimension; i++) { Console.WriteLine("{0} {1} {2}", i, result1.Parameter(i), result3.Parameter(i) ); } }
public Dictionary <string, Distribution> GenerateNeighbour(Dictionary <string, Distribution> currentPar, double temp) { EPTDistribution dist = (EPTDistribution)currentPar.First().Value; WIPDepDistParameters x = dist.Par; WIPDepDistParameters par = new WIPDepDistParameters { WorkCenter = wc }; UniformDistribution parDist = new UniformDistribution(0, Parameter.TotalWeight); double u = parDist.Next(); // x is the original parameter set (input), par is a neighbouring parameter set // Change one parameter based on a probability if (isInRange("LBWIP", u)) { par.LBWIP = (int)Math.Max(1, newValue("LBWIP", x.LBWIP)); } else { par.LBWIP = x.LBWIP; } if (isInRange("UBWIP", u)) { par.UBWIP = (int)Math.Max(1, newValue("UBWIP", x.UBWIP)); } else { par.UBWIP = x.UBWIP; } if (isInRange("Tmin", u)) { par.Tmin = newValue("Tmin", x.Tmin); } else { par.Tmin = x.Tmin; } if (isInRange("Tmax", u)) { par.Tmax = newValue("Tmax", x.Tmax); } else { par.Tmax = x.Tmax; } if (isInRange("Tdecay", u)) { par.Tdecay = newValue("Tdecay", x.Tdecay); } else { par.Tdecay = x.Tdecay; } if (isInRange("Cmin", u)) { par.Cmin = newValue("Cmin", x.Cmin); } else { par.Cmin = x.Cmin; } if (isInRange("Cmax", u)) { par.Cmax = newValue("Cmax", x.Cmax); } else { par.Cmax = x.Cmax; } if (isInRange("Cdecay", u)) { par.Cdecay = newValue("Cdecay", x.Cdecay); } else { par.Cdecay = x.Cdecay; } Dictionary <string, Distribution> neighbour = new Dictionary <string, Distribution> { { wc, new EPTDistribution(par) } }; return(neighbour); bool isInRange(string parName, double u) { Parameter parameter = ParConfig[parName]; double pLower = parameter.CumulativeWeight - parameter.Weight; double pUpper = parameter.CumulativeWeight; if (u > pLower && u <= pUpper) { return(true); } else { return(false); } } double newValue(string parName, double value) { // Use Min-max feature scaling to determine the half width size of the new value // Large range at high temps (50%), small range at low temps (10%) double halfWidth = (0.5 - 0.1) * temp / maxTemp + 0.1; Parameter parameter = ParConfig[parName]; double lowerBound = Math.Max(parameter.LowerBound, value - value * halfWidth); double upperBound = Math.Min(parameter.UpperBound, value + value * halfWidth); UniformDistribution valueDist = new UniformDistribution(lowerBound, upperBound); return(valueDist.Next()); } }
public void TestHistogramUniformDistDouble() { IDoubleDistribution dist = new UniformDistribution(m_model, "UniformDistribution", Guid.NewGuid(), 5, 35); _TestDoubleHistogram(dist, 1500, 7, 33, (33 - 7)); }
public static void MetropolisHastings (ref decimal result, ref decimal numerator, ref decimal denominator , ref decimal partition_function , uint calculation_count_epoch , decimal[] initial_x, ref decimal[] final_x , IAction iaction, decimal[] step_half_width, uint[] seeds_for_step , uint seed_for_judge , IScalarFunction iscalar ) { //ジャンプの幅の乱数の種の次元をそろえる if (step_half_width.Length != seeds_for_step.Length) { throw new FormatException("Length of " + nameof(step_half_width) + "(" + step_half_width.Length + ")" + " with that of " + nameof(seeds_for_step) + "(" + seeds_for_step.Length + ")"); } //初期位置と乱数の種の次元をそろえる if (initial_x.Length != seeds_for_step.Length) { throw new FormatException("Length of " + nameof(initial_x) + "(" + initial_x.Length + ")" + " with that of " + nameof(seeds_for_step) + "(" + seeds_for_step.Length + ")"); } //ジャンプの幅を正の数にしておく decimal[] abs_step_half_width = new decimal[step_half_width.Length]; for (int j = 0; j < step_half_width.Length; j++) { abs_step_half_width[j] = Math.Abs(step_half_width[j]); } //一様乱数のclassを生成する List <UniformDistribution> list_ud = new List <UniformDistribution>(); for (int j = 0; j < step_half_width.Length; j++) { list_ud.Add(new UniformDistribution(seeds_for_step[j])); } UniformDistribution judge = new UniformDistribution(seed_for_judge); //初期設定を行う decimal[] xs = new decimal[step_half_width.Length]; decimal[] xs_candidate = new decimal[step_half_width.Length]; for (int k = 0; k < step_half_width.Length; k++) { xs[k] = initial_x[k]; xs_candidate[k] = xs[k]; } decimal action = 0m; decimal action_candidate = 0m; action = iaction.Calculate_f_u(xs); action_candidate = action; //計算を行う for (int j = 0; j < calculation_count_epoch; j++) { //新しいxの候補を計算する。 for (int k = 0; k < step_half_width.Length; k++) { //xの値を1次元だけ動かす xs_candidate[k] = xs[k] + list_ud[k].NextDecimal(abs_step_half_width[k], -abs_step_half_width[k]); } action_candidate = iaction.Calculate_f_u(xs_candidate); //更新できる場合 if (judge.NextDecimal() < TaylorSeriesDecimal.Exponential(action - action_candidate)) { //被積分関数の値を足す numerator += iscalar.Calculate_f_u(xs_candidate); //分配関数に確率値を加える。 partition_function += TaylorSeriesDecimal.Exponential(-action_candidate); for (int k = 0; k < step_half_width.Length; k++) { //xの値を更新する。 xs[k] = xs_candidate[k]; } //作用を更新する。 action = action_candidate; } else { } denominator++; } result = numerator / denominator; for (int j = 0; j < initial_x.Length; j++) { final_x[j] = xs[j]; } }
public UniformRandomFunction() { FitnessDistribution = new UniformDistribution(new ConstantControlParameter(0), new ConstantControlParameter(1)); }
public ParamChanger() { m_uniformDistribution = new UniformDistribution(); //UnityEngine.Random.seed = (int)Time.time; }
public void UniformOrderStatistics() { // Check that the order statistics of the uniform distribution are distributed as expected. Random rng = new Random(1); UniformDistribution u = new UniformDistribution(); Sample maxima = new Sample(); Sample minima = new Sample(); for (int i = 0; i < 100; i++) { double maximum = 0.0; double minimum = 1.0; for (int j = 0; j < 4; j++) { double value = u.GetRandomValue(rng); if (value > maximum) maximum = value; if (value < minimum) minimum = value; } maxima.Add(maximum); minima.Add(minimum); } // maxima should be distributed according to Beta(n,1) TestResult maxTest = maxima.KolmogorovSmirnovTest(new BetaDistribution(4, 1)); Assert.IsTrue(maxTest.LeftProbability < 0.95); // minima should be distributed according to Beta(1,n) TestResult minTest = minima.KolmogorovSmirnovTest(new BetaDistribution(1, 4)); Assert.IsTrue(minTest.LeftProbability < 0.95); }
public void SpearmanNullDistributionTest() { // pick independent distributions for x and y, which needn't be normal and needn't be related Distribution xDistrubtion = new UniformDistribution(); Distribution yDistribution = new CauchyDistribution(); Random rng = new Random(1); // generate bivariate samples of various sizes foreach (int n in TestUtilities.GenerateIntegerValues(4, 64, 8)) { Sample testStatistics = new Sample(); Distribution testDistribution = null; for (int i = 0; i < 128; i++) { BivariateSample sample = new BivariateSample(); for (int j = 0; j < n; j++) { sample.Add(xDistrubtion.GetRandomValue(rng), yDistribution.GetRandomValue(rng)); } TestResult result = sample.SpearmanRhoTest(); testStatistics.Add(result.Statistic); testDistribution = result.Distribution; } TestResult r2 = testStatistics.KuiperTest(testDistribution); Console.WriteLine("n={0} P={1}", n, r2.LeftProbability); Assert.IsTrue(r2.RightProbability > 0.05); Assert.IsTrue(testStatistics.PopulationMean.ConfidenceInterval(0.99).ClosedContains(testDistribution.Mean)); Assert.IsTrue(testStatistics.PopulationVariance.ConfidenceInterval(0.99).ClosedContains(testDistribution.Variance)); } }
private void btnGenerate_Click(object sender, EventArgs e) { string error; if (!ValidateInput(out error)) { MessageBox.Show(error); return; } Distribution distribution = null; var generator = new Generator(); switch (distributionType) { case DistributionType.Uniform: var a = double.Parse(textBoxA.Text); var b = double.Parse(textBoxB.Text); distribution = new UniformDistribution(generator, a, b); break; case DistributionType.Gaussian: var mean = double.Parse(textBoxMParameter.Text); var standartDeviation = double.Parse(textBoxQParameter.Text); distribution = new GaussianDistribution(generator, mean, standartDeviation); break; case DistributionType.Exponential: var lambda = double.Parse(textBoxLambda.Text); distribution = new ExponentialDistribution(generator, lambda); break; case DistributionType.Gamma: var eta = Convert.ToInt32(numericUpDownEta.Text); var lambdaG = double.Parse(textBoxLambdaG.Text); distribution = new GammaDistribution(generator, eta, lambdaG); break; case DistributionType.Triangular: var aT = double.Parse(textBoxAT.Text); var bT = double.Parse(textBoxBT.Text); var isFirstVariant = radioBtnVariant1.Checked; distribution = new TriangularDistribution(generator, aT, bT, isFirstVariant); break; case DistributionType.Simpsons: var aS = double.Parse(textBoxAS.Text); var bS = double.Parse(textBoxBS.Text); distribution = new SimpsonsDistribution(generator, aS, bS); break; default: distribution = null; break; } if (ReferenceEquals(distribution, null)) { MessageBox.Show("Error: distribution is undefined"); return; } var n = Convert.ToInt32(numericUpDownLength.Text); generatedValues = new List <double>(); listBoxGeneratedValues.Items.Clear(); for (int i = 0; i < n; i++) { var value = distribution.GetNext(); generatedValues.Add(value); listBoxGeneratedValues.Items.Add(value); } DrawChart(); var m = CalcM(generatedValues); var d = CalcD(generatedValues, m); var q = Math.Sqrt(d); textBoxMActual.Text = m.ToString(CultureInfo.InvariantCulture); textBoxDActual.Text = d.ToString(CultureInfo.InvariantCulture); textBoxQActual.Text = q.ToString(CultureInfo.InvariantCulture); }
public ParamChanger() { m_uniformDistribution = new UniformDistribution(); UnityEngine.Random.seed = (int)Time.time; }
static void Main(string[] args) { string inputDirectory = @"C:\CSSLWaferFab\Input\WSC2021paper"; string outputDirectory = @"C:\CSSLWaferFab\Output\WaferAreaOptimiser"; ReaderWriter readerWriter = new ReaderWriter(inputDirectory, outputDirectory); Dictionary <string, Tuple <double, double> > realQueueLengths = readerWriter.GetRealQueueLengths(); List <string> workCenters = realQueueLengths.Keys.ToList(); // All work centers workCenters = new List <string>() // Remove to evaluate all work centers { "PHOTOLITH", "DRY ETCH" }; foreach (string workCenter in workCenters) { #region Parameters // Model parameters string wc = workCenter; DateTime initialDateTime = new DateTime(2019, 6, 1); string eptParameterFile = @"FittedEPTParameters - 2019-06-01.csv"; bool useInitialLots = true; Settings.WriteOutput = false; // Simulated annealing parameters double temp = 25; double cooldown = 0.993; //0.995 = 1102 solutions, 0.996 = 1378 solutions, 0.997 = 1834 solutions double meanObj = realQueueLengths[wc].Item1; double stdObj = realQueueLengths[wc].Item2; // Dictionary with parameters to optimise and optional weights Dictionary <string, Parameter> parameterConfiguration = new Dictionary <string, Parameter>() { { "LBWIP", new Parameter("LBWIP", false) }, { "UBWIP", new Parameter("UBWIP", false) }, { "Tmin", new Parameter("Tmin", false) }, { "Tmax", new Parameter("Tmax", true) }, { "Tdecay", new Parameter("Tdecay", true) }, { "Cmin", new Parameter("Cmin", true) }, { "Cmax", new Parameter("Cmax", true) }, { "Cdecay", new Parameter("Cdecay", true) } }; #endregion #region Variables and instances Optimiser optimiser = new Optimiser(wc, temp, parameterConfiguration); optimiser.SetBounds(inputDirectory); WaferAreaSim waferAreaSim = new WaferAreaSim(wc, eptParameterFile, inputDirectory, outputDirectory, initialDateTime, optimiser, useInitialLots); Dictionary <string, Distribution> currentPar, nextPar, bestPar; Tuple <double, double> currentRes, nextRes, bestRes; double currentCost, nextCost, bestCost, deltaCost; Dictionary <WIPDepDistParameters, Tuple <double, double> > results = new Dictionary <WIPDepDistParameters, Tuple <double, double> >(); // Save all solutions UniformDistribution uDist = new UniformDistribution(0, 1); #endregion #region Simulated annealing algorithm // Initial model parameters and results currentPar = waferAreaSim.InitialParameters; bestPar = optimiser.CopyParameters(currentPar); currentRes = waferAreaSim.RunSim(currentPar); bestRes = optimiser.CopyResults(currentRes); currentCost = Math.Abs(currentRes.Item1 - meanObj) + 0.5 * Math.Abs(currentRes.Item2 - stdObj); bestCost = currentCost; optimiser.AddResult(results, currentPar, currentRes); // Iterate and evaluate solutions until sufficiently cooled down int i = 0; while (temp > 0.1 && currentCost > Math.Min(1, 0.1 * (meanObj + stdObj))) // If a good solution is found, stop searching { nextPar = optimiser.GenerateNeighbour(currentPar, temp); nextRes = waferAreaSim.RunSim(nextPar); nextCost = Math.Abs(nextRes.Item1 - meanObj) + 0.5 * Math.Abs(nextRes.Item2 - stdObj); optimiser.AddResult(results, nextPar, nextRes); if (nextCost < currentCost) // New solution is better than current, accept new solution { currentPar = optimiser.CopyParameters(nextPar); currentRes = optimiser.CopyResults(nextRes); currentCost = nextCost; if (nextCost < bestCost) // New solution is better best, accept new best solution { bestPar = optimiser.CopyParameters(nextPar); bestRes = optimiser.CopyResults(nextRes); bestCost = nextCost; } } else { deltaCost = nextCost - currentCost; if (uDist.Next() < Math.Pow(Math.E, -deltaCost / temp)) // Accept solution if u ~ U[0,1] < e^-(dC/T) { currentPar = optimiser.CopyParameters(nextPar); currentRes = optimiser.CopyResults(nextRes); currentCost = nextCost; } } temp = temp * cooldown; // Reduce temperature i++; Console.WriteLine("\nResults for area {0}.", wc); Console.WriteLine("Iteration: {0}. Temperature {1}", i, temp); Console.WriteLine("Evaluated solution: {0}, {1}", nextRes.Item1, nextRes.Item2); Console.WriteLine("Current solution: {0}, {1}", currentRes.Item1, currentRes.Item2); Console.WriteLine("Best solution: {0}, {1}\n", bestRes.Item1, bestRes.Item2); } #endregion #region Write results to file // Write all results to a text file readerWriter.WriteAllSolutions(results, wc); // Write the best and current solution to a text file readerWriter.WriteFinalSolutions(currentPar, currentRes, bestPar, bestRes, wc); #endregion } }
/// <summary> /// Shows a new chart in a default form. /// </summary> /// <param name="dist">The distribution.</param> /// <param name="function">The distribution function to plot.</param> /// <param name="numInterpolatedValues">The number of interpolated values.</param> /// <remarks> /// Equivalent to: /// <code> /// NMathStatsChart.Show( ToChart( dist, function, numInterpolatedValues ) ); /// </code> /// </remarks> public static void Show( UniformDistribution dist, DistributionFunction function = DistributionFunction.PDF, int numInterpolatedValues = 100 ) { Show( ToChart( dist, function, numInterpolatedValues ) ); }
public void MultivariateLinearRegression() { int outputIndex = 2; double[] c = new double[] { -1.0, 2.0, -3.0, 4.0 }; Random rng = new Random(1001110000); UniformDistribution pointDistribution = new UniformDistribution(Interval.FromEndpoints(-4.0, 4.0)); MultivariateSample sample = new MultivariateSample(c.Length); for (int k = 0; k < 1000; k++) { double[] row = new double[sample.Dimension]; double z = 0.0; for (int i = 0; i < row.Length; i++) { if (i == outputIndex) { z += c[i]; } else { row[i] = pointDistribution.GetRandomValue(rng); z += row[i] * c[i]; } } double ez = Math.Exp(z); double p = ez / (1.0 + ez); row[outputIndex] = (rng.NextDouble() < p) ? 1.0 : 0.0; sample.Add(row); } FitResult result = sample.LogisticLinearRegression(outputIndex); for (int i = 0; i < result.Dimension; i++) { Console.WriteLine(result.Parameter(i)); Assert.IsTrue(result.Parameter(i).ConfidenceInterval(0.99).ClosedContains(c[i])); } }
public void MultivariateMoments() { // create a random sample MultivariateSample M = new MultivariateSample(3); Distribution d0 = new NormalDistribution(); Distribution d1 = new ExponentialDistribution(); Distribution d2 = new UniformDistribution(); Random rng = new Random(1); int n = 10; for (int i = 0; i < n; i++) { M.Add(d0.GetRandomValue(rng), d1.GetRandomValue(rng), d2.GetRandomValue(rng)); } // test that moments agree for (int i = 0; i < 3; i++) { int[] p = new int[3]; p[i] = 1; Assert.IsTrue(TestUtilities.IsNearlyEqual(M.Column(i).Mean, M.Moment(p))); p[i] = 2; Assert.IsTrue(TestUtilities.IsNearlyEqual(M.Column(i).Variance, M.MomentAboutMean(p))); for (int j = 0; j < i; j++) { int[] q = new int[3]; q[i] = 1; q[j] = 1; Assert.IsTrue(TestUtilities.IsNearlyEqual(M.TwoColumns(i, j).Covariance, M.MomentAboutMean(q))); } } }
public void SampleKolmogorovSmirnovTest() { // this test has a whiff of meta-statistics about it // we want to make sure that the KS test statistic D is distributed according to the Kolmogorov // distribution; to do this, we create a sample of D statistics and do KS/Kuiper tests // comparing it to the claimed Kolmogorov distribution // start with any 'ol underlying distribution Distribution distribution = new UniformDistribution(Interval.FromEndpoints(-2.0, 4.0)); // generate some samples from it, and for each one get a D statistic from a KS test Sample DSample = new Sample(); Distribution DDistribution = null; for (int i = 0; i < 25; i++) { // the sample size must be large enough that the asymptotic assumptions are satistifed // at the moment this test fails if we make the sample size much smaller; we should // be able shrink this number when we expose the finite-sample distributions Sample sample = CreateSample(distribution, 250, i); TestResult ks = sample.KolmogorovSmirnovTest(distribution); double D = ks.Statistic; Console.WriteLine("D = {0}", D); DSample.Add(D); DDistribution = ks.Distribution; } // check on the mean Console.WriteLine("m = {0} vs. {1}", DSample.PopulationMean, DDistribution.Mean); Assert.IsTrue(DSample.PopulationMean.ConfidenceInterval(0.95).ClosedContains(DDistribution.Mean), String.Format("{0} vs. {1}", DSample.PopulationMean, DDistribution.Mean)); // check on the standard deviation Console.WriteLine("s = {0} vs. {1}", DSample.PopulationStandardDeviation, DDistribution.StandardDeviation); Assert.IsTrue(DSample.PopulationStandardDeviation.ConfidenceInterval(0.95).ClosedContains(DDistribution.StandardDeviation)); // do a KS test comparing the sample to the expected distribution TestResult kst = DSample.KolmogorovSmirnovTest(DDistribution); Console.WriteLine("D = {0}, P = {1}", kst.Statistic, kst.LeftProbability); Assert.IsTrue(kst.LeftProbability < 0.95); // do a Kuiper test comparing the sample to the expected distribution TestResult kut = DSample.KuiperTest(DDistribution); Console.WriteLine("V = {0}, P = {1}", kut.Statistic, kut.LeftProbability); Assert.IsTrue(kut.LeftProbability < 0.95); }
}// end constructor /* * La méthode étant internal, elle peut être invoquée d'un programme externe à la librairie. * La seule méthode qui peut invoquer Run, c'est la méthode Compute de Model qui ne le fera que si le modèle est * jugé valide. */ internal override void Run() { SGNFnAParam localA = null; GenObject oTV = null; GenObject oMu = null; GenObject oSigma = null; GenObject oME = null; YGen genY = YGen.EmptyInstance; TrueValuesGen genTV = null; double[] burninMu; double[] burninSigma; double[] burninCV = null; double[] sampleMu; double[] sampleSigma; double[] sampleCV = null; double mu; double sigma; int iter = -1, savedIter; double muCondMean; double yBar; double muCondSD; double[] pLim = new double[2]; double p; double[] muLim = new double[] { this.MuLower, this.MuUpper }; double logSigmaSD; try { logSigmaSD = 1 / Math.Sqrt(this.LogSigmaPrec); if (ME.Any) { if (ME.ThroughCV) { if (OutcomeIsLogNormallyDistributed) { oTV = new TrueValue_CV_LogN_GenObject(); } else { oTV = new TrueValue_CV_Norm_GenObject(); } } else { //oTV = new TrueValue_SD_GenObject(); } } //# modif_0.12 int combinedN = this.Data.N + (this.PastData.Defined ? PastData.N : 0); if (ME.ThroughCV && !OutcomeIsLogNormallyDistributed) { oMu = new MuTruncatedData_GenObject(combinedN); //# modif_0.12 oSigma = GenObject.GetSigmaTruncatedDataLNormGenObject(combinedN, this.LogSigmaMu, logSigmaSD); //# modif_0.12 } else { oSigma = GenObject.GetSigmaGenObject(combinedN, this.LogSigmaMu, logSigmaSD); //# modif_0.12 } localA = oSigma.A.Clone(); if (ME.Any && !ME.Known) { oME = GenObject.GetMeGenObject(this.ME, this.OutcomeIsLogNormallyDistributed, this.Data.N); } int nIterations = NBurnin + NIter * NThin; //les tableaux pour les chaines sampleMu = Result.Chains.GetChain("muSample"); sampleSigma = Result.Chains.GetChain("sdSample"); burninMu = Result.Chains.GetChain("muBurnin"); burninSigma = Result.Chains.GetChain("sdBurnin"); if (ME.ThroughCV) { sampleCV = Result.Chains.GetChain("cvSample"); burninCV = Result.Chains.GetChain("cvBurnin"); } bool inestimableLowerLimit = false; //Initial values for mu and sigma mu = InitMu; sigma = InitSigma; savedIter = 0; // pour les échantillons if (this.Data.AnyCensored) { genY = YGen.Inits(this.Data, mu, sigma, meThroughCV: this.ME.ThroughCV, logNormalDistrn: OutcomeIsLogNormallyDistributed); } if (ME.Any) { ME.Parm = ME.InitialValue; } //Boucle principale for (iter = 0; iter < nIterations; iter++) { if (ME.Any) { genTV = TrueValuesGen.GetInstance(genY, this.Data, mu, sigma, this.ME, logNormalDistrn: OutcomeIsLogNormallyDistributed, o: oTV); } if (this.Data.AnyCensored) { //y.gen(true.values, data, sigma, me, outcome.is.logNormally.distributed, mu=mu) //On ne tient pas compte de true.values, ni de me ... genY = YGen.GetInstance(this.ME, genTV, this.Data, mu, sigma, OutcomeIsLogNormallyDistributed); } OutLogoutMoments moments = OutLogoutMoments.Get(this.ME.Any, this.OutcomeIsLogNormallyDistributed, this.Data, genY, genTV); double sigmaBeta = (moments.Sum2 - 2 * mu * moments.Sum + this.Data.N * mu * mu) / 2.0; if (PastData.Defined) { sigmaBeta = sigmaBeta + PastData.N / 2.0 * Math.Pow(PastData.Mean - mu, 2) + PastData.NS2 / 2.0; } double[] start = new double[0]; if (this.ME.ThroughCV && !OutcomeIsLogNormallyDistributed) { //ici // A <- c(o.sigma$A, list(b=sigma.beta, mu=mu)) localA = oSigma.A.Clone(); localA.B = sigmaBeta; localA.Mu = mu; start = Tools.Combine(sigma); inestimableLowerLimit = false; } else { localA.B = sigmaBeta; start = oSigma.Start(localA); inestimableLowerLimit = true; } Icdf icdf = new Icdf(oSigma, localA, Tools.Combine(0, double.PositiveInfinity)); sigma = icdf.Bidon(start, inestimableLowerLimit); yBar = moments.Sum / this.Data.N; muCondMean = this.PastData.Defined ? (moments.Sum + PastData.N * PastData.Mean) / combinedN : yBar; // # new_0.12 if (this.ME.ThroughCV && !this.OutcomeIsLogNormallyDistributed) { mu = MuTruncatedGen.GetInstance(oMu, muLim, muCondMean, sigma).Mu; } else { muCondSD = sigma / Math.Sqrt(combinedN); pLim = NormalDistribution.PNorm(muLim.Substract(muCondMean).Divide(muCondSD)); p = UniformDistribution.RUnif(1, pLim[0], pLim[1])[0]; mu = NormalDistribution.QNorm(p, mu: muCondMean, sigma: muCondSD); } //# Sample Measurement Error from its posterior density if (this.ME.Any && !this.ME.Known) { this.ME.Parm = MEParmGen.GetInstance(oME, this.ME, this.Data, genY, genTV).Parm; } if (iter < NBurnin) { if (MonitorBurnin) { burninMu[iter] = mu; burninSigma[iter] = sigma; if (this.ME.Any && !this.ME.Known) { burninCV[iter] = ME.Parm; } } } else if ((iter - NBurnin) % NThin == 0) { sampleMu[savedIter] = mu; sampleSigma[savedIter] = sigma; if (this.ME.Any && !this.ME.Known) { sampleCV[savedIter] = ME.Parm; } savedIter++; } }// for( int iter = 1 ... } catch (Exception ex) { this.Result.Messages.AddError(WEException.GetStandardMessage(ex, iter, Result.PRNGSeed), this.ClassName); return; } } //end Run
/// <summary> /// Returns a new line chart plotting the specified function of the given distribution for 0.0001 <= p <= 0.9999. /// </summary> /// <param name="dist">The distribution.</param> /// <param name="function">The distribution function to plot.</param> /// <param name="numInterpolatedValues">The number of interpolated values.</param> /// <returns>A new chart.</returns> public static ChartControl ToChart( UniformDistribution dist, DistributionFunction function = DistributionFunction.PDF, int numInterpolatedValues = 100 ) { ChartControl chart = GetDefaultChart(); Update( ref chart, dist, function, numInterpolatedValues ); return chart; }
public void MultivariateLinearRegressionSimple() { // define model y = a + b0 * x0 + b1 * x1 + noise double a = 1.0; double b0 = -2.0; double b1 = 3.0; ContinuousDistribution x0distribution = new CauchyDistribution(10.0, 5.0); ContinuousDistribution x1distribution = new UniformDistribution(Interval.FromEndpoints(-10.0, 20.0)); ContinuousDistribution noise = new NormalDistribution(0.0, 10.0); // draw a sample from the model Random rng = new Random(1); MultivariateSample sample = new MultivariateSample("x0", "x1", "y"); FrameTable table = new FrameTable(); table.AddColumns <double>("x0", "x1", "y"); for (int i = 0; i < 100; i++) { double x0 = x0distribution.GetRandomValue(rng); double x1 = x1distribution.GetRandomValue(rng); double eps = noise.GetRandomValue(rng); double y = a + b0 * x0 + b1 * x1 + eps; sample.Add(x0, x1, y); table.AddRow(x0, x1, y); } // do a linear regression fit on the model ParameterCollection oldResult = sample.LinearRegression(2).Parameters; MultiLinearRegressionResult newResult = table["y"].As <double>().MultiLinearRegression( table["x0"].As <double>(), table["x1"].As <double>() ); // the result should have the appropriate dimension Assert.IsTrue(oldResult.Count == 3); Assert.IsTrue(newResult.Parameters.Count == 3); // The parameters should match the model Assert.IsTrue(oldResult[0].Estimate.ConfidenceInterval(0.90).ClosedContains(b0)); Assert.IsTrue(oldResult[1].Estimate.ConfidenceInterval(0.90).ClosedContains(b1)); Assert.IsTrue(oldResult[2].Estimate.ConfidenceInterval(0.90).ClosedContains(a)); Assert.IsTrue(newResult.CoefficientOf(0).ConfidenceInterval(0.99).ClosedContains(b0)); Assert.IsTrue(newResult.CoefficientOf("x1").ConfidenceInterval(0.99).ClosedContains(b1)); Assert.IsTrue(newResult.Intercept.ConfidenceInterval(0.99).ClosedContains(a)); // The residuals should be compatible with the model predictions double ssr = 0.0; for (int i = 0; i < table.Rows.Count; i++) { FrameRow row = table.Rows[i]; double x0 = (double)row["x0"]; double x1 = (double)row["x1"]; double yp = newResult.Predict(x0, x1).Value; double y = (double)row["y"]; double z = y - yp; Assert.IsTrue(TestUtilities.IsNearlyEqual(newResult.Residuals[i], z)); ssr += z * z; } Assert.IsTrue(TestUtilities.IsNearlyEqual(newResult.SumOfSquaredResiduals, ssr)); }
public void GetStatistic() { Random rand = new Random(); double[] y = new double[N]; for (int i = 0; i < N; i++) { double ksi = rand.NextDouble(); double x = ksi * (b - a) + a; y[i] = 1 / (x + 1); } double[] v = y.OrderBy((x) => x).ToArray(); int _n; if (N <= 100) { _n = (int)Math.Sqrt(N); } else { _n = (int)(4 * Math.Log(N)); } double[] F = new double[N]; for (int i = 0; i < N; i++) { int w = 0; for (int j = 0; j < N; j++) { if (v[i] == v[j]) { w++; } } F[i] = (double)w / N; if (i > 0) { F[i] += F[i - 1]; } } Histogram = new LineSeries() { Title = "Эмпирическая функция распределения", Color = OxyColors.Green }; for (int i = 0; i < N; i++) { Histogram.Points.Add(new DataPoint(v[i], (i > 0) ? Math.Round(F[i - 1], 1) : 0)); Histogram.Points.Add(new DataPoint(v[i], Math.Round(F[i], 1))); } NormalDistributon _norm = new NormalDistributon(v); Func <double, double> tf = (arg) => { if (arg < -1) { return(0); } else if (arg >= -1 / 5) { return(1); } else { return(_norm.Function(arg)); } }; NormalFunction = new FunctionSeries(tf, -1, -1.0 / 5, dx) { Title = "Нормальный закон распределения", Color = OxyColors.Orange }; ExponentialDistribution _exp = new ExponentialDistribution(v); Func <double, double> tf1 = (arg) => { if (arg < -1) { return(0); } else if (arg >= -1 / 5) { return(1); } else { return(_exp.Function(arg)); } }; ExponentialFunction = new FunctionSeries(tf1, -1, -1.0 / 5, dx) { Title = "Экспоненциальный закон распределения", Color = OxyColors.Plum }; MyDistribution _myfun = new MyDistribution(); Func <double, double> tf3 = (arg) => { if (arg < -1) { return(0); } else if (arg >= -1 / 5) { return(1); } else { return(_myfun.Function(arg)); } }; MyFunction = new FunctionSeries(tf3, -1, -1.0 / 5, dx) { Title = "Теоретическая функция распределения", Color = OxyColors.Red }; UniformDistribution _uni = new UniformDistribution(v); Func <double, double> tf2 = (arg) => { if (arg < -1) { return(0); } else if (arg >= -1 / 5) { return(1); } else { return(_uni.Function(arg)); } }; UniformFunction = new FunctionSeries(tf2, -1, -1.0 / 5, dx) { Title = "Равномерный закон распределения", Color = OxyColors.Blue }; //////////////////////////////////////////////////////////////////////////// double[] _pirsonAnswers = new double[4]; Pirson _pirson = new Pirson(); _pirsonAnswers[0] = _pirson._Pirson(v, _norm); _pirsonAnswers[1] = _pirson._Pirson(v, _exp); _pirsonAnswers[2] = _pirson._Pirson(v, _uni); _pirsonAnswers[3] = _pirson._Pirson(v, _myfun); //////////////////////////////////////// int _N1 = 30; double[] y1 = new double[_N1]; for (int i = 0; i < 30; i++) { double ksi = rand.NextDouble(); double x = ksi * (b - a) + a; y1[i] = 1 / (x + 1); } v = y1.OrderBy((x) => x).ToArray(); double[] _colmoAnswers = new double[4]; Colmogorov _colmo = new Colmogorov(); _colmoAnswers[0] = _colmo._Colmogorov(v, _norm); _colmoAnswers[1] = _colmo._Colmogorov(v, _exp); _colmoAnswers[2] = _colmo._Colmogorov(v, _uni); _colmoAnswers[3] = _colmo._Colmogorov(v, _myfun); //////////////////////////////////////////////////////////////////////// int _N2 = 50; double[] y2 = new double[_N2]; for (int i = 0; i < 50; i++) { double ksi = rand.NextDouble(); double x = ksi * (b - a) + a; y2[i] = 1 / (x + 1); } v = y2.OrderBy((x) => x).ToArray(); double[] _mizesAnswers = new double[4]; Mizes _mizes = new Mizes(); _mizesAnswers[0] = _mizes._Mizes(v, _norm); _mizesAnswers[1] = _mizes._Mizes(v, _exp); _mizesAnswers[2] = _mizes._Mizes(v, _uni); _mizesAnswers[3] = _mizes._Mizes(v, _myfun); string[] answerString = { "-Нормальный з.р. ", "-Экспоненциальный з.р. ", "-Равномерный з.р. ", "-Теоретическую ф.р. " }; for (int i = 0; i < 4; i++) { bool _yes = false; if (_pirsonAnswers[i] != 0) { answerString[i] += "критерий Пирсона не отклоняет с вероятностью " + _pirsonAnswers[i].ToString() + ' '; _yes = true; } if (_colmoAnswers[i] != 0) { if (_yes) { answerString[i] += ",\n"; } answerString[i] += "критерий Колмогорова не отклоняет с вероятностью " + _colmoAnswers[i].ToString() + ' '; _yes = true; } if (_mizesAnswers[i] != 0) { if (_yes) { answerString[i] += ",\n"; } answerString[i] += "критерий Мизеса не отклоняет с вероятностью " + _mizesAnswers[i].ToString(); } if ((_mizesAnswers[i] == 0) && (_colmoAnswers[i] == 0) && (_pirsonAnswers[i] == 0)) { answerString[i] += "ни один из критериев не подтверждает"; } answerString[i] += ";\n"; } this.Result = ""; this.Result = answerString.Aggregate((working, next) => next + working); }
/// <summary> /// Updates the given chart with the specified distribution. /// </summary> /// <param name="chart">A chart.</param> /// <param name="dist">The distribution.</param> /// <param name="function">The distribution function to plot.</param> /// <param name="numInterpolatedValues">The number of interpolated values.</param> /// <returns>A new chart.</returns> /// <remarks> /// Plots the specified function of the given distribution for 0.0001 <= p <= 0.9999. /// <br/> /// Titles are added only if chart does not currently contain any titles. /// <br/> /// chart.Series[0] is replaced, or added if necessary. /// </remarks> public static void Update( ref ChartControl chart, UniformDistribution dist, DistributionFunction function = DistributionFunction.PDF, int numInterpolatedValues = 100 ) { List<string> titles = new List<string>() { "UniformDistribution", String.Format("lower={0}, upper={1}", dist.LowerLimit, dist.UpperLimit) }; UpdateContinuousDistribution( ref chart, dist, titles, function, numInterpolatedValues ); }