private void SetNextOrderAmount() { switch (OptionA) { case EnumTypes.BuyerAOptions.Static: NextOrderAmount = this.Amount; break; case EnumTypes.BuyerAOptions.Random: Random rnd = new Random(); int rand = rnd.Next(Convert.ToInt32(this.MinAmount), Convert.ToInt32(this.MaxAmount) + 1); NextOrderAmount = rand; break; case EnumTypes.BuyerAOptions.Poisson: MathNet.Numerics.Distributions.Poisson poisson = new MathNet.Numerics.Distributions.Poisson(this.Lambda); NextOrderAmount = poisson.Sample(); break; case EnumTypes.BuyerAOptions.Gauss: MathNet.Numerics.Distributions.Normal normal = new MathNet.Numerics.Distributions.Normal(this.MeanOptionA, this.DeviationOptionA); NextOrderAmount = normal.Sample(); break; } }
public List <double> GetRandomAccumulatedOfNormal(ref List <double> normalAccumulated) { var normalD = new MathNet.Numerics.Distributions.Normal(); int testCnt = 0; double pValue = 0.0; double[] temp = new double[ScenCnt]; List <double> normal = null; while (pValue < 0.95) { normalAccumulated.CopyTo(temp); normal = GetRandomOfNormal(); for (int scenSeq = 0; scenSeq < normalAccumulated.Count(); scenSeq++) { temp[scenSeq] += normal[scenSeq]; } var sw = new Accord.Statistics.Testing.ShapiroWilkTest(temp.ToArray()); pValue = sw.PValue; testCnt++; } for (int scenSeq = 0; scenSeq < normalAccumulated.Count(); scenSeq++) { normalAccumulated[scenSeq] += normal[scenSeq]; } return(normal); }
// 1000개 랜덤 public List <double> GetRandomOfNormal() { List <double> rdSet = new List <double>(); var normalD = new MathNet.Numerics.Distributions.Normal(); double pValue = 0.0; int testCnt = 0; while (pValue < 0.9) { rdSet.Clear(); for (int i = 0; i < ScenCnt; i++) { rdSet.Add(normalD.InverseCumulativeDistribution(RD.NextDouble())); } // Create a new Shapiro-Wilk test: var sw = new Accord.Statistics.Testing.ShapiroWilkTest(rdSet.ToArray()); pValue = sw.PValue; testCnt++; } return(rdSet); }
public static double GetLambda(double[] sample) { List <double> arr = new List <double>(sample); var normal2 = new MathNet.Numerics.Distributions.Normal(); arr.Sort(); double dMax = 0.0; double dp, dm; double MX = StatisticData.CalculateMathExpectation(sample); double D = StatisticData.CalculateDispersoin(sample, MX); for (int i = 0; i < arr.Count; i++) { double v = (arr[i] - MX) / Math.Sqrt(D); dp = Math.Abs((double)(i + 1) / arr.Count - normal2.CumulativeDistribution(v)); dm = Math.Abs(normal2.CumulativeDistribution(v) - (double)i / arr.Count); if (dp > dMax) { dMax = dp; } if (dm > dMax) { dMax = dm; } } return(dMax * Math.Sqrt(arr.Count)); }
private Matrix <double> RandomizeGaussianMatrixMaker(double damagedSD) { MathNet.Numerics.Distributions.Normal normalDist = new MathNet.Numerics.Distributions.Normal(0, damagedSD); Matrix <double> newMatrix = DenseMatrix.CreateRandom(SendLayer.UnitCount, ReceiveLayer.UnitCount, normalDist); return(newMatrix); }
// sensor reading public void get_corrupted_accelerations(ref Vector <float> xdotdot_bf_corrupted, Vector <float> xdotdot_ideal, Vector <float> attitude_ideal) { // The accelerations in the earth frame are perfectly known from the differential equation. // But the accelerometer returns the accelerations in the body frame, therefore a transformation is needed. // R is a transformation matrix from the body to the earth frame, so R.inverse() transforms from earth to body frame. Matrix <float> R = Matrix <float> .Build.Dense(3, 3, 0); rotation(ref R, attitude_ideal); xdotdot_bf_corrupted = R.Inverse() * xdotdot_ideal; // If there is no acceleration in the earth frame (xdotdot_ideal = 0), the accelerometer would still measure the gravity acceleration. // Therefore the gravity vector has to be added. float[] gravity = { 0, 0, GRAVITY }; xdotdot_bf_corrupted = xdotdot_bf_corrupted + R.Inverse() * Vector <float> .Build.DenseOfArray(gravity); // add chip tilt rotation(ref R, chip_tilt); xdotdot_bf_corrupted = R.Inverse() * xdotdot_bf_corrupted; MathNet.Numerics.Distributions.Normal distribution = new MathNet.Numerics.Distributions.Normal(0.0, 1.0); //using standard normal distribution // add noise and an offset xdotdot_bf_corrupted[0] += (float)distribution.Sample() * ACCELEROMETER_STANDEV_X + ACCELEROMETER_OFFSET_X; xdotdot_bf_corrupted[1] += (float)distribution.Sample() * ACCELEROMETER_STANDEV_Y + ACCELEROMETER_OFFSET_Y; xdotdot_bf_corrupted[2] += (float)distribution.Sample() * ACCELEROMETER_STANDEV_Z + ACCELEROMETER_OFFSET_Z; // remove the offset from the calibration xdotdot_bf_corrupted -= calibrated_offsets; }
/// <summary> /// Produce a vector of curves where the element at index i is a realization of a simulation at /// simulationDates i. If you require the rates directly use <see cref="GetSimulatedRates(Date[])"/> /// </summary> /// <param name="simulationDates">Dates on which the simulation is run. Must all be greater than the /// anchor date.</param> /// <returns></returns> public ICurve[] GetSimulatedCurves(Date[] simulationDates, Currency curveCcy = null) { if (curveCcy == null) { curveCcy = Currency.ANY; } var results = new ICurve[simulationDates.Length]; var dist = new Normal(); var previousDate = anchorDate; var previousRates = initialRates.Clone() as double[]; var currentRates = new double[initialRates.Length]; // Iterate through the simulation dates for (var simCounter = 0; simCounter < simulationDates.Length; simCounter++) { var currentDate = simulationDates[simCounter]; var dt = (currentDate - previousDate) / 365.0; var sdt = Math.Sqrt(dt); var curveDates = new Date[initialRates.Length]; // Random realizations to be used in simulation. var eps1 = dist.Sample(); var eps2 = dist.Sample(); var eps3 = dist.Sample(); // Iterate thrrough the dates on the curve for (var i = 0; i < initialRates.Length; i++) { curveDates[i] = simulationDates[simCounter].AddTenor(tenors[i]); if (useRelative) { //TODO: add mean correction. var exponent = components[0, i] * vols[0] * sdt * eps1 + components[1, i] * vols[1] * sdt * eps2 + components[2, i] * vols[2] * sdt * eps3; currentRates[i] = previousRates[i] * Math.Exp(exponent); } else { var change = components[0, i] * vols[0] * sdt * eps1 + components[1, i] * vols[1] * sdt * eps2 + components[2, i] * vols[2] * sdt * eps3; currentRates[i] = previousRates[i] + change; if (floorAtZero) { currentRates[i] = Math.Max(0.0, currentRates[i]); } } } currentRates = currentRates.Multiply(multiplier); results[simCounter] = new DatesAndRates(curveCcy, simulationDates[simCounter], curveDates, currentRates, simulationDates[simCounter].AddMonths(360)); previousRates = currentRates.Clone() as double[]; previousDate = new Date(currentDate); } return(results); }
public String get_z() { double z; MathNet.Numerics.Distributions.Normal result = new MathNet.Numerics.Distributions.Normal(); z = result.InverseCumulativeDistribution(probabilidad_cumplir); return(Convert.ToString(z)); }
public Random(int mean, int min, int max) { _mean = mean; _min = min; _max = max; var stddev = Math.Min(Math.Abs(mean - min), Math.Abs(max - mean)) / 3.0; _dist = new MathNet.Numerics.Distributions.Normal(mean, stddev); }
public void get_corrupted_height(ref float height_corrupted, float height_ideal) { MathNet.Numerics.Distributions.Normal distribution = new MathNet.Numerics.Distributions.Normal(0.0, 1.0); // add noise and an offset height_corrupted = height_ideal + (float)distribution.Sample() * BAROMETER_STANDEV + BAROMETER_OFFSET; // remove the offset from the calibration height_corrupted -= calibrated_offset; }
private static double CumulativeProbabilityNormalDistributionFunction(double x) { /* * Related links * https://stackoverflow.com/questions/1662943/standard-normal-distribution-z-value-function-in-c-sharp * https://numerics.mathdotnet.com/ * https://en.wikipedia.org/wiki/Normal_distribution * https://en.wikipedia.org/wiki/Error_function */ MathNet.Numerics.Distributions.Normal result = new MathNet.Numerics.Distributions.Normal(); return(result.CumulativeDistribution(x)); }
public void GetRandom(out double[] rdIR, out double[] rdEQ) { var normalD = new MathNet.Numerics.Distributions.Normal(); rdIR = new double[EndMth]; rdEQ = new double[EndMth]; for (int i = 0; i < EndMth; i++) { rdIR[i] = normalD.InverseCumulativeDistribution(RD.NextDouble()); rdEQ[i] = normalD.InverseCumulativeDistribution(RD.NextDouble()); } }
public static IEnumerable<Z2<float>> TestData() { var g = new MathNet.Numerics.Distributions.Normal(0.0, 1.0); var randy = new MathNet.Numerics.Random.MersenneTwister(); g.RandomSource = randy; var dblsX = new double[100000]; var dblsY = new double[100000]; g.Samples(dblsX); g.Samples(dblsY); return dblsX.Select((d,i) => new Z2<float>((float)d, (float)dblsY[i])); }
public Carnivore(Random rng, Position pos = null, IAnimalParams customParameters = null) : base(rng, pos) { if (customParameters is null) { Params = new CarnivoreParams(); } else { Params = customParameters; } var norm = new MathNet.Numerics.Distributions.Normal(Params.BirthWeight, Params.BirthSigma); Weight = norm.Sample(); }
public void CurtainsLeadTime() { var mean = 17.54F; var std = 15.16F; var dist = new MathNet.Numerics.Distributions.Normal(mean, std); List <double> results = new List <double>(); for (double d = 0; d <= 1.0; d += 0.05) { results.Add(dist.InverseCumulativeDistribution(d)); } }
public static void TestRandom() { var rd = new MathNet.Numerics.Random.MersenneTwister(); var normalD = new MathNet.Numerics.Distributions.Normal(); List <double> vals = new List <double>(); for (int i = 0; i < 100000; i++) { vals.Add(normalD.InverseCumulativeDistribution(rd.NextDouble())); } // Create a new Shapiro-Wilk test: var sw = new Accord.Statistics.Testing.ShapiroWilkTest(vals.ToArray()); }
public void GenerateWeights(int weightCount, Random randomWeight) { Weights = new double[weightCount]; WeightErrorDistribution = new double[weightCount]; for (int i = 0; i < weightCount; i++) { double mean = 0; double stdDev = 0.3; MathNet.Numerics.Distributions.Normal normalDist = new MathNet.Numerics.Distributions.Normal(mean, stdDev); double randomGaussianValue = normalDist.Sample(); //randomGaussianValue = randomWeight.NextDouble() - 0.5; //randomGaussianValue = 1; Weights[i] = randomGaussianValue; bias = randomGaussianValue; } }
/// <summary> /// Removes any values in the list that are outside of a two-tailed p value range. /// Does not remove any outliers if number of value is less or equal to 3. /// </summary> /// <param name="values"></param> /// <param name="pVal"></param> /// <returns>list with no outliers</returns> public static IEnumerable <double> removeOutliers_NormalDist(this IEnumerable <double> values, double pVal = 0.05) { List <double> outliers = new List <double>(); if (values.Count() > 3) { MathNet.Numerics.Distributions.Normal distribution = MathNet.Numerics.Distributions.Normal.Estimate(values); foreach (double val in values) { double cur_p = distribution.CumulativeDistribution(val); if (cur_p < (pVal / 2) || cur_p > (1 - (pVal / 2))) { outliers.Add(val); } } } return(values.Except(outliers)); }
public void SetNextOrderIfNeeded(DateTime currentTime) { if (NextOrderTime == new DateTime() || NextOrderTime <= currentTime) { switch (OptionB) { case EnumTypes.BuyerBOptions.Static: NextOrderTime = currentTime.AddMinutes(this.Minutes); break; case EnumTypes.BuyerBOptions.Gauss: MathNet.Numerics.Distributions.Normal normal = new MathNet.Numerics.Distributions.Normal(this.MeanOptionB, this.DeviationOptionB); NextOrderTime = currentTime.AddMinutes(normal.Sample()); break; } SetNextOrderAmount(); } }
public void get_corrupted_MagneticVectorBodyFrame(ref Vector <float> magField_bf_corrupted, Vector <float> attitude_ideal) { Matrix <float> R = Matrix <float> .Build.Dense(3, 3, 0); rotation(ref R, attitude_ideal); // from earth to body frame magField_bf_corrupted = R.Inverse() * magneticEarthFieldVector; // add chip tilt rotation(ref R, chip_tilt); magField_bf_corrupted = R.Inverse() * (magField_bf_corrupted); MathNet.Numerics.Distributions.Normal distribution = new MathNet.Numerics.Distributions.Normal(0.0, 1.0); magField_bf_corrupted[0] += (float)distribution.Sample() * MAGNETOMETER_OFFSET_X + MAGNETOMETER_STANDEV_X; magField_bf_corrupted[1] += (float)distribution.Sample() * MAGNETOMETER_OFFSET_Y + MAGNETOMETER_STANDEV_Y; magField_bf_corrupted[2] += (float)distribution.Sample() * MAGNETOMETER_OFFSET_Z + MAGNETOMETER_STANDEV_Z; }
public Func <double> CreateRandomFunc() { var random = this.Seed == 0 ? DefaultRandom.Value : new Random(this.Seed); switch (this.Distribution) { case Distribution.Triangular: return(() => { double p = random.NextDouble(); return p < (this.Average - this.Minimum) / (this.Maximum - this.Minimum) ? this.Minimum + Math.Sqrt( p * (this.Maximum - this.Minimum) * (this.Average - this.Minimum)) : this.Maximum - Math.Sqrt( (1 - p) * (this.Maximum - this.Minimum) * (this.Maximum - this.Average)); }); case Distribution.Normal: { var normal = new MathNet.Numerics.Distributions.Normal( this.Average, this.StandardDeviation, random); return(() => normal.Sample()); } case Distribution.LogNormal: { var logNormal = new MathNet.Numerics.Distributions.LogNormal( this.Average, this.StandardDeviation, random); return(() => logNormal.Sample()); } case Distribution.Uniform: return(() => random.NextDouble() * (this.Maximum - this.Minimum) + this.Minimum); } return(() => double.NaN); }
public void get_corrupted_angveloc(ref Vector <float> thetadot_bf_corrupted, Vector <float> thetadot_ideal, Vector <float> attitude_ideal) { // The angular velocities in the earth frame are perfectly known from the differential equation. // But the gyroscope returns angular accelerations in the body frame, therefore a transformation is needed. // The easiest way is to use the functions for the differential equations. thetadot2omega(ref thetadot_bf_corrupted, thetadot_ideal, attitude_ideal); // add chip tilt thetadot2omega(ref thetadot_bf_corrupted, thetadot_bf_corrupted, chip_tilt); MathNet.Numerics.Distributions.Normal distribution = new MathNet.Numerics.Distributions.Normal(0.0, 1.0); // add noise and an offset thetadot_bf_corrupted[0] += (float)distribution.Sample() * DEG2RAD(GYROSCOPE_STANDEV_R) + DEG2RAD(GYROSCOPE_OFFSET_R); thetadot_bf_corrupted[1] += (float)distribution.Sample() * DEG2RAD(GYROSCOPE_STANDEV_P) + DEG2RAD(GYROSCOPE_OFFSET_P); thetadot_bf_corrupted[2] += (float)distribution.Sample() * DEG2RAD(GYROSCOPE_STANDEV_Y) + DEG2RAD(GYROSCOPE_OFFSET_Y); // remove the offset from the calibration thetadot_bf_corrupted -= calibrated_offsets; }
//generate ransdom number based off if gaussian or not static double generateRandom(int numMems, Random rnd) { double random = rnd.Next(0, numMems); if (USING_GUASSIAN) { double stdDev; if (numMems > 2) { stdDev = numMems / (numMems * 3); } else { stdDev = 0; } var normalDist = new MathNet.Numerics.Distributions.Normal(random, stdDev); random = Math.Abs(normalDist.Sample()); } return(random); }
private Tuple <Genome, Genome> Crossover(Genome genome1, Genome genome2) { MathNet.Numerics.Distributions.Normal normal = new MathNet.Numerics.Distributions.Normal(0, GASettings.MutStdDev); double[] one = new double[genome1.ann.Weights.Length], two = new double[genome2.ann.Weights.Length]; for (int i = 0; i < one.Length; i++) { if (Rand.NextDouble() < GASettings.CrossProb) { one[i] = genome1.ann.Weights[i]; two[i] = genome2.ann.Weights[i]; } else { one[i] = genome2.ann.Weights[i]; two[i] = genome1.ann.Weights[i]; } if (Rand.NextDouble() < GASettings.MutProb) { one[i] += normal.Sample(); one[i] = one[i] > ANNSettings.MaxBoundary ? ANNSettings.MaxBoundary : one[i]; one[i] = one[i] < ANNSettings.MinBoundary ? ANNSettings.MinBoundary : one[i]; } if (Rand.NextDouble() < GASettings.MutProb) { two[i] += normal.Sample(); two[i] = two[i] > ANNSettings.MaxBoundary ? ANNSettings.MaxBoundary : two[i]; two[i] = two[i] < ANNSettings.MinBoundary ? ANNSettings.MinBoundary : two[i]; } } Genome genome3 = null, genome4 = null; Parallel.Invoke(() => { genome3 = new Genome(one); }, () => { genome4 = new Genome(two); }); return(Tuple.Create(genome3, genome4)); }
public IΦHatResultElement Calculate( INormalFactory normalFactory, IΦHatResultElementFactory ΦHatResultElementFactory, IlIndexElement lIndexElement, IsIndexElement sIndexElement, IΛIndexElement ΛIndexElement, IExpectedValueΦ expectedValueΦ, IVarianceΦ varianceΦ, IVHat VHat, Iυ2 υ2) { // https://stackoverflow.com/questions/1662943/standard-normal-distribution-z-value-function-in-c-sharp MathNet.Numerics.Distributions.Normal normal = (MathNet.Numerics.Distributions.Normal)normalFactory.Create(); return(ΦHatResultElementFactory.Create( sIndexElement, lIndexElement, ΛIndexElement, (decimal)expectedValueΦ.GetElementAtAsdecimal( sIndexElement, lIndexElement, ΛIndexElement) + (decimal)normal.CumulativeDistribution( (double)(1 - υ2.GetElementAtAsdecimal( ΛIndexElement))) * (decimal)Math.Pow( Math.Sqrt( (double)VHat.GetElementAtAsdecimal( ΛIndexElement)), -1) * (decimal)varianceΦ.GetElementAtAsdecimal( sIndexElement, lIndexElement, ΛIndexElement))); }
public decimal Calculate( INormalFactory normalFactory, ItIndexElement tIndexElement, IΛIndexElement ΛIndexElement, IExpectedValueI expectedValueI, IVarianceI varianceI, decimal υ2) { // https://stackoverflow.com/questions/1662943/standard-normal-distribution-z-value-function-in-c-sharp MathNet.Numerics.Distributions.Normal normal = (MathNet.Numerics.Distributions.Normal)normalFactory.Create(); return ((decimal)expectedValueI.GetElementAtAsdecimal( tIndexElement, ΛIndexElement) + (decimal)normal.CumulativeDistribution((double)(1 - υ2)) * (decimal)Math.Sqrt( (double)varianceI.GetElementAtAsdecimal( tIndexElement, ΛIndexElement))); }
public double TakeSamples() { var dateTimeElapsed = 0.0; var dateTime = DateTime.Now; if (this.DistributionName == "Binomial") { fullName = $"{DistributionName}-Samples:{SamplesNumber}-Trials:{TrialsNumber}"; var binomaial = new MathNet.Numerics.Distributions.Binomial(0.5, this.TrialsNumber); var generatedsamples = binomaial.Samples().Take(SamplesNumber).ToArray(); } else if (this.DistributionName == "Geometric") { fullName = $"{DistributionName}-Samples:{SamplesNumber}"; var geometric = new MathNet.Numerics.Distributions.Geometric(0.5); var generatedsamples = geometric.Samples().Take(SamplesNumber).ToArray(); } else if (this.DistributionName == "Poisson") { fullName = $"{DistributionName}-Samples:{SamplesNumber}"; var poisson = new MathNet.Numerics.Distributions.Poisson(0.5); var generatedsamples = poisson.Samples().Take(SamplesNumber).ToArray(); } else if (this.DistributionName == "Normal") { fullName = $"{DistributionName}-Samples:{SamplesNumber}"; var normal = new MathNet.Numerics.Distributions.Normal(0.5, 2); var generatedsamples = normal.Samples().Take(SamplesNumber).ToArray(); } dateTimeElapsed = (DateTime.Now - dateTime).TotalMilliseconds; return(dateTimeElapsed); }
// 50년 누적값이 몇개의 난수셋을 거치면서 안정되는지 확인 public static void TestRandom2() { var rd = new MathNet.Numerics.Random.MersenneTwister(); var normalD = new MathNet.Numerics.Distributions.Normal(); double val = 0.0; double avg = 0.0; List <double> avgVal = new List <double>(); for (int i = 1; i <= 1000000; i++) { double cumVal = 1.0; for (int j = 0; j < 50; j++) { val = normalD.InverseCumulativeDistribution(rd.NextDouble()); cumVal *= 1.0 + (val / 10.0); } if (i == 1) { avg = cumVal; } else { avg = avg * (Convert.ToDouble(i - 1) / Convert.ToDouble(i)) + cumVal * (1 / Convert.ToDouble(i)); } // 1000개마다 평균값을 저장 if (i % 1000 == 0) { avgVal.Add(avg); } } }
/// <summary> /// Gets a new Decision Vector, based on the PCX logic. /// </summary> /// <param name="parents">A list of parent <see cref="DecisionVector"/>s.</param> /// <returns>A new <see cref="DecisionVector"/>.</returns> /// <exception cref="ArgumentOutOfRangeException"> /// Thrown if: /// - there are less than two parents; or /// - the parents have different length or zero length decision vectors; or /// - any of the parents have non-continuous Decision Vector elements. /// </exception> public DecisionVector Operate(params DecisionVector[] parents) { if (parents.Length < 2) { throw new ArgumentOutOfRangeException(nameof(parents), "There must be at least two parents."); } // TODO: These calls to .Any() are slow - can we remove the error checking? if (parents.Any(p => p.GetContinuousElements().Count == 0)) { throw new ArgumentOutOfRangeException(nameof(parents), "Parents must have non-zero length decision vectors."); } if (parents.Any(p => p.GetContinuousElements().Count != parents.First().Count)) { throw new ArgumentOutOfRangeException(nameof(parents), "Parents must have the same length and fully continuous decision vectors."); } // 1: Pre-process var parentDVs = Matrix <double> .Build.DenseOfColumns(parents.Select(dv => dv.Select(d => (double)d))); var motherDV = Vector <double> .Build.DenseOfArray(parents.ElementAt(0).Select(d => (double)d).ToArray()); // 1a: centroid of all parents var centroid = parentDVs.RowSums().Divide(parents.Count()); // 1b: vector distance from centroid to mother (following Deb's C code, not paper) var motherCentroidVectorDistance = centroid - motherDV; var motherCentroidAbsoluteDistance = motherCentroidVectorDistance.L2Norm(); if (motherCentroidAbsoluteDistance < 1e-20) { return(DecisionVector.CreateForEmpty()); } // 1c: vector distance from other parents to mother var otherParentDVs = parentDVs.RemoveColumn(0); var parentMotherVectorDistances = otherParentDVs.EnumerateColumns() .Select(v => v - motherDV).ToArray(); var parentMotherAbsoluteDistances = parentMotherVectorDistances.Select(v => v.L2Norm()).ToArray(); if (parentMotherAbsoluteDistances.Any(d => d < 1e-20)) { return(DecisionVector.CreateForEmpty()); } // 1d: perpendicular distances from other parents to centroid-mother vector var orthogonalDistances = parentMotherVectorDistances .Select((v, i) => parentMotherAbsoluteDistances.ElementAt(i) * Math.Sqrt(1.0 - Math.Pow( v.DotProduct(motherCentroidVectorDistance) / (parentMotherAbsoluteDistances.ElementAt(i) * motherCentroidAbsoluteDistance), 2.0))); var meanOrthogonalDistance = orthogonalDistances.Mean(); // 2: Now create a new individual var normRnd = new MathNet.Numerics.Distributions.Normal(rngManager.Rng); var samplesEta = new double[motherDV.Count]; normRnd.Samples(samplesEta); var newRandomDv = Vector <double> .Build.DenseOfArray(samplesEta) .Multiply(sigmaEta * meanOrthogonalDistance); //Remove component of randomness in direction of ? var offset1 = motherCentroidVectorDistance .Multiply(newRandomDv.DotProduct(motherCentroidVectorDistance)) .Divide(Math.Pow(motherCentroidAbsoluteDistance, 2.0)); newRandomDv -= offset1; var offset2 = motherCentroidVectorDistance .Multiply(sigmaZeta * normRnd.Sample()); newRandomDv += offset2; // Modification of Deb2002 which should maintain stability. var finalDv = motherDV + newRandomDv.Divide(Math.Sqrt(motherDV.Count)); return(DecisionVector.CreateFromArray(parents.First().GetDecisionSpace(), finalDv.ToArray())); }
public override void UpdateDistribution() { MathNet.Numerics.Random.RandomSource rand; if(RandomSeed) { rand = new MathNet.Numerics.Random.MersenneTwister(); } else { rand = new MathNet.Numerics.Random.MersenneTwister(Seed); } _gauss = new MathNet.Numerics.Distributions.Normal(_mean, _deviation, rand); }
private static AggregationResult RunAggregation(ClusterNetwork net, double bias) { Dictionary<Vertex, double> _attributes = new Dictionary<Vertex, double>(); Dictionary<Vertex, double> _aggregates = new Dictionary<Vertex, double>(); MathNet.Numerics.Distributions.Normal normal = new MathNet.Numerics.Distributions.Normal(0d, 5d); AggregationResult result = new AggregationResult(); result.Modularity = net.NewmanModularity; double average = 0d; foreach (Vertex v in net.Vertices) { _attributes[v] = normal.Sample(); _aggregates[v] = _attributes[v]; average += _attributes[v]; } average /= (double)net.VertexCount; double avgEstimate = double.MaxValue; result.FinalVariance = double.MaxValue; result.FinalOffset = 0d; for (int k = 0; k < Properties.Settings.Default.ConsensusRounds; k++) { foreach (Vertex v in net.Vertices.ToArray()) { Vertex w = v.RandomNeighbor; List<Vertex> intraNeighbors = new List<Vertex>(); List<Vertex> interNeighbors = new List<Vertex>(); ClassifyNeighbors(net, v, intraNeighbors, interNeighbors); double r = net.NextRandomDouble(); if (r <= bias && interNeighbors.Count > 0) w = interNeighbors.ElementAt(net.NextRandom(interNeighbors.Count)); _aggregates[v] = aggregate(_aggregates[v], _aggregates[w]); _aggregates[w] = aggregate(_aggregates[v], _aggregates[w]); } avgEstimate = 0d; foreach (Vertex v in net.Vertices.ToArray()) avgEstimate += _aggregates[v]; avgEstimate /= (double)net.VertexCount; result.FinalVariance = 0d; foreach (Vertex v in net.Vertices.ToArray()) result.FinalVariance += Math.Pow(_aggregates[v] - avgEstimate, 2d); result.FinalVariance /= (double)net.VertexCount; double intraVar = 0d; foreach (int c in net.ClusterIDs) { double localavg = 0d; double localvar = 0d; foreach (Vertex v in net.GetNodesInCluster(c)) localavg += _aggregates[v]; localavg /= net.GetClusterSize(c); foreach (Vertex v in net.GetNodesInCluster(c)) localvar += Math.Pow(_aggregates[v] - localavg, 2d); localvar /= net.GetClusterSize(c); intraVar += localvar; } intraVar /= 50d; //Console.WriteLine("i = {0:0000}, Avg = {1:0.000}, Estimate = {2:0.000}, Intra-Var = {3:0.000}, Total Var = {4:0.000}", result.iterations, average, avgEstimate, intraVar, totalVar); } result.FinalOffset = average - avgEstimate; return result; }
public double TakeSamples() { var dateTimeElapsed = 0.0; var dateTime = DateTime.Now; //IEnumerable<int> generatedSamplesEnumerable = Enumerable.Empty<int>(); //IEnumerable<double> generatedSamplesDoubleEnumerable = Enumerable.Empty<double>(); if (this.DistributionName == "Binomial") { fullName = $"{DistributionName}-Samples:{SamplesNumber}-Trials:{TrialsNumber}"; var binomaial = new MathNet.Numerics.Distributions.Binomial(0.5, this.TrialsNumber); var generatedsamples = binomaial.Samples().Take(SamplesNumber).ToArray(); //generatedSamplesEnumerable = binomaial.Samples().Take(SamplesNumber); //foreach (var item in generatedSamplesEnumerable) //{ // var test = item; //} } else if (this.DistributionName == "Geometric") { fullName = $"{DistributionName}-Samples:{SamplesNumber}"; var geometric = new MathNet.Numerics.Distributions.Geometric(0.5); var generatedsamples = geometric.Samples().Take(SamplesNumber).ToArray(); //generatedSamplesEnumerable = geometric.Samples().Take(SamplesNumber); //foreach (var item in generatedSamplesEnumerable) //{ // var test = item; //} } else if (this.DistributionName == "Poisson") { fullName = $"{DistributionName}-Samples:{SamplesNumber}"; var poisson = new MathNet.Numerics.Distributions.Poisson(0.5); var generatedsamples = poisson.Samples().Take(SamplesNumber).ToArray(); //generatedSamplesEnumerable = poisson.Samples().Take(SamplesNumber); //foreach (var item in generatedSamplesEnumerable) //{ // var test = item; //} } else if (this.DistributionName == "Normal") { fullName = $"{DistributionName}-Samples:{SamplesNumber}"; var normal = new MathNet.Numerics.Distributions.Normal(0.5, 2); var generatedsamples = normal.Samples().Take(SamplesNumber).ToArray(); //generatedSamplesDoubleEnumerable = normal.Samples().Take(SamplesNumber); //foreach(var item in generatedSamplesDoubleEnumerable) //{ // var test = item; //} } dateTimeElapsed = (DateTime.Now - dateTime).TotalMilliseconds; return(dateTimeElapsed); }
private Matrix<double> RandomizeGaussianMatrixMaker(double damagedSD, int rowCount) { MathNet.Numerics.Distributions.Normal normalDist = new MathNet.Numerics.Distributions.Normal(0, damagedSD); Matrix<double> newMatrix = DenseMatrix.CreateRandom(rowCount, UnitCount, normalDist); return newMatrix; }
/// <summary> /// Se estiman las propiedades estáticas en función de la profundidad del core /// </summary> public void Estimar() { // se toma la informacion de segmentacion vs areas de interes y se estiman las propiedades petrofisicas estaticas // se preparan los generadores de CT aleatorios para cada phantom var phantom1High = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom1.mediaHigh, padre.actual.phantom1.desvHigh); var phantom1Low = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom1.mediaLow, padre.actual.phantom1.desvLow); var phantom2High = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom2.mediaHigh, padre.actual.phantom2.desvHigh); var phantom2Low = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom2.mediaLow, padre.actual.phantom2.desvLow); var phantom3High = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom3.mediaHigh, padre.actual.phantom3.desvHigh); var phantom3Low = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom3.mediaLow, padre.actual.phantom3.desvLow); // se prepara un vector de valores CT high y low para cada phantom // este vector representa un slide y solo se guarda un valor promedio del slide double[] temp = new double[padre.actual.datacuboHigh.dataCube[0].segCore.Count]; MathNet.Numerics.Statistics.DescriptiveStatistics stats; // se preparan los vectores para densidad y zeff this.Dfm = new double[padre.actual.datacuboHigh.dataCube.Count]; this.Zfme = new double[padre.actual.datacuboHigh.dataCube.Count]; this.Pefm = new double[padre.actual.datacuboHigh.dataCube.Count]; double ctP1High, ctP2High, ctP3High, ctP1Low, ctP2Low, ctP3Low; double A, B, C, D, E, F; List<double> Df, Zf, Zeff, Pef; int iarea; // se empieza a recorrer cada slide que se encuentre dentro de las areas de interes // para cada slide se genera una colección de datos aleatorios simulando cada phantom, y se estima la media para cada phantom en cada slide bool slide = false; for (int i = 0; i < padre.actual.datacuboHigh.dataCube.Count; i++) { slide = false; iarea = -1; for (int j = 0; j < padre.actual.areasCore.Count; j++) { // se busca si este slide esta dentro de al menos un area de interes if ((i >= padre.actual.areasCore[j].ini) & (i <= padre.actual.areasCore[j].fin)) { slide = true; iarea = j; } } if (slide) { // el slide pertenece al menos a un area de interes, por tanto se procede a calcular la densidad y zeff para este slide // se generan los valores CT para cada phantom y se toma su media phantom1High.Samples(temp); stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp); ctP1High = stats.Mean; phantom2High.Samples(temp); stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp); ctP2High = stats.Mean; phantom3High.Samples(temp); stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp); ctP3High = stats.Mean; phantom1Low.Samples(temp); stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp); ctP1Low = stats.Mean; phantom2Low.Samples(temp); stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp); ctP2Low = stats.Mean; phantom3Low.Samples(temp); stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp); ctP3Low = stats.Mean; // se resuelve el sistema lineal para obtener las constantes A,B,C,D,E,F var matriz = MathNet.Numerics.LinearAlgebra.Matrix<double>.Build.DenseOfArray(new double[,] { { ctP1Low, ctP1High, 1 }, { ctP2Low, ctP2High, 1 }, { ctP3Low, ctP3High, 1 } }); var sol = MathNet.Numerics.LinearAlgebra.Vector<double>.Build.Dense(new double[] { padre.actual.phantom1.densidad, padre.actual.phantom2.densidad, padre.actual.phantom3.densidad }); var x = matriz.Solve(sol); A = x[0]; B = x[1]; C = x[2]; sol = MathNet.Numerics.LinearAlgebra.Vector<double>.Build.Dense(new double[] { padre.actual.phantom1.zeff, padre.actual.phantom2.zeff, padre.actual.phantom3.zeff }); x = matriz.Solve(sol); D = x[0]; E = x[1]; F = x[2]; // se empieza a recorrer cada voxel, en la segmentacion del actual i-slide, se revisa que este dentro del area de interes Df = new List<double>(); Zf = new List<double>(); Zeff = new List<double>(); Pef = new List<double>(); int jkindex = 0; double dx; double dy; double tDf, tZf, tZeff, tPef; // dado que se recorre fila a fila, entonces el indice j corresponde al eje Y y el indice k al eje X for (int j = 0; j < padre.actual.datacuboHigh.widthSeg; j++) { for (int k = 0; k < padre.actual.datacuboHigh.widthSeg; k++) { // se calcula la distancia de la posicion (j,k) al centro del area de interes // si la distancia es menor que el radio entonces se agrega al calculo, sino no dx = k - padre.actual.areasCore[iarea].x; dx = dx * dx; dy = j - padre.actual.areasCore[iarea].y; dy = dy * dy; if (Math.Sqrt(dx + dy) <= padre.actual.datacuboHigh.widthSeg) { // la coordenada (j,k) esta dentro del area de interes // se calculan las propiedades estaticas tDf = A * padre.actual.datacuboLow.dataCube[i].segCore[jkindex] + B * padre.actual.datacuboHigh.dataCube[i].segCore[jkindex] + C; Df.Add(tDf); tZf = D * padre.actual.datacuboLow.dataCube[i].segCore[jkindex] + E * padre.actual.datacuboHigh.dataCube[i].segCore[jkindex] + F; Zf.Add(tZf); //tZeff = Math.Pow(Math.Pow((tZf / (0.9342 * tDf + 0.1759)), 10), 1 / 36); tZeff = Math.Pow((tZf / (0.9342 * tDf + 0.1759)), 1/3.6); Zeff.Add(tZeff); tPef = Math.Pow(Math.Pow((tZeff / 10), 36), 0.1); Pef.Add(tPef); } jkindex++; } } stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(Df); Dfm[i] = stats.Mean; stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(Zeff); Zfme[i] = stats.Mean; stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(Pef); Pefm[i] = stats.Mean; } else { // se llenan los vectores de densidad y zeff con valores -1... si el valor es -1 entonces no se grafica this.Dfm[i] = -1; this.Zfme[i] = -1; this.Pefm[i] = -1; } } DateTime fin = DateTime.Now; }
internal static double probability(double x, double mean, double stdev, bool cumulative) { var normalDistribution = new MathNet.Numerics.Distributions.Normal(mean, stdev); return(cumulative ? normalDistribution.CumulativeDistribution(x) : normalDistribution.Density(x)); }
static void Main(string[] args) { double bias; try{ // The neighbor selection bias is given as command line argument bias1 = double.Parse(args[0]); bias2 = double.Parse(args[1]); } catch(Exception) { Console.WriteLine("Usage: mono ./DemoSimulation.exe [initial_bias] [secondary_bias]"); return; } // The number of clusters (c) and the nodes within a cluster (Nc) int c = 20; int Nc = 20; // The number of desired edges int m = 6 * c * Nc; // In order to yield a connected network, at least ... double inter_thresh = 3d * ((c * Math.Log(c)) / 2d); // ... edges between communities are required // So the maximum number of edges within communities we s create is ... double intra_edges = m - inter_thresh; Console.WriteLine("Number of intra_edge pairs = " + c * Combinatorics.Combinations(Nc, 2)); Console.WriteLine("Number of inter_edge pairs = " + (Combinatorics.Combinations(c * Nc, 2) - (c * Combinatorics.Combinations(Nc, 2)))); // Calculate the p_i necessary to yield the desired number of intra_edges double pi = intra_edges / (c * Combinatorics.Combinations(Nc, 2)); // From this we can compute p_e ... double p_e = (m - c * MathNet.Numerics.Combinatorics.Combinations(Nc, 2) * pi) / (Combinatorics.Combinations(c * Nc, 2) - c * MathNet.Numerics.Combinatorics.Combinations(Nc, 2)); Console.WriteLine("Generating cluster network with p_i = {0:0.0000}, p_e = {1:0.0000}", pi, p_e); // Create the network ... network = new NETGen.NetworkModels.Cluster.ClusterNetwork(c, Nc, pi, p_e); // ... and reduce it to the GCC network.ReduceToLargestConnectedComponent(); Console.WriteLine("Created network has {0} vertices and {1} edges. Modularity = {2:0.00}", network.VertexCount, network.EdgeCount, network.NewmanModularity); // Run the OopenGL visualization NetworkColorizer colorizer = new NetworkColorizer(); NetworkVisualizer.Start(network, new FruchtermanReingoldLayout(15), colorizer); currentBias = bias1; // Setup the synchronization simulation, passing the bias strategy as a lambda expression sync = new EpidemicSynchronization( network, colorizer, v => { Vertex neighbor = v.RandomNeighbor; double r = network.NextRandomDouble(); // classify neighbors List<Vertex> intraNeighbors = new List<Vertex>(); List<Vertex> interNeighbors = new List<Vertex>(); ClassifyNeighbors(network, v, intraNeighbors, interNeighbors); neighbor = intraNeighbors.ElementAt(network.NextRandom(intraNeighbors.Count)); // biasing strategy ... if (r <= currentBias && interNeighbors.Count > 0) neighbor = interNeighbors.ElementAt(network.NextRandom(interNeighbors.Count)); return neighbor; }, 0.9d); Dictionary<int, double> _groupMus = new Dictionary<int, double>(); Dictionary<int, double> _groupSigmas = new Dictionary<int, double>(); MathNet.Numerics.Distributions.Normal avgs_normal = new MathNet.Numerics.Distributions.Normal(300d, 50d); MathNet.Numerics.Distributions.Normal devs_normal = new MathNet.Numerics.Distributions.Normal(20d, 5d); for(int i=0; i<c; i++) { double groupAvg = avgs_normal.Sample(); double groupStdDev = devs_normal.Sample(); foreach(Vertex v in network.GetNodesInCluster(i)) { sync._MuPeriods[v] = groupAvg; sync._SigmaPeriods[v] = groupStdDev; } } sync.OnStep+=new EpidemicSynchronization.StepHandler(collectLocalOrder); // Run the simulation synchronously sync.Run(); Console.ReadKey(); // Collect and print the results SyncResults res = sync.Collect(); Console.WriteLine("Order {0:0.00} reached after {1} rounds", res.order, res.time); }