public Blocking(Distributions fault, Distributions corrective) { _fault = fault; _corrective = corrective; time0 = 0; Isfault = false; }
public void NextNormal_SingleValue_Sigma1Mu0() { const double sigma = 1; const double mu = 0; const int count = 100000; var values = new double[count]; //var seed = (int) (DateTime.Now.Ticks % int.MaxValue); const int seed = 1395601201; var rnd = new Random(seed); for (var i = 0; i < count; i++) { values[i] = rnd.NextNormal(sigma, mu); } var pirsons_criteria = Distributions.GetPirsonsCriteria( values, Distributions.NormalGauss(sigma, mu), out var freedom_degree); var quantile = MathCore.SpecialFunctions.Distribution.Student.QuantileHi2Approximation(0.80, freedom_degree); Assert.That.Value(pirsons_criteria).LessThan(quantile, $"seed:{seed}"); }
/// <summary> /// Returns the sampler for a specified rng, data type and distribution spec /// </summary> /// <param name="rng">The random stream</param> /// <param name="spec">The distribution specifier</param> /// <typeparam name="T">The sample point type</typeparam> public static IRngSampler <T> Sampler <T>(this MklRng rng, IDistributionSpec <T> spec) where T : unmanaged { var sampler = default(IRngSampler <T>); switch (spec.DistKind) { case DistributionKind.Uniform: sampler = rng.UniformSampler <T>(Distributions.uniform(spec)); break; case DistributionKind.UniformBits: sampler = rng.UniformBitsSampler <T>(Distributions.uniformbits(spec)); break; case DistributionKind.Bernoulli: sampler = rng.BernoulliSampler <T>(Distributions.bernoulli(spec)); break; case DistributionKind.Gaussian: sampler = rng.GaussianSampler <T>(Distributions.gaussian(spec)); break; default: throw Unsupported.define <T>(); } return(sampler); }
public void GetPirsonsCriteriaTest() { var pirsons_criteria = Distributions.GetPirsonsCriteria(__TestData, Distributions.NormalGauss(), out var freedom_degree); Assert.That.Value(pirsons_criteria).IsEqual(2.5513486175786118); Assert.That.Value(freedom_degree).IsEqual(2); }
private void ShowAreasOnMap() { AreasMap.Clear(); Location location = new Location(); if (selectedCity != "כל הארץ") { try { // re-center map location.Latitude = Distributions[0].Packages[0].Recipient.Address.Lat; location.Longitude = Distributions[0].Packages[0].Recipient.Address.Lon; AreasMap.SetMapLocation(location, 9); } catch { } } else { AreasMap.SetMapLocation(new Location(32.032527, 34.8851379), 8); } List <Address> Addresses = new List <Address>(); foreach (Distribution distribution in Distributions.ToList()) { foreach (Package p in distribution.Packages.ToList()) { Addresses.Add(p.Recipient.Address); } AreasMap.AddAreas(Addresses); Addresses.Clear(); } SelectedCity = "כל הארץ"; }
public override void Input(string pinyin) { distribution = distribution.ExpandAndMerge(str => Model.GetDistribution(new PinyinToSolve(str, pinyin)) .Take(TakeSize) .Select(result => str + result.Substring(0, 1))) .Take(TakeSize) .Norm(); if (MakeGoodResults) { goodResults.AddRange(distribution.KeyProbDescending .TakeWhile(pair => pair.Value > 0.2) .Reverse() .Select(pair => pair.Key)); } if (TraceDistribute) { Distributions.Add(distribution); } if (PrintDistributeSize > 0) { distribution.Take(PrintDistributeSize).Print(); } longestAnswer = distribution.KeyProbDescending.First().Key; }
public void Roll() { var betSize = int.Parse(bet.text); if (!TryMakeBet(betSize)) { return; } var multiplier = 0; var streak = 0; if (Distributions.Current(_streak)) { multiplier = MultiplierCalculator.GetMultiplier(); var prize = multiplier * betSize; prizeText.text = prize.ToString(); balance.Value += prize; streak = _streak; _streak = 0; } statistic.RecordResult(streak, multiplier, balance.Value); slot.PushToChangeWinningLine(multiplier); slot.PushToChangeEmptyLines(); slot.ChangeLines(); }
static void Main(string[] args) { // Use directly of Sampling Functions // We don't need Distribution extra facilities; var b = Distributions.Bernoulli(0.3); var count = 0; for (var i = 0; i < 100000; i++) { if (b.NextSample()) { count++; } } Console.WriteLine(count); var a = Army.ArmyElements().Take(20000).ToList(); var sa = a.OfType <SoldierTypeA>().Count(); var sb = a.OfType <SoldierTypeB>().Count(); var ta = a.OfType <TankA>().Count(); var tb = a.OfType <TankB>().Count(); Console.WriteLine($"Soldiers/Tanks {a.OfType<Soldier>().Count()/ (double)(a.OfType<Tank>().Count())}"); Console.WriteLine($"Soldiers A({sa}) B({sb}), Tanks A({ta}) B({tb})"); }
public override void Clear() { base.Clear(); distribution = Distribution <string> .Single(""); goodResults.Clear(); Distributions.Clear(); }
/// <inheritdoc/> public bool Equals(PackageImplementation other) { if (other == null) { return(false); } return(base.Equals(other) && Package == other.Package && Distributions.UnsequencedEquals(other.Distributions)); }
public static void Run() { var rnd = new Random(5); const int count = 100_000; var samples = new double[count]; for (var i = 0; i < count; i++) { var x = (rnd.NextDouble() * 2 - 1) + (rnd.NextDouble() * 2 - 1) + (rnd.NextDouble() * 2 - 1) + (rnd.NextDouble() * 2 - 1) + (rnd.NextDouble() * 2 - 1) ; samples[i] = x / 5; } const double D = 1; const double m = 0; //const int count = 1000000; //var values = rnd.NextNormal(count, D, m); var values = Enumerable.Range(0, count).ToArray(_ => (rnd.NextDouble() - 0.5) + (rnd.NextDouble() - 0.5) + (rnd.NextDouble() - 0.5) + (rnd.NextDouble() - 0.5) + (rnd.NextDouble() - 0.5)); var gauss = Distributions.NormalGauss(D, m); var gauss0 = Distributions.NormalGauss(D, m + 0.1); const int intervals_count = 60; var histogram = new Histogram(samples, intervals_count); var pirson = histogram.GetPirsonsCriteria(gauss); var pirson0 = histogram.GetPirsonsCriteria(gauss0); var q1 = SpecialFunctions.Distribution.Student.QuantileHi2Approximation(0.95, intervals_count - 2); var q2 = SpecialFunctions.Distribution.Student.QuantileHi2(0.95, intervals_count - 2); var interval = histogram.Interval; const int function_points_count = 1000; var model = new PlotModel { Background = OxyColors.White, Series = { new HistogramSeries { FillColor = OxyColors.Blue, StrokeColor = OxyColors.DarkBlue, StrokeThickness = 1, ItemsSource = histogram, Mapping = o => { var((min, max), n, value, normal_value) = (Histogram.HistogramValue)o; return(new HistogramItem(min, max, value, 0)); }, },
/// <inheritdoc/> public override int GetHashCode() { unchecked { int result = base.GetHashCode(); result = (result * 397) ^ Package?.GetHashCode() ?? 0; result = (result * 397) ^ Distributions.GetUnsequencedHashCode(); return(result); } }
/// <summary> /// Create the basic distribution table for balls /// </summary> /// <remarks> /// </remarks> /// <param name="sizeMin">Minimum size to include</param> /// <param name="sizeMax">Maximum size to include</param> /// <param name="number">Number of entries there will be</param> public Distribute(int sizeMin, int sizeMax, Distributions dist) { // This is a bit involved. For each possible size, the distribution function // will return a float that indicates how likely (in a relative sense) that // size is to be created. We sum these values up, and then normalize them // so that their sum will be one. We then place them in a threshold array, // which will be searched to find the proper value. DistFunc distFunc = null; int count = sizeMax - sizeMin + 1; this.sizeMin = sizeMin; this.sizeMax = sizeMax; rand = new Random(); switch (dist) { case Distributions.Linear: distFunc = new DistFuncLinear(count); break; case Distributions.Squared: distFunc = new DistFuncSquared(count); break; case Distributions.Inverse: distFunc = new DistFuncInverse(count); break; } /* * Add up all the values to figure out the normalization factor. */ float total = 0.0f; for (int index = 0; index < count; index++) { total += distFunc.Value(index); } /* * Go through again to get the real values, normalizing so they add to 1.0. */ values = new float[count]; float threshold = 0.0f; for (int index = 0; index < count; index++) { threshold += distFunc.Value(index) / total; values[index] = threshold; } }
/// <summary> /// Найти распределение по году, направлению и уровню подготовки /// </summary> /// <param name="year">Учебный год</param> /// <param name="directionId">Направление</param> /// <param name="backgroundLevel">Уровень подготовки</param> /// <returns>Распределение</returns> public virtual async Task <Distribution> FindByYearDirectionBackgroundLevelAsync(int year, long directionId, StudentBackgroundLevel backgroundLevel) { var result = await Distributions .SingleOrDefaultAsync ( d => d.AcademicYear == year && d.Department.Directions.Any(dir => dir.Id == directionId) && d.BackgroundLevel == backgroundLevel ); return(result); }
/// <inheritdoc/> public override int GetHashCode() { unchecked { int result = base.GetHashCode(); result = (result * 397) ^ InterfaceUri?.GetHashCode() ?? 0; result = (result * 397) ^ (int)OS; result = (result * 397) ^ Versions?.GetHashCode() ?? 0; result = (result * 397) ^ Constraints.GetUnsequencedHashCode(); result = (result * 397) ^ Distributions.GetUnsequencedHashCode(); return(result); } }
public static IEnumerator <IArmyElement> ArmySamplingFunction() { var b = Distributions.Bernoulli(0.2); var tg = new Distribution <Tank>(TankSamplingFunction()); var sg = new Distribution <Soldier>(SoldierSamplingFunction()); while (true) { if (b.NextSample()) { yield return(tg.NextSample()); } else { yield return(sg.NextSample()); } } }
/// <inheritdoc/> public override int GetHashCode() { unchecked { int result = base.GetHashCode(); if (InterfaceUri != null) { result = (result * 397) ^ InterfaceUri.GetHashCode(); } result = (result * 397) ^ (int)OS; if (Versions != null) { result = (result * 397) ^ Versions.GetHashCode(); } result = (result * 397) ^ Constraints.GetUnsequencedHashCode(); result = (result * 397) ^ Distributions.GetUnsequencedHashCode(); return(result); } }
public static IRngSampler <T> UniformBitsSampler <T>(this MklRng rng, UniformBitsSpec <T>?spec = null) where T : unmanaged { var sampler = default(IRngSampler <T>); var _spec = spec ?? Distributions.uniformbits <T>(); if (typeof(T) == typeof(uint)) { sampler = samplers.bits(rng, _spec.ToUInt32()) as IRngSampler <T>; } else if (typeof(T) == typeof(ulong)) { sampler = samplers.bits(rng, _spec.ToUInt64()) as IRngSampler <T>; } else { throw Unsupported.define <T>(); } return(sampler); }
public static Army Generate(int size) { var ret = new Army(); var b = Distributions.Bernoulli(0.2); var tg = new Distribution <Tank>(TankSamplingFunction()); var sg = new Distribution <Soldier>(SoldierSamplingFunction()); for (var i = 0; i < size; i++) { if (b.NextSample()) { ret.Tanks.Add(tg.NextSample()); } else { ret.Soldiers.Add(sg.NextSample()); } } return(ret); }
public Order(Distributions ad, Distributions td) { ammountDistribution = ad; timeDistribution = td; Ammount = NonNegativeValue(new Func <double>(() => { switch (ammountDistribution) { case Distributions.Normal: return(NormalDistribution.OrderSize()); case Distributions.Poisson: return(PoissonDistribution.OrderSize()); case Distributions.Exponential: return(ExponentialDistribution.OrderSize()); case Distributions.UniformCont: return(UniformContDistribution.OrderSize()); case Distributions.UniformDisc: return(UniformDiscDistribution.OrderSize()); default: return(-1); } })()); Time = NonNegativeValue(new Func <double>(() => { switch (timeDistribution) { case Distributions.Normal: return(NormalDistribution.TimeBetweenOrder()); case Distributions.Poisson: return(PoissonDistribution.TimeBetweenOrder()); case Distributions.Exponential: return(ExponentialDistribution.TimeBetweenOrder()); case Distributions.UniformCont: return(UniformContDistribution.TimeBetweenOrder()); case Distributions.UniformDisc: return(UniformDiscDistribution.TimeBetweenOrder()); default: return(-1); } })()); }
public string Put(Distributions d) { try { DataTable dataTable = new DataTable(); string query = @"update Distribution set name='" + d.name + "',categoryId='" + d.categoryId + "',cityId='" + d.cityId + "',distributorName='" + d.distributorName + "',distributorEmail='" + d.distributorEmail + "',distributorCnic='" + d.distributorCnic + "',distributorPhone='" + d.distributorPhone + "' where distributionId='" + d.distributionId + "' "; using (var con = new SqlConnection(ConfigurationManager.ConnectionStrings["DefaultConnection"].ConnectionString)) using (var Comand = new SqlCommand(query, con)) using (var dataAdapter = new SqlDataAdapter(Comand)) { Comand.CommandType = CommandType.Text; dataAdapter.Fill(dataTable); } return("Updated Successfully"); } catch (Exception ex) { return(ex.Message); } }
private void AddDistributions(List <Package>[] DividedPackages) { try { foreach (var pkgGroup in DividedPackages) { Distributions.Add(new Distribution() { Date = DistributionDate, Packages = pkgGroup, AdminId = (((App)Application.Current).Currents.LoggedUser as Admin).AdminId }); } AssignVolunteers(); } catch (Exception e) { Message = new Message("משהו השתבש.", e.Message, false, true); } }
public string Post(Distributions d) { try { DataTable dataTable = new DataTable(); string query = @"INSERT INTO dbo.Distribution VALUES('" + d.name + "','" + d.categoryId + "','" + d.distributorName + "','" + d.distributorEmail + "','" + d.distributorCnic + "','" + d.distributorPhone + "','" + d.cityId + "')"; using (var con = new SqlConnection(ConfigurationManager.ConnectionStrings["DefaultConnection"].ConnectionString)) using (var Comand = new SqlCommand(query, con)) using (var dataAdapter = new SqlDataAdapter(Comand)) { Comand.CommandType = CommandType.Text; dataAdapter.Fill(dataTable); } return("Added Successfully"); } catch (Exception ex) { return(ex.Message); } }
public static IRngSampler <T> UniformSampler <T>(this MklRng rng, UniformSpec <T>?spec = null) where T : unmanaged { var _spec = spec ?? Distributions.uniform <T>(Numeric.minval <T>(), Numeric.maxval <T>()); var sampler = default(IRngSampler <T>); if (typeof(T) == typeof(int)) { sampler = samplers.uniform(rng, _spec.ToInt32()) as IRngSampler <T>; } else if (typeof(T) == typeof(float)) { sampler = samplers.uniform(rng, _spec.ToFloat32()) as IRngSampler <T>; } else if (typeof(T) == typeof(double)) { sampler = samplers.uniform(rng, _spec.ToFloat64()) as IRngSampler <T>; } else { throw no <T>(); } return(sampler); }
public void NextNormal_SingleValue_Sigma3Mu5() { const double sigma = 3; const double mu = 5; const int count = 10000; var values = new double[count]; var rnd = new Random(); for (var i = 0; i < count; i++) { values[i] = rnd.NextNormal(sigma, mu); } var pirsons_criteria = Distributions.GetPirsonsCriteria( values, Distributions.NormalGauss(sigma, mu), out var freedom_degree); var quantile = MathCore.SpecialFunctions.Distribution.Student.QuantileHi2Approximation(0.95, freedom_degree); Assert.That.Value(pirsons_criteria).LessThan(quantile); }
public OSVersionInfo(Distributions distribution, Version version, string name) { this.Distribution = distribution; this.Version = version; this.Name = name; }
private void KalmanFilterButton_Click(object sender, RoutedEventArgs e) { if (CountriesListView.SelectedItem == null) { return; } PWTCountry country = (CountriesListView.SelectedItem as CountryVM).CountryObject; SortedDictionary <int, double> kalmanDataSet = country.SavingsRateHT; int minYear = kalmanDataSet.Keys.Min(); int maxYear = kalmanDataSet.Keys.Max(); if (!double.TryParse(TimeStepTerm.Text, out double dt) || dt < 0) { MessageBox.Show("invalid time step"); return; } int numXVals = (int)((maxYear - minYear) / dt) + 1; // time steps MathMatrix t = Sequences.SteppedSequence(minYear, maxYear, dt); // state matrix MathMatrix xt = MathMatrix.CreateMatrix(3, numXVals, 0); double[] timeArray = t.RowVectorArray(0); for (int colidx = 0; colidx < t.ColCount; ++colidx) { if (Math.Floor(timeArray[colidx]) == Math.Ceiling(timeArray[colidx]) && country.AGrowthRateHT.ContainsKey((int)timeArray[colidx])) { int key = (int)timeArray[colidx]; xt[0, colidx] = country.AGrowthRateHT[key]; xt[1, colidx] = country.SavingsRateHT[key]; xt[2, colidx] = country.LGrowthRateHT[key]; } else if (colidx > 0) { xt[0, colidx] = xt[0, colidx - 1]; xt[1, colidx] = xt[1, colidx - 1]; xt[2, colidx] = xt[2, colidx - 1]; } } // state matrix // x(k) = Fx(k-1) + Gu(k-1) which can be seen below in Kalman filter loop. MathMatrix x = MathMatrix.CreateMatrix(3, numXVals, 0); // process matrix moves state matrix from state k to k + 1 MathMatrix F = MatrixOperations.Identity(3); MathMatrix FT = F; // control matrix MathMatrix u = MathMatrix.CreateMatrix(1, 1, 0); MathMatrix G = MathMatrix.CreateMatrix(3, 1, new double[] { 0, 0, 0 }); // state error covariance matrix if (!double.TryParse(CovarianceTerm.Text, out double covterm)) { MessageBox.Show("Covariance term needs to be a double value."); return; } //MathMatrix P = MathMatrix.CreateMatrix(3, 3, new double[] { -0.1, 0.05, -0.1, 0.001, 0.01, -0.005, -0.005, 0.15, -0.05 }); MathMatrix P = MathMatrix.CreateMatrix(3, 3, new double[] { covterm, 0, 0, 0, covterm, 0, 0, 0, covterm }); // observation matrix MathMatrix H = MathMatrix.CreateMatrix(1, 3, new double[] { 1, 1, 1 }); MathMatrix HT = MatrixOperations.Transpose(H); // process noise covariance matrix MathMatrix Q = MathMatrix.CreateMatrix(3, 3, new double[] { -0.01, 0.05, -0.1, 0.001, -0.01, -0.005, -0.005, 0.15, -0.05 }); MathMatrix I = MatrixOperations.Identity(3); // measurement noise covariance matrix MathMatrix R = MathMatrix.CreateMatrix(1, 1, 3); MathMatrix sqrtR = MatrixOperations.Sqrt_Elmtwise(R); // measurement noise MathMatrix v = sqrtR * Distributions.Normal(numXVals); // observation / measurement // y(k) = Hxt(k) + v(k); MathMatrix y = H * xt + v; // Kalman filter for (int k = 0; k < numXVals; ++k) { x.AssignColumn(F * xt.ColumnVector(k) + G * u, k); P = F * P * FT + Q; // HACK HERE SINCE WE DO NOT YET HAVE MATRIX INVERSION. MathMatrix Knumerator = P * HT; MathMatrix Kdenominator = (H * P * HT + R); Kdenominator[0, 0] = 1 / Kdenominator[0, 0]; MathMatrix K = Knumerator * Kdenominator; x.AssignColumn(x.ColumnVector(k) + K * (y.ColumnVector(k) - H * x.ColumnVector(k)), k); P = (I - K * H) * P; } ResultsPlot.ClearPlotArea(clearPlotData: true); int kalmanrowidx = KalmanFilterPlotType == "n" ? 2 : KalmanFilterPlotType == "s" ? 1 : KalmanFilterPlotType == "g" ? 0 : -1; if (kalmanrowidx == -1) { MessageBox.Show("A filter quantity must be selected."); return; } // GO THROUGH THE ARRAYS AND DROP CORRESPONDING NAN OR INFINITY ENTRIES FROM X AND XT. double[] numsOnlyVec = x.RowVectorArray(kalmanrowidx).Where(p => !double.IsNaN(p) && !double.IsInfinity(p)).ToArray(); double minY = new double[] { numsOnlyVec.Min(), xt.RowVectorArray(kalmanrowidx).Min() }.Min(); double maxY = new double[] { numsOnlyVec.Max(), xt.RowVectorArray(kalmanrowidx).Max() }.Max(); ap.YLabel = YAxisLabel.NewAxisLabel(KalmanFilterPlotQtyDict[KalmanFilterPlotType], 0.5, 15, ylp); ap.XLabel = XAxisLabel.NewAxisLabel("Year", minY < 0 ? 0.05 : 0.5, 15, minY < 0 ? xlp2 : xlp); ResultsPlot.SetAxes(minYear, maxYear, minY, maxY, ap, drawHorAxisAtY0: minY < 0); ResultsPlot.SetPlotGridLines(20, 20); PointCollection pc = new PointCollection(); PointCollection pc2 = new PointCollection(); for (int idx = 0; idx < numsOnlyVec.Length; ++idx) { pc.Add(new Point(t[0, idx], numsOnlyVec[idx])); pc2.Add(new Point(t[0, idx], xt[kalmanrowidx, idx])); } ResultsPlot.PlotPoints2D($"KalmanFiltered_{country.CountryCode}_{kalmanrowidx}_Points", pc, dpp); ResultsPlot.PlotCurve2D($"KalmanFiltered_{country.CountryCode}_{kalmanrowidx}_Curve", pc2, cp2); }
/// <summary> /// Creates a float dense vector based on a string. The string can be in the following formats (without the /// quotes): 'n', 'n,n,..', '(n,n,..)', '[n,n,...]', where n is a float. /// </summary> /// <returns> /// A float dense vector containing the values specified by the given string. /// </returns> /// <param name="value"> /// the string to parse. /// </param> /// <param name="formatProvider"> /// An <see cref="IFormatProvider"/> that supplies culture-specific formatting information. /// </param> public static DenseVector Parse(string value, IFormatProvider formatProvider) { if (value == null) { throw new ArgumentNullException("value"); } value = value.Trim(); if (value.Length == 0) { throw new FormatException(); } // strip out parens if (value.StartsWith("(", StringComparison.Ordinal)) { if (!value.EndsWith(")", StringComparison.Ordinal)) { throw new FormatException(); } value = value.Substring(1, value.Length - 2).Trim(); } if (value.StartsWith("[", StringComparison.Ordinal)) { if (!value.EndsWith("]", StringComparison.Ordinal)) { throw new FormatException(); } value = value.Substring(1, value.Length - 2).Trim(); } // keywords var textInfo = formatProvider.GetTextInfo(); var keywords = new[] { textInfo.ListSeparator }; // lexing var tokens = new LinkedList<string>(); GlobalizationHelper.Tokenize(tokens.AddFirst(value), keywords, 0); var token = tokens.First; if (token == null || tokens.Count.IsEven()) { throw new FormatException(); } // parsing var data = new float[(tokens.Count + 1) >> 1]; for (var i = 0; i < data.Length; i++) { if (token == null || token.Value == textInfo.ListSeparator) { throw new FormatException(); } data[i] = float.Parse(token.Value, NumberStyles.Any, formatProvider); token = token.Next; if (token != null) { token = token.Next; } } return new DenseVector(data); }
public void CheckDistributionTest() => Assert.IsTrue(__TestData.CheckDistribution(Distributions.NormalGauss()));
public override double Distribution(double x) { var sigma2 = Sigma / 2; return(Distributions.Uniform(x, Mu - sigma2, Mu + sigma2)); }
/// <inheritdoc/> public bool Equals(Restriction other) { if (other == null) { return(false); } return(base.Equals(other) && InterfaceUri == other.InterfaceUri && OS == other.OS && Versions == other.Versions && Constraints.UnsequencedEquals(other.Constraints) && Distributions.UnsequencedEquals(other.Distributions)); }