public Distribution(String DistributionName, params double[] vs) // 构造函数 根据名字初始化分布 { switch (DistributionName.ToUpper()) { case "NORMAL": normalDis = new Normal(vs[0], vs[1]); // double mean, double stddev break; case "CONTINUOUS": continuousUniformDis = new ContinuousUniform(vs[0], vs[1]); // int lower, int upper break; case "TRIANGULAR": triangularDis = new Triangular(vs[0], vs[1], vs[2]); //double lower, double upper, double mode (lower ≤ mode ≤ upper) break; case "STUDENTT": studentTDis = new StudentT(vs[0], vs[1], vs[2]); //double location, double scale, double freedom break; case "BERNOULLI": bernoulliDis = new Bernoulli(vs[0]); break; case "DISCRETEUNIFORM": discreteUniform = new DiscreteUniform((int)vs[0], (int)vs[1]); // int lower, int upper break; } this.DistributionName = DistributionName; }
public void ValidateToString() { System.Threading.Thread.CurrentThread.CurrentCulture = System.Globalization.CultureInfo.InvariantCulture; var b = new DiscreteUniform(0, 10); Assert.AreEqual("DiscreteUniform(Lower = 0, Upper = 10)", b.ToString()); }
/// <summary> /// Sets this instance's AmiabilityLevel /// </summary> /// <param name="seed">Seed based on player and slice</param> private void SetLevel(int seed) { uniformRandSeed = new SystemRandomSource(seed); binomialRandSeed = new SystemRandomSource(seed); // Doesn't matter if it's the same seed uniformDist = new DiscreteUniform(0, Enum.GetNames(typeof(AmiabilityLevel)).Length - 1, uniformRandSeed); AmiabilityLevel = (AmiabilityLevel)uniformDist.Sample(); }
private Order[] CancellationOrdersByValue(EquityInstrumentIntraDayTimeBar security, int totalOrders) { if (totalOrders == 0 || security == null) return new Order[0]; var ordersToCancel = DiscreteUniform.Sample(1, totalOrders); var ordersToFulfill = totalOrders - ordersToCancel; // ReSharper disable RedundantCast var minimumPerOrderValue = (int)((decimal)this._valueOfCancelledTradeThreshold * (decimal)(1m / ordersToCancel) + 1); // ReSharper restore RedundantCast var orders = new List<Order>(); for (var x = 0; x < ordersToCancel; x++) orders.Add( this.OrderForValue( OrderStatus.Cancelled, minimumPerOrderValue, security, this._lastFrame.Exchange)); for (var x = 0; x < ordersToFulfill; x++) { var fulfilledOrderValue = DiscreteUniform.Sample(0, minimumPerOrderValue * 3); orders.Add( this.OrderForValue(OrderStatus.Filled, fulfilledOrderValue, security, this._lastFrame.Exchange)); } return orders.ToArray(); }
/// <summary> /// BeInfluenced a beliefBit by doing /// Random value is used to set the new value /// </summary> /// <param name="model"></param> /// <param name="beliefBit"></param> public void Learn(RandomGenerator model, byte beliefBit) { var bit = BeliefBits.GetBit(beliefBit); switch (model) { case RandomGenerator.RandomUniform: bit += ContinuousUniform.Sample(RangeMin, RangeMax); if (bit < RangeMin) { bit = RangeMin; } if (bit > RangeMax) { bit = RangeMax; } break; case RandomGenerator.RandomBinary: bit = DiscreteUniform.Sample(RangeMin, RangeMax); break; default: throw new ArgumentOutOfRangeException(nameof(model), model, null); } BeliefBits.SetBit(beliefBit, bit); }
public static IEnumerable <TSLProtocol> GetDefaultNumberOfDistinctProtocols(this TSLGeneratorContext context) { var protocolNumber = DiscreteUniform.Sample(context.MasterRandom, CommunicationInstanceSettings.MinProtocolNumber, CommunicationInstanceSettings.MaxProtocolNumber); return(context.GetRandomDistinctProtocols(protocolNumber)); }
private static AtomType IntegralType(string name, int lower, int upper, string suffix = "") { Func <Random, string> provider = random => DiscreteUniform.Sample(random, lower, upper).ToString() + suffix; return(new AtomType(name, false, provider)); }
/// Generates samples with weightings that are integral and compares that to the unweighted statistics result. Doesn't correspond with the /// higher order sample statistics because our weightings represent reliability weights, *not* frequency weights, and the Bessel correction is /// calculated appropriately - so don't let the construction of the test mislead you. public void ConsistentWithUnweighted(string dataSet) { var data = _data[dataSet].Data.ToArray(); var gen = new DiscreteUniform(1, 5); var weights = new int[data.Length]; gen.Samples(weights); var stats = new RunningWeightedStatistics(data.Select((x, i) => System.Tuple.Create((double)weights[i], x))); var stats2 = new RunningStatistics(); for (int i = 0; i < data.Length; ++i) { for (int j = 0; j < weights[i]; ++j) { stats2.Push(data[i]); } } var sumWeights = weights.Sum(); Assert.That(stats.TotalWeight, Is.EqualTo(sumWeights), "TotalWeight"); Assert.That(stats.Count, Is.EqualTo(weights.Length), "Count"); Assert.That(stats2.Minimum, Is.EqualTo(stats.Minimum), "Minimum"); Assert.That(stats2.Maximum, Is.EqualTo(stats.Maximum), "Maximum"); Assert.That(stats2.Mean, Is.EqualTo(stats.Mean).Within(1e-8), "Mean"); Assert.That(stats2.PopulationVariance, Is.EqualTo(stats.PopulationVariance).Within(1e-9), "PopulationVariance"); Assert.That(stats2.PopulationStandardDeviation, Is.EqualTo(stats.PopulationStandardDeviation).Within(1e-9), "PopulationStandardDeviation"); Assert.That(stats2.PopulationSkewness, Is.EqualTo(stats.PopulationSkewness).Within(1e-8), "PopulationSkewness"); Assert.That(stats2.PopulationKurtosis, Is.EqualTo(stats.PopulationKurtosis).Within(1e-8), "PopulationKurtosis"); }
private OrderStatus CalculateOrderStatus() { var orderStatusSample = DiscreteUniform.Sample(1, 5); var orderStatus = (OrderStatus)orderStatusSample; return(orderStatus); }
public void CanCreateDiscreteUniform([Values(-10, 0, 10, 20)] int l, [Values(10, 4, 20, 20)] int u) { var du = new DiscreteUniform(l, u); Assert.AreEqual(l, du.LowerBound); Assert.AreEqual(u, du.UpperBound); }
/// <summary> /// Given a random model /// set the weights : an array fill of random float ranging [-1; 1] /// representing the detailed Belief of an agent /// </summary> /// <param name="model"></param> /// <param name="length"></param> /// <param name="beliefWeightLevel"></param> /// <returns></returns> public void InitializeWeights(RandomGenerator model, byte length, BeliefWeightLevel beliefWeightLevel) { float[] beliefBits; switch (beliefWeightLevel) { case BeliefWeightLevel.NoWeight: beliefBits = DiscreteUniform.Samples(length, 0, 0); break; case BeliefWeightLevel.RandomWeight: beliefBits = model == RandomGenerator.RandomUniform ? ContinuousUniform.Samples(length, 0, RangeMax) : DiscreteUniform.Samples(length, 0, RangeMax); break; case BeliefWeightLevel.FullWeight: beliefBits = DiscreteUniform.Samples(length, 1, 1); break; default: throw new ArgumentOutOfRangeException(nameof(beliefWeightLevel), beliefWeightLevel, null); } Weights = new Bits(beliefBits, 0); }
private OrderDirections CalculateTradeDirection() { var buyOrSellSample = DiscreteUniform.Sample(1, 2); var buyOrSell = (OrderDirections)buyOrSellSample; return(buyOrSell); }
public void CanSampleSequence() { var n = new DiscreteUniform(0, 10); var ied = n.Samples(); GC.KeepAlive(ied.Take(5).ToArray()); }
public void CanCreateDiscreteUniform(int l, int u) { var du = new DiscreteUniform(l, u); Assert.AreEqual(l, du.LowerBound); Assert.AreEqual(u, du.UpperBound); }
private void SetAgentKnowledge(CognitiveAgent actor, IReadOnlyList <IAgentId> knowledgeIds, int i) { var index = 0; switch (ExampleMainOrganization.Knowledge) { case 0: // same Knowledge for all index = 0; break; case 1: // Knowledge is by group index = i; break; case 2: // Knowledge is randomly defined for agentId index = DiscreteUniform.Sample(0, ExampleMainOrganization.GroupsCount - 1); break; } actor.KnowledgeModel.AddKnowledge(knowledgeIds[index], ExampleMainOrganization.KnowledgeLevel, actor.Cognitive.InternalCharacteristics.MinimumRemainingKnowledge, actor.Cognitive.InternalCharacteristics.TimeToLive); }
private void TradeOnHeartbeat(object sender, EventArgs e) { lock (this._lock) { if (this._lastFrame == null || !this._lastFrame.Securities.Any()) { return; } var selectSecurityToSpoof = DiscreteUniform.Sample(0, this._lastFrame.Securities.Count - 1); var spoofSecurity = this._lastFrame.Securities.Skip(selectSecurityToSpoof).FirstOrDefault(); // limited to six as recursion > 8 deep tends to get tough on the stack and raise the risk of a SO error // if you want to increase this beyond 20 update spoofed order code for volume as well. var spoofSize = DiscreteUniform.Sample(1, 6); var spoofedOrders = this.SpoofedOrder(spoofSecurity, spoofSize, spoofSize) .OrderBy(x => x.MostRecentDateEvent()); var counterTrade = this.CounterTrade(spoofSecurity); foreach (var item in spoofedOrders) { this.TradeStream.Add(item); } this.TradeStream.Add(counterTrade); } }
private Order[] SpoofedOrder( EquityInstrumentIntraDayTimeBar security, int remainingSpoofedOrders, int totalSpoofedOrders) { if (security == null || remainingSpoofedOrders <= 0) { return(new Order[0]); } var priceOffset = (100 + remainingSpoofedOrders) / 100m; var limitPriceValue = security.SpreadTimeBar.Bid.Value * priceOffset; var limitPrice = new Money(limitPriceValue, security.SpreadTimeBar.Bid.Currency); var individualTradeVolumeLimit = 100 / totalSpoofedOrders; var volumeTarget = (100 + DiscreteUniform.Sample(0, individualTradeVolumeLimit)) / 100m; var volume = (int)(security.SpreadTimeBar.Volume.Traded * volumeTarget); var statusChangedOn = DateTime.UtcNow.AddMinutes(-10 + remainingSpoofedOrders); var tradePlacedOn = statusChangedOn; var spoofedTrade = new Order( security.Security, security.Market, null, Guid.NewGuid().ToString(), DateTime.UtcNow, "order-v1", "order-v1", "order-group-1", tradePlacedOn, tradePlacedOn, null, null, statusChangedOn, null, OrderTypes.LIMIT, OrderDirections.BUY, security.SpreadTimeBar.Price.Currency, security.SpreadTimeBar.Price.Currency, OrderCleanDirty.NONE, null, limitPrice, limitPrice, volume, volume, null, null, null, null, null, null, null, OptionEuropeanAmerican.NONE, new DealerOrder[0]); return(new[] { spoofedTrade } .Concat(this.SpoofedOrder(security, remainingSpoofedOrders - 1, totalSpoofedOrders)).ToArray()); }
private string GetRandomListValue(Random random) { var count = DiscreteUniform.Sample(random, 0, ContainerProbabilities.List.MaxRandomElementCount); var elems = Enumerable.Range(0, count).Select(_ => ElementType.GetRandomValue(random)); return($"new {Name}{{ {string.Join(", ", elems)} }}"); }
private OrderTypes CalculateTradeOrderType() { var tradeOrderTypeSample = DiscreteUniform.Sample(0, 1); var tradeOrderType = (OrderTypes)tradeOrderTypeSample; return(tradeOrderType); }
protected void WaitForTransmit(bool failed) { backoff = failed ? Math.Min(backoff * 2 + 1, Constants.CW_MAX) : Constants.CW_MIN; int sample = DiscreteUniform.Sample(0, backoff); timeToWait = Constants.SIFS_TIME + sample * Constants.SLOT_TIME; isWaitingAck = false; }
private int CalculateVolume(EquityInstrumentIntraDayTimeBar tick) { var upperLimit = Math.Max(tick.SpreadTimeBar.Volume.Traded, 1); var tradingVolume = (int)Math.Sqrt(upperLimit); var volume = DiscreteUniform.Sample(1, tradingVolume); return(volume); }
protected void Generate() { var amount = poisson.Sample(); var array = new int[amount]; DiscreteUniform.Samples(array, 0, Constants.ONE_SECOND_TIME - 1); samples = new HashSet <int>(array); }
private Order[] SingularCancelledOrder(EquityInstrumentIntraDayTimeBar security, Market exchange) { var cancelledTradeOrderValue = DiscreteUniform.Sample( this._valueOfSingularCancelledTradeThreshold, 10000000); var order = this.OrderForValue(OrderStatus.Cancelled, cancelledTradeOrderValue, security, exchange); return new[] { order }; }
public static ITSLTopLevelElement GenerateCell(this TSLGeneratorContext context) { var name = $"CellStruct_{context.TopLevelElementCount + 1}"; var numberOfFields = DiscreteUniform.Sample(context.MasterRandom, StructSettings.MinFieldNumber, StructSettings.MaxFieldNumber); var fields = context.RandomFields().Take(numberOfFields).ToArray(); var result = new TSLCell(name, fields); context.Cells.Add(result); return(result); }
public void ValidateCumulativeDistribution( [Values(-10, -10, -10, -10, -10, -10)] int l, [Values(10, 10, 10, -10, -10, -10)] int u, [Values(-5, 1, 10, 0, -10, -11)] double x, [Values(6.0 / 21.0, 12.0 / 21.0, 1.0, 1.0, 1.0, 0.0)] double cdf) { var b = new DiscreteUniform(l, u); Assert.AreEqual(cdf, b.CumulativeDistribution(x)); }
public void ValidateProbabilityLn( [Values(-10, -10, -10, -10, -10)] int l, [Values(10, 10, 10, -10, -10)] int u, [Values(-5, 1, 10, 0, -10)] int x, [Values(-3.0445224377234229965005979803657054342845752874046093, -3.0445224377234229965005979803657054342845752874046093, -3.0445224377234229965005979803657054342845752874046093, Double.NegativeInfinity, 0.0)] double dln) { var b = new DiscreteUniform(l, u); Assert.AreEqual(dln, b.ProbabilityLn(x)); }
public static ITSLTopLevelElement GenerateStruct(this TSLGeneratorContext context) { var name = $"Struct_{context.TopLevelElementCount + 1}"; var numberOfFields = DiscreteUniform.Sample(context.MasterRandom, StructSettings.MinFieldNumber, StructSettings.MaxFieldNumber); var fields = context.RandomFields().Take(numberOfFields); // TODO(leasunhy): generate attributes var result = new TSLStruct(name, fields); context.AddStruct(result); return(result); }
/// <summary> /// Determine the number of arrivals in time window (15 minutes) and their arrival times /// </summary> /// <param name="mean">Average number of arrivals</param> /// <returns></returns> public static int[] arrivingPassengers(double mean) { var arrivals = Poisson.Sample(mean); var times = new int[arrivals]; for (int i = 0; i < arrivals; i++) { times[i] = DiscreteUniform.Sample(0, 899); } return(times); }
private void MutateNetworkTopology(Chromosome chromosome) { var inputCountDistribution = new DiscreteUniform(1 + _parameters.MinSensorCount, 1 + _parameters.MaxSensorCount); var hiddenNeuronCountDistribution = new DiscreteUniform(_parameters.MinHiddenLayerNeuronCount, _parameters.MaxHiddenLayerNeuronCount); if (!ShouldMutate()) { return; } int oldInputCount = chromosome.InputCount; chromosome.InputCount = inputCountDistribution.Sample(); if (oldInputCount != chromosome.InputCount) { for (int i = 0; i < chromosome.HiddenLayerNeurons.Count; i++) { var newNeuronData = new float[chromosome.InputCount + 1]; CopyAndFill(chromosome.HiddenLayerNeurons[i], newNeuronData); chromosome.HiddenLayerNeurons[i] = newNeuronData; } } int oldHiddenLayerNeuronCount = chromosome.HiddenLayerNeuronCount; chromosome.HiddenLayerNeuronCount = hiddenNeuronCountDistribution.Sample(); if (oldHiddenLayerNeuronCount != chromosome.HiddenLayerNeuronCount) { int diff = oldHiddenLayerNeuronCount - chromosome.HiddenLayerNeuronCount; if (diff > 0) { for (int i = 0; i < diff; i++) { int index = new DiscreteUniform(0, chromosome.HiddenLayerNeurons.Count - 1).Sample(); chromosome.HiddenLayerNeurons.RemoveAt(index); } } else { for (int i = 0; i < Math.Abs(diff); i++) { chromosome.HiddenLayerNeurons.Add(NewNeuron(chromosome.InputCount)); } } for (int i = 0; i < chromosome.OutputLayerNeurons.Count; i++) { var newNeuronData = new float[chromosome.HiddenLayerNeuronCount + 1]; CopyAndFill(chromosome.OutputLayerNeurons[i], newNeuronData); chromosome.OutputLayerNeurons[i] = newNeuronData; } } }
public static void Initialize(int lower, int upper, Generator g) { switch (g) { case Generator.OrderSize: distributionOrderSize = new DiscreteUniform(lower, upper); break; case Generator.TimeBetweenOrders: distributionTimeBetweenOrder = new DiscreteUniform(lower, upper); break; default: break; } }
public void DiscreteUniformCreateFailsWithBadParameters(int l, int u) { var du = new DiscreteUniform(l, u); }
public void SetUpperBoundFails(int p) { var b = new DiscreteUniform(0, 10); b.UpperBound = p; }
public void ValidateMinimum() { var b = new DiscreteUniform(-10, 10); Assert.AreEqual<double>(-10, b.Minimum); }
public void ValidateEntropy([Values(-10, 0, 10, 20)] int l, [Values(10, 4, 20, 20)] int u, [Values(3.0445224377234229965005979803657054342845752874046093, 1.6094379124341003746007593332261876395256013542685181, 2.3978952727983705440619435779651292998217068539374197, 0.0)] double e) { var du = new DiscreteUniform(l, u); AssertHelpers.AlmostEqual(e, du.Entropy, 14); }
public void ValidateProbabilityLn(int l, int u, int x, double dln) { var b = new DiscreteUniform(l, u); Assert.AreEqual(dln, b.ProbabilityLn(x)); }
public void ValidateMean(int l, int u, int m) { var du = new DiscreteUniform(l, u); Assert.AreEqual(m, du.Mean); }
public void ValidateEntropy(int l, int u, double e) { var du = new DiscreteUniform(l, u); AssertHelpers.AlmostEqualRelative(e, du.Entropy, 14); }
public void CanSampleSequence() { var n = new DiscreteUniform(0, 10); var ied = n.Samples(); ied.Take(5).ToArray(); }
public void SetLowerBoundFails([Values(11, 20)] int p) { var b = new DiscreteUniform(0, 10); Assert.Throws<ArgumentOutOfRangeException>(() => b.LowerBound = p); }
public void ValidateSkewness([Values(-10, 0, 10, 20)] int l, [Values(10, 4, 20, 20)] int u) { var du = new DiscreteUniform(l, u); Assert.AreEqual(0.0, du.Skewness); }
public void SetUpperBoundFails(int p) { var b = new DiscreteUniform(0, 10); Assert.Throws<ArgumentOutOfRangeException>(() => b.UpperBound = p); }
public void ValidateMode(int l, int u, int m) { var du = new DiscreteUniform(l, u); Assert.AreEqual<double>(m, du.Mode); }
public void ValidateSkewness(int l, int u) { var du = new DiscreteUniform(l, u); Assert.AreEqual(0.0, du.Skewness); }
public void ValidateMean([Values(-10, 0, 10, 20)] int l, [Values(10, 4, 20, 20)] int u, [Values(0, 2, 15, 20)] int m) { var du = new DiscreteUniform(l, u); Assert.AreEqual(m, du.Mean); }
public void ValidateMaximum() { var b = new DiscreteUniform(-10, 10); Assert.AreEqual(10, b.Maximum); }
public void CanSample() { var n = new DiscreteUniform(0, 10); n.Sample(); }
public void SetUpperBoundFails(int p) { var b = new DiscreteUniform(0, 10); Assert.That(() => b.UpperBound = p, Throws.ArgumentException); }
public void ValidateCumulativeDistribution(int l, int u, double x, double cdf) { var b = new DiscreteUniform(l, u); Assert.AreEqual(cdf, b.CumulativeDistribution(x)); }
public void CanSetUpperBound(int p) { var b = new DiscreteUniform(0, 10); b.UpperBound = p; }
public void ValidateToString() { var b = new DiscreteUniform(0, 10); Assert.AreEqual("DiscreteUniform(Lower = 0, Upper = 10)", b.ToString()); }
public void ValidateProbability([Values(-10, -10, -10, -10, -10)] int l, [Values(10, 10, 10, -10, -10)] int u, [Values(-5, 1, 10, 0, -10)] int x, [Values(1 / 21.0, 1 / 21.0, 1 / 21.0, 0.0, 1.0)] double p) { var b = new DiscreteUniform(l, u); Assert.AreEqual(p, b.Probability(x)); }