// generate synthetic data // "1" - cheating // "0" - not cheating static bool[] GetSampleData(int n, double p) { bool[] data = new bool[n]; Bernoulli z = new Bernoulli(p); Bernoulli fairCoin = new Bernoulli(0.5); // fair coin; used as #1 and #2 coin as they are indistinguishable for (int i = 0; i < n; i++) { bool trueState = z.Sample(); // true state if (fairCoin.Sample()) { data[i] = trueState; // if #1 coin = "1", report true state } else { if (fairCoin.Sample()) // if #1 coin = "0"; toss #2 coin { data[i] = true; // report "1" if #2 coin = "1" } else { data[i] = false; // report "0" if #2 coin = "0" } } } return(data); }
protected void _CreateFilter(IMatrix matrix) { Debug.Assert(_filter == null); // create a row level probability //var dropout = Enumerable.Range(0, matrix.ColumnCount).Select(v => _probabilityDistribution.Sample() / _invertedMultiplier).ToArray(); // create a filter against the dropout probability _filter = _lap.Create(matrix.RowCount, matrix.ColumnCount, (x, y) => _probabilityDistribution.Sample() / _invertedMultiplier); }
// generate no-effect synthetic data public static void GenerateNoEffectData(int n, double probRecovery, out bool[] treatedData, out bool[] controlData) { treatedData = new bool[n]; controlData = new bool[n]; Bernoulli recovery = new Bernoulli(probRecovery); for (int i = 0; i < n; i++) { treatedData[i] = recovery.Sample(); controlData[i] = recovery.Sample(); } }
public static SimulationOutcome GetSportsLeagueSimulationOutcome(string sportsLeague, string division, int seasonStartYear) { var randomSeed = SystemRandomSource.Default; // Initialize the simulation var simulationOutCome = new SimulationOutcome { SimulationID = Guid.NewGuid().ToString(), SeasonStartYear = seasonStartYear }; // Get basketball season league structure SportsLeagueSeason sportsLeagueRules = new SportsLeagueSeason(); if (sportsLeague == "football") { sportsLeagueRules = Seasons.SportsSeasonRules.GetFootballSeasonRules(seasonStartYear); } else { sportsLeagueRules = Seasons.SportsSeasonRules.GetBasketballSeasonRules(seasonStartYear, division); } // Simulate if made playoffs var probabilityOfPlayOffsDistribution = new Bernoulli(sportsLeagueRules.ProbabilityOfPlayoffs, randomSeed); var isPlayoffTeam = Convert.ToBoolean(probabilityOfPlayOffsDistribution.Sample()); simulationOutCome.IsPlayoffTeam = isPlayoffTeam; if (simulationOutCome.IsPlayoffTeam) { // Simulate if playoff team var probabilityOfPlayOffsByeDistribution = new Bernoulli(sportsLeagueRules.ProbabilityOfPlayoffsBye, randomSeed); var test = probabilityOfPlayOffsByeDistribution.Sample(); var isPlayoffTeamWithBye = Convert.ToBoolean(probabilityOfPlayOffsByeDistribution.Sample()); simulationOutCome.IsPlayoffTeamWithBye = isPlayoffTeamWithBye; // Simulate if championship winning team var numberOfPlayoffRoundWinsForChampionship = sportsLeagueRules.NumberOfPlayoffRoundsWithChampionship - (isPlayoffTeamWithBye ? 1 : 0); var probabilityOfChampionship = new Binomial(0.5, numberOfPlayoffRoundWinsForChampionship, randomSeed); var numberOfSimulatedPlayoffWins = probabilityOfChampionship.Sample(); simulationOutCome.IsChampionshipWinningTeam = (numberOfSimulatedPlayoffWins == numberOfPlayoffRoundWinsForChampionship) ? true : false; } return(simulationOutCome); }
public static void GenerateComplete(int numExperiments, int numTrials, double selectProb, double biasA, double biasB, out int[] data, out int[] assignment) { data = new int[numExperiments]; assignment = new int[numExperiments]; Bernoulli selector = new Bernoulli(selectProb); Binomial coinA = new Binomial(numTrials, biasA); Binomial coinB = new Binomial(numTrials, biasB); for (int i = 0; i < numExperiments; i++) { if (selector.Sample()) { assignment[i] = 0; data[i] = coinA.Sample(); } else { assignment[i] = 1; data[i] = coinB.Sample(); } } }
/// <summary> /// This method runs the sampler for a number of iterations without returning a sample /// </summary> private void Burn(int n) { for (int i = 0; i < n; i++) { // Get a sample from the proposal. T next = mProposal(mCurrent); // Evaluate the density at the next sample. double p = mPdfLnP(next); // Evaluate the forward transition probability. double fwd = mKrnlQ(next, mCurrent); // Evaluate the backward transition probability double bwd = mKrnlQ(mCurrent, next); mSamples++; double acc = System.Math.Min(0.0, p + bwd - mCurrentDensityLn - fwd); if (acc == 0.0) { mCurrent = next; mCurrentDensityLn = p; mAccepts++; } else if (Bernoulli.Sample(RandomSource, System.Math.Exp(acc)) == 1) { mCurrent = next; mCurrentDensityLn = p; mAccepts++; } } }
/// <summary> /// Generates a data set from a particular true model. /// </summary> public Vector[] GenerateData(int nData) { Vector trueM1 = Vector.FromArray(2.0, 3.0); Vector trueM2 = Vector.FromArray(7.0, 5.0); PositiveDefiniteMatrix trueP1 = new PositiveDefiniteMatrix( new double[, ] { { 3.0, 0.2 }, { 0.2, 2.0 } }); PositiveDefiniteMatrix trueP2 = new PositiveDefiniteMatrix( new double[, ] { { 2.0, 0.4 }, { 0.4, 4.0 } }); VectorGaussian trueVG1 = VectorGaussian.FromMeanAndPrecision(trueM1, trueP1); VectorGaussian trueVG2 = VectorGaussian.FromMeanAndPrecision(trueM2, trueP2); double truePi = 0.6; Bernoulli trueB = new Bernoulli(truePi); // Restart the infer.NET random number generator Rand.Restart(12347); Vector[] data = new Vector[nData]; for (int j = 0; j < nData; j++) { bool bSamp = trueB.Sample(); data[j] = bSamp ? trueVG1.Sample() : trueVG2.Sample(); } return(data); }
/// <summary> /// Generates data from the true model: B causes A /// </summary> /// <param name="N">Number of data points to generate</param> /// <param name="q">Noise (flip) probability</param> /// <param name="doB">Whether to intervene or not</param> /// <param name="probBIntervention">Prob of choosing B=true when intervening</param> /// <returns></returns> private static Data GenerateFromBcausesA(int N, double q, bool doB, double probBIntervention) { // Create data object to fill with data. Data d = new Data { A = new bool[N], B = new bool[N], doB = new bool[N] }; var Bprior = new Bernoulli(0.5); // Noise distribution var flipDist = new Bernoulli(q); // Distribution over the values of B when we intervene var interventionDist = new Bernoulli(probBIntervention); // Loop over data for (int i = 0; i < N; i++) { d.B[i] = doB ? interventionDist.Sample() : Bprior.Sample(); // Draw A from prior d.A[i] = d.B[i] != flipDist.Sample(); // Whether we intervened on B // This is currently the same for all data points - but could easily be modified. d.doB[i] = doB; } return(d); }
/// <summary> /// This method runs the sampler for a number of iterations without returning a sample /// </summary> private void Burn(int n) { for (int i = 0; i < n; i++) { // Get a sample from the proposal. T next = _proposal(_current); // Evaluate the density at the next sample. double p = _pdfLnP(next); // Evaluate the forward transition probability. double fwd = _krnlQ(next, _current); // Evaluate the backward transition probability double bwd = _krnlQ(_current, next); Samples++; double acc = Math.Min(0.0, p + bwd - _currentDensityLn - fwd); if (acc == 0.0) { _current = next; _currentDensityLn = p; Accepts++; } else if (Bernoulli.Sample(RandomSource, Math.Exp(acc)) == 1) { _current = next; _currentDensityLn = p; Accepts++; } } }
/// <summary> /// This method runs the sampler for a number of iterations without returning a sample /// </summary> private void Burn(int n) { for (int i = 0; i < n; i++) { // Get a sample from the proposal. T next = _proposal(_current); // Evaluate the density at the next sample. double p = _pdfLnP(next); Samples++; double acc = Math.Min(0.0, p - _currentDensityLn); if (acc == 0.0) { _current = next; _currentDensityLn = p; Accepts++; } else if (Bernoulli.Sample(RandomSource, Math.Exp(acc)) == 1) { _current = next; _currentDensityLn = p; Accepts++; } } }
public static void GenData(int n, Vector w, double b, out Vector[] X, out bool[] Y, int seed = 1) { Rand.Restart(seed); int d = w.Count; X = new Vector[n]; Y = new bool[n]; if (w == null) { throw new ArgumentException("coefficient vector w cannot be null"); } // X = new Vector[n]; // Y = new double[n]; for (int i = 0; i < n; i++) { X[i] = Vector.Zero(d); // samples are from standard multivariate normal Rand.Normal(Vector.Zero(d), PositiveDefiniteMatrix.IdentityScaledBy(d, 1), X[i]); // Gamma random noise to each dimension // X[i] = Rand.Gamma(1)*X[i]; double inner = w.Inner(X[i]); double p = MMath.Logistic(inner + b); // Y[i] = p >= 0.5 ? 1.0 - ep : 0.0 + ep; Y[i] = Bernoulli.Sample(p); // Y[i] = p >= 0.5; } }
/// <summary> /// This method runs the sampler for a number of iterations without returning a sample /// </summary> private void Burn(int n) { for (int i = 0; i < n; i++) { // Get a sample from the proposal. T next = mProposal(mCurrent); // Evaluate the density at the next sample. double p = mPdfLnP(next); mSamples++; double acc = System.Math.Min(0.0, p - mCurrentDensityLn); if (acc == 0.0) { mCurrent = next; mCurrentDensityLn = p; mAccepts++; } else if (Bernoulli.Sample(RandomSource, System.Math.Exp(acc)) == 1) { mCurrent = next; mCurrentDensityLn = p; mAccepts++; } } }
public Room GenerateRoom() { var width = sizeGen.Sample(); var height = sizeGen.Sample(); var offset = new DiscreteUniform(1, height - 2).Sample(); var pivot = lastRoom.ExitPoint + new Point(1, -offset); var room = GenerateEmpty(width, height, pivot, lastRoom.ExitPoint, !first); var characterGen = new CharacterGenerator(); var itemGen = new ItemGenerator(); for (int i = 0; i < 6; i++) { if (boolGen.Sample() == 1) { var character = characterGen.GenerateCharacter(); var position = findRandomPosition(room); character.Transform.Position = position; room.Entities.Add(character); } } for (int i = 0; i < 6; i++) { if (boolGen.Sample() == 1) { var chest = new Chest(); var position = findRandomPosition(room); chest.Transform.Position = position; var itemCount = randomItemCount(); chest.Items = new List <Item>(); for (int j = 0; j < itemCount; j++) { chest.Items.Add(itemGen.GenerateItem()); } room.Entities.Add(chest); } } lastRoom = room; first = false; return(room); }
/// <summary> /// Method used to update the sample location. Used in the end of the loop. /// </summary> /// <param name="E">The old energy.</param> /// <param name="Gradient">The old gradient/derivative of the energy.</param> /// <param name="mNew">The new sample.</param> /// <param name="gNew">The new gradient/derivative of the energy.</param> /// <param name="Enew">The new energy.</param> /// <param name="DH">The difference between the old Hamiltonian and new Hamiltonian. Use to determine /// if an update should take place. </param> protected void Update(ref double E, ref T Gradient, T mNew, T gNew, double Enew, double DH) { if (DH <= 0) { mCurrent = mNew; Gradient = gNew; E = Enew; Accepts++; } else if (Bernoulli.Sample(RandomSource, System.Math.Exp(-DH)) == 1) { mCurrent = mNew; Gradient = gNew; E = Enew; Accepts++; } }
/// <summary> /// Method used to update the sample location. Used in the end of the loop. /// </summary> /// <param name="e">The old energy.</param> /// <param name="gradient">The old gradient/derivative of the energy.</param> /// <param name="mNew">The new sample.</param> /// <param name="gNew">The new gradient/derivative of the energy.</param> /// <param name="enew">The new energy.</param> /// <param name="dh">The difference between the old Hamiltonian and new Hamiltonian. Use to determine /// if an update should take place. </param> protected void Update(ref double e, ref T gradient, T mNew, T gNew, double enew, double dh) { if (dh <= 0) { Current = mNew; gradient = gNew; e = enew; Accepts++; } else if (Bernoulli.Sample(RandomSource, System.Math.Exp(-dh)) == 1) { Current = mNew; gradient = gNew; e = enew; Accepts++; } }
public Character GenerateCharacter() { var isMonster = monsterOrNPCDistribution.Sample(); if (isMonster == 1) { return(GenerateMonster()); } else { return(GenerateNPC()); } }
/// <summary> /// Synthetic data: Samples person skill data /// </summary> /// <param name="numSkills">total number of skills</param> /// <param name="numPersons">total number of persons</param> /// <param name="personSkills">output array of arrays</param> public static void SamplePersonSkillsData(int numSkills, int numPersons, out bool[][] personSkills) { personSkills = new bool[numPersons][]; var coin = new Bernoulli(0.5); for (int p = 0; p < numPersons; p++) { personSkills[p] = new bool[numSkills]; for (int s = 0; s < numSkills; s++) { personSkills[p][s] = coin.Sample(); } } }
private void generateAttributes(Equipment equipment) { foreach (KeyValuePair <int, Attribute> entry in this.attributes) { bool success = Bernoulli.Sample(StoreConfig.ATTRIBUTE_PROB) == 1; if (success) { int modifier = Poisson.Sample(StoreConfig.MODIFIER_LAMBDA) - 1; // Clamp modifier in [1,inf) equipment.setAttributeBonus(entry.Value, modifier > 0 ? modifier : 1); } } }
// generate has-effect synthetic data public static void GenerateHasEffectData(int n, double probTreated, double probControl, out bool[] treatedData, out bool[] controlData) { treatedData = new bool[n]; controlData = new bool[n]; Bernoulli treated = new Bernoulli(probTreated); Bernoulli control = new Bernoulli(probControl); for (int i = 0; i < n; i++) { treatedData[i] = treated.Sample(); controlData[i] = control.Sample(); } }
public static IList <int> Sample(SparseVector probsTrue) { var result = new bool[probsTrue.Count]; var list = new List <int>(); int i = 0; foreach (double d in probsTrue) { if (Bernoulli.Sample(d)) { list.Add(i); } i++; } return(list); }
/// <summary> /// Samples a list of ints from this distribution /// </summary> /// <param name="result">Where to put the resulting sample</param> /// <returns></returns> public IList <int> Sample(IList <int> result) { // TODO: efficient sparse implementation result.Clear(); int i = 0; foreach (double d in LogOddsVector) { if (Bernoulli.Sample(MMath.Logistic(d))) { result.Add(i); } i++; } return(result); }
public static void Test() { var realData = Util.ArrayInit(20, d => Bernoulli.Sample(0.5) ? 1.0 : 0.0); var testData = Util.ArrayInit(20, d => Beta.Sample(1, 1)); // Creates the Receiver Operating Curve of the given source var rocCurve = new ReceiverOperatingCharacteristic(realData, realData); // Compute the ROC curve with 20 points rocCurve.Compute(20); for (int i = 0; i < rocCurve.Points.Count; i++) { Console.WriteLine("ROC curve at point {0}: false positive rate {1:0.000}, true positive rate {2:0.000}, accuracy {3:0.000}", i, 1 - rocCurve.Points[i].Specificity, rocCurve.Points[i].Specificity, rocCurve.Points[i].Accuracy); } Console.WriteLine("Area under the ROC curve: {0:0.000}", rocCurve.Area); }
public override void ExecuteForward(IContext context) { if (context.IsTraining) { // drop out random neurons during training var lap = context.LinearAlgebraProvider; var matrix = context.Data.GetMatrix(); var filter = lap.CreateMatrix(matrix.RowCount, matrix.ColumnCount, (i, j) => BoundMath.IsZero(_dropOutPercentage) ? 1f : _probabilityToDrop.Sample() == 1 ? 0f : 1f / _dropOutPercentage); var output = matrix.PointwiseMultiply(filter); _AddNextGraphAction(context, context.Data.ReplaceWith(output), () => new Backpropagation(this, filter)); } else { _AddNextGraphAction(context, context.Data, null); } }
public override void ExecuteForward(IContext context) { if (context.IsTraining) { var lap = context.LinearAlgebraProvider; var input = context.Data; var inputMatrix = input.GetMatrix(); var filter = lap.CreateMatrix(Weight.RowCount, Weight.ColumnCount, (i, j) => BoundMath.IsZero(_dropOutPercentage) ? 1f : _probabilityToDrop.Sample() == 1 ? 0f : 1f / _dropOutPercentage); var filteredWeights = Weight.PointwiseMultiply(filter); var output = _FeedForward(inputMatrix, filteredWeights); _AddNextGraphAction(context, input.ReplaceWith(output), () => new Backpropagation(this, inputMatrix, filter, filteredWeights)); } else { base.ExecuteForward(context); } }
public static int[] SampleData(int N, double pi, double lambda) { int[] data = new int[N]; Bernoulli coin = new Bernoulli(pi); Poisson poisson = new Poisson(lambda); for (int i = 0; i < N; i++) { bool coin_value = coin.Sample(); if (coin_value) { data[i] = 0; } else { data[i] = poisson.Sample(); } } return(data); }
/// <summary> /// Check if Agent is randomly Isolated /// </summary> /// <returns>true if agent is isolated, false otherwise</returns> private bool IsRandomlyIsolated() { float isolationThreshold; switch (AgentCanBeIsolated) { case Frequency.Never: isolationThreshold = 0; break; case Frequency.VeryRarely: isolationThreshold = 0.1F; break; case Frequency.Rarely: isolationThreshold = 0.3F; break; case Frequency.Medium: isolationThreshold = 0.5F; break; case Frequency.Often: isolationThreshold = 0.7F; break; case Frequency.VeryOften: isolationThreshold = 0.9F; break; case Frequency.Always: isolationThreshold = 1; break; default: throw new ArgumentOutOfRangeException(); } return(Bernoulli.Sample(isolationThreshold)); }
internal override double ForwardCpu(CpuTensorScopeCollection bottom, CpuTensorScopeCollection top) { var bottomData = bottom[0].Data; var topData = top[0].Data; if (Phase == PhaseType.Train) { var ratio = this.Parameters.Ratio; var scale = 1f / (1f - ratio); var bernoulli = new Bernoulli(1 - ratio); mask = Vector <double> .Build.SameAs(bottomData, () => scale *bernoulli.Sample()); bottomData.PointwiseMultiply(mask, result: topData); } else { bottomData.CopyTo(topData); } return(0); }
/// <summary> /// Synthetic data: Samples answer data /// </summary> /// <param name="numPersons">total number of persons</param> /// <param name="numQuestions">total number of questions</param> /// <param name="numSkills">total number of skills</param> /// <param name="skillsNeeded">question skills array of arrays</param> /// <param name="personSkills">person skills array of arrays</param> /// <param name="isCorrect">output array of arrays</param> public static void SampleIsCorrectData(int numPersons, int numQuestions, int numSkills, int[][] skillsNeeded, bool[][] personSkills, out bool[][] isCorrect) { Bernoulli isCorrectCoin = new Bernoulli(0.9); Bernoulli isIncorrectCoin = new Bernoulli(0.1); isCorrect = new bool[numPersons][]; for (int p = 0; p < numPersons; p++) { var pSkills = personSkills[p]; // skills of p-th person isCorrect[p] = new bool[numQuestions]; for (int q = 0; q < numQuestions; q++) { var qSkills = skillsNeeded[q]; // skills needed to answer q-th question bool hasSkills = false; for (int s = 0; s < qSkills.Length; s++) { if (s == 0) { hasSkills = pSkills[qSkills[s]]; } else { hasSkills = hasSkills & pSkills[qSkills[s]]; } } if (hasSkills) { isCorrect[p][q] = isCorrectCoin.Sample(); } else { isCorrect[p][q] = isIncorrectCoin.Sample(); } } } }
/// <summary> /// Generates data from the true model: A causes B /// </summary> /// <param name="N">Number of data points to generate</param> /// <param name="q">Noise (flip) probability</param> /// <param name="doB">Whether to intervene or not</param> /// <param name="probBIntervention">Prob of choosing B=true when intervening</param> /// <returns></returns> private static Data GenerateFromAcausesB(int N, double q, bool doB, double probBIntervention) { // Create data object to fill with data. Data d = new Data { A = new bool[N], B = new bool[N], doB = new bool[N] }; // Uniform prior on A var Aprior = new Bernoulli(0.5); // Noise distribution var flipDist = new Bernoulli(q); // Distribution over the values of B when we intervene var interventionDist = new Bernoulli(probBIntervention); // Loop over data for (int i = 0; i < N; i++) { // Draw A from prior d.A[i] = Aprior.Sample(); // Whether we intervened on B // This is currently the same for all data points - but could easily be modified. d.doB[i] = doB; if (!d.doB[i]) { // We are not intervening so use the causal model i.e. // make B a noisy version of A - flipping it with probability q d.B[i] = d.A[i] != flipDist.Sample(); } else { // We are intervening - setting B according to a coin flip d.B[i] = interventionDist.Sample(); } } return(d); }
/// <summary> /// Synthetic data: Samples question skill data /// </summary> /// <param name="numSkills">total number of skills</param> /// <param name="numQuestions">total number of questions</param> /// <param name="skillsNeeded">output array of arrays</param> public static void SampleSkillsNeededData(int numSkills, int numQuestions, out int[][] skillsNeeded) { skillsNeeded = new int[numQuestions][]; var coin = new Bernoulli(0.5); for (int q = 0; q < numQuestions; q++) { List <int> skillsList = new List <int>(); for (int s = 0; s < numSkills; s++) { if (coin.Sample()) { skillsList.Add(s); } } int L = skillsList.Count; skillsNeeded[q] = new int[L]; for (int s = 0; s < L; s++) { skillsNeeded[q][s] = skillsList[s]; } } }
public void CanSample() { var n = new Bernoulli(0.3); var d = n.Sample(); }
/// <summary> /// Generates a data set from a particular true model. /// </summary> public Vector[] GenerateData(int nData) { Vector trueM1 = Vector.FromArray(2.0, 3.0); Vector trueM2 = Vector.FromArray(7.0, 5.0); PositiveDefiniteMatrix trueP1 = new PositiveDefiniteMatrix( new double[,] { { 3.0, 0.2 }, { 0.2, 2.0 } }); PositiveDefiniteMatrix trueP2 = new PositiveDefiniteMatrix( new double[,] { { 2.0, 0.4 }, { 0.4, 4.0 } }); VectorGaussian trueVG1 = VectorGaussian.FromMeanAndPrecision(trueM1, trueP1); VectorGaussian trueVG2 = VectorGaussian.FromMeanAndPrecision(trueM2, trueP2); double truePi = 0.6; Bernoulli trueB = new Bernoulli(truePi); // Restart the infer.NET random number generator Rand.Restart(12347); Vector[] data = new Vector[nData]; for (int j = 0; j < nData; j++) { bool bSamp = trueB.Sample(); data[j] = bSamp ? trueVG1.Sample() : trueVG2.Sample(); } return data; }