public void Test1()
    {
        // SplitMix64
        var seed = 1234567ul;

        Xoshiro256StarStar.SplitMix64(ref seed).Is(6457827717110365317ul);
        Xoshiro256StarStar.SplitMix64(ref seed).Is(3203168211198807973ul);
        Xoshiro256StarStar.SplitMix64(ref seed).Is(9817491932198370423ul);
        Xoshiro256StarStar.SplitMix64(ref seed).Is(4593380528125082431ul);
        Xoshiro256StarStar.SplitMix64(ref seed).Is(16408922859458223821ul);

        var xo = new Xoshiro256StarStar(42);

        DoubleToString(xo.NextDouble()).Is("0.0838629710598822");
        DoubleToString(xo.NextDouble()).Is("0.3789802506626686");
        DoubleToString(xo.NextDouble()).Is("0.6800434110281394");
        DoubleToString(xo.NextDouble()).Is("0.9246929453253876");
        DoubleToString(xo.NextDouble()).Is("0.9918039142821028");

        string DoubleToString(double d) => d.ToString("F16");

        xo.NextUInt64().Is(14199186830065750584ul);
        xo.NextUInt64().Is(13267978908934200754ul);
        xo.NextUInt64().Is(15679888225317814407ul);
        xo.NextUInt64().Is(14044878350692344958ul);
        xo.NextUInt64().Is(10760895422300929085ul);
    }
Exemplo n.º 2
0
    public static void QuickStart_Xoshiro256StarStar()
    {
        // xoshiro256** is a pseudo-random number generator.
        var xo    = new Xoshiro256StarStar(42);
        var ul    = xo.NextUInt64(); // [0, 2^64-1]
        var d     = xo.NextDouble(); // [0,1)
        var bytes = new byte[10];

        xo.NextBytes(bytes);
    }
Exemplo n.º 3
0
        public static void TestQuantileTrap()
        {
            Xoshiro256StarStar rand          = new Xoshiro256StarStar(8675309);
            const int          numberOfDists = 10;

            //const double meanOfNormals = 50;
            //const double stdDevOfNormals = 0.002;
            double Shape() => 2 *rand.NextDouble() - 1;
            double Scale() => 0.002 *rand.NextDouble() + 0;
            double Location() => 180 *rand.NextDouble();

            IDistributionWrapper[] dists = new IDistributionWrapper[numberOfDists];
            for (int i = 0; i < dists.Length; i++)
            {
                //dists[i] = new WrappedDistribution(new Normal(rand.NextDouble() * meanOfNormals * 2, 0 + Math.Abs(rand.NextDouble() * stdDevOfNormals)), -100, 200); // Bounds are unused here
                dists[i] = new WrappedDistribution(new GEV(Location(), Scale(), Shape(), rand), -100, 100);
            }
            double[] exact = DiscardProbabilityComputation.ComplementsTrapezoid(dists, 50000);
            Console.WriteLine("Exact:");
            for (int i = 0; i < exact.Length; i++)
            {
                Console.WriteLine($"{i}: {dists[i].GetWrappedDistribution()} 1-P(D_i) = {exact[i]}");
            }
            exact = DiscardProbabilityComputation.ComplementsMonteCarloMaximizing(dists);
            Console.WriteLine("MC:");
            for (int i = 0; i < exact.Length; i++)
            {
                Console.WriteLine($"{i}: {dists[i].GetWrappedDistribution()} 1-P(D_i) = {exact[i]}");
            }
            double[] est;
            int[]    its = new int[] { 10, 20, 30, 40, 50, 75, 100, 200, 500, 1000, 2000, 20000 };
            for (int i = 0; i < its.Length; i++)
            {
                int size = its[i];
                est = DiscardProbabilityComputation.ComplementsQuantileTrapRule(dists, size);
                Console.WriteLine($"Size {size}:");
                for (int j = 0; j < est.Length; j++)
                {
                    Console.WriteLine($"{j}: {dists[j].GetWrappedDistribution()} 1-P(D_i) = {est[j]}");
                }
            }
        }
Exemplo n.º 4
0
        public static void TestGEVComplementComputations()
        {
            double ep           = Math.Pow(2, -50);
            double complementEp = 1.0 - ep;

            int testSize = 20;

            //GEV[] dists = new GEV[] { new GEV(0,200,-1), new GEV(0,100,-1) };

            GEV[]  dists = new GEV[testSize];
            Random rand  = new Xoshiro256StarStar(8675309);

            for (int i = 0; i < dists.Length; i++)
            {
                dists[i] = new GEV(rand.NextDouble(), rand.NextDouble(), -rand.NextDouble(), rand);
            }

            IDistributionWrapper[] wrappedDists = new IDistributionWrapper[dists.Length];
            for (int i = 0; i < dists.Length; i++)
            {
                wrappedDists[i] = new WrappedDistribution(dists[i], dists[i].InverseCumulativeDistribution(ep), dists[i].InverseCumulativeDistribution(complementEp));
            }

            double[] complements     = DiscardProbabilityComputation.ComplementsClenshawCurtisAutomatic(wrappedDists);
            double[] complemetnsTrap = DiscardProbabilityComputation.ComplementsTrapezoid(wrappedDists, 10000);
            double[] mcComplements   = DiscardProbabilityComputation.ComplementsMonteCarlo(wrappedDists, iterations: 10000000);
            double   totalc          = 0;
            double   totalmc         = 0;
            double   totalTrap       = 0;

            for (int i = 0; i < complements.Length; i++)
            {
                GEV dist = dists[i];
                Program.logger.WriteLine($"Distribution Scale: {dist.scale} Loc {dist.location} Shape {dist.shape} " +
                                         $"1-P(D) {complements[i]} MC {mcComplements[i]} Trap {complemetnsTrap[i]}");
                totalc    += complements[i];
                totalmc   += mcComplements[i];
                totalTrap += complemetnsTrap[i];
            }

            Program.logger.WriteLine($"Total probability: {totalc} Total by MC: {totalmc} Total by Trap 10k: {totalTrap}");
        }
Exemplo n.º 5
0
        public void RunAllIterations(IScenario scenario, int noOfIterations, int numberOfThreads, int seed,
                                     int maxSimulationQueueSize, bool addDatesToOutputFileNames = false)
        {
            SaveFilesWithDates = addDatesToOutputFileNames;

            _cancelSignal  = new CancellationTokenSource();
            _numberOfDays  = scenario.DaysToProject;
            _seedGenerator = new Xoshiro256StarStar(seed, true);

            var initializationInfo = PrepareInitializationInfo(scenario);

            SaveScenario(scenario);
            PrepareOutputFiles();

            // this is to ensure that the memory requirements don't run away as we generate simulations faster than they can be processed
            _simulations = new BlockingCollection <TSimulation>(maxSimulationQueueSize);

            var processingTasks = new Task[numberOfThreads + 1];

            processingTasks[0] = ProduceSimulations(noOfIterations, initializationInfo, _cancelSignal.Token);

            for (var t = 0; t < numberOfThreads; t++)
            {
                processingTasks[t + 1] = Task.Factory.StartNew(ConsumeSimulations, _cancelSignal.Token);
            }

            try
            {
                Task.WaitAll(processingTasks);
                Log.Information("All processing threads have finished");
            }
            catch (AggregateException a)
            {
                Log.Error(a, "Failed to run all iterations");
                throw;
            }
        }
Exemplo n.º 6
0
        /// <summary> Runs the algorithm in an attempt to narrow down the solution space to a set of regions
        /// which can then be checked exhaustively. </summary>
        /// <param name="layers"> The number of iterations before terminating </param>
        /// <param name="confidenceLevel"> A number between 0 and 1 describing the confidence level to be used in statistical comparisons </param>
        /// <param name="Conservative"> Whether or not to always keep all regions for which it is not possible to rule out that they contain optimal values with 95% confidence.
        /// Results can be higher quality with this on, but it may take significantly longer to run </param>
        /// <param name="CullDuplicates"> If true, duplicate regions will be removed after at the end of each layer. Requires the solution space to implement <see cref="IEquatable{T}"/> </param>
        /// <returns> An array of regions that are likely to contain optimal solutions </returns>
        public Region[] RunBranchAndBound(int sampleSize, int layers, double confidenceLevel, bool Conservative = true, bool CullDuplicates = false)
        {
            // Sanity check the initial sample size
            if (sampleSize < 30)
            {
                throw new ArgumentOutOfRangeException("Sample size must be at least 30.");
            }

            double branchingFactor   = 0; // Keeps track of average branches per active region per layer, which may not be constant if there is overlap between regions
            double BestObservedValue = double.PositiveInfinity;

            Region[] activeRegions = new Region[1] {
                SolutionSpace
            };
            List <Region> discardedRegions = new List <Region>(512);

            // Determine how many stdevs to use when computing the threshold for keeping otherwise discardable regions because they might contain minima
            double k = Math.Sqrt(1.0 / (1.0 - confidenceLevel) - 1.0); // Using Chebyshev's inequality, with a stardardization for tightening the bound

            // Main loop
            for (int layer = 0; layer < layers; layer++)
            {
                // --- Branch and Sample Active Regions ---
                // Branch the active regions
                var newActiveRegions = new List <Region>();
                foreach (Region reg in activeRegions)
                {
                    if (reg != null)
                    {
                        newActiveRegions.AddRange(reg.Branch());
                    }
                }

                Program.logger.WriteLine($"Branched to produce {newActiveRegions.Count} new regions.");
                // Cull duplicates if desired
                if (CullDuplicates)
                {
                    for (int i = 1; i < newActiveRegions.Count; i++)
                    {
                        for (int j = 0; j < i; j++)
                        {
                            if (newActiveRegions[i].Equals(newActiveRegions[j]))
                            {
                                newActiveRegions.RemoveAt(i);
                                i--;
                                break;
                            }
                        }
                    }
                    Program.logger.WriteLine($"Duplicates culled. {newActiveRegions.Count} regions remain.");
                }

                // Update branching factor
                branchingFactor = (branchingFactor * layer + newActiveRegions.Count / activeRegions.Length) / (layer + 1);
                Program.logger.WriteLine($"Branching factor revised to {branchingFactor}");
                activeRegions = newActiveRegions.ToArray();

                // Set up random number generators for each task in a deterministic way
                Random[] rands = new Random[activeRegions.Length];
                for (int i = 0; i < rands.Length; i++)
                {
                    rands[i] = new Xoshiro256StarStar(SolutionSpace.GetRNG().Next());
                }

                // Sample the regions (now with 300% more threads!)
                Task[] tasks = new Task[activeRegions.Length];
                for (int i = 0; i < activeRegions.Length; i++)
                {
                    int j = i;                                              // To avoid issues with lambda closure, one of the few quirks of this language
                    tasks[j] = Task.Run(() => { activeRegions[j].Sample(sampleSize, rands[j]); });
                }
                Task.WaitAll(tasks);
                //foreach (Region reg in activeRegions) { reg.Sample(sampleSize); }

                // Compute discard probabilities and number of discardable regions
                Normal[] samplingDistributions = new Normal[activeRegions.Length + discardedRegions.Count];
                for (int i = 0; i < activeRegions.Length; i++)
                {
                    samplingDistributions[i] = activeRegions[i].SamplingDistribution;
                }
                for (int i = 0; i < discardedRegions.Count; i++)
                {
                    samplingDistributions[i + activeRegions.Length] = discardedRegions[i].SamplingDistribution;
                }
                double[] discardComplements = NormalComparison.ComputeDiscardComplementsClenshawCurtisAltInvariantAutomatic(samplingDistributions);

                // If discard can likely be increased cheaply, increase sample size repeatedly until it can't
                int  currentDiscardCount = EnumerateDiscardableRegions(discardComplements);
                int  currentSampleSize   = sampleSize;
                bool increasing          = true;
                while (increasing)
                {
                    int newSampleSize = currentSampleSize + (int)(currentSampleSize * branchingFactor / activeRegions.Length);
                    // Construct a new set of normal distributions estimating the scenario where a larger sample size was used on each
                    var hypotheticals = new Normal[samplingDistributions.Length];
                    for (int i = 0; i < activeRegions.Length; i++)
                    {
                        hypotheticals[i] = activeRegions[i].EstimateDistributionWithDifferentSampleSize(newSampleSize);
                    }
                    for (int i = 0; i < discardedRegions.Count; i++)
                    {
                        hypotheticals[activeRegions.Length + i] = discardedRegions[i].SamplingDistribution;
                    }
                    double[] hypoDiscardProbs = NormalComparison.ComputeDiscardComplementsClenshawCurtisAltInvariantAutomatic(hypotheticals);
                    int      discardable      = EnumerateDiscardableRegions(hypoDiscardProbs);
                    if (discardable <= currentDiscardCount)
                    {
                        increasing = false;
                    }
                    else
                    {
                        currentSampleSize = newSampleSize;
                        Program.logger.WriteLine($"Revision expected to increase discard count from {currentDiscardCount} to {discardable}");
                        currentDiscardCount = discardable;
                    }
                }
                // If sample size was increased, resample active regions up to that size
                if (currentSampleSize > sampleSize)
                {
                    for (int i = 0; i < activeRegions.Length; i++)
                    {
                        activeRegions[i].Sample(currentSampleSize);
                        samplingDistributions[i] = activeRegions[i].SamplingDistribution;
                    }
                    discardComplements = NormalComparison.ComputeDiscardComplementsClenshawCurtisAltInvariantAutomatic(samplingDistributions);
                    Program.logger.WriteLine($"Layer sample size increased to {currentSampleSize}");
                }

                // Update the best observed value
                foreach (Region reg in activeRegions)
                {
                    BestObservedValue = Math.Min(reg.BestObservation, BestObservedValue);
                }
                Program.logger.WriteLine($"Best value observed in layer: {BestObservedValue}");

                // --- Discard Regions ---
                // Note: The sorting here is inefficient, but conceptually clearer than doing a quicksort and it isn't a performance bottleneck
                double certainty = 1;
                // Handle the discarded regions' contributions first
                for (int i = activeRegions.Length; i < discardComplements.Length; i++)
                {
                    certainty -= discardComplements[i];
                }
                Program.logger.WriteLine($"Uncertainty due to previously discarded regions: {1-certainty}");
                bool discarding       = true;
                int  discardCountTemp = 0;
                while (discarding)
                {
                    // Find the active region with the largest discard value
                    double min      = 1;
                    int    minIndex = 0;
                    for (int i = 0; i < activeRegions.Length; i++)
                    {
                        if (discardComplements[i] < min &&
                            activeRegions[i] != null
                            // Skip regions if they have too high of a probability of containing observations that compete with the current best
                            && ((activeRegions[i].SampleMean - k * activeRegions[i].SampleStdDev > BestObservedValue) || !Conservative))
                        {
                            min = discardComplements[i]; minIndex = i;
                        }
                    }
                    // Try to discard it
                    if (certainty - min > confidenceLevel)
                    {
                        certainty -= min;
                        if (min > 1E-18)
                        {
                            discardedRegions.Add(activeRegions[minIndex]);
                        }
                        activeRegions[minIndex] = null;
                        discardCountTemp++;
                    }
                    else
                    {
                        discarding = false;
                    }
                }

                // Discard completely uncompetetive regions with prejudice
                int discarded = 0;
                for (int i = activeRegions.Length; i < discardComplements.Length; i++)
                {
                    if (discardComplements[i] < 1E-13)
                    {
                        discardedRegions.RemoveAt(i - activeRegions.Length - discarded); discarded++;
                    }
                }
                Program.logger.WriteLine($"Forgot {discarded} unimportant discarded regions. {discardedRegions.Count} discarded regions remain.");


                Program.logger.WriteLine($"Layer {layer} complete. Analyzed {activeRegions.Length} active regions, and discarded {discardCountTemp} active regions.");
                Program.logger.WriteLine("--- Active Regions --- ");
                for (int i = 0; i < activeRegions.Length; i++)
                {
                    if (activeRegions[i] != null)
                    {
                        Program.logger.WriteLine($"Region {i}: {activeRegions[i].ToString()} \n");
                    }
                }
                Program.logger.WriteLine("--- Discarded Regions Still Affecting Probabilities --- ");
                for (int i = 0; i < discardedRegions.Count; i++)
                {
                    Program.logger.WriteLine($"Region {i}: {discardedRegions[i].ToString()} \n");
                }
            }

            // Shrinkwrap the output
            var output = new List <Region>(activeRegions);

            output.RemoveAll(r => (r == null));
            return(output.ToArray());
        }
Exemplo n.º 7
0
    public void Test1()
    {
        var xo = new Xoshiro256StarStar(42);
        var rv = new RandomVault(() => xo.NextUInt64(), x => xo.NextBytes(x));

        DoubleToString(rv.NextDouble()).Is("0.0838629710598822");
        DoubleToString(rv.NextDouble()).Is("0.3789802506626686");
        DoubleToString(rv.NextDouble()).Is("0.6800434110281394");
        DoubleToString(rv.NextDouble()).Is("0.9246929453253876");
        DoubleToString(rv.NextDouble()).Is("0.9918039142821028");

        string DoubleToString(double d) => d.ToString("F16");

        rv.NextUInt64().Is(14199186830065750584ul);
        rv.NextUInt64().Is(13267978908934200754ul);
        rv.NextUInt64().Is(15679888225317814407ul);
        rv.NextUInt64().Is(14044878350692344958ul);
        rv.NextUInt64().Is(10760895422300929085ul);

        // NextULong only
        xo = new Xoshiro256StarStar(42);
        rv = new RandomVault(() => xo.NextUInt64(), null);

        DoubleToString(rv.NextDouble()).Is("0.0838629710598822");
        DoubleToString(rv.NextDouble()).Is("0.3789802506626686");
        DoubleToString(rv.NextDouble()).Is("0.6800434110281394");
        DoubleToString(rv.NextDouble()).Is("0.9246929453253876");
        DoubleToString(rv.NextDouble()).Is("0.9918039142821028");

        rv.NextUInt64().Is(14199186830065750584ul);
        rv.NextUInt64().Is(13267978908934200754ul);
        rv.NextUInt64().Is(15679888225317814407ul);
        rv.NextUInt64().Is(14044878350692344958ul);
        rv.NextUInt64().Is(10760895422300929085ul);

        // NextBytes only
        xo = new Xoshiro256StarStar(42);
        rv = new RandomVault(null, x => xo.NextBytes(x));

        DoubleToString(rv.NextDouble()).Is("0.0838629710598822");
        DoubleToString(rv.NextDouble()).Is("0.3789802506626686");
        DoubleToString(rv.NextDouble()).Is("0.6800434110281394");
        DoubleToString(rv.NextDouble()).Is("0.9246929453253876");
        DoubleToString(rv.NextDouble()).Is("0.9918039142821028");

        rv.NextUInt64().Is(14199186830065750584ul);
        rv.NextUInt64().Is(13267978908934200754ul);
        rv.NextUInt64().Is(15679888225317814407ul);
        rv.NextUInt64().Is(14044878350692344958ul);
        rv.NextUInt64().Is(10760895422300929085ul);

        // Multi-thread
        var xo2 = new Xoshiro256StarStar(42);
        var rv2 = new RandomVault(() => xo2.NextUInt64(), x => xo2.NextBytes(x));

        const int P     = 100;
        const int N     = 1000;
        var       array = new Queue <ulong> [P];
        var       t     = Parallel.For(0, P, x =>
        {
            var queue = new Queue <ulong>();
            for (var i = 0; i < N; i++)
            {
                queue.Enqueue(rv2.NextUInt64());
            }

            array[x] = queue;
        });

        var ss = new SortedSet <ulong>();

        for (var i = 0; i < P; i++)
        {
            array[i].Count.Is(N);
            while (array[i].TryDequeue(out var u))
            {
                ss.Contains(u).IsFalse();
                ss.Add(u);
            }
        }

        ss.Count.Is(P * N);
    }
Exemplo n.º 8
0
        public static void P3()
        {
            Xoshiro256StarStar rand = new Xoshiro256StarStar();

            int RunProcess(int time)
            {
                int popsize = 1;

                for (int t = 0; t < time; t++)
                {
                    int newpopSize = popsize;
                    for (int i = 0; i < popsize; i++)
                    {
                        double val = rand.NextDouble();
                        if (val < 0.25)
                        {
                            newpopSize--;
                        }
                        if (val > 0.5)
                        {
                            newpopSize++;
                        }
                        if (val > 0.75)
                        {
                            newpopSize++;
                        }
                    }
                    popsize = newpopSize;
                }
                return(popsize);
            }

            int sum   = 0;
            int tests = 10000000;

            /*
             * for (int i = 0; i < tests; i++)
             * {
             *  sum += RunProcess(5);
             * }
             * double avgAfter5 = sum *1.0 / tests;
             *
             * Console.WriteLine($"E(5) = {avgAfter5}");
             *
             * int count = 0;
             * tests = 5000;
             * int limit = 30;
             * for (int i = 0; i < tests; i++)
             * {
             *  if (RunProcess(limit) == 0) { count++; }
             * }
             * double proportionDead = count * 1.0 / tests;
             *
             * Console.WriteLine($"Pi_0 = {proportionDead}");
             */

            tests = 100000000;
            sum   = 0;
            double lambda1 = 1.0 / 11;
            double lambda2 = 1.0 / 9;
            double lambda3 = 1.0 / 8;

            for (int i = 0; i < tests; i++)
            {
                double s1, s2, s3;
                s1 = Exponential.Sample(rand, lambda1);
                s2 = Exponential.Sample(rand, lambda2);
                s3 = Exponential.Sample(rand, lambda3);
                if (s1 > 10 && s2 > 10 && s3 > 10)
                {
                    sum++;
                }
            }

            double proportionLess = sum * 1.0 / tests;

            Console.WriteLine($"Proportion = {proportionLess}");
            //Console.WriteLine($"L1 / (L1 + L2) = {lambda1 / (lambda1 + lambda2)}");

            Console.ReadLine();
        }
 public void StaticSamplesConsistent()
 {
     Assert.That(Xoshiro256StarStar.Doubles(1000, 1), Is.EqualTo(new Xoshiro256StarStar(1).NextDoubles(1000)).Within(1e-12).AsCollection);
 }