Example #1
0
        public double[] getSamples(int num) // 获取指定个数的样本
        {
            double[] ret     = new double[num];
            int[]    ret_int = new int[num];
            switch (DistributionName)
            {
            case "Normal":
                normalDis.Samples(ret);
                break;

            case "ContinuousUniform":
                continuousUniformDis.Samples(ret);
                break;

            case "Triangular":
                triangularDis.Samples(ret);
                break;

            case "StudentT":
                studentTDis.Samples(ret);
                break;

            case "DiscreteUniform":
                discreteUniform.Samples(ret_int);
                for (int i = 0; i < num; i++)
                {
                    ret[i] = ret_int[i];
                }
                break;
            }
            return(ret);
        }
Example #2
0
        /// <summary>
        ///     Given a random model
        ///     set the weights : an array fill of random float ranging [-1; 1]
        ///     representing the detailed Belief of an agent
        /// </summary>
        /// <param name="model"></param>
        /// <param name="length"></param>
        /// <param name="beliefWeightLevel"></param>
        /// <returns></returns>
        public void InitializeWeights(RandomGenerator model, byte length, BeliefWeightLevel beliefWeightLevel)
        {
            float[] beliefBits;
            switch (beliefWeightLevel)
            {
            case BeliefWeightLevel.NoWeight:
                beliefBits = DiscreteUniform.Samples(length, 0, 0);
                break;

            case BeliefWeightLevel.RandomWeight:
                beliefBits = model == RandomGenerator.RandomUniform
                        ? ContinuousUniform.Samples(length, 0, RangeMax)
                        : DiscreteUniform.Samples(length, 0, RangeMax);
                break;

            case BeliefWeightLevel.FullWeight:
                beliefBits = DiscreteUniform.Samples(length, 1, 1);
                break;

            default:
                throw new ArgumentOutOfRangeException(nameof(beliefWeightLevel), beliefWeightLevel, null);
            }

            Weights = new Bits(beliefBits, 0);
        }
        public override IList <Point> Generate(int dimensions, double d, int k, int total)
        {
            var result              = new List <Point>(total);
            var generated           = 0;
            var uniformDistribution = new ContinuousUniform(dimensions - dimensions * d, dimensions + 2 * dimensions * d, RandomSource);

            while (generated++ < total)
            {
                var samples    = new double[dimensions];
                var isPositive = true;

                uniformDistribution.Samples(samples);

                for (var i = 1; i <= dimensions; i++)
                {
                    if (samples[i - 1] < i || samples[i - 1] > i + i * d)
                    {
                        isPositive = false;
                        break;
                    }
                }

                result.Add(new Point(samples)
                {
                    Label = isPositive
                });
            }

            return(result);
        }
Example #4
0
        public void CanSampleSequence()
        {
            var n   = new ContinuousUniform();
            var ied = n.Samples();

            ied.Take(5).ToArray();
        }
Example #5
0
        public static double[] continuousUniform(double v1, double v2, int num)
        {
            var n = new ContinuousUniform(v1, v2);

            double[] ret = new double[num];
            n.Samples(ret);
            return(ret);
        }
 private Task playMatchInit()
 {
     return(Task.Run(() =>
     {
         Random rand1 = new Random((int)DateTime.Now.Ticks & 0x0000FFFF);
         Random rand2 = new Random((int)DateTime.Now.Ticks & 0x0000FFFF);
         ContinuousUniform.Samples(rand1, r1, 0.0, 1.0);
         if (d1 == 0)
         {
             //ContinuousUniform.Samples(r1, 0.0, 0.1);
         }
         else if (d1 == 1)
         {
             r1 = r1.Select(z => Math.Sqrt(z)).ToArray();
         }
         else if (d1 == 2)
         {
             r1 = r1.Select(z => z * z).ToArray();
         }
         ContinuousUniform.Samples(rand2, r2, 0.0, 1.0);
         if (d2 == 0)
         {
             //ContinuousUniform.Samples(r1, 0.0, 0.1);
         }
         else if (d2 == 1)
         {
             r2 = r2.Select(z => Math.Sqrt(z)).ToArray();
         }
         else if (d2 == 2)
         {
             r2 = r2.Select(z => z * z).ToArray();
         }
         foreach (string n in Team1[2].Split(':')[1].Split(','))
         {
             in2.bowlers[n] = new int[3];
         }
         foreach (string n in in2.bowlers.Keys)
         {
             for (int i = 0; i < 3; i++)
             {
                 in2.bowlers[n][i] = 0;
             }
         }
         foreach (string n in Team2[2].Split(':')[1].Split(','))
         {
             in1.bowlers[n] = new int[3];
         }
         foreach (string n in in1.bowlers.Keys)
         {
             for (int i = 0; i < 3; i++)
             {
                 in1.bowlers[n][i] = 0;
             }
         }
     }));
 }
Example #7
0
        public void Cudnn_UseCase_ForwardConvolution_Double()
        {
            CudnnContext.DefaultType         = CudnnType.Double;
            CudnnContext.DefaultTensorFormat = CudnnTensorFormat.MajorRow;

            using (var context = CudnnContext.Create())
            {
                // Set some options and tensor dimensions
                int nInput           = 100;
                int filtersIn        = 10;
                int filtersOut       = 8;
                int heightIn         = 20;
                int widthIn          = 20;
                int heightFilter     = 5;
                int widthFilter      = 5;
                int paddingHeight    = 4;
                int paddingWeight    = 4;
                int verticalStride   = 1;
                int horizontalStride = 1;
                int upscalex         = 1;
                int upscaley         = 1;

                // Input Tensor Data
                double[] xData = new double[nInput * filtersIn * heightIn * widthIn];
                ContinuousUniform.Samples(xData, 0, 1);

                // Filter Tensor Data
                double[] filterData = new double[filtersOut * filtersIn * heightFilter * widthFilter];
                ContinuousUniform.Samples(filterData, 0, 1);

                // Descriptor for input
                var xTensor = CudnnContext.CreateTensor(new CudnnTensorDescriptorParameters(nInput, filtersIn, heightFilter, widthFilter));

                // Filter descriptor
                var filter = CudnnContext.CreateFilter(new CudnnFilterDescriptorParameters(filtersOut, filtersIn, heightFilter, widthFilter));

                // Convolution descriptor
                var convolution = CudnnContext.CreateConvolution(new CudnnConvolutionDescriptorParameters(CudnnConvolutionMode.CrossCorrelation, xTensor, filter, paddingHeight, paddingWeight, verticalStride, horizontalStride, upscalex, upscaley));
                var output      = convolution.GetOutputTensor(CudnnConvolutionPath.Forward);

                // Output tensor
                var      yTensor = CudnnContext.CreateTensor(new CudnnTensorDescriptorParameters(nInput, filtersOut, output.Height, output.Width));
                double[] yData   = new double[nInput * filtersOut * output.Height * output.Width];

                // Perform convolution
                context.Forward(xTensor, xData, filter, filterData, convolution, yTensor, yData, CudnnAccumulateResult.DoNotAccumulate);

                // Clean up
                xTensor.Dispose();
                yTensor.Dispose();
                filter.Dispose();
                convolution.Dispose();
            }
        }
        public override IList <Point> Generate(int dimensions, double d, int k, int total)
        {
            var result              = new List <Point>(total);
            var generated           = 0;
            var uniformDistribution = new ContinuousUniform(-1, 2 * k + d, RandomSource);

            while (generated++ < total)
            {
                var samples    = new double[dimensions];
                var isPositive = false;

                uniformDistribution.Samples(samples);

                for (var j = 1; j <= k; j++)
                {
                    if (samples.Sum() > d * j)
                    {
                        continue;
                    }

                    isPositive = true;

                    for (var i = 1; i <= dimensions; i++)
                    {
                        for (var l = i + 1; l <= dimensions; l++)
                        {
                            if (samples[i - 1] / Math.Tan(Math.PI / 12) - samples[l - 1] * Math.Tan(Math.PI / 12) >= 2 * j - 2 &&
                                samples[l - 1] / Math.Tan(Math.PI / 12) - samples[i - 1] * Math.Tan(Math.PI / 12) >= 2 * j - 2)
                            {
                                continue;
                            }

                            isPositive = false;
                            break;
                        }
                    }

                    if (isPositive)
                    {
                        break;
                    }
                }

                result.Add(new Point(samples)
                {
                    Label = isPositive
                });
            }

            return(result);
        }
        public override IList <Point> Generate(int dimensions, double d, int k, int positives, int negatives)
        {
            var result             = new List <Point>(positives + negatives);
            var generatedPositives = 0;
            var generatedNegatives = 0;
            var uniformDistributionForPositives = new ContinuousUniform(1 - d, d + dimensions, RandomSource);
            var uniformDistributionForNegatives = new ContinuousUniform(1 - 2 * d, dimensions + 2 * d, RandomSource);

            while (generatedPositives < positives)
            {
                var samples = new double[dimensions];

                uniformDistributionForPositives.Samples(samples);

                if (Distance.Euclidean(Enumerable.Range(1, dimensions).Select(Convert.ToDouble).ToArray(), samples) > d)
                {
                    continue;
                }

                generatedPositives++;
                result.Add(new Point(samples)
                {
                    Label = true
                });
            }

            while (generatedNegatives < negatives)
            {
                var samples = new double[dimensions];

                uniformDistributionForNegatives.Samples(samples);

                if (Distance.Euclidean(Enumerable.Range(1, dimensions).Select(Convert.ToDouble).ToArray(), samples) <= d)
                {
                    continue;
                }

                generatedNegatives++;
                result.Add(new Point(samples)
                {
                    Label = false
                });
            }

            return(result);
        }
Example #10
0
        /// <summary>
        ///     Initialize the forgetting knowledge process with a random Selecting mode
        /// </summary>
        /// <param name="actorKnowledge"></param>
        /// <param name="nextForgettingRate"></param>
        /// <returns></returns>
        public float[] InitializeForgettingKnowledgeRandom(ActorKnowledge actorKnowledge, float nextForgettingRate)
        {
            if (actorKnowledge is null)
            {
                throw new ArgumentNullException(nameof(actorKnowledge));
            }

            var forgettingKnowledgeBits = ContinuousUniform.Samples(actorKnowledge.Length, 0, 1);
            var threshold = NextMean();

            for (byte i = 0; i < actorKnowledge.Length; i++)
            {
                forgettingKnowledgeBits[i] = forgettingKnowledgeBits[i] < threshold ? nextForgettingRate : 0;
            }

            return(forgettingKnowledgeBits);
        }
Example #11
0
        /// <summary>
        ///     Given a random model and a BeliefLevel
        ///     return the beliefBits for the agent: an array fill of random binaries
        ///     representing the detailed belief of an agent
        /// </summary>
        /// <param name="model"></param>
        /// <param name="beliefLevel"></param>
        /// <returns></returns>
        public float[] InitializeBits(RandomGenerator model, BeliefLevel beliefLevel)
        {
            float[] beliefBits;
            switch (model)
            {
            case RandomGenerator.RandomUniform:
            {
                float min;
                float max;

                if (beliefLevel == BeliefLevel.Random)
                {
                    min = RangeMin;
                    max = RangeMax;
                }
                else
                {
                    min = GetMinFromBeliefLevel(beliefLevel);
                    max = GetMaxFromBeliefLevel(beliefLevel);
                }

                beliefBits = ContinuousUniform.Samples(Length, min, max);
                break;
            }

            case RandomGenerator.RandomBinary:
            {
                if (beliefLevel == BeliefLevel.Random)
                {
                    beliefBits = ContinuousUniform.FilteredSamples(Length, RangeMin, RangeMax);
                }
                else
                {
                    var mean = 1 - GetValueFromBeliefLevel(beliefLevel);
                    beliefBits = ContinuousUniform.FilteredSamples(Length, mean);
                }

                break;
            }

            default:
                throw new ArgumentOutOfRangeException(nameof(model), model, null);
            }

            return(beliefBits);
        }
        public override IList <Point> Generate(int dimensions, double d, int k, int total)
        {
            var result              = new List <Point>(total);
            var generated           = 0;
            var uniformDistribution = new ContinuousUniform(1 - 2 * d, dimensions + 2 * d + (2 * Math.Sqrt(6) * (k - 1) * d) / Math.PI, RandomSource);

            while (generated++ < total)
            {
                var samples    = new double[dimensions];
                var isPositive = true;

                uniformDistribution.Samples(samples);

                for (var j = 1; j <= k; j++)
                {
                    isPositive = true;

                    for (var i = 1; i <= dimensions; i++)
                    {
                        if (Distance.Euclidean(Enumerable.Range(1, dimensions).Select(c => Convert.ToDouble(c) + _edgeCoefficient * d * (j - 1) / c).ToArray(), samples) <= d)
                        {
                            continue;
                        }

                        isPositive = false;
                        break;
                    }

                    if (isPositive)
                    {
                        break;
                    }
                }

                result.Add(new Point(samples)
                {
                    Label = isPositive
                });
            }

            return(result);
        }
        public override IList <Point> Generate(int dimensions, double d, int k, int total)
        {
            var result              = new List <Point>(total);
            var generated           = 0;
            var uniformDistribution = new ContinuousUniform(-1, 2 + d, RandomSource);

            while (generated++ < total)
            {
                var samples    = new double[dimensions];
                var isPositive = true;

                uniformDistribution.Samples(samples);

                if (samples.Sum() > d)
                {
                    isPositive = false;
                }
                else
                {
                    for (var i = 1; i <= dimensions; i++)
                    {
                        for (var j = i + 1; j < dimensions; j++)
                        {
                            if (samples[i - 1] / Math.Tan(Math.PI / 12) - samples[j - 1] * Math.Tan(Math.PI / 12) < 0 ||
                                samples[j - 1] / Math.Tan(Math.PI / 12) - samples[i - 1] * Math.Tan(Math.PI / 12) < 0)
                            {
                                isPositive = false;
                            }
                        }
                    }
                }

                result.Add(new Point(samples)
                {
                    Label = isPositive
                });
            }

            return(result);
        }
        public override IList <Point> Generate(int dimensions, double d, int k, int total)
        {
            var result              = new List <Point>(total);
            var generated           = 0;
            var uniformDistribution = new ContinuousUniform(1 - 2 * d, dimensions + 2 * d, RandomSource);

            while (generated++ < total)
            {
                var samples = new double[dimensions];

                uniformDistribution.Samples(samples);

                var label = Distance.Euclidean(Enumerable.Range(1, dimensions).Select(Convert.ToDouble).ToArray(), samples) <= d;

                result.Add(new Point(samples)
                {
                    Label = label
                });
            }

            return(result);
        }
Example #15
0
 public void FailSampleSequenceStatic()
 {
     Assert.Throws <ArgumentOutOfRangeException>(() => ContinuousUniform.Samples(new Random(), 0.0, -1.0).First());
 }
Example #16
0
        public void CanSampleSequenceStatic()
        {
            var ied = ContinuousUniform.Samples(new Random(), 0.0, 1.0);

            ied.Take(5).ToArray();
        }
 public void FailSampleSequenceStatic()
 {
     Assert.That(() => ContinuousUniform.Samples(new Random(0), 0.0, -1.0).First(), Throws.ArgumentException);
 }
Example #18
0
        /// <summary>
        ///     Given a KnowledgeModel and a KnowledgeLevel
        ///     return the knowledgeBits for the agent: an array fill of random binaries
        ///     representing the detailed knowledge of an agent
        /// </summary>
        /// <param name="length"></param>
        /// <param name="model"></param>
        /// <param name="knowledgeLevel"></param>
        /// <param name="step"></param>
        /// <returns></returns>
        public void InitializeKnowledge(byte length, RandomGenerator model, KnowledgeLevel knowledgeLevel, ushort step)
        {
            float[] knowledgeBits;
            switch (model)
            {
            case RandomGenerator.RandomUniform:
            {
                float min;
                float max;

                switch (knowledgeLevel)
                {
                case KnowledgeLevel.Random:
                    min = 0;
                    max = 1;
                    break;

                default:
                    min = Knowledge.GetMinFromKnowledgeLevel(knowledgeLevel);
                    max = Knowledge.GetMaxFromKnowledgeLevel(knowledgeLevel);
                    break;
                }

                knowledgeBits = ContinuousUniform.Samples(length, min, max);
                if (Math.Abs(min - max) < Constants.Tolerance)
                {
                    SetKnowledgeBits(knowledgeBits, step);
                    return;
                }

                for (byte i = 0; i < knowledgeBits.Length; i++)
                {
                    if (knowledgeBits[i] < min * (1 + 0.05))
                    {
                        // In randomUniform, there is quasi no bit == 0. But in reality, there are knowledgeBit we ignore.
                        // We force the lowest (Min +5%) knowledgeBit to 0
                        knowledgeBits[i] = 0;
                    }
                }

                break;
            }

            case RandomGenerator.RandomBinary:
            {
                switch (knowledgeLevel)
                {
                case KnowledgeLevel.Random:
                    knowledgeBits = ContinuousUniform.FilteredSamples(length, 0, 1);
                    break;

                default:
                    var mean = 1 - Knowledge.GetValueFromKnowledgeLevel(knowledgeLevel);
                    knowledgeBits = ContinuousUniform.FilteredSamples(length, mean);
                    break;
                }

                break;
            }

            default:
                throw new ArgumentOutOfRangeException(nameof(model), model, null);
            }

            SetKnowledgeBits(knowledgeBits, step);
        }
        public override IList <Point> Generate(int dimensions, double d, int k, int positives, int negatives)
        {
            var result              = new List <Point>(positives + negatives);
            var generatedPositives  = 0;
            var generatedNegatives  = 0;
            var uniformDistribution = new ContinuousUniform(-1, 2 + d, RandomSource);


            while (generatedPositives < positives)
            {
                var samples    = new double[dimensions];
                var isPositive = true;

                uniformDistribution.Samples(samples);

                if (samples.Sum() > d)
                {
                    continue;
                }

                for (var i = 1; i <= dimensions; i++)
                {
                    for (var j = i + 1; j < dimensions; j++)
                    {
                        if (samples[i - 1] / Math.Tan(Math.PI / 12) - samples[j - 1] * Math.Tan(Math.PI / 12) < 0 ||
                            samples[j - 1] / Math.Tan(Math.PI / 12) - samples[i - 1] * Math.Tan(Math.PI / 12) < 0)
                        {
                            isPositive = false;
                            break;
                        }
                    }
                }

                if (!isPositive)
                {
                    continue;
                }

                generatedPositives++;
                result.Add(new Point(samples)
                {
                    Label = true
                });
            }

            while (generatedNegatives < negatives)
            {
                var samples = new double[dimensions];

                uniformDistribution.Samples(samples);

                if (samples.Sum() < d)
                {
                    continue;
                }

                generatedNegatives++;
                result.Add(new Point(samples)
                {
                    Label = false
                });
            }

            return(result);
        }
        public override IList <Point> Generate(int dimensions, double d, int k, int positives, int negatives)
        {
            var result             = new List <Point>(positives + negatives);
            var generatedPositives = 0;
            var generatedNegatives = 0;
            var uniformDistributionForPositives = new ContinuousUniform(1, dimensions + dimensions * d, RandomSource);
            var uniformDistributionForNegatives = new ContinuousUniform(dimensions - dimensions * d, dimensions + 2 * dimensions * d, RandomSource);

            while (generatedPositives < positives)
            {
                var samples    = new double[dimensions];
                var isPositive = true;

                uniformDistributionForPositives.Samples(samples);

                for (var i = 1; i <= dimensions; i++)
                {
                    if (samples[i - 1] < i || samples[i - 1] > i + i * d)
                    {
                        isPositive = false;
                        break;
                    }
                }

                if (!isPositive)
                {
                    continue;
                }

                generatedPositives++;
                result.Add(new Point(samples)
                {
                    Label = true
                });
            }

            while (generatedNegatives < negatives)
            {
                var samples    = new double[dimensions];
                var isNegative = false;

                uniformDistributionForNegatives.Samples(samples);

                for (var i = 1; i <= dimensions; i++)
                {
                    if (samples[i - 1] < i || samples[i - 1] > i + i * d)
                    {
                        isNegative = true;
                        break;
                    }
                }

                if (!isNegative)
                {
                    continue;
                }

                generatedNegatives++;
                result.Add(new Point(samples)
                {
                    Label = false
                });
            }

            return(result);
        }
 public void CanSampleSequence()
 {
     var n = new ContinuousUniform();
     var ied = n.Samples();
     var e = ied.Take(5).ToArray();
 }
        /// <summary>
        /// Run example
        /// </summary>
        /// <a href="http://en.wikipedia.org/wiki/Uniform_distribution_%28continuous%29">ContinuousUniform distribution</a>
        public void Run()
        {
            // 1. Initialize the new instance of the ContinuousUniform distribution class with default parameters.
            var continuousUniform = new ContinuousUniform();

            Console.WriteLine(@"1. Initialize the new instance of the ContinuousUniform distribution class with parameters Lower = {0}, Upper = {1}", continuousUniform.LowerBound, continuousUniform.UpperBound);
            Console.WriteLine();

            // 2. Distributuion properties:
            Console.WriteLine(@"2. {0} distributuion properties:", continuousUniform);

            // Cumulative distribution function
            Console.WriteLine(@"{0} - Сumulative distribution at location '0.3'", continuousUniform.CumulativeDistribution(0.3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            Console.WriteLine(@"{0} - Probability density at location '0.3'", continuousUniform.Density(0.3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            Console.WriteLine(@"{0} - Log probability density at location '0.3'", continuousUniform.DensityLn(0.3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            Console.WriteLine(@"{0} - Entropy", continuousUniform.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            Console.WriteLine(@"{0} - Largest element in the domain", continuousUniform.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            Console.WriteLine(@"{0} - Smallest element in the domain", continuousUniform.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            Console.WriteLine(@"{0} - Mean", continuousUniform.Mean.ToString(" #0.00000;-#0.00000"));

            // Median
            Console.WriteLine(@"{0} - Median", continuousUniform.Median.ToString(" #0.00000;-#0.00000"));

            // Mode
            Console.WriteLine(@"{0} - Mode", continuousUniform.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            Console.WriteLine(@"{0} - Variance", continuousUniform.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            Console.WriteLine(@"{0} - Standard deviation", continuousUniform.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            Console.WriteLine(@"{0} - Skewness", continuousUniform.Skewness.ToString(" #0.00000;-#0.00000"));
            Console.WriteLine();

            // 3. Generate 10 samples of the ContinuousUniform distribution
            Console.WriteLine(@"3. Generate 10 samples of the ContinuousUniform distribution");
            for (var i = 0; i < 10; i++)
            {
                Console.Write(continuousUniform.Sample().ToString("N05") + @" ");
            }

            Console.WriteLine();
            Console.WriteLine();

            // 4. Generate 100000 samples of the ContinuousUniform(0, 1) distribution and display histogram
            Console.WriteLine(@"4. Generate 100000 samples of the ContinuousUniform(0, 1) distribution and display histogram");
            var data = new double[100000];

            ContinuousUniform.Samples(data, 0.0, 1.0);
            ConsoleHelper.DisplayHistogram(data);
            Console.WriteLine();

            // 5. Generate 100000 samples of the ContinuousUniform(2, 10) distribution and display histogram
            Console.WriteLine(@"5. Generate 100000 samples of the ContinuousUniform(2, 10) distribution and display histogram");
            ContinuousUniform.Samples(data, 2, 10);
            ConsoleHelper.DisplayHistogram(data);
        }
Example #23
0
 public void FailSampleSequenceStatic()
 {
     var ied = ContinuousUniform.Samples(new Random(), 0.0, -1.0).First();
 }
Example #24
0
 public static Vec MakeRandomVector(int size, double min, double max)
 {
     return(new DenseVector(ContinuousUniform.Samples(Random, min, max).Take(size).ToArray()));
 }