/// <summary> /// Build linear samples. /// </summary> /// <param name="x">X sample values.</param> /// <param name="y">Y samples values.</param> /// <param name="xtest">X test values.</param> /// <param name="ytest">Y test values.</param> /// <param name="samples">Sample values.</param> /// <param name="sampleOffset">Sample offset.</param> /// <param name="slope">Slope number.</param> /// <param name="intercept">Intercept criteria.</param> public static void Build(out double[] x, out double[] y, out double[] xtest, out double[] ytest, int samples = 3, double sampleOffset = -0.5, double slope = 2.0, double intercept = -1.0) { // Fixed-seed "random" distribution to ensure we always test with the same data var uniform = new ContinuousUniform(0.0, 1.0, new MersenneTwister(42)); // build linear samples x = new double[samples]; y = new double[samples]; for (int i = 0; i < x.Length; i++) { x[i] = i + sampleOffset; y[i] = (x[i] * slope) + intercept; } // build linear test vectors randomly between the sample points xtest = new double[samples + 1]; ytest = new double[samples + 1]; if (samples == 1) { // y = const xtest[0] = sampleOffset - uniform.Sample(); xtest[1] = sampleOffset + uniform.Sample(); ytest[0] = ytest[1] = (sampleOffset * slope) + intercept; } else { for (int i = 0; i < xtest.Length; i++) { xtest[i] = (i - 1) + sampleOffset + uniform.Sample(); ytest[i] = (xtest[i] * slope) + intercept; } } }
public void RejectTest() { var uniform = new ContinuousUniform(0.0, 1.0, new MersenneTwister()); var rs = new RejectionSampler<double>(x => Math.Pow(x, 1.7)*Math.Pow(1.0 - x, 5.3), x => 0.021, uniform.Sample); Assert.IsNotNull(rs.RandomSource); rs.RandomSource = uniform.RandomSource; Assert.IsNotNull(rs.RandomSource); }
public void NullRandomNumberGenerator() { var uniform = new ContinuousUniform(0.0, 1.0) { RandomSource = new MersenneTwister() }; var rs = new RejectionSampler<double>(x => Math.Pow(x, 1.7) * Math.Pow(1.0 - x, 5.3), x => Double.NegativeInfinity, uniform.Sample); Assert.Throws<ArgumentNullException>(() => rs.RandomSource = null); }
public void NullRandomNumberGenerator() { var uniform = new ContinuousUniform(0.0, 1.0); uniform.RandomSource = new MersenneTwister(); var rs = new RejectionSampler<double>(x => System.Math.Pow(x, 1.7) * System.Math.Pow(1.0 - x, 5.3), x => System.Double.NegativeInfinity, uniform.Sample); rs.RandomSource = null; }
public void NoUpperBound() { var uniform = new ContinuousUniform(0.0, 1.0) { RandomSource = new MersenneTwister() }; var rs = new RejectionSampler<double>(x => Math.Pow(x, 1.7) * Math.Pow(1.0 - x, 5.3), x => Double.NegativeInfinity, uniform.Sample); Assert.Throws<ArgumentOutOfRangeException>(() => rs.Sample()); }
public void NoUpperBound() { var uniform = new ContinuousUniform(0.0, 1.0); uniform.RandomSource = new MersenneTwister(); var rs = new RejectionSampler<double>(x => System.Math.Pow(x, 1.7) * System.Math.Pow(1.0 - x, 5.3), x => System.Double.NegativeInfinity, uniform.Sample); double s = rs.Sample(); }
public void SampleArrayTest() { var uniform = new ContinuousUniform(0.0, 1.0, new MersenneTwister()); var rs = new RejectionSampler<double>(x => Math.Pow(x, 1.7)*Math.Pow(1.0 - x, 5.3), x => 0.021, uniform.Sample) { RandomSource = uniform.RandomSource }; rs.Sample(5); }
public void DiagonalDenseMatrixTransposeThisAndMultiply() { var dist = new ContinuousUniform(-1.0, 1.0, new SystemRandomSource(1)); Assert.IsInstanceOf <DiagonalMatrix>(Matrix <float> .Build.DiagonalIdentity(3, 3)); var wide = Matrix <float> .Build.Random(3, 8, dist); Assert.IsTrue((Matrix <float> .Build.DiagonalIdentity(3).Multiply(2f).TransposeThisAndMultiply(wide)).Equals(wide.Multiply(2f))); Assert.IsTrue((Matrix <float> .Build.Diagonal(3, 5, 2f).TransposeThisAndMultiply(wide)).Equals(wide.Multiply(2f).Stack(Matrix <float> .Build.Dense(2, 8)))); Assert.IsTrue((Matrix <float> .Build.Diagonal(3, 2, 2f).TransposeThisAndMultiply(wide)).Equals(wide.Multiply(2f).SubMatrix(0, 2, 0, 8))); var tall = Matrix <float> .Build.Random(8, 3, dist); Assert.IsTrue((Matrix <float> .Build.DiagonalIdentity(8).Multiply(2f).TransposeThisAndMultiply(tall)).Equals(tall.Multiply(2f))); Assert.IsTrue((Matrix <float> .Build.Diagonal(8, 10, 2f).TransposeThisAndMultiply(tall)).Equals(tall.Multiply(2f).Stack(Matrix <float> .Build.Dense(2, 3)))); Assert.IsTrue((Matrix <float> .Build.Diagonal(8, 2, 2f).TransposeThisAndMultiply(tall)).Equals(tall.Multiply(2f).SubMatrix(0, 2, 0, 3))); }
public void DiagonalDenseMatrixMultiply() { var dist = new ContinuousUniform(-1.0, 1.0, new SystemRandomSource(1)); Assert.IsInstanceOf <DiagonalMatrix>(Matrix <Complex32> .Build.DiagonalIdentity(3, 3)); var wide = Matrix <Complex32> .Build.Random(3, 8, dist); Assert.IsTrue((Matrix <Complex32> .Build.DiagonalIdentity(3).Multiply(2f) * wide).Equals(wide.Multiply(2f))); Assert.IsTrue((Matrix <Complex32> .Build.Diagonal(5, 3, 2f) * wide).Equals(wide.Multiply(2f).Stack(Matrix <Complex32> .Build.Dense(2, 8)))); Assert.IsTrue((Matrix <Complex32> .Build.Diagonal(2, 3, 2f) * wide).Equals(wide.Multiply(2f).SubMatrix(0, 2, 0, 8))); var tall = Matrix <Complex32> .Build.Random(8, 3, dist); Assert.IsTrue((Matrix <Complex32> .Build.DiagonalIdentity(8).Multiply(2f) * tall).Equals(tall.Multiply(2f))); Assert.IsTrue((Matrix <Complex32> .Build.Diagonal(10, 8, 2f) * tall).Equals(tall.Multiply(2f).Stack(Matrix <Complex32> .Build.Dense(2, 3)))); Assert.IsTrue((Matrix <Complex32> .Build.Diagonal(2, 8, 2f) * tall).Equals(tall.Multiply(2f).SubMatrix(0, 2, 0, 3))); }
public void DenseDiagonalMatrixMultiply() { var dist = new ContinuousUniform(-1.0, 1.0, new SystemRandomSource(1)); Assert.IsInstanceOf <DiagonalMatrix>(Matrix <Complex32> .Build.DiagonalIdentity(3, 3)); var tall = Matrix <Complex32> .Build.Random(8, 3, dist); Assert.IsTrue((tall * Matrix <Complex32> .Build.DiagonalIdentity(3).Multiply(2f)).Equals(tall.Multiply(2f))); Assert.IsTrue((tall * Matrix <Complex32> .Build.Diagonal(3, 5, 2f)).Equals(tall.Multiply(2f).Append(Matrix <Complex32> .Build.Dense(8, 2)))); Assert.IsTrue((tall * Matrix <Complex32> .Build.Diagonal(3, 2, 2f)).Equals(tall.Multiply(2f).SubMatrix(0, 8, 0, 2))); var wide = Matrix <Complex32> .Build.Random(3, 8, dist); Assert.IsTrue((wide * Matrix <Complex32> .Build.DiagonalIdentity(8).Multiply(2f)).Equals(wide.Multiply(2f))); Assert.IsTrue((wide * Matrix <Complex32> .Build.Diagonal(8, 10, 2f)).Equals(wide.Multiply(2f).Append(Matrix <Complex32> .Build.Dense(3, 2)))); Assert.IsTrue((wide * Matrix <Complex32> .Build.Diagonal(8, 2, 2f)).Equals(wide.Multiply(2f).SubMatrix(0, 3, 0, 2))); }
public void DenseDiagonalMatrixTransposeAndMultiply() { var dist = new ContinuousUniform(-1.0, 1.0, new SystemRandomSource(1)); Assert.IsInstanceOf <DiagonalMatrix>(Matrix <float> .Build.DiagonalIdentity(3, 3)); var tall = Matrix <float> .Build.Random(8, 3, dist); Assert.IsTrue(tall.TransposeAndMultiply(Matrix <float> .Build.DiagonalIdentity(3).Multiply(2f)).Equals(tall.Multiply(2f))); Assert.IsTrue(tall.TransposeAndMultiply(Matrix <float> .Build.Diagonal(5, 3, 2f)).Equals(tall.Multiply(2f).Append(Matrix <float> .Build.Dense(8, 2)))); Assert.IsTrue(tall.TransposeAndMultiply(Matrix <float> .Build.Diagonal(2, 3, 2f)).Equals(tall.Multiply(2f).SubMatrix(0, 8, 0, 2))); var wide = Matrix <float> .Build.Random(3, 8, dist); Assert.IsTrue(wide.TransposeAndMultiply(Matrix <float> .Build.DiagonalIdentity(8).Multiply(2f)).Equals(wide.Multiply(2f))); Assert.IsTrue(wide.TransposeAndMultiply(Matrix <float> .Build.Diagonal(10, 8, 2f)).Equals(wide.Multiply(2f).Append(Matrix <float> .Build.Dense(3, 2)))); Assert.IsTrue(wide.TransposeAndMultiply(Matrix <float> .Build.Diagonal(2, 8, 2f)).Equals(wide.Multiply(2f).SubMatrix(0, 3, 0, 2))); }
public TestLogisticModelUniformNoiseScalar() { TestName = "Логистическая модель с равномерным шумом"; TestFileName = "LogisticModelUniform"; Vector <double> mW = Exts.Vector(1e-5); Matrix <double> dW = Exts.Diag(1.0 / 3.0); Vector <double> mNu = Exts.Vector(0); Matrix <double> dNu = Exts.Diag(1); Vector <double> mEta = Exts.Vector(0.5); Matrix <double> dEta = Exts.Diag(0.01); // small values are for regularization Func <int, Vector <double>, Vector <double> > phi1 = (s, x) => Exts.Vector(3.0 * x[0] * (1 - x[0])); Func <int, Vector <double>, Matrix <double> > phi2 = (s, x) => Exts.Diag(x[0] * (1 - x[0])); Func <int, Vector <double>, Vector <double> > psi = (s, x) => Exts.Vector(x[0]); //Phi1_latex = new string[] { @"???" }; //Psi1_latex = new string[] { @"x_t" }; //P_W = @"\mathcal{R}\left(0,1\right)"; //P_Nu = @"\mathcal{N}\left(" + mNu.ToLatex() + ", " + dNu.ToLatex() + @"\right)"; //P_Eta = @"\mathcal{N}\left(" + mEta.ToLatex() + ", " + dEta.ToLatex() + @"\right)"; ContinuousUniform[] UniformW = new ContinuousUniform[1] { new ContinuousUniform(-1 + 1e-5, 1 + 1e-5) }; Normal[] NormalNu = new Normal[1] { new Normal(mNu[0], Math.Sqrt(dNu[0, 0])) }; Normal[] NormalEta = new Normal[1] { new Normal(mEta[0], Math.Sqrt(dEta[0, 0])) }; //Expression<Func<int, Vector<double>, Vector<double>>> expr = (s, x) => Vector(x[0] / (1 + x[0] * x[0]), x[1] / (1 + x[1] * x[1])); ; Phi1 = phi1; Phi2 = phi2; Psi1 = psi; Xi = (s, x) => phi1(s, x) + phi2(s, x) * mW; Zeta = (s, x, y, k) => y - psi(s, x) - mNu; W = (s) => Exts.Vector(UniformW[0].Sample()); Nu = (s) => Exts.Vector(NormalNu[0].Sample()); DW = dW; DNu = dNu; X0 = () => Exts.Vector(NormalEta[0].Sample()); X0Hat = mEta; DX0Hat = dEta; }
public void ValidateDensity(double lower, double upper) { var n = new ContinuousUniform(lower, upper); for (var i = 0; i < 11; i++) { var x = i - 5.0; if (x >= lower && x <= upper) { Assert.AreEqual(1.0 / (upper - lower), n.Density(x)); Assert.AreEqual(1.0 / (upper - lower), ContinuousUniform.PDF(lower, upper, x)); } else { Assert.AreEqual(0.0, n.Density(x)); Assert.AreEqual(0.0, ContinuousUniform.PDF(lower, upper, x)); } } }
public static ITSLTopLevelElement GenerateEnum(this TSLGeneratorContext context) { var name = $"Enum{context.TopLevelElementCount + 1}"; var memberNumber = DiscreteUniform.Sample(context.MasterRandom, EnumSettings.MinEnumMemberNumber, EnumSettings.MaxEnumMemberNumber); var members = Enumerable.Range(0, memberNumber).Select(i => $"{name}_{i}").ToList(); var values = members .WithProbabilityThreshold( EnumSettings.ValueSpecifiedProbability, n => new KeyValuePair <string, int>(n, DiscreteUniform.Sample(context.MasterRandom, 0, byte.MaxValue)), _ => ContinuousUniform.Sample(context.MasterRandom, 0.0, 1.0)) .Where(p => p.Key != null) .ToImmutableDictionary(); var result = new TSLEnum(name, members, values); context.Enums.Add(result); return(result); }
public void ValidateDensityLn(double lower, double upper) { var n = new ContinuousUniform(lower, upper); for (var i = 0; i < 11; i++) { var x = i - 5.0; if (x >= lower && x <= upper) { Assert.AreEqual(-Math.Log(upper - lower), n.DensityLn(x)); Assert.AreEqual(-Math.Log(upper - lower), ContinuousUniform.PDFLn(lower, upper, x)); } else { Assert.AreEqual(double.NegativeInfinity, n.DensityLn(x)); Assert.AreEqual(double.NegativeInfinity, ContinuousUniform.PDFLn(lower, upper, x)); } } }
static void LoadSinSeq(out HashSet <TrainingData> data, double dt, int seqLength, int nData) { data = new HashSet <TrainingData>(); ContinuousUniform rand = new ContinuousUniform(0, 2 * Math.PI); for (var i = 0; i < nData; i++) { double theta = rand.Sample(); TrainingData td = new TrainingData(0, 1); for (var j = 0; j < seqLength; j++) { TrainingData.TrainingPair pair = new TrainingData.TrainingPair(); theta += dt; pair.Response = DenseVector.Create(1, Math.Sin(theta)); td[j] = pair; } data.Add(td); } }
public override IList <Point> Generate(int dimensions, double d, int k, int total) { var result = new List <Point>(total); var generated = 0; var uniformDistribution = new ContinuousUniform(dimensions - dimensions * d * k, dimensions + 2 * dimensions * d * k, RandomSource); while (generated++ < total) { var samples = new double[dimensions]; var isPositive = true; uniformDistribution.Samples(samples); for (var j = 1; j <= k; j++) { isPositive = true; for (var i = 1; i <= dimensions; i++) { if (samples[i - 1] >= i * j && samples[i - 1] <= i * j + i * d) { continue; } isPositive = false; break; } if (isPositive) { break; } } result.Add(new Point(samples) { Label = isPositive }); } return(result); }
public void UnivariateExponentialRegressionWithResidualSumOfSquares() { var theta = Vector <double> .Build.DenseOfArray(new[] { 0D, 13500D, -1.7D }); var initialTheta = Vector <double> .Build.DenseOfArray(new[] { 0D, 10000D, -1D }); // define the hypothesis var hypothesis = new UnivariateExponentialHypothesis(); // define a probability distribution var distribution = new ContinuousUniform(0D, 1000D); // obtain the test data const int dataPoints = 100; var trainingSet = new List <DataPoint <double> >(dataPoints); for (int i = 0; i < dataPoints; ++i) { var inputs = Vector <double> .Build.Random(1, distribution); var output = hypothesis.Evaluate(theta, inputs); trainingSet.Add(new DataPoint <double>(inputs, output)); } ; // cost function is sum of squared errors var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet); // define the optimization problem var problem = new OptimizationProblem <double, IDifferentiableCostFunction <double> >(costFunction, initialTheta); // optimize! var gd = new HagerZhangCG(); var result = gd.Minimize(problem); // assert! var coefficients = result.Coefficients; coefficients[1].Should().BeApproximately(theta[1], 1000D, "because that's the underlying system's [a] parameter"); coefficients[2].Should().BeApproximately(theta[2], 1E-2D, "because that's the underlying system's [b] parameter"); coefficients[0].Should().BeApproximately(theta[0], 1E-5D, "because that's the underlying system's offset"); }
public override IList <Point> Generate(int dimensions, double d, int k, int total) { var result = new List <Point>(total); var generated = 0; var uniformDistribution = new ContinuousUniform(1 - 2 * d, dimensions + 2 * d + (2 * Math.Sqrt(6) * (k - 1) * d) / Math.PI, RandomSource); while (generated++ < total) { var samples = new double[dimensions]; var isPositive = true; uniformDistribution.Samples(samples); for (var j = 1; j <= k; j++) { isPositive = true; for (var i = 1; i <= dimensions; i++) { if (Distance.Euclidean(Enumerable.Range(1, dimensions).Select(c => Convert.ToDouble(c) + _edgeCoefficient * d * (j - 1) / c).ToArray(), samples) <= d) { continue; } isPositive = false; break; } if (isPositive) { break; } } result.Add(new Point(samples) { Label = isPositive }); } return(result); }
private Vector <double> HiddenToOutputBiases; //Size: NumberOfOutputs; public void Initialize(int testSetSize, double lowerBound = -0.001, double upperBound = 0.001) { Random rand = new Random(); ContinuousUniform randomDistribution = new ContinuousUniform(lowerBound, upperBound, rand); InputToHiddenWeights = Matrix <double> .Build.Random(NumberOfInputs, NumberOfHiddenUnits, randomDistribution); HiddenToOutputWeights = Matrix <double> .Build.Random(NumberOfHiddenUnits, NumberOfOutputs, randomDistribution); InputToHiddenBiases = Vector <double> .Build.Random(NumberOfHiddenUnits, randomDistribution); HiddenToOutputBiases = Vector <double> .Build.Random(NumberOfOutputs, randomDistribution); ArrangeTestSet(testSetSize); NormalizeData(); using (StreamWriter outputFile = new StreamWriter(Logs, false)) { outputFile.WriteLine("Begin neural network regression. Tendon Layout"); outputFile.WriteLine("Goal is to predict the function of bending moment M as f(TendonLayout).\n"); if (TestInputs == null) { outputFile.WriteLine("Data loaded: " + TrainingInputs.RowCount.ToString() + " examples in training set, 0 examples in test set.\n"); } else { outputFile.WriteLine("Data loaded: " + TrainingInputs.RowCount.ToString() + " examples in training set, " + TestInputs.RowCount.ToString() + " examples in test set.\n"); } outputFile.WriteLine("Creating a " + NumberOfInputs.ToString() + "-" + NumberOfHiddenUnits.ToString() + "-" + NumberOfOutputs.ToString() + " regression neural network."); outputFile.WriteLine("Using tanh hidden layer activation. Function parameters normalized.\n"); outputFile.WriteLine("Maximum number of epochs: " + MaxNumberOfEpochs.ToString()); outputFile.WriteLine(string.Format("Learning rate: {0:0.000000}\n\n", LearnRate)); outputFile.WriteLine("Outputs normalization:"); for (int i = 0; i < TrainingOutputs.ColumnCount; i++) { outputFile.WriteLine(string.Format(" Output {0}: avg = {1:0.000000}, std = {2:0.000000}", i + 1, NormOutputAvg.At(i), NormOutputStd.At(i))); } outputFile.WriteLine("\nStarting training (using stochastic back-propagation):"); } }
public MotionSequence GenerateSingleSequence(List <Guid> manipulatableIds, int dimention) { var timeUniform = new ContinuousUniform(0, _timeRange, _randomGenerator); var times = Enumerable.Range(0, _numControlPoints) .Select(v => (float)timeUniform.Sample()) .OrderBy(v => v) .ToList(); var valueUniform = new ContinuousUniform(0, _valueRange, _randomGenerator); var sequence = times .Select(time => new MotionTarget( time, Enumerable.Range(0, dimention).Select(v => (float)valueUniform.Sample()).ToList()) ) .ToList(); // to neutral sequence.Add(new MotionTarget(_timeRange, Enumerable.Range(0, dimention).Select(v => 0.5f).ToList())); return(new MotionSequence(sequence)); }
public override void Setup(TensorCollection bottom, TensorCollection top) { CheckSizeParameters(); this._maxTopBlobs = 1; base.Setup(bottom, top); int channels = bottom[0].Channels; top[0].Reshape(bottom[0].Num, channels, Pooled.Height, Pooled.Width); using (var topCpu = top[0].OnCpu()) { this.randomIndexes = Vector <double> .Build.SameAs(topCpu.Data); var distribution = new ContinuousUniform(0, 1); randomIndexes.MapInplace(x => distribution.Sample(), Zeros.Include); } }
public void ValidateCumulativeDistribution([Values(0.0, 0.0, 0.0, 0.0, -5.0, 0.0)] double lower, [Values(0.0, 0.1, 1.0, 10.0, 100.0, Double.PositiveInfinity)] double upper) { var n = new ContinuousUniform(lower, upper); for (var i = 0; i < 11; i++) { var x = i - 5.0; if (x <= lower) { Assert.AreEqual(0.0, n.CumulativeDistribution(x)); } else if (x >= upper) { Assert.AreEqual(1.0, n.CumulativeDistribution(x)); } else { Assert.AreEqual((x - lower) / (upper - lower), n.CumulativeDistribution(x)); } } }
public void ValidateCumulativeDistribution(double lower, double upper) { var n = new ContinuousUniform(lower, upper); for (var i = 0; i < 11; i++) { var x = i - 5.0; if (x <= lower) { Assert.AreEqual(0.0, n.CumulativeDistribution(x)); } else if (x >= upper) { Assert.AreEqual(1.0, n.CumulativeDistribution(x)); } else { Assert.AreEqual((x - lower) / (upper - lower), n.CumulativeDistribution(x)); } } }
public override IList <Point> Generate(int dimensions, double d, int k, int total) { var result = new List <Point>(total); var generated = 0; var uniformDistribution = new ContinuousUniform(-1, 2 + d, RandomSource); while (generated++ < total) { var samples = new double[dimensions]; var isPositive = true; uniformDistribution.Samples(samples); if (samples.Sum() > d) { isPositive = false; } else { for (var i = 1; i <= dimensions; i++) { for (var j = i + 1; j < dimensions; j++) { if (samples[i - 1] / Math.Tan(Math.PI / 12) - samples[j - 1] * Math.Tan(Math.PI / 12) < 0 || samples[j - 1] / Math.Tan(Math.PI / 12) - samples[i - 1] * Math.Tan(Math.PI / 12) < 0) { isPositive = false; } } } } result.Add(new Point(samples) { Label = isPositive }); } return(result); }
/// <summary> /// Uses Infer3DExactLMS to infer a random world point from a number of different projections of the point. /// It writes out the squared errors corresponding to different number of projections into a file. /// </summary> /// <param name="numProjections">the max number of projections to use.</param> /// <param name="gaussianNoiseSigma">the standard deviation of the gaussian noise to be added to the projected points.</param> public static void ShowErrorInfer3DExactLMS(int numProjections, double gaussianNoiseSigma, string fileName) { ContinuousUniform dist = new ContinuousUniform(0, 1); Normal gaussianNoise = new Normal(0, gaussianNoiseSigma); DenseMatrix worldPoint = new DenseMatrix(4, 1); worldPoint = (DenseMatrix)worldPoint.Random(4, 1, dist); ProjectedPoint[] projections = new ProjectedPoint[numProjections]; for (int i = 0; i < projections.Length; i++) { projections[i] = new ProjectedPoint(); projections[i].worldToImage = new DenseMatrix(3, 4); projections[i].worldToImage = (DenseMatrix)projections[i].worldToImage.Random(3, 4, dist); projections[i].projectedPoint = (projections[i].worldToImage * worldPoint); projections[i].projectedPoint += (DenseMatrix)projections[i].projectedPoint.Random(3, 1, gaussianNoise); } File.WriteAllLines(fileName, Enumerable.Range(2, numProjections) .Select(i => String.Format("{0}\t{1}", i, (worldPoint - Infer3DExactLMS(projections.Take(i))).L2Norm()))); }
public override IList <Point> Generate(int dimensions, double d, int k, int total) { var result = new List <Point>(total); var generated = 0; var uniformDistribution = new ContinuousUniform(1 - 2 * d, dimensions + 2 * d, RandomSource); while (generated++ < total) { var samples = new double[dimensions]; uniformDistribution.Samples(samples); var label = Distance.Euclidean(Enumerable.Range(1, dimensions).Select(Convert.ToDouble).ToArray(), samples) <= d; result.Add(new Point(samples) { Label = label }); } return(result); }
public RNNonGPU(Vocabulary vocab, int id, int od, int hd, double learningRate = 0.005) { this.vocab = vocab; this.id = id; this.od = od; this.hd = hd; this.learningRate = learningRate; IContinuousDistribution scd = new ContinuousUniform(-1 / Sqrt(id), 1 / Sqrt(id)); IContinuousDistribution wcd = new ContinuousUniform(-1 / Sqrt(hd), 1 / Sqrt(hd)); h = Vector <double> .Build.Dense(hd); U = Matrix <double> .Build.Random(hd, id, scd); V = Matrix <double> .Build.Random(od, hd, wcd); W = Matrix <double> .Build.Random(hd, hd, wcd); bo = Vector <double> .Build.Dense(hd); bh = Vector <double> .Build.Dense(od); }
private void BuildStarSystem() { if (HasPanets == true) { SystemRadius = (int)ContinuousUniform.Sample(500, 1800); List <Planetoid> lstPlanets = new List <Planetoid>(); int NumOfPlanetoids = 1; if (SystemRadius < 700) { NumOfPlanetoids = DiscreteUniform.Sample(1, 4); } else { NumOfPlanetoids = DiscreteUniform.Sample(1, 7); } for (int i = 0; i < NumOfPlanetoids; i++) { bool blnShouldBuildPlanet = true; int intOrbitalRadus = 0; int intAngleFromZero = 0; GetPlanetoidOrbitalRadiusAndAngle(ref lstPlanets, out intOrbitalRadus, out intAngleFromZero); if (blnShouldBuildPlanet == true) { Planetoid objPlanetoid = new Planetoid(0, 0, 0, this); objPlanetoid.OrbitalRadius = intOrbitalRadus; objPlanetoid.Orbit.Radius = intOrbitalRadus; objPlanetoid.AngleFromZero = intAngleFromZero; lstPlanets.Add(objPlanetoid); } } ChildOrbits = lstPlanets.ToArray(); } }
public static List <Vector <double> > RandomVectors(int nbSamples, int dim, double maxNorm = 1.0) { var ret = new List <Vector <double> >(); var counter = 0; var continuousUniform = new ContinuousUniform(-maxNorm, +maxNorm); while (counter < nbSamples) { var meetsCondition = false; Vector <double> rndVector = null; while (!meetsCondition) { rndVector = Vector <double> .Build.Random(dim, continuousUniform); meetsCondition = rndVector.L2Norm() < maxNorm; } ret.Add(rndVector); counter++; } return(ret); }
public void ValidateInverseCumulativeDistribution(double lower, double upper) { var n = new ContinuousUniform(lower, upper); for (var i = 0; i < 11; i++) { var x = i - 5.0; if (x <= lower) { Assert.AreEqual(lower, n.InverseCumulativeDistribution(0.0), 1e-12); Assert.AreEqual(lower, ContinuousUniform.InvCDF(lower, upper, 0.0), 1e-12); } else if (x >= upper) { Assert.AreEqual(upper, n.InverseCumulativeDistribution(1.0), 1e-12); Assert.AreEqual(upper, ContinuousUniform.InvCDF(lower, upper, 1.0), 1e-12); } else { Assert.AreEqual(x, n.InverseCumulativeDistribution((x - lower) / (upper - lower)), 1e-12); Assert.AreEqual(x, ContinuousUniform.InvCDF(lower, upper, (x - lower) / (upper - lower)), 1e-12); } } }
/// <summary> /// Build linear samples. /// </summary> /// <param name="x">X sample values.</param> /// <param name="y">Y samples values.</param> /// <param name="xtest">X test values.</param> /// <param name="ytest">Y test values.</param> /// <param name="samples">Sample values.</param> /// <param name="sampleOffset">Sample offset.</param> /// <param name="slope">Slope number.</param> /// <param name="intercept">Intercept criteria.</param> public static void Build(out double[] x, out double[] y, out double[] xtest, out double[] ytest, int samples = 3, double sampleOffset = -0.5, double slope = 2.0, double intercept = -1.0) { // Fixed-seed "random" distribution to ensure we always test with the same data var uniform = new ContinuousUniform { RandomSource = new MersenneTwister(42) }; // build linear samples x = new double[samples]; y = new double[samples]; for (int i = 0; i < x.Length; i++) { x[i] = i + sampleOffset; y[i] = (x[i] * slope) + intercept; } // build linear test vectors randomly between the sample points xtest = new double[samples + 1]; ytest = new double[samples + 1]; if (samples == 1) { // y = const xtest[0] = sampleOffset - uniform.Sample(); xtest[1] = sampleOffset + uniform.Sample(); ytest[0] = ytest[1] = (sampleOffset * slope) + intercept; } else { for (int i = 0; i < xtest.Length; i++) { xtest[i] = (i - 1) + sampleOffset + uniform.Sample(); ytest[i] = (xtest[i] * slope) + intercept; } } }
public void FailSampleSequenceStatic() { Assert.Throws <ArgumentOutOfRangeException>(() => ContinuousUniform.Samples(new Random(), 0.0, -1.0).First()); }
public void CanSampleSequence() { var n = new ContinuousUniform(); var ied = n.Samples(); var e = ied.Take(5).ToArray(); }
public void CanSample() { var n = new ContinuousUniform(); var d = n.Sample(); }
public void SetUpperFail() { var n = new ContinuousUniform(); n.Upper = -1.0; }
public void ValidateDensityLn([Values(0.0, 0.0, 0.0, 0.0, -5.0, 0.0)] double lower, [Values(0.0, 0.1, 1.0, 10.0, 100.0, Double.PositiveInfinity)] double upper) { var n = new ContinuousUniform(lower, upper); for (var i = 0; i < 11; i++) { var x = i - 5.0; if (x >= lower && x <= upper) { Assert.AreEqual(-Math.Log(upper - lower), n.DensityLn(x)); } else { Assert.AreEqual(double.NegativeInfinity, n.DensityLn(x)); } } }
public void CanCreateContinuousUniform([Values(0.0, 0.0, 0.0, 10.0, -5.0)] double lower, [Values(0.0, 0.1, 1.0, 10.0, 11.0, 100.0, Double.PositiveInfinity)] double upper) { var n = new ContinuousUniform(lower, upper); Assert.AreEqual(lower, n.Lower); Assert.AreEqual(upper, n.Upper); }
public void ValidateMedian(double lower, double upper) { var n = new ContinuousUniform(lower, upper); Assert.AreEqual((lower + upper) / 2.0, n.Median); }
public void ValidateMode(double lower, double upper) { var n = new ContinuousUniform(lower, upper); Assert.AreEqual<double>( (lower + upper) / 2.0 , n.Mode); }
public void ValidateMinimum(double lower, double upper) { var n = new ContinuousUniform(lower, upper); Assert.AreEqual<double>(lower, n.Minimum); }
public void ValidateEntropy(double lower, double upper) { var n = new ContinuousUniform(lower, upper); Assert.AreEqual<double>(Math.Log(upper - lower), n.Entropy); }
public void ValidateDensityLn(double lower, double upper) { var n = new ContinuousUniform(lower, upper); for (int i = 0; i < 11; i++) { double x = i - 5.0; if (x >= lower && x <= upper) { Assert.AreEqual<double>(-Math.Log(upper - lower), n.DensityLn(x)); } else { Assert.AreEqual<double>(double.NegativeInfinity, n.DensityLn(x)); } } }
public void ValidateDensity(double lower, double upper) { var n = new ContinuousUniform(lower, upper); for (int i = 0; i < 11; i++) { double x = i - 5.0; if(x >= lower && x <= upper) { Assert.AreEqual<double>(1.0 / (upper - lower), n.Density(x)); } else { Assert.AreEqual<double>(0.0, n.Density(x)); } } }
public void ValidateCumulativeDistribution(double lower, double upper) { var n = new ContinuousUniform(lower, upper); for (int i = 0; i < 11; i++) { double x = i - 5.0; if (x <= lower) { Assert.AreEqual<double>(0.0, n.CumulativeDistribution(x)); } else if (x >= upper) { Assert.AreEqual<double>(1.0, n.CumulativeDistribution(x)); } else { Assert.AreEqual<double>((x - lower) / (upper - lower), n.CumulativeDistribution(x)); } } }
public void ValidateSkewness(double lower, double upper) { var n = new ContinuousUniform(lower, upper); Assert.AreEqual<double>(0.0, n.Skewness); }
public void CanSampleStatic() { ContinuousUniform.Sample(new Random(), 0.0, 1.0); }
public void ValidateToString() { var n = new ContinuousUniform(1.0, 2.0); Assert.AreEqual<string>("ContinuousUniform(Lower = 1, Upper = 2)", n.ToString()); }
public void SetBadUpperFails() { var n = new ContinuousUniform(); Assert.Throws<ArgumentOutOfRangeException>(() => n.Upper = -1.0); }
public void CanCreateContinuousUniform() { var n = new ContinuousUniform(); Assert.AreEqual<double>(0.0, n.Lower); Assert.AreEqual<double>(1.0, n.Upper); }
public void CanCreateContinuousUniform(double lower, double upper) { var n = new ContinuousUniform(lower, upper); Assert.AreEqual<double>(lower, n.Lower); Assert.AreEqual<double>(upper, n.Upper); }
public void SampleArrayTest() { var uniform = new ContinuousUniform(0.0, 1.0); uniform.RandomSource = new MersenneTwister(); var rs = new RejectionSampler<double>(x => System.Math.Pow(x, 1.7) * System.Math.Pow(1.0 - x, 5.3), x => 0.021, uniform.Sample); rs.RandomSource = uniform.RandomSource; double[] sample = rs.Sample(5); }
public void SetLowerFail() { var n = new ContinuousUniform(); n.Lower = 3.0; }
public void CanSampleSequenceStatic() { var ied = ContinuousUniform.Samples(new Random(), 0.0, 1.0); ied.Take(5).ToArray(); }
public void CanSetLower(double lower) { var n = new ContinuousUniform(); n.Lower = lower; }
public void CanSample() { var n = new ContinuousUniform(); n.Sample(); }
public void CanSetUpper(double upper) { var n = new ContinuousUniform(); n.Upper = upper; }
public void ValidateToString() { var n = new ContinuousUniform(1.0, 2.0); Assert.AreEqual("ContinuousUniform(Lower = 1, Upper = 2)", n.ToString()); }
public void ContinuousUniformCreateFailsWithBadParameters(double lower, double upper) { var n = new ContinuousUniform(lower, upper); }