public DirichletProcess(IProbabilityDistribution <E> baseMeasure, double alpha) { this.baseMeasure = baseMeasure; this.alpha = alpha; this.sampled = new ClassicCounter <E>(); sampled.IncrementCount(null, alpha); }
private bool M(VarietyPair pair) { IWordAligner aligner = _project.WordAligners[AlignerId]; int segmentCount = pair.Variety2.SegmentFrequencyDistribution.ObservedSamples.Count; int possCorrCount = aligner.ExpansionCompressionEnabled ? (segmentCount * segmentCount) + segmentCount + 1 : segmentCount + 1; var cpd = new ConditionalProbabilityDistribution <SoundContext, Ngram <Segment> >( pair.CognateSoundCorrespondenceFrequencyDistribution, (sc, fd) => new WittenBellProbabilityDistribution <Ngram <Segment> >(fd, possCorrCount)); bool converged = true; if (pair.CognateSoundCorrespondenceProbabilityDistribution == null || pair.CognateSoundCorrespondenceProbabilityDistribution.Conditions.Count != cpd.Conditions.Count) { converged = false; } else { foreach (SoundContext lhs in cpd.Conditions) { IProbabilityDistribution <Ngram <Segment> > probDist = cpd[lhs]; IProbabilityDistribution <Ngram <Segment> > oldProbDist; if (!pair.CognateSoundCorrespondenceProbabilityDistribution.TryGetProbabilityDistribution(lhs, out oldProbDist) || probDist.Samples.Count != oldProbDist.Samples.Count) { converged = false; break; } foreach (Ngram <Segment> correspondence in probDist.Samples) { if (Math.Abs(probDist[correspondence] - oldProbDist[correspondence]) > 0.0001) { converged = false; break; } } if (!converged) { break; } } } if (!converged) { pair.CognateSoundCorrespondenceProbabilityDistribution = cpd; pair.DefaultSoundCorrespondenceProbability = 1.0 / possCorrCount; } return(converged); }
public void Export(Stream stream, IWordAligner aligner, VarietyPair varietyPair) { using (var writer = new StreamWriter(new NonClosingStreamWrapper(stream))) { writer.WriteLine("Similarity"); writer.WriteLine("----------"); writer.WriteLine("Lexical: {0:p}", varietyPair.LexicalSimilarityScore); writer.WriteLine("Phonetic: {0:p}", varietyPair.PhoneticSimilarityScore); writer.WriteLine(); writer.WriteLine("Likely cognates"); writer.WriteLine("--------------"); WriteWordPairs(writer, aligner, varietyPair.WordPairs.Where(wp => wp.Cognacy)); writer.WriteLine(); writer.WriteLine("Likely non-cognates"); writer.WriteLine("-------------------"); WriteWordPairs(writer, aligner, varietyPair.WordPairs.Where(wp => !wp.Cognacy)); writer.WriteLine(); writer.WriteLine("Sound correspondences"); writer.WriteLine("---------------------"); bool first = true; foreach (SoundContext lhs in varietyPair.CognateSoundCorrespondenceProbabilityDistribution.Conditions) { if (!first) { writer.WriteLine(); } IProbabilityDistribution <Ngram <Segment> > probDist = varietyPair.CognateSoundCorrespondenceProbabilityDistribution[lhs]; FrequencyDistribution <Ngram <Segment> > freqDist = varietyPair.CognateSoundCorrespondenceFrequencyDistribution[lhs]; writer.WriteLine(lhs.ToString()); foreach (var correspondence in freqDist.ObservedSamples.Select(corr => new { Segment = corr, Probability = probDist[corr], Frequency = freqDist[corr] }).OrderByDescending(corr => corr.Probability)) { writer.WriteLine("{0}: {1:p}, {2}", correspondence.Segment, correspondence.Probability, correspondence.Frequency); } first = false; } } }
public static string GetParameterValueString(this IProbabilityDistribution distribution) => distribution.GetParameterValuePairs().Aggregate(String.Empty, (t, l) => t += $"{l.Key} = {l.Value}, ").TrimEnd(' ', ',');
public static string GetTypeName(this IProbabilityDistribution distribution) => distribution.GetType().Name;
public bool Equals(IProbabilityDistribution <Position1>?other) => other is CombinedDistanceDistribution cdd && Equals(cdd);
public static string[] GetParameterNames(this IProbabilityDistribution distribution) => distribution.GetParameterValuePairs().Keys.ToArray();
public bool Equals(IProbabilityDistribution <T>?other) => other is UniformPMF <T> wpmf && Equals(wpmf);
public bool TryGetProbabilityDistribution(TCondition condition, out IProbabilityDistribution <TSample> probDist) { return(_probDists.TryGetValue(condition, out probDist)); }
public RandomSizeInfiniteBacklog(IProbabilityDistribution distribution) { _distribution = distribution; }
public bool Equals(IProbabilityDistribution <Normal3>?other) => other is HemisphericalDiffuse hd && Equals(hd);
public bool Equals(IProbabilityDistribution <Position1>?other) => other is ExponentialInterval ed && Equals(ed);
public bool Equals(IProbabilityDistribution <Normal3>?other) => other is SpecularReflection sr && Equals(sr);
public bool Equals(IProbabilityDistribution <Normal3>?other) => other is SurfaceEllipsoid ssggx && Equals(ssggx);
public bool Equals(IProbabilityDistribution <Position1>?other) => other is DeltaDistance dd && Equals(dd);
public bool Equals(IProbabilityDistribution <Position1>?other) => other is RecursiveDistanceDistribution rdd && Equals(rdd);
public static int GetNParameters(this IProbabilityDistribution distribution) => distribution.GetParameterValuePairs().Count;
public bool Equals(IProbabilityDistribution <Normal3>?other) => other is HemisphericalCosPower hcp && Equals(hcp);
public RobustParameter(IProbabilityDistribution distribution) : base(distribution.Data) { Distribution = distribution; }
public void Propagate(List <IProbabilityDistribution> inputDistributions, List <IProbabilityDistribution> outputDistributions, WorkflowComponent innerWorkflow) { filer = (createFile) ? new CSVFiler(path) : null; try { int NInputDistributions = inputDistributions.Count; int Nout = outputDistributions.Count; foreach (IProbabilityDistribution dist in inputDistributions) { dist.Data.Value = dist.Mean; } // Defining the "deltas" for the computation of the propagation stencils: double[] h_plus = new double[NInputDistributions]; double[] h_minus = new double[NInputDistributions]; for (int j = 0; j < NInputDistributions; j++) { IProbabilityDistribution dist = inputDistributions[j]; h_plus[j] = dist.Skewness / 2 + Math.Sqrt(dist.Kurtosis - (3.0 / 4) * Math.Pow(dist.Skewness, 2)); h_minus[j] = dist.Skewness / 2 - Math.Sqrt(dist.Kurtosis - (3.0 / 4) * Math.Pow(dist.Skewness, 2)); } // Setup of the URQ weights: double W0 = 1; double[] Wp = new double[NInputDistributions]; double[] Wp_plus = new double[NInputDistributions]; double[] Wp_minus = new double[NInputDistributions]; double[] Wp_plusminus = new double[NInputDistributions]; for (int i = 0; i < NInputDistributions; i++) { W0 += 1.0 / (h_plus[i] * h_minus[i]); double delta = h_plus[i] - h_minus[i]; Wp[i] = 1.0 / delta; Wp_plus[i] = (Math.Pow(h_plus[i], 2) - h_plus[i] * h_minus[i] - 1) / (Math.Pow(delta, 2)); Wp_minus[i] = (Math.Pow(h_minus[i], 2) - h_plus[i] * h_minus[i] - 1) / (Math.Pow(delta, 2)); Wp_plusminus[i] = 2 / (Math.Pow(delta, 2)); } // Center point evaluation ExecutePoint(innerWorkflow, Nout, out double[] output0); double[] means = new double[Nout]; double[] variances = new double[Nout]; for (int i = 0; i < Nout; i++) { means[i] = W0 * output0[i]; } // Stencil evaluation: for (int p = 0; p < NInputDistributions; p++) { IProbabilityDistribution dist = inputDistributions[p]; // Dimension i, forward stencil point evaluation dist.Data.Value = dist.Mean + h_plus[p] * Math.Sqrt(dist.Variance); ExecutePoint(innerWorkflow, Nout, out double[] output_plus); // Dimension i, backeard stencil point evaluation dist.Data.Value = dist.Mean + h_minus[p] * Math.Sqrt(dist.Variance); ExecutePoint(innerWorkflow, Nout, out double[] output_minus); // Estimation of the mean and variance for all the model outputs: for (int j = 0; j < Nout; j++) { means[j] += Wp[p] * ((output_plus[j] / h_plus[p]) - (output_minus[j] / h_minus[p])); double deltap = (output_plus[j] - output0[j]) / h_plus[p]; double deltam = (output_minus[j] - output0[j]) / h_minus[p]; variances[j] += Wp_plus[p] * deltap * deltap + Wp_minus[p] * deltam * deltam + Wp_plusminus[p] * deltap * deltam; } // Recover original value dist.Data.Value = dist.Mean; } for (int i = 0; i < outputDistributions.Count; i++) { outputDistributions[i].Update(new double[] { means[i], variances[i], 0, 3 }); } } finally { filer?.Dispose(); } }
public bool Equals(IProbabilityDistribution <Normal3>?other) => other is SphericalUniform su && Equals(su);
public bool Equals(IProbabilityDistribution <Position1>?other) => other is UniformInterval ud && Equals(ud);
public bool Equals(IProbabilityDistribution <Normal3>?other) => other is HemisphericalUniform hu && Equals(hu);
public static Dictionary <string, double> GetParameterValuePairs(this IProbabilityDistribution distribution) => distribution.GetParameterObjects().ToDictionary(p => p.Name, p => (double)p.GetValue(distribution));
public static IEnumerable <PropertyInfo> GetParameterObjects(this IProbabilityDistribution distribution) { Type type = distribution.GetType(); return(type.GetProperties().Where(p => Attribute.GetCustomAttribute(p, typeof(OptionAttribute)) is OptionAttribute)); }
/// <summary> Sample the <paramref name="scene"/> with a <paramref name="sample"/> returning a color found </summary> /// <param name="scene">The <see cref="IScene"/> to sample </param> /// <param name="ray">The <see cref="Ir4"/> to trace through the <paramref name="scene"/> </param> /// <param name="spectrum">The throughput <see cref="ISpectrum"/></param> /// <param name="recursionDepth">The depth of recursion</param> /// <returns>The color found for the <see cref="ISample"/></returns> public ISpectrum Sample(IScene scene, IRay ray, ISpectrum spectrum, int recursionDepth) { if (spectrum.Equals(ISpectrum.Black)) { return(ISpectrum.Black); } /// Russian Roulette float throughput = 1f; if (recursionDepth >= GauranteedRecursionDepth) { if (Utils.ThreadRandom.NextSingle() < RussianRouletteChance) { return(ISpectrum.Black); } else { throughput = 1f / RussianRouletteChance; } } /// Sample Distance IDistanceQuery?distanceQuery = scene.Trace(ray, spectrum); if (distanceQuery is null) { return(ISpectrum.Black); } Position1 distance = distanceQuery.DistanceDistribution.Sample(Utils.ThreadRandom); if (distance == Position1.PositiveInfinity) { return(ISpectrum.Black); } /// Sample Primitive IProbabilityDistribution <IPrimitive>?primitives = distanceQuery.TryGetPrimitives(distance); if (primitives is null) { throw new InvalidOperationException("Distance was sampled but no primitive was found"); } IPrimitive primitive = primitives.Sample(Utils.ThreadRandom); /// Get Intersection Position Position3 position = primitive.Material.DensityProfile.GetPosition(ray, distance, primitive.Shape); /// Sample Material Orientation IProbabilityDistribution <Normal3>?orientations = primitive.Material.OrientationProfile.GetOrientations(position, ray.Direction, primitive.Shape); if (orientations is null) { return(ISpectrum.Black); } Normal3 orientation = orientations.Sample(Utils.ThreadRandom); /// Get Direct Illumination ISpectrum directIllumination = RGBColors.Black; if (primitive.Material.EmittanceProfile.IsEmitting) { directIllumination = primitive.Material.EmittanceProfile.GetEmittance(position, orientation, -ray.Direction); } /// Get Indirect Illumination ISpectrum indirectIllumination = RGBColors.Black; if (!primitive.Material.AbsorptionProfile.IsBlackBody) { /// Sample Direction IProbabilityDistribution <Normal3> directions = primitive.Material.ReflectionProfile.GetDirections(ray.Direction, position, orientation, spectrum); Normal3 direction = directions.Sample(Utils.ThreadRandom); /// Get Albedo ISpectrum albedo = primitive.Material.AbsorptionProfile.GetAlbedo(position, orientation, -direction); /// Get Ray IRay raySample = primitive.Material.DensityProfile.GetRay(position, orientation, direction); /// Sample Indirect Illumination indirectIllumination = albedo * Sample(scene, raySample, spectrum * albedo, recursionDepth + 1); } /// Light Throughput Calculation return((directIllumination + indirectIllumination) * throughput); }