private void EvalBrdf(ref Vector wo, ref Vector wi, ref Normal N, out RgbSpectrum fs) { float cosThetaO = Vector.AbsDot(ref N, ref wo); //AbsCosTheta(wo); float cosThetaI = Vector.AbsDot(ref N, ref wi); //AbsCosTheta(wi); if (Math.Abs(cosThetaI - 0f) < Epsilon || Math.Abs(cosThetaO - 0f) < Epsilon) { fs = new RgbSpectrum(0f, 0f, 0f); return; //return new RgbSpectrum(1f, 0f, 0f); } Vector wh = wi + wo; if (wh.IsZero()) { fs = new RgbSpectrum(0f); return; } //return new RgbSpectrum(1f, 0f, 0f); wh = wh.Normalize(); float cosThetaH = Vector.Dot(ref wi, ref wh); var F = fresnel.Evaluate(cosThetaH); fs = (R0 * distr.D(ref wh) * G(ref N, ref wo, ref wi, ref wh) * F / (4f * cosThetaI * cosThetaO)); }
// Update is called once per frame void Update() { if (!_fadeInDone) { _lerp += Time.deltaTime / Duration; _renderer.material.color = Color.Lerp(_startColor, _endColor, _lerp); if (_lerp >= 1) { _fadeInDone = true; } } if (_fadeOut) { _lerp += Time.deltaTime / Duration; _renderer.material.color = Color.Lerp(_endColor, _startColor, _lerp); if (_lerp >= 1) { Destroy(gameObject); } } Vector3 up = _transform.InverseTransformDirection(Vector3.up); Closest = new Normal() { Value = -1, Direction = new Vector3(99, 99, 99) }; float angle = 360; foreach (var normal in _normals) { var a = Vector3.Angle(normal.Direction, up); if (a < angle) { Closest = normal; angle = a; } } }
public override void Sample_f(ref Vector wo, ref Normal N, ref Normal shadeN, ref RgbSpectrum in_f, float u0, float u1, float u2, ref SurfaceTextureData surfaceData, out BsdfSampleData result) { result.Lambda = 0f; Vector dir = MC.CosineSampleHemisphere(u0, u1); result.Pdf = dir.z * MathLab.INVPI; result.Type = BrdfType.Diffuse; Vector v1, v2; Normal n = N; Vector.CoordinateSystem(ref n, out v1, out v2); dir = new Vector( v1.x * dir.x + v2.x * dir.y + n.x * dir.z, v1.y * dir.x + v2.y * dir.y + n.y * dir.z, v1.z * dir.x + v2.z * dir.y + n.z * dir.z); var wi = dir; float dp = (Normal.Dot(ref shadeN, ref wi)); if (dp <= 0.01f) { result.Pdf = 0f; result.F = new RgbSpectrum(); } else { result.Pdf /= dp; result.F = surfaceData.Diffuse*MathLab.INVPI; } result.Wi = wi; }
public override void f(ref Vector lwo, ref Vector lwi, ref Normal N, ref RgbSpectrum in_fs, out RgbSpectrum fs) { CreateFrame(ref N); var wi = WorldToLocal(ref lwi); var wo = WorldToLocal(ref lwo); EvalBrdf(ref wo, ref wi, ref N, out fs); }
public BaseBxdf GetBsdf(ref RayData pathRay, ref RayHit hit, ref MediumInfo med, bool fromLight, float u0) { var currentTriangleIndex = (int) hit.Index; bool isLight = scene.IsLight(currentTriangleIndex); var mesh = scene.GetMeshByTriangleIndex(currentTriangleIndex); if (mesh == null) //|| mesh.MeshName.Equals("COL254_01", StringComparison.InvariantCultureIgnoreCase)) { //ii.Color = new RgbSpectrum(1f); //Debugger.Break(); throw new ApplicationException("Invalid triangle index " + currentTriangleIndex + " Mesh not found"); } UV TexCoords; Normal normal = new Normal(), shadeN = new Normal(); mesh.InterpolateTriUV(currentTriangleIndex, hit.U, hit.V, out TexCoords); //normal = -scene.Triangles[currentTriangleIndex].ComputeNormal(scene.Vertices).Normalize(); mesh.InterpolateTriangleNormal((int)hit.Index, hit.U, hit.V, ref normal); //normal = -normal; shadeN = (Normal.Dot(ref pathRay.Dir, ref normal) > 0f) ? -normal : normal; var bsdf = mesh.Material.GetBsdf(ref pathRay, ref hit, ref normal, ref shadeN, ref TexCoords, ref med, fromLight,u0); bsdf.SetLight(isLight); return bsdf; }
public void EvaluateShadow(ref Point point, ref Normal n, float u0, float u1, float u2, ref LightSample[] result, float ltProb = 0f) { switch (scene.LightSamplingStrategy) { case LightSamplingStrategy.UniformOneLight: if (result == null) result = new LightSample[scene.ShadowRaysPerSample]; for (int i = 0; i < scene.ShadowRaysPerSample; i++) { int currentLightIndex = scene.SampleLights(ltProb <= 0f ? rnd.NextFloat() : ltProb); var light = scene.Lights[currentLightIndex]; var ls = result[i]??new LightSample(); ls.LightIndex = currentLightIndex; light.EvaluateShadow(ref point, ref n, u0, u1, u2, ref ls); //ls.Pdf *= (scene.ShadowRaysPerSample); result[i] = ls; } break; case LightSamplingStrategy.UniformAllLights: { var sm = new List<LightSample>(); foreach (var light in scene.Lights) { var ls = new LightSample(); light.EvaluateShadow(ref point, ref n, u0, u1, u2, ref ls); if (ls.Pdf > 0f) sm.Add(ls); } result = sm.ToArray(); } break; } }
public override void f(ref Vector WO, ref Vector WI, ref Normal N, ref RgbSpectrum in_fs, out RgbSpectrum fs) { CreateFrame(ref N); var lwo = WorldToLocal(ref WO); var lwi = WorldToLocal(ref WI); EvalBrdf(out fs, ref lwo, ref lwi); }
public override void Sample(ref Vector wi, ref Normal n, float u0, float u1, float lambda, out Vector dir, out float f) { var nl = n.ToVec(); dir = Geometry.Reflect(ref wi, ref n); //new Vector(wi - nl * 2f * nl&wi); f = 1f; }
public static Vector GlossyReflection(Vector wo, float exponent, Normal shadN, float u0, float u1) { var shadeN = shadN; float phi = 2f * MathLab.M_PI * u0; float cosTheta = (float)MathLab.Pow(1f - u1, exponent); float sinTheta = MathLab.Sqrt(1f - cosTheta * cosTheta); float x = (float)Math.Cos(phi) * sinTheta; float y = (float)Math.Sin(phi) * sinTheta; float z = cosTheta; Vector dir = -wo; float dp = Vector.Dot(ref shadeN, ref dir); Vector w = dir - (2f * dp) * shadeN.ToVec(); Vector u; if (Math.Abs(shadeN.x) > .1f) { var a = new Vector(0f, 1f, 0f); u = Vector.Cross(ref a, ref w); } else { Vector a = new Vector(1f, 0f, 0f); u = Vector.Cross(ref a, ref w); } u = u.Normalize(); Vector v = Vector.Cross(ref w, ref u); return x * u + y * v + z * w; }
public override void Sample_f(ref Vector wo, ref Normal N, ref Normal shadeN, ref RgbSpectrum in_f, float u0, float u1, float u2, ref SurfaceTextureData surfaceData, out BsdfSampleData result) { result.Lambda = 0f; Vector dir = MC.CosineSampleHemisphere(u0, u1); result.Pdf = dir.z * MathLab.INVPI; result.Type = BrdfType.Diffuse | BrdfType.Refractive; Vector v1, v2; Normal n = -N; Vector.CoordinateSystem(ref n, out v1, out v2); dir = new Vector( v1.x * dir.x + v2.x * dir.y + n.x * dir.z, v1.y * dir.x + v2.y * dir.y + n.y * dir.z, v1.z * dir.x + v2.z * dir.y + n.z * dir.z); var wi = dir; float dp = (Normal.AbsDot(ref n, ref wi)); // Using 0.01 instead of 0.0 to cut down fireflies if (dp <= 0.0001f) { result.Pdf /= 1000f; // return new RgbSpectrum(0f); } else { result.Pdf /= dp; } result.F= KdOverPI; result.Wi = wi; }
public override void Sample_f(ref Vector wo, ref Normal N, ref Normal shadeN, ref RgbSpectrum in_f, float u0, float u1, float u2, ref SurfaceTextureData surfaceData, out BsdfSampleData result) { EvalParams(ref surfaceData); bool into = (Normal.Dot(ref N, ref shadeN) > 0f); result = new BsdfSampleData(); result.Type = this.Type; if (!into) { // No internal reflections result.Wi = (-wo); result.Pdf = 1f; result.Type = reflectionSpecularBounce ? BrdfType.Specular : BrdfType.Glossy; result.F = Krefl; } else { // RR to choose if reflect the ray or go trough the glass float comp = u0 * totFilter; if (comp > transFilter) { Vector mwo = -wo; result.Wi = mwo - (2f * Vector.Dot(ref N, ref mwo)) * N.ToVec(); result.Pdf = reflPdf; result.Type = reflectionSpecularBounce ? BrdfType.Specular : BrdfType.Glossy; result.F = Krefrct; } else { result.Wi = -wo; result.Pdf = transPdf; result.F = Krefrct; result.Type = transmitionSpecularBounce ? BrdfType.Specular : BrdfType.Glossy; } } }
public override void Sample_f(ref Vector wo, ref Normal geoNormal, ref Normal shadeN, float lambda, ref SurfaceTextureData surfaceData, float u0, float u1, float u2, out BsdfSampleData result) { EvalParams(ref surfaceData, lambda); Vector dir = MC.CosineSampleHemisphere(u0, u1); result = new BsdfSampleData { Pdf = dir.z*MathLab.INVPI, Type = BrdfType.Diffuse}; // TODO Lambda weight Vector v1, v2; Normal n = geoNormal; Vector.CoordinateSystem(ref n, out v1, out v2); dir = new Vector( v1.x * dir.x + v2.x * dir.y + n.x * dir.z, v1.y * dir.x + v2.y * dir.y + n.y * dir.z, v1.z * dir.x + v2.z * dir.y + n.z * dir.z); var wi = dir; float dp = (Normal.Dot(ref shadeN, ref wi)); // Using 0.01 instead of 0.0 to cut down fireflies if (dp <= 0.0001f) { result.Pdf = 0f; result.F = new RgbSpectrum(); result.Lambda = 0f; } else { result.Pdf /= dp; result.F = surfaceData.Diffuse * MathLab.INVPI; result.Lambda = Kd.Sample(lambda)*MathLab.INVPI; } result.Wi = wi; }
float mReflectCoeff; //!< Fresnel reflection coefficient (for glass) Bsdf( Ray aRay, ref Normal normal, Material Material, SceneGeometryInfo aScene) { Setup(aRay, ref normal, Material, aScene); }
public static float G(ref Normal N, ref Vector wo, ref Vector wi, ref Vector wh) { float NdotWh = Vector.AbsDot(ref N, ref wh); float NdotWo = Vector.AbsDot(ref N, wo); float NdotWi = Vector.AbsDot(ref N, wi); float WOdotWh = Vector.AbsDot( ref wo, ref wh); return Math.Min(1f, Math.Min(2f * NdotWh * NdotWo / WOdotWh, 2f * NdotWh * NdotWi / WOdotWh)); }
public override RgbSpectrum Sample_f(ref Vector wo, out Vector wi, ref Normal N, ref Normal shadeN, float u0, float u1, float u2, out float pdf, out bool specularBounce) { specularBounce = false; distr.Sample_f(ref wo, out wi, u1, u2, out pdf); if(!SameHemisphere(ref wo, ref wi)) return RgbSpectrum.ZeroSpectrum(); return f(ref wo, ref wi, ref N); }
public void TaxaNormalContaTest() { Normal target = new Normal(); decimal esperado = decimal.Parse("10.00"); decimal retornado; target.Tarifa = esperado; retornado = target.Tarifa; Assert.AreEqual(esperado, retornado); }
public override void Sample_f(ref Vector wo, ref Normal N, ref Normal shadeN, ref RgbSpectrum in_f, float u0, float u1, float u2, ref SurfaceTextureData surfaceData, out BsdfSampleData result) { Vector dir = -wo; float dp = Normal.Dot(ref shadeN, ref dir); result.Lambda = 0f; result.Wi = dir - (2f * dp) * shadeN.ToVec(); result.Type = reflectionSpecularBounce ? BrdfType.Specular : BrdfType.Glossy; result.Pdf = 1f; result.F = Kr; }
public override void f(ref Vector wo, ref Vector wi, ref Normal N, ref RgbSpectrum in_fs, out RgbSpectrum fs) { float c = 1f - Vector.Dot(ref wo, ref N); float Re = R0 + (1f - R0) * c * c * c * c * c; float P = .25f + .5f * Re; fs = KdiffOverPI * (1f - Re) / (1f - P); }
public void ContaNormalContaTest() { Normal target = new Normal(); int esperado = int.Parse("030"); int retornado; target.NumeroDaconta = esperado; retornado = target.NumeroDaconta; Assert.AreEqual(esperado, retornado); }
public override void Sample(ref Vector wi, ref Normal n, float u0, float u1, float lambda, out Vector dir, out float f) { float r1 = MathLab.M_2PI * u0, r2 = u1, r2s = MathLab.Sqrt(r2); var w = (Vector.Dot(ref n, ref wi) < 0 ? n : n * -1).ToVec(); var u = ((Math.Abs(w.x) > 0.1f ? new Vector(0, 1, 0) : new Vector(1, 0, 0)) ^ w).Normalize(); var v = w ^ u; dir = (u * MathLab.Cos(r1) * r2s + v * MathLab.Sin(r1) * r2s + w * MathLab.Sqrt(1 - r2)).Normalize(); f = 1f; }
public void Sample(ref Vector wo, ref Normal N, ref Normal shadeN, float u0, float u1, float u2,float u3, out BsdfSample sample, BxDFType type = BxDFType.AllTypes) { sample = new BsdfSample(); var fl = type; int matchingComps = NumComponents(fl); if (matchingComps == 0) { sample.Pdf = 0f; sample.Wi = Vector.Zero; sample.Spectrum = new RgbSpectrum(0f).ToArray(); } int which = (int)Math.Min((u0 * matchingComps), matchingComps - 1); BxDFBase bxdf = null; int count = which; for (int i = 0; i < Bxdfs.Length; ++i) if (Bxdfs[i].Type.HasFlag(fl)) if (count-- == 0) { bxdf = Bxdfs[i]; break; } Vector wi = new Vector(); var pdf = 0f; bxdf.Sample(ref wo, ref N, ref shadeN, u1, u2, u3, out sample, type); wi = sample.Wi; pdf = sample.Pdf; var sampled = bxdf.Type; if (pdf > 0f && pdf < MathLab.Epsilon) sample.Spectrum = new float[3]{0f,0f,0f}; //if (sampledTy != null) sampledType = bxdf.Type; //wiW = LocalToWorld(wi); if ((!bxdf.Type.HasFlag(BxDFType.Specular)) && matchingComps > 1) { for (int i = 0; i < Bxdfs.Length; ++i) { if (Bxdfs[i] != bxdf && (Bxdfs[i].Type.HasFlag(fl))) pdf += Bxdfs[i].Pdf(ref wo, ref wi, fl); } } if (matchingComps > 1) pdf /= matchingComps; // Compute value of BSDF for sampled direction if (bxdf.Type.HasFlag(BxDFType.Specular)) //if ((bxdf.Type & BxDFType.BSDF_SPECULAR) == 0) { var f = RgbSpectrum.ZeroSpectrum(); if ((Vector.Dot(ref N,ref wi)) * Vector.Dot(ref N, ref wo) > 0f) // ignore BTDFs { fl = fl & ~BxDFType.Transmission; } else // ignore BRDFs fl = (fl & ~BxDFType.Reflection); for (int i = 0; i < Bxdfs.Length; ++i) if ((Bxdfs[i].Type.HasFlag(fl))) f += new RgbSpectrum(Bxdfs[i].Eval(ref wo, ref wi, ref N)); sample.Spectrum = (f/pdf).ToArray(); } }
public static void ApplyMVPNParameter(OpenGL gl, ExtShaderProgram esp, Projection pr, ModelView mv, Normal nrml) { var prms = esp.Parameters as IMVPNParameters; var p = esp.Program; // Set the matrices. p.SetUniformMatrix4(gl, prms.ProjectionMatrixId, pr.ToArray()); p.SetUniformMatrix4(gl, prms.ModelviewMatrixId, mv.ToArray()); p.SetUniformMatrix3(gl, prms.NormalMatrixId, nrml.ToArray()); }
public void SampleTest() { var normal = new Normal(0.0, 1.0); var rnd = new MersenneTwister(); var ms = new MetropolisSampler<double>(0.2, normal.Density, x => Normal.Sample(rnd, x, 0.1), 10); ms.RandomSource = rnd; double sample = ms.Sample(); }
public void StatusNormalContaTest() { Normal target = new Normal(); bool esperado = true; bool retornado; target.statusDaConta = esperado; retornado = target.statusDaConta; target.setBloqueado(retornado); Assert.AreEqual(esperado, retornado); }
public void ClienteNormalContaTest() { Normal target = new Normal(); Cliente esperado = new Cliente(); esperado.Nome = "Glebson"; Cliente retornado; target.Nome = esperado; retornado = target.Nome; Assert.AreEqual(esperado, retornado); }
public virtual void Sample_f(ref Vector wo, ref Normal N, ref Normal shadeN, ref SurfaceTextureData surfaceData, float u0, float u1, float u2, out BsdfSampleData result) { result = new BsdfSampleData() {Type = this.Type}; CreateFrame(ref N); var wi = MC.CosineSampleHemisphere(u1, u2); if (wo.z < 0f) wi.z *= -1f; this.EvalTexture(ref surfaceData); result.Pdf = Pdf(ref wo, ref wi, BxDFTypes.BSDF_ALL); f(ref wo, ref wi, ref shadeN, out result.F); result.Wi = LocalToWorld(ref wi); }
public void MetropolisConstructor() { var normal = new Normal(0.0, 1.0); var rnd = new MersenneTwister(); var ms = new MetropolisSampler<double>(0.2, normal.Density, x => Normal.Sample(rnd, x, 0.1), 10); Assert.IsNotNull(ms.RandomSource); ms.RandomSource = rnd; Assert.IsNotNull(ms.RandomSource); }
protected BaseBxdf(ref RayHit rh, ref RayData ray, ref Normal ng, ref Normal ns, ref UV texCoord, MaterialInfo mi, SurfaceTextureInfo texData, bool fromLight) { #if VERBOSE if (ng.Length > 1f || ns.Length > 1f) { Console.WriteLine("Normals in bsdf arent normalized"); } #endif this.Init(ref rh, ref ray, ref ng, ref ns, ref texCoord, mi, texData, fromLight); }
// Use this for initialization private void Start() { _renderer = GetComponent<Renderer>(); _rigidbody = GetComponent<Rigidbody>(); _transform = GetComponent<Transform>(); Closest = new Normal(); _normals = GetNormals(0); SetFadeColors(); }
float get_phong_lobe_pdf(float exponent, ref Normal normal, ref Vector dir_out, ref Vector dir_in, out float bdf_val) { var r = -reflect(ref dir_out, ref normal); float cos_theta = Math.Abs(Vector.Dot(ref r, ref dir_in)); float powered_cos = (float)MathLab.Pow(cos_theta, exponent); bdf_val = (exponent + 2.0f) / (MathLab.M_2PI) * powered_cos; return (exponent + 1.0f) / (MathLab.M_2PI) * powered_cos; }
/// <summary> /// Creates a Formula from a string that consists of a standard infix expression composed /// from non-negative floating-point numbers (using standard C# syntax for double/int literals), /// variable symbols (one or more letters followed by one or more digits), left and right /// parentheses, and the four binary operator symbols +, -, *, and /. White space is /// permitted between tokens, but is not required. /// /// An example of a valid parameter to this constructor is "2.5e9 + x5 / 17". /// Examples of invalid parameters are "x", "-5.3", and "2 5 + 3"; /// /// If the formula is syntacticaly invalid, throws a FormulaFormatException with an /// explanatory Message. /// </summary> public Formula(String formula, Normal Normalizer, Valid Validater) { if (String.IsNullOrWhiteSpace(formula)) { throw new FormulaFormatException("There must be input in order to compute the formula"); } this.Normal = Normalizer; this.equation = formula; int check = 0; //COUNTER FOR TOKENS int lparen = 0; //COUNTER FOR LEFT PARENTHESIS int rparen = 0; //COUNTER FOR RIGHT PARENTHESIS string[] arr = { "+", "-", "*", "/" }; Stack <string> test = new Stack <string>(); Exception ffex = new FormulaFormatException("You made a syntax error. Any token following a number, variable, or closing parenthesis must be either an operator or a closing parenthesis"); foreach (string s in GetTokens(formula)) { test.Push(s); double n; if (double.TryParse(s, out n)) { if (check == 1) { throw ffex; } else { check = 1; } } else if (arr.Contains(s)) { if (check == 2 || check == 0) { throw ffex; } else { check = 2; } } else { switch (s) { case "(": lparen++; check = 2; break; case ")": rparen++; if (rparen > lparen) { throw new FormulaFormatException("You have made a syntax error. Please check your Parrenthesis and try again"); } break; ////VARIABLES\\\\ default: if (!Validater(Normalizer(s))) { throw new FormulaFormatException("There variables you entered do not follow the correct format or are undefined"); } vars.Add(s); char[] ss = s.ToCharArray(); if (!Char.IsLetter(ss[0]) || ss[0] == '_') { throw new FormulaFormatException("You have made a syntax error. Variables must be letters followed by numbers"); } if (check > 0 && check == 1) { throw ffex; } else { check = 1; } break; } } } if (rparen != lparen) { throw new FormulaFormatException("You have made a syntax error. Check Parenthesis and try again"); } //CHECK FIRST AND LAST ELEMENTS FOR ANYTHING OTHER THE THE REQUIRED INFO string[] temp = test.ToArray(); int nn = temp.Length - 1; if (temp[0] == "(" || temp[0] == "+" || temp[0] == "-" || temp[0] == "*" || temp[0] == "/") { throw new FormulaFormatException("The last token must be a number, variable or opening parenthesis."); } }
/// <summary> /// Gets the complementary cumulative distribution function /// (ccdf) for this distribution evaluated at point <c>x</c>. /// This function is also known as the Survival function. /// </summary> /// /// <param name="x">A single point in the distribution range.</param> /// public override double ComplementaryDistributionFunction(double x) { return(Normal.Complemented((x - mean) / stdDev)); }
/// <summary> /// Gets the inverse of the cumulative distribution function (icdf) for /// this distribution evaluated at probability <c>p</c>. This function /// is also known as the Quantile function. /// </summary> /// /// <remarks> /// The Inverse Cumulative Distribution Function (ICDF) specifies, for /// a given probability, the value which the random variable will be at, /// or below, with that probability. /// </remarks> /// /// <param name="p">A probability value between 0 and 1.</param> /// /// <returns>A sample which could original the given probability /// value when applied in the <see cref="DistributionFunction"/>.</returns> /// public override double InverseDistributionFunction(double p) { return(Normal.Inverse(1.0 - Math.Pow(1.0 - p, 1.0 / power))); }
/// <summary> /// Returns a <see cref="System.String" /> /// that represents this instance. /// </summary> /// <returns>A <see cref="System.String" /> /// that represents this instance.</returns> public override string ToString() { return(Normal.ToString()); }
//登录 public int Login(string account, string password) { string sql = string.Format("select id from UserInfo where Account='{0}' and [password]='{1}'", account, password); return(Normal.ParseInt(SqlAccess.QueryObj(sql))); }
//cumulative standard normal distribution private double Z(double x) { return(Normal.CDF(0, 1, x)); }
/// <summary> /// Gets the inverse of the cumulative distribution function (icdf) for /// this distribution evaluated at probability <c>p</c>. This function /// is also known as the Quantile function. /// </summary> /// /// <remarks> /// The Inverse Cumulative Distribution Function (ICDF) specifies, for /// a given probability, the value which the random variable will be at, /// or below, with that probability. /// </remarks> /// public override double InverseDistributionFunction(double p) { return(mean + stdDev * Normal.Inverse(p)); }
public void ValidateMaximum() { var n = new Normal(); Assert.AreEqual(Double.PositiveInfinity, n.Maximum); }
public static bool getRandomDecisionAboutNewCustomerFromNormalDistribution(double mean = 0, double stdDev = 0.2) { Normal normalDistribution = new Normal(mean, stdDev); return(System.Convert.ToBoolean(normalDistribution.Sample())); }
public void ValidateMedian(double mean) { var n = new Normal(mean, 1.0); Assert.AreEqual(mean, n.Median); }
public void ValidateMinimum() { var n = new Normal(); Assert.AreEqual(Double.NegativeInfinity, n.Minimum); }
public void ValidateSkewness(double sdev) { var n = new Normal(1.0, sdev); Assert.AreEqual(0.0, n.Skewness); }
public void ValidateEntropy(double sdev) { var n = new Normal(1.0, sdev); Assert.AreEqual(Constants.LogSqrt2PiE + Math.Log(n.StdDev), n.Entropy); }
public void ValidateToString() { var n = new Normal(1d, 2d); Assert.AreEqual("Normal(μ = 1, σ = 2)", n.ToString()); }
/// <summary> /// Gets the cumulative distribution function (cdf) for /// this distribution evaluated at point <c>x</c>. /// </summary> /// /// <param name="x"> /// A single point in the distribution range.</param> /// /// <remarks> /// The Cumulative Distribution Function (CDF) describes the cumulative /// probability that a given value or any value smaller than it will occur. /// </remarks> /// public override double DistributionFunction(double x) { double phi = Normal.Function(-x); return(1.0 - Math.Pow(phi, power)); }
public void CanSampleSequence() { var n = new Normal(); var ied = n.Samples(); var e = ied.Take(5).ToArray(); }
//cumulative standard normal distribution inverse private double ZInverse(double x) { return(Normal.InvCDF(0, 1, x)); }
public void FailSampleStatic() { var d = Normal.Sample(new Random(), 1.0, -1.0); }
private static double CND(double X) { return(Normal.CDF(0, 1, X)); }
public void FailSampleSequenceStatic() { Assert.Throws <ArgumentOutOfRangeException>(() => Normal.Samples(new Random(), 1.0, -1.0).First()); }
/// <summary> /// Calculates the reliability from a probability. /// </summary> /// <param name="probability">The probability to convert.</param> /// <returns>The reliability.</returns> private static double ProbabilityToReliability(double probability) { return(Normal.InvCDF(0, 1, 1 - probability)); }
public static void TestNumericalIntegration(Random rand) { BogaertGLWrapper.Initialize(); /* * double[] nodesAndWeightsRaw = BogaertGLWrapper.GetGLNodesAndWeights(10000000); * * // Count how many can be culled * int count = 0; * for (int i = 0; i < nodesAndWeightsRaw.Length / 2; i++) * { * double x = nodesAndWeightsRaw[2 * i]; * double w = nodesAndWeightsRaw[2 * i + 1]; * if (x < 0 && w * Normal.CDF(0, 1.0 / 8, x) < 10E-18) { count++; } * else if (x > 0 && w * Normal.PDF(0, 1.0 / 8, x) < 10E-18) { count++; } * } * Console.WriteLine($"Could cull {count} out of {nodesAndWeightsRaw.Length / 2} evaluation points."); * * Console.ReadKey(); */ // Parameters int numberOfDistributions = 20; double minMeanFitness = 8; double maxMeanFitness = 60; double minStDev = 6; double maxStDev = 25; // Computed values double fitnessRange = maxMeanFitness - minMeanFitness; double stDevRange = maxStDev - minStDev; // Set up the distributions and pick the one with the biggest mean to be i Normal[] distributions = new Normal[numberOfDistributions]; Normal distribution_i = new Normal(); double minMean = -1 * minMeanFitness, maxMean = -1 * maxMeanFitness; // Starting points for finding min and max in the set (not an error) for (int i = 0; i < distributions.Length; i++) { distributions[i] = new Normal(-1 * (minMeanFitness + fitnessRange * rand.NextDouble()), minStDev + stDevRange * rand.NextDouble()); if (distributions[i].Mean > maxMean) { maxMean = distributions[i].Mean; } if (distributions[i].Mean < minMean) { minMean = distributions[i].Mean; distribution_i = distributions[i]; } Console.WriteLine($"Dist {i}: mean {distributions[i].Mean}, stdev {distributions[i].StdDev}"); } Func <double, double> altForm = x => { double cdfi = distribution_i.CumulativeDistribution(x); if (cdfi == 0 || double.IsNaN(cdfi)) { return(0); } double product = distribution_i.Density(x) / cdfi; for (int i = 0; i < distributions.Length; i++) { product *= distributions[i].CumulativeDistribution(x); } return(product); }; /* * double correctResult = SimpsonsRule.Integrate(altForm, minMean - 3 * maxStDev, maxMean + 3 * maxStDev, 600); * Console.WriteLine($"Simp 3/8 (600): 1 - P(D_i) = {correctResult}"); * * double gaussLegendreResult = MathNet.Numerics.Integration.GaussLegendreRule.Integrate(altForm, minMean - 3 * maxStDev, maxMean + 3 * maxStDev, 128); * Console.WriteLine($"Gauss-Legendre 128: 1 - P(D_i) = {gaussLegendreResult}"); */ double[] discardProbs = new double[distributions.Length]; /* * for (int i = 0; i < distributions.Length; i++) * { * distribution_i = distributions[i]; * discardProbs[i] = SimpsonsRule.Integrate(altForm, minMean - 8 * maxStDev, maxMean + 8 * maxStDev, 1500); * Console.WriteLine($"Simp 3/8 (1500): 1 - P(D_{i}) = {discardProbs[i]}"); * }*/ List <double> discardProbList = new List <double>(discardProbs); discardProbList.Sort(); double sum = 0; for (int i = 0; i < discardProbList.Count; i++) { //Console.WriteLine($"Sorted: {discardProbList[i]}"); sum += discardProbList[i]; } // Console.WriteLine($"Sum of probabilities is {sum}"); /* * NormalComparison.ComputeDiscardComplementsSimpson(distributions); * NormalComparison.ComputeDiscardComplementsGaussHermite(distributions); * NormalComparison.ComputeDiscardComplementsGaussLegendre(distributions); * NormalComparison.ComputeDiscardComplementsSimpsonAlt(distributions); * NormalComparison.ComputeDiscardComplementsGaussHermiteAlt(distributions); * NormalComparison.ComputeDiscardComplementsGaussLegendreAlt(distributions); */ List <double> output; sum = 0; output = new List <double>(NormalComparison.ComputeDiscardComplementsSimpson38AltInvariant(distributions, 450)); output.Sort(); for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); /* * Console.WriteLine($"Dist 1 : 1/sqrt3 & 2 : 1/sqrt3"); * distributions = new Normal[] { new Normal(1, 1.0/Math.Sqrt(3)), new Normal(2, 1.0/ Math.Sqrt(3)) }; * Console.WriteLine($"Exact = {NormalComparison.ComputeDiscardProbabilityPairwiseExact(distributions[0], distributions[1])}"); * NormalComparison.ComputeDiscardComplementsGaussLegendreAlt(distributions); * NormalComparison.ComputeDiscardComplementsSimpson38AltInvariant(distributions, 210); * Console.WriteLine($"4 Dists 1 : 1 & 2 : 1"); * distributions = new Normal[10]; * for (int i = 0; i < distributions.Length - 1; i++) { distributions[i] = new Normal(1, 1); } * distributions[distributions.Length - 1] = new Normal(2, 1); */ /* * NormalComparison.ComputeDiscardComplementsGaussLegendreAlt(distributions); * output = new List<double>(NormalComparison.ComputeDiscardComplementsSimpson38AltInvariant(distributions, 210)); * output.Sort(); * sum = 0; * for (int i = 0; i < output.Count; i++) * { * sum += output[i]; * } * Console.WriteLine($"Sum of probabilities is {sum}"); */ output = new List <double>(NormalComparison.ComputeDiscardComplementsGaussLegendreAltInvariant(distributions, GaussLegendre.evalPoints75opt, GaussLegendre.weights75opt)); output.Sort(); sum = 0; for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); output = new List <double>(NormalComparison.ComputeDiscardComplementsGaussHermiteAltInvariant(distributions, GaussHermite.evaluationPoints70opt, GaussHermite.weights70opt)); output.Sort(); sum = 0; for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); output = new List <double>(NormalComparison.ComputeDiscardComplementsClenshawCurtisAltInvariant(distributions, 450)); output.Sort(); sum = 0; for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch(); watch.Start(); double[] bigTest = NormalComparison.ComputeDiscardComplementsClenshawCurtisAltInvariantAutomatic(distributions); watch.Stop(); sum = 0; for (int i = 0; i < bigTest.Length; i++) { //Console.WriteLine($"CCAltInvAuto[{i}]: {bigTest[i]}"); sum += bigTest[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); Console.WriteLine($"Total Error lower bound: {Math.Abs(sum - 1)}"); Console.WriteLine($"Time: {watch.Elapsed.TotalMilliseconds}ms"); discardProbList = new List <double>(bigTest); discardProbList.Sort(); { double certainty = 1; int idx = 0; while (true) { double newval = certainty - discardProbList[idx]; if (newval < 0.95) { break; } certainty = newval; idx++; } Console.WriteLine($"Can discard {idx} distributions with 95% certainty"); } watch.Restart(); bigTest = NormalComparison.ComputeDiscardComplementsSimpson38AltInvariantAutomatic(distributions); watch.Stop(); output = new List <double>(bigTest); output.Sort(); sum = 0; for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"S38 Sum of probabilities is {sum}"); Console.WriteLine($"S38 Time: {watch.Elapsed.TotalMilliseconds}ms"); }
public void FailSampleSequenceStatic() { var ied = Normal.Samples(new Random(), 1.0, -1.0).First(); }
public bool EquivalentTo(Plane other, decimal delta = 0.0001m) { return(Normal.EquivalentTo(other.Normal, delta) && Math.Abs(DistanceFromOrigin - other.DistanceFromOrigin) < delta); }
private void OnValidate() { animator = animator ?? GetComponentInChildren <Animator>(); rigidbody2D = rigidbody2D ?? GetComponent <Rigidbody2D>(); normal = normal ?? GetComponent <Normal>(); }
/// <summary> /// Gets the log-probability density function (pdf) for /// this distribution evaluated at point <c>x</c>. /// </summary> /// /// <param name="x"> /// A single point in the distribution range.</param> /// /// <remarks> /// The Probability Density Function (PDF) describes the /// probability that a given value <c>x</c> will occur. /// </remarks> /// /// <returns> /// The logarithm of the probability of <c>x</c> /// occurring in the current distribution.</returns> /// public override double LogProbabilityDensityFunction(double x) { return(Math.Log(power) + Normal.LogDerivative(x) + (power - 1) * Normal.Function(-x)); }
public PftOutputBuffer WriteLine() { Normal.WriteLine(); return(this); }
public override int GetHashCode() { unchecked { return(((Normal != null ? Normal.GetHashCode() : 0) * 397) ^ DistanceFromOrigin.GetHashCode()); } }
public void CanSample() { var n = new Normal(); n.Sample(); }
/// <summary> /// Gets the cumulative distribution function (cdf) for /// the this Normal distribution evaluated at point <c>x</c>. /// </summary> /// /// <param name="x"> /// A single point in the distribution range.</param> /// /// <remarks> /// <para> /// The Cumulative Distribution Function (CDF) describes the cumulative /// probability that a given value or any value smaller than it will occur.</para> /// <para> /// The calculation is computed through the relationship to the error function /// as <see cref="Accord.Math.Special.Erfc">erfc</see>(-z/sqrt(2)) / 2.</para> /// /// <para> /// References: /// <list type="bullet"> /// <item><description> /// Weisstein, Eric W. "Normal Distribution." From MathWorld--A Wolfram Web Resource. /// Available on: http://mathworld.wolfram.com/NormalDistribution.html </description></item> /// <item><description><a href="http://en.wikipedia.org/wiki/Normal_distribution#Cumulative_distribution_function"> /// Wikipedia, The Free Encyclopedia. Normal distribution. Available on: /// http://en.wikipedia.org/wiki/Normal_distribution#Cumulative_distribution_function </a></description></item> /// </list></para> /// </remarks> /// /// <example> /// See <see cref="NormalDistribution"/>. /// </example> /// public override double DistributionFunction(double x) { return(Normal.Function((x - mean) / stdDev)); }