public string writeModel(string outModelPath) { outPath = outModelPath; using (System.IO.StreamWriter sw = new System.IO.StreamWriter(outPath)) { sw.WriteLine(modelTypes.GLM.ToString()); sw.WriteLine(InTablePath); sw.WriteLine(String.Join(",", IndependentFieldNames)); sw.WriteLine(String.Join(",", DependentFieldNames)); sw.WriteLine(String.Join(",", ClassFieldNames)); sw.WriteLine(SampleSize.ToString()); sw.WriteLine(NumberOfVariables.ToString()); sw.WriteLine(Iterations.ToString()); sw.WriteLine(DeltaC.ToString()); sw.WriteLine(LogLikelihood); sw.WriteLine(LogLikelihoodratio); sw.WriteLine(PValue.ToString()); sw.WriteLine(Deviance.ToString()); sw.WriteLine(ChiSquare.ToString()); sw.WriteLine(linkfunction.ToString()); sw.WriteLine(String.Join(" ", (from double d in Coefficients select d.ToString()).ToArray())); sw.WriteLine(String.Join(" ", (from double d in StdError select d.ToString()).ToArray())); sw.WriteLine(String.Join(" ", (from double d in waldTestValues select d.ToString()).ToArray())); sw.WriteLine(String.Join(" ", (from double d in waldTestPValues select d.ToString()).ToArray())); sw.WriteLine(String.Join(" ", (from double d in minValues select d.ToString()).ToArray())); sw.WriteLine(String.Join(" ", (from double d in maxValues select d.ToString()).ToArray())); sw.WriteLine(String.Join(" ", (from double d in sumValues select d.ToString()).ToArray())); sw.Close(); } return(outPath); }
public void CanSampleSequence() { var n = new ChiSquare(1.0); var ied = n.Samples(); ied.Take(5).ToArray(); }
public void getReport(double alpha) { Forms.RunningProcess.frmRunningProcessDialog rd = new Forms.RunningProcess.frmRunningProcessDialog(false); rd.Text = "GLM Results"; rd.TopLevel = true; rd.pgbProcess.Visible = false; rd.FormBorderStyle = System.Windows.Forms.FormBorderStyle.Sizable; rd.addMessage("Dependent field = " + DependentFieldNames[0]); rd.addMessage("Independent fields = " + String.Join(", ", IndependentFieldNames)); rd.addMessage("Sample size = " + SampleSize.ToString()); rd.addMessage("Iteration = " + Iterations.ToString()); rd.addMessage("Delta Convergence " + DeltaC.ToString()); rd.addMessage("Chi-Sqr = " + ChiSquare.ToString() + " p-value = " + PValue.ToString()); rd.addMessage("Deviance = " + Deviance.ToString()); rd.addMessage("Log Likelihood = " + LogLikelihood.ToString()); rd.addMessage("Log Likelihood Ratio = " + LogLikelihoodratio.ToString() + "\n\nCoefficents and standard errors:\n"); rd.addMessage("Param: Intercept, " + String.Join(", ", IndependentFieldNames)); rd.addMessage("Coef: " + string.Join(", ", (from double d in Coefficients select d.ToString()).ToArray())); rd.addMessage("STE: " + string.Join(", ", (from double d in StdError select d.ToString()).ToArray()) + "\n"); try { if (ModelHelper.chartingAvailable() && System.Windows.Forms.MessageBox.Show("Do you want to build distribution graphs?", "Graphs", System.Windows.Forms.MessageBoxButtons.YesNo) == System.Windows.Forms.DialogResult.Yes) { createRegChart(); } } catch { System.Windows.Forms.MessageBox.Show("Cannot create charts."); } rd.Show(); rd.enableClose(); }
public void SetDofFailsWithNonPositiveDoF(double dof) { { var n = new ChiSquare(1.0); n.DegreesOfFreedom = dof; } }
public List <string> GetDemographicsWhereThereIsAssociation() { List <string> demographicsWithAssociation = new List <string>(); List <int> FOT = new FunctionalOrganizationTypesServices().GetByCompany(Test.Company_Id).Select(fot => fot.Id).ToList(); //List<int> countriesId = new CountriesServices().GetCountriesByTest(Test); CountriesServices countryService = new CountriesServices(); FunctionalOrganizationTypesServices FOTService = new FunctionalOrganizationTypesServices(); ChiSquare cs; foreach (string demographic in DemographicNames.Keys) //recorro los demograficos { if (demographic == "FunctionalOrganizationType") //si el demografico actual es una estructura funcional { foreach (int type in FOT) { cs = new ChiSquare(Test, demographic, null, null, null, null, type, 0.05, country, state, region); GetConclusion(demographicsWithAssociation, cs, demographic, FOTService.GetById(type).Name); } } else { cs = new ChiSquare(Test, demographic, null, null, null, 0.05, country, state, region); GetConclusion(demographicsWithAssociation, cs, demographic, ""); } } if (demographicsWithAssociation.Count > 0) { return(demographicsWithAssociation); } else { return(null); } }
/// <summary> /// Creates an instance. /// </summary> /// <param name="degrees"> The degrees of freedom of the distribution, not less than one </param> /// <param name="engine"> A uniform random number generator, not null </param> public ChiSquareDistribution(double degrees, RandomEngine engine) { ArgChecker.isTrue(degrees >= 1, "Degrees of freedom must be greater than or equal to one"); ArgChecker.notNull(engine, "engine"); _chiSquare = new ChiSquare(degrees, engine); _degrees = degrees; }
/// <summary> /// GOF test for one categorical variable with more than two levels. /// /// Hypotheses are: /// H_0 : actual distribution of each level = expected distribution of each level /// H_1 : actual distribution of each level != expected distribution of each level /// /// p-value = P(observed or more mismatch of expected and actual level distribution | H_0 is true) /// /// Reject H_0 if p-value < alpha /// </summary> /// <param name="countOfEachLevel">The count of each level in the sample data for the categorical variable</param> /// <param name="expectedPercentageOfEachLevel">The expected distribution / percentage of each level in the population for the categorical variable</param> /// <param name="pValue">p-value which is P(observed or more extreme mismatch of expected and actual level distribution | H_0 is true</param> /// <param name="significance_level">alpha</param> /// <returns>True if H_0 is rejected; False if H_0 is failed to be rejected</returns> public bool RejectH0(int[] observedCountInEachLevel, double[] expectedPercentageOfEachLevel, out double pValue, double significance_level = 0.05) { int sampleSize = 0; int countOfLevels = observedCountInEachLevel.Length; for (int i = 0; i < countOfLevels; ++i) { sampleSize += observedCountInEachLevel[i]; } int[] expectedCountInEachLevel = new int[countOfLevels]; int r = sampleSize; for (int i = 0; i < countOfLevels; ++i) { expectedCountInEachLevel[i] = (int)(expectedPercentageOfEachLevel[i] * sampleSize); r -= expectedCountInEachLevel[i]; } if (r > 0) { expectedCountInEachLevel[0] += r; } double ChiSq = 0; for (int i = 0; i < countOfLevels; ++i) { ChiSq += System.Math.Pow(observedCountInEachLevel[i] - expectedCountInEachLevel[i], 2) / expectedCountInEachLevel[i]; } pValue = 1 - ChiSquare.GetPercentile(ChiSq, countOfLevels - 1); return(pValue < significance_level); }
private Dictionary <string, double[]> GetSatisfiedAndNonSatisfiedByDemographic(string demographic, int?fot = null) { double EvaluationsByUbication = this.evaluationsByUbicationCount;// GetEvaluationsByUbication(country, state, region).Count(); Dictionary <string, double[]> data = new Dictionary <string, double[]>(); ChiSquare cs = new ChiSquare(Test, demographic, null, null, null, null, fot, 0.05, country, state, region); Dictionary <string, double> satisfied = (Dictionary <string, double>)cs.DataSatisfaction["Satisfied"]; Dictionary <string, double> nonSatisfied = (Dictionary <string, double>)cs.DataSatisfaction["NoSatisfied"]; Dictionary <string, object> AvgAndMed = new Dictionary <string, object>(); Dictionary <string, double> average = new Dictionary <string, double>(); Dictionary <string, double> median = new Dictionary <string, double>(); List <string> keys = satisfied.Keys.ToList(); bool table = false; if (keys.Count > 10) { if (demographic == "Location") { AvgAndMed = Test.GetAvgAndMedByLocations(null, null, null, false, country, state, region); } else if (demographic == "FunctionalOrganizationType") { AvgAndMed = Test.GetAvgAndMedByFOTypes(null, null, null, fot.Value, false, country, state, region); } else if (demographic == "AgeRange") { AvgAndMed = Test.GetAvgAndMedByAgeRanges(null, null, null, false, country, state, region); } else { AvgAndMed = Test.GetCategoryAvgAndMed(false, null, country, state, region); } average = (Dictionary <string, double>)AvgAndMed["Average"]; median = (Dictionary <string, double>)AvgAndMed["Median"]; table = true; } foreach (string key in keys) { double sat = satisfied[key]; double nonSat = nonSatisfied[key]; if (demographic == "Location") { EvaluationsByUbication = sat + nonSat; } double pctSat = sat * 100 / EvaluationsByUbication; double pctNonSat = nonSat * 100 / EvaluationsByUbication; if (table) { double avg = average[key]; double med = median[key]; data.Add(key, new double[] { sat, pctSat, nonSat, pctNonSat, avg, med }); } else { data.Add(key, new double[] { sat, pctSat, nonSat, pctNonSat }); } } return(data); }
public void ValidateDensity( [Values(1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.5, 2.5, 2.5, 2.5, 2.5, 2.5, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity)] double dof, [Values(0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity)] double x) { var n = new ChiSquare(dof); Assert.AreEqual((Math.Pow(x, (dof / 2.0) - 1.0) * Math.Exp(-x / 2.0)) / (Math.Pow(2.0, dof / 2.0) * SpecialFunctions.Gamma(dof / 2.0)), n.Density(x)); }
public void ValidateDensityLn( [Values(1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.5, 2.5, 2.5, 2.5, 2.5, 2.5, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity)] double dof, [Values(0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity)] double x) { var n = new ChiSquare(dof); Assert.AreEqual((-x / 2.0) + (((dof / 2.0) - 1.0) * Math.Log(x)) - ((dof / 2.0) * Math.Log(2)) - SpecialFunctions.GammaLn(dof / 2.0), n.DensityLn(x)); }
public void ValidateCumulativeDistribution( [Values(1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.5, 2.5, 2.5, 2.5, 2.5, 2.5, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity)] double dof, [Values(0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity, 0.0, 0.1, 1.0, 5.5, 110.1, Double.PositiveInfinity)] double x) { var n = new ChiSquare(dof); Assert.AreEqual(SpecialFunctions.GammaLowerIncomplete(dof / 2.0, x / 2.0) / SpecialFunctions.Gamma(dof / 2.0), n.CumulativeDistribution(x)); }
public ChiSquare GetSatisfiedAndNoSatisfiedDictionary(string demographic, int test_id, string type, double pValue, int?FO_id) { Test test = new TestsServices().GetById(test_id); ChiSquare chiSquare = new ChiSquare(test, demographic, null, null, null, null, FO_id, pValue, null, null, null); chiSquare.GetAssociation(); return(chiSquare); }
public override void SaveParamsForReport() { ReportGenerator g = ReportGenerator.Instance; g.ClearReflModelInfo(); int arrayconst = 2; List <string> ginfo = new List <string>(); if (HoldsigmaCB) { ginfo.Add("The reflectivity curve was fit with a single roughness parameter\n"); } else { ginfo.Add(String.Format("The reflectivity curve was fit with {0} roughness parameters\n", BoxCountTB + 1)); arrayconst = 3; } ginfo.Add("Percent Error in Q: " + QSpreadTB.ToString() + "\n"); ginfo.Add("Normalization Factor: " + NormalizationFactor.ToString() + "\n"); ginfo.Add("Low Q Offset: " + LowQOffset.ToString() + "\n"); ginfo.Add("High Q Offset: " + HighQOffset.ToString() + "\n"); ginfo.Add("Superphase SLD: " + SubphaseSLDTB.ToString() + "\n"); ginfo.Add("Subphase SLD: " + SubphaseSLD.ToString() + "\n"); ginfo.Add("Wavelength: " + WavelengthTB.ToString() + "\n"); ginfo.Add("Chi Square for reflectivity fit: " + ChiSquare.ToString() + "\n"); ginfo.Add(string.Format("The subphase roughness was: {0:#.### E-0} " + (char)0x00B1 + " {1:#.### E-0}\n", SubRoughTB, CovarArray[0])); for (int i = 0; i < BoxCountTB; i++) { ginfo.Add((i + 1).ToString()); ginfo.Add(LengthArray[i].ToString("#.### E-0") + " " + (char)0x00B1 + " " + CovarArray[arrayconst * i + 1].ToString("#.### E-0")); if (!m_bUseSLD) { ginfo.Add(RhoArray[i].ToString("#.### E-0") + " " + (char)0x00B1 + " " + CovarArray[arrayconst * i + 2].ToString("#.### E-0")); } else { ginfo.Add((RhoArray[i] * SubphaseSLDTB).ToString("#.### E-0") + " " + (char)0x00B1 + " " + (CovarArray[arrayconst * i + 2] * SubphaseSLDTB).ToString("#.### E-0")); } if (HoldsigmaCB) { ginfo.Add(SigmaArray[i].ToString("#.### E-0") + " " + (char)0x00B1 + " " + CovarArray[0].ToString("#.### E-0")); } else { ginfo.Add(SigmaArray[i].ToString("#.### E-0") + " " + (char)0x00B1 + " " + CovarArray[arrayconst * i + 3].ToString("#.### E-0")); } } g.SetReflModelInfo = ginfo; }
private static void CreateVectorMachines() { var kernel = new ChiSquare(); // Extract training parameters from the interface double complexity = (double)1; double tolerance = (double)0.01; int cacheSize = (int)500; SelectionStrategy strategy = SelectionStrategy.Sequential; // Create the support vector machine learning algorithm var teacher = new MulticlassSupportVectorLearning <IKernel>() { Kernel = kernel, Learner = (param) => { return(new SequentialMinimalOptimization <IKernel>() { Kernel = kernel, Complexity = complexity, Tolerance = tolerance, CacheSize = cacheSize, Strategy = strategy, }); } }; // Get the input and output data double[][] inputs; int[] outputs; var inputsList = new List <double[]>(); var outputsList = new List <int>(); var i = 0; foreach (var trainingImage in trainingImages.Keys) { var trainingFeature = trainingFeatures[trainingImage]; inputsList.Add(trainingFeature); outputsList.Add(i); i++; } inputs = inputsList.ToArray(); outputs = outputsList.ToArray(); ksvm = teacher.Learn(inputs, outputs); double error = new ZeroOneLoss(outputs).Loss(ksvm.Decide(inputs)); Console.WriteLine("Error was {0}", error); }
/// <summary> /// Run example /// </summary> public void Run() { // 1. Get 10 random samples of f(x) = (x * x) / 2 using continuous uniform distribution on [-10, 10] var uniform = new ContinuousUniform(-10, 10); var result = SignalGenerator.Random(Function, uniform, 10); Console.WriteLine(@" 1. Get 10 random samples of f(x) = (x * x) / 2 using continuous uniform distribution on [-10, 10]"); for (var i = 0; i < result.Length; i++) { Console.Write(result[i].ToString("N") + @" "); } Console.WriteLine(); Console.WriteLine(); // 2. Get 10 random samples of f(x) = (x * x) / 2 using Exponential(1) distribution and retrieve sample points var exponential = new Exponential(1); double[] samplePoints; result = SignalGenerator.Random(Function, exponential, 10, out samplePoints); Console.WriteLine(@"2. Get 10 random samples of f(x) = (x * x) / 2 using Exponential(1) distribution and retrieve sample points"); Console.Write(@"Points: "); for (var i = 0; i < samplePoints.Length; i++) { Console.Write(samplePoints[i].ToString("N") + @" "); } Console.WriteLine(); Console.Write(@"Values: "); for (var i = 0; i < result.Length; i++) { Console.Write(result[i].ToString("N") + @" "); } Console.WriteLine(); Console.WriteLine(); // 3. Get 10 random samples of f(x, y) = (x * y) / 2 using ChiSquare(10) distribution var chiSquare = new ChiSquare(10); result = SignalGenerator.Random(TwoDomainFunction, chiSquare, 10); Console.WriteLine(@" 3. Get 10 random samples of f(x, y) = (x * y) / 2 using ChiSquare(10) distribution"); for (var i = 0; i < result.Length; i++) { Console.Write(result[i].ToString("N") + @" "); } Console.WriteLine(); }
public double GetSatisfiedCountPercentage() { ChiSquare cs = new ChiSquare(Test, "General", null, null, null, 0.05, country, state, region); Dictionary <string, double> satisfied = (Dictionary <string, double>)cs.DataSatisfaction["Satisfied"]; if (satisfied.Count > 0) { int sat = (int)satisfied[Test.Name]; double pct = (double)(sat * 100) / evaluationsByUbicationCount; // GetEvaluationsByUbication(country, state, region).Count(); return(pct); } else { return(0); } }
public ChiSquare getChiSquare(int test_id, string Demographic, int?questionnaire_id, int?category_id, int?question_id, int?country_id, int?FO_id, double?pValue) { ChiSquare chiSquare; double pvalue; if (pValue.HasValue) { pvalue = pValue.Value; } else { pvalue = 0.05; } chiSquare = new ChiSquare(new TestsServices().GetById(test_id), Demographic, questionnaire_id, category_id, question_id, country_id, FO_id, pvalue, null, null, null); chiSquare.GetAssociation(); return(chiSquare); }
private void setKernalType(KernelType k) { switch (k) { case KernelType.Linear: kernel = new Linear(); break; case KernelType.Quadratic: kernel = new Quadratic(); break; case KernelType.Sigmoid: kernel = new Sigmoid(); break; case KernelType.Spline: kernel = new Spline(); break; case KernelType.ChiSquared: kernel = new ChiSquare(); break; case KernelType.Gaussian: kernel = new Gaussian(); break; case KernelType.Multiquadric: kernel = new Multiquadric(); break; case KernelType.InverseMultquadric: kernel = new InverseMultiquadric(); break; case KernelType.Laplacian: kernel = new Laplacian(); break; default: break; } }
private void GetConclusion(List <string> demographicsWithAssociation, ChiSquare cs, string demographic, string name) { Dictionary <string, double> sat = (Dictionary <string, double>)cs.DataSatisfaction["Satisfied"];//diccionario de satisfechos y no satisfechos if (sat.Count > 1) { cs.GetAssociation();//aqui realmente realiza lo de chicuadrado y trae los valores de:la tabla, de nosotros y la conclusion } if (cs.Association) { if (demographic == "FunctionalOrganizationType") { demographicsWithAssociation.Add(name);//agreega el nombre de la estructura funcional o el nombre del demografico } else { demographicsWithAssociation.Add(DemographicNames[demographic]); } } }
public override string StochFit(double[] parampercs, int iterations) { double[] parameters = null; string chosenchisquare = string.Empty; MakeParameters(ref parameters, false); double[] ParamArray = new double[1000 * parameters.Length]; double[] ChiSquareArray = new double[1000]; double[] CovarArray = new double[1000 * parameters.Length]; double[] locinfo = new double[9 * 1000]; int Size = 0; InfoStruct = new BoxModelSettings(); SetInitStruct(ref InfoStruct, parampercs); InfoStruct.Iterations = iterations; NativeMethods.ConstrainedStochFit(InfoStruct, parameters, CovarArray, parameters.Length, locinfo, ParamArray, ChiSquareArray, ref Size); //Not ideal, will always back up regardless of whether the new model is accepted or not BackupArrays(); if (ModelChooser != null) { if (ModelChooser(ParamArray, ChiSquareArray, CovarArray, locinfo, Size, parameters.Length, InfoStruct)) { } } UpdateProfile(); InfoStruct.Dispose(); return(ChiSquare.ToString("##.### E-0")); }
public void CanSample() { var n = new ChiSquare(1.0); n.Sample(); }
public void CanSampleStatic() { ChiSquare.Sample(new Random(), 2.0); }
public void ValidateDensity(double dof, double x) { var n = new ChiSquare(dof); Assert.AreEqual((Math.Pow(x, (dof / 2.0) - 1.0) * Math.Exp(-x / 2.0)) / (Math.Pow(2.0, dof / 2.0) * SpecialFunctions.Gamma(dof / 2.0)), n.Density(x)); }
public void ValidateMean(double dof) { var n = new ChiSquare(dof); Assert.AreEqual(dof, n.Mean); }
public void ValidateToString() { var n = new ChiSquare(1.0); Assert.AreEqual("ChiSquare(DoF = 1)", n.ToString()); }
public void ValidateVariance(double dof) { var n = new ChiSquare(dof); Assert.AreEqual<double>(2 * dof, n.Variance); }
public void CanSetDoF(double dof) { var n = new ChiSquare(1.0); n.DegreesOfFreedom = dof; }
public void ValidateDensity(double dof, double x) { var n = new ChiSquare(dof); Assert.AreEqual<double>((Math.Pow(x, dof / 2.0 - 1.0) * Math.Exp(-x / 2.0)) / (Math.Pow(2.0, dof / 2.0) * SpecialFunctions.Gamma(dof / 2.0)), n.Density(x)); }
public void ValidateDensityLn(double dof, double x) { var n = new ChiSquare(dof); Assert.AreEqual<double>(-x / 2.0 + (dof / 2.0 - 1.0) * Math.Log(x) - (dof / 2.0) * Math.Log(2) - SpecialFunctions.GammaLn(dof / 2.0), n.DensityLn(x)); }
public void ValidateMedian(double dof) { var n = new ChiSquare(dof); Assert.AreEqual<double>(dof - 2.0 / 3.0, n.Median); }
public void ValidateMode(double dof) { var n = new ChiSquare(dof); Assert.AreEqual<double>(dof - 2, n.Mode); }
public void ValidateStdDev(double dof) { var n = new ChiSquare(dof); Assert.AreEqual<double>(Math.Sqrt(n.Variance), n.StdDev); }
public void ValidateCumulativeDistribution(double dof, double x) { var n = new ChiSquare(dof); Assert.AreEqual(SpecialFunctions.GammaLowerIncomplete(dof / 2.0, x / 2.0) / SpecialFunctions.Gamma(dof / 2.0), n.CumulativeDistribution(x)); }
public void CanCreateChiSquare(double dof) { var n = new ChiSquare(dof); Assert.AreEqual<double>(dof, n.DegreesOfFreedom); }
public void ValidateCumulativeDistribution(double dof, double x) { var n = new ChiSquare(dof); Assert.AreEqual<double>(SpecialFunctions.GammaUpperIncomplete(dof / 2.0, x / 2.0) / SpecialFunctions.Gamma(dof / 2.0), n.CumulativeDistribution(x)); }
public void CanCreateChiSquare([Values(1.0, 3.0, Double.PositiveInfinity)] double dof) { var n = new ChiSquare(dof); Assert.AreEqual(dof, n.DegreesOfFreedom); }
public void SetDofFailsWithNonPositiveDoF([Values(-1.0, -0.0, 0.0)] double dof) { var n = new ChiSquare(1.0); Assert.Throws<ArgumentOutOfRangeException>(() => n.DegreesOfFreedom = dof); }
public void ValidateVariance([Values(1.0, 2.0, 2.5, 3.0, Double.PositiveInfinity)] double dof) { var n = new ChiSquare(dof); Assert.AreEqual(2 * dof, n.Variance); }
public void ValidateMaximum() { var n = new ChiSquare(1.0); Assert.AreEqual(Double.PositiveInfinity, n.Maximum); }
public void ValidateStdDev([Values(1.0, 2.0, 2.5, 3.0, Double.PositiveInfinity)] double dof) { var n = new ChiSquare(dof); Assert.AreEqual(Math.Sqrt(n.Variance), n.StdDev); }
public void ValidateDensityLn(double dof, double x) { var n = new ChiSquare(dof); Assert.AreEqual((-x / 2.0) + (((dof / 2.0) - 1.0) * Math.Log(x)) - ((dof / 2.0) * Math.Log(2)) - SpecialFunctions.GammaLn(dof / 2.0), n.DensityLn(x)); }
public void ValidateMode([Values(1.0, 2.0, 2.5, 3.0, Double.PositiveInfinity)] double dof) { var n = new ChiSquare(dof); Assert.AreEqual(dof - 2, n.Mode); }
public void FailSampleStatic() { Assert.Throws <ArgumentOutOfRangeException>(() => ChiSquare.Sample(new Random(), -1.0)); }
public void ValidateMedian([Values(1.0, 2.0, 2.5, 3.0, Double.PositiveInfinity)] double dof) { var n = new ChiSquare(dof); Assert.AreEqual(dof - (2.0 / 3.0), n.Median); }
public void ValidateMinimum() { var n = new ChiSquare(1.0); Assert.AreEqual(0.0, n.Minimum); }
public void CanCreateChiSquare(double dof) { var n = new ChiSquare(dof); Assert.AreEqual(dof, n.DegreesOfFreedom); }
/// <summary> /// Chi^2 independence test for categorical variables, var1 and var2 /// /// The hypotheses are: /// H_0 : variable 1 is independent of variable 2 /// H_A : variable 1 and variable 2 are dependent /// /// p-value = P(observed or more extreme events that favors H_A | H_0) /// /// Now assuming H_0 is true, that is, the var1 and var2 are independent, /// This implies the distribution of each level of var1 in each level of var2 should be the same /// In other words, the expected distribution of each level of var1 in each level of var2 is given by distributionInEachLevel_var1 /// Now we can build a new contingency table containing the expected count corresponding to each level of both var1 and var2 /// /// Reject H_0 if p-value < alpha /// </summary> /// <param name="contingency_table">The contingency table in which each cell contains the counts of records in the sample data that matches the row (i.e. a var1 level) and col (i.e. a var2 level)</param> /// <param name="pValue">p-value = P(observed or more extreme events that favors H_A | H_0)</param> /// <param name="signficance_level">alpha</param> /// <returns>True if H_0 is rejected; False if H_0 is failed to be rejected</returns> public bool RejectH0(int[][] contingency_table, out double pValue, double signficance_level = 0.05) { int countOfLevels_var1 = contingency_table.Length; int countOfLevels_var2 = contingency_table[0].Length; int sampleSize = 0; int[] countInEachLevel_var1 = new int[countOfLevels_var1]; for (int row = 0; row < countOfLevels_var1; ++row) { int countInLevel = 0; for (int col = 0; col < countOfLevels_var2; ++col) { countInLevel += contingency_table[row][col]; } countInEachLevel_var1[row] = countInLevel; sampleSize += countInLevel; } double[] distributionInEachLevel_var1 = new double[countOfLevels_var1]; for (int row = 0; row < countOfLevels_var1; ++row) { distributionInEachLevel_var1[row] = (double)countInEachLevel_var1[row] / sampleSize; } int[] countInEachLevel_var2 = new int[countOfLevels_var2]; for (int col = 0; col < countOfLevels_var2; ++col) { int countInLevel = 0; for (int row = 0; row < countOfLevels_var1; ++row) { countInLevel += contingency_table[row][col]; } countInEachLevel_var2[col] = countInLevel; } //Now assuming H_0 is true, that is, the var1 and var2 are independent, //This implies the distribution of each level of var1 in each level of var2 should be the same //In other words, the expected distribution of each level of var1 in each level of var2 is given by distributionInEachLevel_var1 //Now we can build a new contingency table containing the expected count corresponding to each level of both var1 and var2 double[][] expected_contingency_table = new double[countOfLevels_var1][]; for (int row = 0; row < countOfLevels_var1; ++row) { expected_contingency_table[row] = new double[countOfLevels_var2]; for (int col = 0; col < countOfLevels_var2; ++col) { expected_contingency_table[row][col] = countInEachLevel_var2[col] * distributionInEachLevel_var1[row]; } } double ChiSq = 0; for (int row = 0; row < countOfLevels_var1; ++row) { for (int col = 0; col < countOfLevels_var2; ++col) { ChiSq += System.Math.Pow(contingency_table[row][col] - expected_contingency_table[row][col], 2) / expected_contingency_table[row][col]; } } int df = (countOfLevels_var1 - 1) * (countOfLevels_var2 - 1); pValue = 1 - ChiSquare.GetPercentile(ChiSq, df); return(pValue < signficance_level); }
public void ChiSquareCreateFailsWithBadParameters(double dof) { var n = new ChiSquare(dof); }