/// <summary> /// Creates a new Cox Proportional-Hazards Model. /// </summary> /// /// <param name="inputs">The number of input variables for the model.</param> /// <param name="baseline">The initial baseline hazard distribution.</param> /// public ProportionalHazards(int inputs, IUnivariateDistribution baseline) { Offsets = new double[inputs]; Coefficients = new double[inputs]; StandardErrors = new double[inputs]; BaselineHazard = baseline; }
/// <summary> /// Creates a new Anderson-Darling test. /// </summary> /// /// <param name="sample">The sample we would like to test as belonging to the <paramref name="hypothesizedDistribution"/>.</param> /// <param name="hypothesizedDistribution">A fully specified distribution.</param> /// public AndersonDarlingTest(double[] sample, IUnivariateDistribution <double> hypothesizedDistribution) { // Create the test statistic distribution this.TheoreticalDistribution = hypothesizedDistribution; if (hypothesizedDistribution is UniformContinuousDistribution) { StatisticDistribution = new AndersonDarlingDistribution(AndersonDarlingDistributionType.Uniform, sample.Length); } else if (hypothesizedDistribution is NormalDistribution) { StatisticDistribution = new AndersonDarlingDistribution(AndersonDarlingDistributionType.Normal, sample.Length); } else { Trace.WriteLine(String.Format("Unsupported distribution in AndersonDarling: {0}. P-values will not be computed, but test statistic may be useful.", hypothesizedDistribution.ToString(), hypothesizedDistribution.GetType().ToString())); } // Create a copy of the samples to prevent altering the // constructor's original arguments in the sorting step double[] sortedSamples = sample.Sorted(); // Create the theoretical and empirical distributions this.TheoreticalDistribution = hypothesizedDistribution; this.Statistic = GetStatistic(sortedSamples, TheoreticalDistribution); this.PValue = StatisticToPValue(Statistic); }
public Regression(Vector <double> dependent, Matrix <double> explanatory, bool addConstant, bool getBetaHatOnly) { //stdNormal = new StandardDistribution(); stdNormal = new Normal(); this.dependent = dependent; if (addConstant) { augmentedExplanatory = Matrix <double> .Build.Dense(explanatory.RowCount, explanatory.ColumnCount + 1); for (int i = 0; i < explanatory.RowCount; ++i) { augmentedExplanatory[i, 0] = 1.0; for (int j = 0; j < explanatory.ColumnCount; ++j) { augmentedExplanatory[i, j + 1] = explanatory[i, j]; } } } else { augmentedExplanatory = explanatory; } Recompute(getBetaHatOnly); }
private void update(IUnivariateDistribution instance, bool estimating) { this.Instance = instance; this.SupportMax = Instance.Support.Max; this.SupportMin = Instance.Support.Min; this.Measures.Update(instance); this.DensityFunction = this.Measures.CreatePDF(); foreach (var property in Properties) { property.Update(); } if (estimating) { foreach (var param in Parameters) { param.ValueChanged -= distribution_OnParameterChanged; param.Sync(); param.ValueChanged += distribution_OnParameterChanged; } } if (IsInitialized && Updated != null) { Updated(this, EventArgs.Empty); } }
public static PSObject AddConvinienceMethods(IUnivariateDistribution dist) { var obj = new PSObject(dist); foreach (var a in Aliases) { obj.Methods.Add(new PSScriptMethod(a[0], ScriptBlock.Create(a[1]))); } if (dist is UnivariateDiscreteDistribution) { foreach (var a in DiscreteAliases) { obj.Methods.Add(new PSScriptMethod(a[0], ScriptBlock.Create(a[1]))); } } else { foreach (var a in ContinuousAliases) { obj.Methods.Add(new PSScriptMethod(a[0], ScriptBlock.Create(a[1]))); } } return(obj); }
/// <summary> /// Creates a new instance of the distribution using the given arguments. /// </summary> /// /// <param name="arguments">The arguments to be passed to the distribution's constructor.</param> /// public IUnivariateDistribution CreateInstance(Dictionary <DistributionParameterInfo, object> arguments) { IUnivariateDistribution distribution = null; var parameters = new object[arguments.Count]; foreach (KeyValuePair <DistributionParameterInfo, object> p in arguments) { Type paramType = p.Key.ParameterInfo.ParameterType; if (paramType.IsEnum) { int i = (int)Convert.ChangeType(p.Value, typeof(int)); object v = Enum.ToObject(paramType, i); parameters[p.Key.Position] = v; } else { parameters[p.Key.Position] = Convert.ChangeType(p.Value, paramType); } } distribution = (IUnivariateDistribution)Activator.CreateInstance(this.DistributionType, parameters); return(distribution); }
/// <summary> /// Creates a new Cox Proportional-Hazards Model. /// </summary> /// /// <param name="inputs">The number of input variables for the model.</param> /// <param name="baseline">The initial baseline hazard distribution. Default is the <see cref="EmpiricalHazardDistribution"/>.</param> /// public ProportionalHazards(int inputs, IUnivariateDistribution baseline) { Coefficients = new double[inputs]; StandardErrors = new double[inputs]; #pragma warning disable 612, 618 Offsets = new double[inputs]; #pragma warning restore 612, 618 BaselineHazard = baseline; }
public void TheoreticalDistributionTest() { double[] sample = { 1, 5, 3, 1, 5, 2, 1 }; UnivariateContinuousDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution); IUnivariateDistribution actual = target.TheoreticalDistribution; Assert.AreEqual(distribution, actual); }
public static double TwoTailProbability(this IUnivariateDistribution nd, double value) { var result = nd.CumulativeDistribution(value); if (result > 0.5) { result = 1 - result; } result *= 2; return(result); }
private StarClass(string edname, string name, string chromaticity, decimal percentage, IUnivariateDistribution massdistribution, IUnivariateDistribution radiusdistribution, IUnivariateDistribution luminositydistribution) { this.edname = edname; this.name = name; this.chromaticity = chromaticity; this.percentage = percentage; this.massdistribution = massdistribution; this.radiusdistribution = radiusdistribution; this.luminositydistribution = luminositydistribution; CLASSES.Add(this); }
/// <summary> /// Initializes a new instance of the <see cref="DistributionAnalysis"/> class. /// </summary> /// /// <param name="observations">The observations to be fitted against candidate distributions.</param> /// public DistributionAnalysis(double[] observations) { this.data = observations; Distributions = new IUnivariateDistribution[] { new NormalDistribution(), new UniformContinuousDistribution(), new GammaDistribution(), new GumbelDistribution(), new PoissonDistribution(), }; }
private void update(IUnivariateDistribution instance) { this.instance = instance; this.updateRange(); this.DensityFunction = CreatePDF(); this.DistributionFunction = CreateCDF(); this.ComplementaryDistributionFunction = CreateCCDF(); this.CumulativeHazardFunction = CreateCHF(); this.HazardFunction = CreateHF(); this.InverseDistributionFunction = CreateICDF(); this.LogDensityFunction = CreateLPDF(); this.QuantileDensityFunction = CreateIPDF(); }
private StarClass(string edname, string name, string chromaticity, decimal percentage, IUnivariateDistribution massdistribution, IUnivariateDistribution radiusdistribution, IUnivariateDistribution tempdistribution, IUnivariateDistribution agedistribution) { this.edname = edname; this.name = name; this.chromaticity = chromaticity; this.percentage = percentage; this.massdistribution = massdistribution; this.radiusdistribution = radiusdistribution; this.tempdistribution = tempdistribution; this.agedistribution = agedistribution; CLASSES.Add(this); }
private StarClass(string edname, string name, Chromaticity chromaticity, decimal percentage, IUnivariateDistribution massdistribution, IUnivariateDistribution radiusdistribution, IUnivariateDistribution tempdistribution, IUnivariateDistribution agedistribution) { this.edname = edname; this.name = name; this.percentage = percentage; this.massdistribution = massdistribution; this.radiusdistribution = radiusdistribution; this.tempdistribution = tempdistribution; this.agedistribution = agedistribution; this.chromaticity = chromaticity; CLASSES.Add(this); }
private PlanetClass(string edname, decimal?percentage = null, IUnivariateDistribution gravitydistribution = null, IUnivariateDistribution massdistribution = null, IUnivariateDistribution radiusdistribution = null, IUnivariateDistribution tempdistribution = null, IUnivariateDistribution pressuredistribution = null, IUnivariateDistribution orbitalperioddistribution = null, IUnivariateDistribution semimajoraxisdistribution = null, IUnivariateDistribution eccentricitydistribution = null, IUnivariateDistribution inclinationdistribution = null, IUnivariateDistribution periapsisdistribution = null, IUnivariateDistribution rotationalperioddistribution = null, IUnivariateDistribution tiltdistribution = null, IUnivariateDistribution densitydistribution = null) : base(edname, edname) { this.percentage = percentage; this.gravitydistribution = gravitydistribution; this.massdistribution = massdistribution; this.radiusdistribution = radiusdistribution; this.tempdistribution = tempdistribution; this.pressuredistribution = pressuredistribution; this.orbitalperioddistribution = orbitalperioddistribution; this.semimajoraxisdistribution = semimajoraxisdistribution; this.eccentricitydistribution = eccentricitydistribution; this.inclinationdistribution = inclinationdistribution; this.periapsisdistribution = periapsisdistribution; this.rotationalperioddistribution = rotationalperioddistribution; this.tiltdistribution = tiltdistribution; this.densitydistribution = densitydistribution; }
/// <summary> /// Gets the Anderson-Darling statistic for the samples and target distribution. /// </summary> /// /// <param name="sortedSamples">The sorted samples.</param> /// <param name="distribution">The target distribution.</param> /// public static double GetStatistic(double[] sortedSamples, IUnivariateDistribution <double> distribution) { double N = sortedSamples.Length; double S = 0; int n = sortedSamples.Length; // Finally, compute the test statistic. for (int i = 0; i < sortedSamples.Length; i++) { double a = 2.0 * (i + 1) - 1; double b = distribution.DistributionFunction(sortedSamples[i]); double c = distribution.ComplementaryDistributionFunction(sortedSamples[n - i - 1]); S += a * (Math.Log(b) + Math.Log(c)); } return(-n - S / n); }
/// <summary> /// Creates a new Anderson-Darling test. /// </summary> /// /// <param name="sample">The sample we would like to test as belonging to the <paramref name="hypothesizedDistribution"/>.</param> /// <param name="hypothesizedDistribution">A fully specified distribution.</param> /// public AndersonDarlingTest(double[] sample, IUnivariateDistribution hypothesizedDistribution) { double N = sample.Length; // Create the test statistic distribution with given degrees of freedom this.TheoreticalDistribution = hypothesizedDistribution; if (hypothesizedDistribution is UniformContinuousDistribution) { StatisticDistribution = new AndersonDarlingDistribution(AndersonDarlingDistributionType.Uniform, sample.Length); } else if (hypothesizedDistribution is NormalDistribution) { StatisticDistribution = new AndersonDarlingDistribution(AndersonDarlingDistributionType.Normal, sample.Length); } // Create a copy of the samples to prevent altering the // constructor's original arguments in the sorting step double[] Y = (double[])sample.Clone(); // Sort sample Array.Sort(Y); // Create the theoretical and empirical distributions this.TheoreticalDistribution = hypothesizedDistribution; double S = 0; int n = Y.Length; // Finally, compute the test statistic. for (int i = 0; i < Y.Length; i++) { double a = 2.0 * (i + 1) - 1; double b = TheoreticalDistribution.DistributionFunction(Y[i]); double c = TheoreticalDistribution.ComplementaryDistributionFunction(Y[n - i - 1]); S += a * (Math.Log(b) + Math.Log(c)); } this.Statistic = -n - S / n; this.PValue = StatisticToPValue(Statistic); }
public Regression(Vector <double> dependent, Matrix <double> explanatory, Vector <double> weights, bool addConstant, bool getBetaHatOnly) { // to perform weighted regression, we create modified versions of //stdNormal = new StandardDistribution(); stdNormal = new Normal(); this.dependent = Vector <double> .Build.Dense(dependent.Count); for (var i = 0; i < dependent.Count; ++i) { this.dependent[i] = dependent[i] * Math.Sqrt(weights[i]); } augmentedExplanatory = Matrix <double> .Build.Dense(explanatory.RowCount, explanatory.ColumnCount + (addConstant ? 1 : 0)); if (addConstant) { for (var i = 0; i < explanatory.RowCount; ++i) { augmentedExplanatory[i, 0] = 1.0 * Math.Sqrt(weights[i]); for (var j = 0; j < explanatory.ColumnCount; ++j) { augmentedExplanatory[i, j + 1] = explanatory[i, j] * Math.Sqrt(weights[i]); } } } else { for (var i = 0; i < explanatory.RowCount; ++i) { for (var j = 0; j < explanatory.ColumnCount; ++j) { augmentedExplanatory[i, j] = explanatory[i, j] * Math.Sqrt(weights[i]); } } } Recompute(getBetaHatOnly); }
/// <summary> /// Creates a new distribution instance using the current selected /// values for this constructor's parameters. If there is any problem /// creating the object, this method returns null. /// </summary> /// public IUnivariateDistribution Activate() { IUnivariateDistribution distribution = null; try { var parameters = new object[Parameters.Count]; foreach (ParameterViewModel p in Parameters) { parameters[p.Parameter.Position] = Convert.ChangeType(p.Value, p.Parameter.ParameterType); } distribution = (IUnivariateDistribution)Activator.CreateInstance(Owner.Type, parameters); } catch (Exception ex) { System.Diagnostics.Trace.WriteLine(ex.ToString()); return(null); } return(distribution); }
private StarClass(string edname, string name, string chromaticityEdName, decimal?percentage = null, IUnivariateDistribution absolutemagnitudedistribution = null, IUnivariateDistribution massdistribution = null, IUnivariateDistribution radiusdistribution = null, IUnivariateDistribution tempdistribution = null, IUnivariateDistribution agedistribution = null, IUnivariateDistribution orbitalperioddistribution = null, IUnivariateDistribution semimajoraxisdistribution = null, IUnivariateDistribution eccentricitydistribution = null, IUnivariateDistribution inclinationdistribution = null, IUnivariateDistribution periapsisdistribution = null, IUnivariateDistribution rotationalperioddistribution = null, IUnivariateDistribution tiltdistribution = null, IUnivariateDistribution densitydistribution = null) { this.edname = edname; this.name = name; this.percentage = percentage; this.absolutemagnitudedistribution = absolutemagnitudedistribution; this.massdistribution = massdistribution; this.radiusdistribution = radiusdistribution; this.tempdistribution = tempdistribution; this.agedistribution = agedistribution; this.orbitalperioddistribution = orbitalperioddistribution; this.semimajoraxisdistribution = semimajoraxisdistribution; this.eccentricitydistribution = eccentricitydistribution; this.inclinationdistribution = inclinationdistribution; this.periapsisdistribution = periapsisdistribution; this.rotationalperioddistribution = rotationalperioddistribution; this.tiltdistribution = tiltdistribution; this.densitydistribution = densitydistribution; this.chromaticity = Chromaticity.FromEDName(chromaticityEdName); CLASSES.Add(this); }
/// <summary> /// Vapnik Chervonenkis test. /// </summary> /// <param name="epsilon">The error we are willing to tolerate.</param> /// <param name="delta">The error probability we are willing to tolerate.</param> /// <param name="s">The samples to use for testing.</param> /// <param name="dist">The distribution we are testing.</param> public static void VapnikChervonenkisTest(double epsilon, double delta, IEnumerable<double> s, IUnivariateDistribution dist) { // Using VC-dimension, we can bound the probability of making an error when estimating empirical probability // distributions. We are using Theorem 2.41 in "All Of Nonparametric Statistics". // http://books.google.com/books?id=MRFlzQfRg7UC&lpg=PP1&dq=all%20of%20nonparametric%20statistics&pg=PA22#v=onepage&q=%22shatter%20coe%EF%AC%83cients%20do%20not%22&f=false .</para> // For intervals on the real line the VC-dimension is 2. double n = s.Count(); Assert.Greater(n, Math.Ceiling(32.0 * Math.Log(16.0 / delta) / epsilon / epsilon)); var histogram = new Histogram(s, NumberOfBuckets); for (var i = 0; i < NumberOfBuckets; i++) { var p = dist.CumulativeDistribution(histogram[i].UpperBound) - dist.CumulativeDistribution(histogram[i].LowerBound); var pe = histogram[i].Count / n; Assert.Less(Math.Abs(p - pe), epsilon, dist.ToString()); } }
/// <summary> /// Creates a new One-Sample Kolmogorov test. /// </summary> /// /// <param name="sample">The sample we would like to test as belonging to the <paramref name="hypothesizedDistribution"/>.</param> /// <param name="hypothesizedDistribution">A fully specified distribution (which must NOT have been estimated from the data).</param> /// <param name="alternate">The alternative hypothesis (research hypothesis) to test.</param> /// public KolmogorovSmirnovTest(double[] sample, IUnivariateDistribution hypothesizedDistribution, KolmogorovSmirnovTestHypothesis alternate = KolmogorovSmirnovTestHypothesis.SampleIsDifferent) { this.Hypothesis = alternate; double N = sample.Length; // Create the test statistic distribution with given degrees of freedom StatisticDistribution = new KolmogorovSmirnovDistribution(sample.Length); // Create a copy of the samples to prevent altering the // constructor's original arguments in the sorting step double[] Y = (double[])sample.Clone(); double[] D = new double[sample.Length]; // Sort sample Array.Sort(Y); // Create the theoretical and empirical distributions this.TheoreticalDistribution = hypothesizedDistribution; this.EmpiricalDistribution = new EmpiricalDistribution(Y, smoothing: 0); Func <double, double> F = TheoreticalDistribution.DistributionFunction; // Finally, compute the test statistic and perform actual testing. if (alternate == KolmogorovSmirnovTestHypothesis.SampleIsDifferent) { // Test if the sample's distribution is just significantly // "different" than the given theoretical distribution. // This is a correction on the common formulation found in many places // such as in Wikipedia. Please see the Engineering Statistics Handbook, // section "1.3.5.16. Kolmogorov-Smirnov Goodness-of-Fit Test" for more // details: http://www.itl.nist.gov/div898/handbook/eda/section3/eda35g.htm for (int i = 0; i < sample.Length; i++) { D[i] = Math.Max(Math.Abs(F(Y[i]) - i / N), Math.Abs((i + 1) / N - F(Y[i]))); } base.Statistic = D.Max(); // This is the two-sided "Dn" statistic. base.PValue = StatisticDistribution.ComplementaryDistributionFunction(Statistic); base.Tail = Testing.DistributionTail.TwoTail; } else if (alternate == KolmogorovSmirnovTestHypothesis.SampleIsGreater) { // Test if the sample's distribution is "larger" than the // given theoretical distribution, in a statistical sense. for (int i = 0; i < sample.Length; i++) { D[i] = Math.Max(i / N - F(Y[i]), (i + 1) / N - F(Y[i])); } base.Statistic = D.Max(); // This is the one-sided "Dn+" statistic. base.PValue = StatisticDistribution.OneSideDistributionFunction(Statistic); base.Tail = Testing.DistributionTail.OneUpper; } else { // Test if the sample's distribution is "smaller" than the // given theoretical distribution, in a statistical sense. for (int i = 0; i < sample.Length; i++) { D[i] = Math.Max(F(Y[i]) - i / N, F(Y[i]) - (i + 1) / N); } base.Statistic = D.Max(); // This is the one-sided "Dn-" statistic. base.PValue = StatisticDistribution.OneSideDistributionFunction(Statistic); base.Tail = Testing.DistributionTail.OneLower; } }
/// <summary> Provide the cumulative probability that a value will be equal to or lower than that supplied </summary> public static decimal?CumulativeP(IUnivariateDistribution distribution, decimal?val) { return(val == null || distribution == null ? null : sanitiseCumulativeP((decimal?)distribution.CumulativeDistribution((double)val))); }
/// <summary> /// Gets the current instance of the selected distribution /// that is currently active in the application. /// </summary> /// public void Update(IUnivariateDistribution instance) { update(instance); }
public PutIntegralFunction(IIntegralPoints integralPoints, IUnivariateDistribution distribution) { _integralPoints = integralPoints; _distribution = distribution; }
public BtCalculator(IIntegralPoints integralPoints, IBtIntegralFunction btIntegralFunction, IUnivariateDistribution distribution) { _integralPoints = integralPoints; _btIntegralFunction = btIntegralFunction; _distribution = distribution; }
/// <summary> /// Vapnik Chervonenkis test. /// </summary> /// <param name="epsilon">The error we are willing to tolerate.</param> /// <param name="delta">The error probability we are willing to tolerate.</param> /// <param name="s">The samples to use for testing.</param> /// <param name="dist">The distribution we are testing.</param> public static void VapnikChervonenkisTest(double epsilon, double delta, IEnumerable <double> s, IUnivariateDistribution dist) { // Using VC-dimension, we can bound the probability of making an error when estimating empirical probability // distributions. We are using Theorem 2.41 in "All Of Nonparametric Statistics". // http://books.google.com/books?id=MRFlzQfRg7UC&lpg=PP1&dq=all%20of%20nonparametric%20statistics&pg=PA22#v=onepage&q=%22shatter%20coe%EF%AC%83cients%20do%20not%22&f=false .</para> // For intervals on the real line the VC-dimension is 2. double n = s.Count(); Assert.Greater(n, Math.Ceiling(32.0 * Math.Log(16.0 / delta) / epsilon / epsilon)); var histogram = new Histogram(s, NumberOfBuckets); for (var i = 0; i < NumberOfBuckets; i++) { var p = dist.CumulativeDistribution(histogram[i].UpperBound) - dist.CumulativeDistribution(histogram[i].LowerBound); var pe = histogram[i].Count / n; Assert.Less(Math.Abs(p - pe), epsilon, dist.ToString()); } }
/// <summary> /// Constructs a Chi-Square Test. /// </summary> /// public ChiSquareTest(double[] observations, IUnivariateDistribution hypothesizedDistribution) { int n = observations.Length; var E = new EmpiricalDistribution(observations); var F = hypothesizedDistribution; // Create bins with the observations int bins = (int)Math.Ceiling(1 + Math.Log(observations.Length)); double[] ebins = new double[bins + 1]; for (int i = 0; i <= bins; i++) { double p = i / (double)bins; ebins[i] = F.InverseDistributionFunction(p); } double[] expected = new double[bins - 1]; int size = expected.Length; for (int i = 0; i < expected.Length; i++) { double a = ebins[i]; double b = ebins[i + 1]; if (Double.IsPositiveInfinity(b)) { break; } double Fa = F.DistributionFunction(a); double Fb = F.DistributionFunction(b); double samples = Math.Abs(Fb - Fa) * n; expected[i] = samples; if (samples < 5) { size = i + 1; for (int j = i + 1; j < ebins.Length - 1; j++) { ebins[j] = ebins[j + 1]; } ebins[ebins.Length - 1] = Double.PositiveInfinity; i--; } } ebins = ebins.Submatrix(size + 2); expected = expected.Submatrix(ebins.Length - 2); double[] observed = new double[expected.Length]; for (int i = 0; i < observed.Length; i++) { double a = ebins[i]; double b = ebins[i + 1]; observed[i] = E.DistributionFunction(a, b) * n; } double sum = compute(expected, observed); Compute(sum, bins - 1); }
private void update(IUnivariateDistribution instance, bool estimating) { this.Instance = instance; this.SupportMax = Instance.Support.Max; this.SupportMin = Instance.Support.Min; this.Measures.Update(instance); this.DensityFunction = this.Measures.CreatePDF(); foreach (var property in Properties) property.Update(); if (estimating) { foreach (var param in Parameters) { param.ValueChanged -= distribution_OnParameterChanged; param.Sync(); param.ValueChanged += distribution_OnParameterChanged; } } if (IsInitialized && Updated != null) Updated(this, EventArgs.Empty); }
/// <summary> /// Creates a new One-Sample Kolmogorov test. /// </summary> /// /// <param name="sample">The sample we would like to test as belonging to the <paramref name="hypothesizedDistribution"/>.</param> /// <param name="hypothesizedDistribution">A fully specified distribution (which must NOT have been estimated from the data).</param> /// public KolmogorovSmirnovTest(double[] sample, IUnivariateDistribution hypothesizedDistribution) : this(sample, hypothesizedDistribution, KolmogorovSmirnovTestHypothesis.SampleIsDifferent) { }
public Statistic(IUnivariateDistribution distribution, double innerValue) { Distribution = distribution; this.innerValue = innerValue; }
/// <summary> /// Constructs a new potential function modeling Hidden Markov Models. /// </summary> /// /// <param name="classifier">A hidden Markov sequence classifier.</param> /// <param name="includePriors">True to include class features (priors), false otherwise.</param> /// public MarkovMultivariateFunction(HiddenMarkovClassifier <Independent> classifier, bool includePriors = true) { this.Outputs = classifier.Classes; int factorIndex = 0; var factorParams = new List <double>(); var factorFeatures = new List <IFeature <double[]> >(); this.Factors = new FactorPotential <double[]> [Outputs]; int[] classOffset = new int[classifier.Classes]; int[] edgeOffset = new int[classifier.Classes]; int[] stateOffset = new int[classifier.Classes]; int[] classCount = new int[classifier.Classes]; int[] edgeCount = new int[classifier.Classes]; int[] stateCount = new int[classifier.Classes]; int[][][] lookupTables = new int[classifier.Classes][][]; // Create features for initial class probabilities for (int c = 0; c < classifier.Classes; c++) { var stateParams = new List <double>(); var stateFeatures = new List <IFeature <double[]> >(); var edgeParams = new List <double>(); var edgeFeatures = new List <IFeature <double[]> >(); var classParams = new List <double>(); var classFeatures = new List <IFeature <double[]> >(); var model = classifier[c]; int[][] lookupTable = new int[model.States][]; for (int i = 0; i < lookupTable.Length; i++) { lookupTable[i] = new int[model.Dimension]; } if (includePriors) { // Create features for class labels classParams.Add(Math.Log(classifier.Priors[c])); classFeatures.Add(new OutputFeature <double[]>(this, c, c)); } // Create features for initial state probabilities for (int i = 0; i < model.States; i++) { edgeParams.Add(model.Probabilities[i]); edgeFeatures.Add(new InitialFeature <double[]>(this, c, i)); } // Create features for state transition probabilities for (int i = 0; i < model.States; i++) { for (int j = 0; j < model.States; j++) { edgeParams.Add(model.Transitions[i, j]); edgeFeatures.Add(new TransitionFeature <double[]>(this, c, i, j)); } } int position = 0; // Create features emission probabilities for (int i = 0; i < model.States; i++) { for (int d = 0; d < model.Emissions[i].Components.Length; d++) { IUnivariateDistribution distribution = model.Emissions[i].Components[d]; NormalDistribution normal = distribution as NormalDistribution; if (normal != null) { double var = normal.Variance; double mean = normal.Mean; // Occupancy stateParams.Add(-0.5 * (Math.Log(2.0 * Math.PI * var) + (mean * mean) / var)); stateFeatures.Add(new OccupancyFeature <double[]>(this, c, i)); lookupTable[i][d] = position; position++; // 1st Moment (x) stateParams.Add(mean / var); stateFeatures.Add(new MultivariateFirstMomentFeature(this, c, i, d)); position++; // 2nd Moment (x²) stateParams.Add(-1.0 / (2.0 * var)); stateFeatures.Add(new MultivariateSecondMomentFeature(this, c, i, d)); position++; continue; } var discrete = distribution as GeneralDiscreteDistribution; if (discrete != null) { lookupTable[i][d] = position; for (int k = 0; k < discrete.Frequencies.Length; k++) { stateParams.Add(Math.Log(discrete.Frequencies[k])); stateFeatures.Add(new MultivariateEmissionFeature(this, c, i, k, d)); position++; } continue; } } } classOffset[c] = factorIndex; edgeOffset[c] = factorIndex + classParams.Count; stateOffset[c] = factorIndex + classParams.Count + edgeParams.Count; classCount[c] = classParams.Count; edgeCount[c] = edgeParams.Count; stateCount[c] = stateParams.Count; lookupTables[c] = lookupTable; // 1. classes factorFeatures.AddRange(classFeatures); factorParams.AddRange(classParams); // 2. edges factorFeatures.AddRange(edgeFeatures); factorParams.AddRange(edgeParams); // 3. states factorFeatures.AddRange(stateFeatures); factorParams.AddRange(stateParams); factorIndex += classParams.Count + stateParams.Count + edgeParams.Count; } System.Diagnostics.Debug.Assert(factorIndex == factorParams.Count); System.Diagnostics.Debug.Assert(factorIndex == factorFeatures.Count); this.Weights = factorParams.ToArray(); this.Features = factorFeatures.ToArray(); for (int c = 0; c < classifier.Models.Length; c++) { Factors[c] = new MarkovIndependentFactor(this, classifier.Models[c].States, c, lookupTables[c], classIndex: classOffset[c], classCount: classCount[c], // 1. classes edgeIndex: edgeOffset[c], edgeCount: edgeCount[c], // 2. edges stateIndex: stateOffset[c], stateCount: stateCount[c]); // 3. states } }
public EuropeanPut(IIntegralPoints integralPoints, IUnivariateDistribution distribution) { _integralPoints = integralPoints; _distribution = distribution; }