/// <summary> /// Computes the EState for each atom in molecule /// </summary> /// <returns></returns> private double[] CalculateEStateIndices() { var numAtoms = this.Mol.NumAtoms(); var Is = new double[numAtoms].Zero().ToArray(); for (var i = 1; i <= numAtoms; i++) { var atom = this.Mol.GetAtom(i); var degree = atom.GetValence(); if (degree > 0) { var dv = atom.NOuterShellElectrons() - atom.TotalNumHydrogens(); var N = atom.PrincipleQuantumNumber(); Is[i - 1] = (4 / (N * N) * dv + 1) / (double)degree; } } var distances = this.DistanceMatrix.Add(1); var accum = new double[numAtoms].Zero().ToArray(); for (var i = 0; i < numAtoms; i++) { for (var j = i + 1; j < numAtoms; j++) { var p = distances.Get(i, j); if (p < FloydWarshall.MaxValue) { var temp = (Is[i] - Is[j]) / (p * p); accum[i] += temp; accum[j] -= temp; } } } return(Is.Add(accum).ToArray()); }
public void Matrix_StandardMatrixAdditionTest() { var matrix1 = new double[3, 2]; matrix1[0, 0] = 1; matrix1[0, 1] = 1; matrix1[1, 0] = 2; matrix1[1, 1] = 0; matrix1[2, 0] = 0; matrix1[2, 1] = 3; var matrix2 = new double[3, 2]; matrix2[0, 0] = 0; matrix2[0, 1] = 1; matrix2[1, 0] = 1; matrix2[1, 1] = 0; matrix2[2, 0] = 2; matrix2[2, 1] = 3; var answerMatrix = new double[3, 2]; answerMatrix[0, 0] = 1; answerMatrix[0, 1] = 2; answerMatrix[1, 0] = 3; answerMatrix[1, 1] = 0; answerMatrix[2, 0] = 2; answerMatrix[2, 1] = 6; var resultingMatrix = matrix1.Add(matrix2); resultingMatrix.ShouldEqualWithinTolerance(answerMatrix); }
public void MatrixAdd_Add_Vectors_Different_Lengths() { var v1 = new double[] { 2, 3, 5, 10 }; var v2 = new double[] { 1, 1 }; v1.Add(v2); }
public static void GenerateValues(int howMany, double meanA, double meanB, double meanC, double[,] R) { TestChol(); var disitrubtion = new NormalDistribution(0, Math.Sqrt(1)); var mean = new double[] { disitrubtion.InverseDistributionFunction(meanA), disitrubtion.InverseDistributionFunction(meanB), disitrubtion.InverseDistributionFunction(meanC) }; var randomValues = disitrubtion.Generate(howMany * 3); var aSamples = new double[howMany]; var bSamples = new double[howMany]; var cSamples = new double[howMany]; int randomValueCount = 0; for (int i = 0; i < howMany; i++) { //generating one sample var z = new[] { randomValues[randomValueCount++], randomValues[randomValueCount++], randomValues[randomValueCount++] }; var product = z.Dot(R); var y = mean.Add(product); var samples = new double[] { y[0] > 0 ? 1 : 0, y[1] > 0 ? 1 : 0, y[2] > 0 ? 1 : 0 }; aSamples[i] = samples[0]; bSamples[i] = samples[1]; cSamples[i] = samples[2]; Trace.WriteLine($"{aSamples[i]}, {bSamples[i]}, {cSamples[i]}"); } var resultCorrAb = CalculateCorrelation(aSamples, bSamples); var resultCorrAc = CalculateCorrelation(aSamples, cSamples); var resultCorrBc = CalculateCorrelation(bSamples, cSamples); }
public static double[] OpenSimplex2DOctaves( int seed, int size, int offsetX, int offsetY, double scale, double initialFrequency, int octaves, int octaveOffsetScale, double amplitudePerOctave, double freqPerOctave) { var random = new Random(seed); var data = new double[size * size]; var currentAmplitude = 1d; var currentFrequency = initialFrequency; var maxNoise = 0d; var octaveOffsets = new Vector2[octaves]; for (var octaveIndex = 0; octaveIndex < octaves; octaveIndex++) { octaveOffsets[octaveIndex] = new Vector2( random.Next(-octaveOffsetScale, octaveOffsetScale) + offsetX, random.Next(-octaveOffsetScale, octaveOffsetScale) + offsetY); data.Add(OpenSimplex2D(seed, octaveOffsets[octaveIndex].X, octaveOffsets[octaveIndex].Y, size, size, scale, currentFrequency, currentAmplitude)); maxNoise += currentAmplitude; currentAmplitude *= amplitudePerOctave; currentFrequency *= freqPerOctave; } return(data.Mul(1d / maxNoise)); }
public static double[] StochasticGradientAscent(double[,] Features, double[,] classLabels, int numIter = 1000) { int m = Features.GetLength(0); int n = Features.GetLength(1); double[,] weights = new double[n, 1]; for (int i = 0; i < n; i++) { weights[i, 0] = 1; } for (int i = 0; i < numIter; i++) { for (int j = 0; j < m; j++) { double alpha = 4 / (1.0 + i + j) + WALKSTEP; double h = 0.0; double[,] temp = new double[n, 1]; for (int k = 0; k < n; k++) { h += Features[j, k] * weights[k, 0]; temp[k, 0] = Features[j, k]; } double error = classLabels[j, 0] - Sigmoid(h); weights = weights.Add(temp.ScalarMultiply(error * alpha)); } } double[] result = new double[n]; for (int i = 0; i < n; i++) { result[i] = weights[i, 0]; } return(result); }
public void TrainByPseudoInverse(int[,] testData) { testData = testData.Multiply(2).Subtract(1); // macierz -1 i 1 int patternCount = testData.Rows(); //liczba wzorców neuronCount = testData.Columns(); //liczba neuronów we wzorcu double[,] W = new double[neuronCount, neuronCount]; //inicjalizacja macierzy wag 64x64 for (int row = 0; row < patternCount; row++) { double[,] x = testData.Get(row, row + 1, 0, neuronCount).Transpose().Convert(i => (double)i); var x1 = W.Dot(x).Subtract(x); var x1t = x1.Transpose(); var licznik = x1.Dot(x1t); double mianownik = x.TransposeAndDot(x).Subtract(x.Transpose().Dot(W).Dot(x))[0, 0]; W = W.Add(licznik.Divide(mianownik)); } weights = W; trained = true; }
public static double[][] InvertU(this double[][] upper) { if (upper is null) { throw new ArgumentNullException(nameof(upper)); } double[][] identity = new double[upper.Length][]; double[][] diagonalInverse = new double[upper.Length][]; double[][] strictUpper = new double[upper.Length][]; for (int i = 0; i < identity.Length; i++) { identity[i] = new double[upper[i].Length]; diagonalInverse[i] = new double[upper[i].Length]; strictUpper[i] = new double[upper[i].Length]; for (int j = 0; j < identity[i].Length; j++) { if (i == j) { identity[i][j] = 1.0; diagonalInverse[i][j] = 1.0 / upper[i][j]; } else if (i < j) { strictUpper[i][j] = upper[i][j]; } } } return(identity.Add(strictUpper).InvertLu().MatrixProduct(diagonalInverse)); }
public void AddShouldAddIfMaxIsNotReached() { double add = 10; double value = 10; value.Add(add).Should().Be(20); }
public static double[] StochasticGradientAscent(double[,] Features, double[,] classLabels) { int m = Features.GetLength(0); int n = Features.GetLength(1); double[,] weights = new double[n, 1]; for (int i = 0; i < n; i++) { weights[i, 0] = 1; } for (int i = 0; i < m; i++) { double h = 0.0; double[,] temp = new double[n, 1]; for (int j = 0; j < n; j++) { h += Features[i, j] * weights[j, 0]; temp[j, 0] = Features[i, j]; } double error = classLabels[i, 0] - Sigmoid(h); weights = weights.Add(temp.ScalarMultiply(error * WALKSTEP)); } double[] result = new double[n]; for (int i = 0; i < n; i++) { result[i] = weights[i, 0]; } return(result); }
public static double[] GradAscent(double[,] Features, double[,] classLabels) { int m = Features.GetLength(0); int n = Features.GetLength(1); double[,] weights = new double[n, 1]; for (int i = 0; i < n; i++) { weights[i, 0] = 1; } for (int i = 0; i < REPEATCOUNT; i++) { double[,] h = Sigmoid(Features.Multiply(weights)); double[,] error = classLabels.Substract(h); weights = weights.Add(Features.Transpose().ScalarMultiply(WALKSTEP).Multiply(error)); if (i > 9900) { Console.WriteLine(weights[0, 0] + "\t" + weights[1, 0] + "\t" + weights[2, 0]); } } double[] result = new double[n]; for (int i = 0; i < n; i++) { result[i] = weights[i, 0]; } return(result); }
private double[] CalculatePredictionValue <TDistribution>(TDistribution emission, double[][] trainingSet) where TDistribution : IDistribution { var result = new double[trainingSet[0].Length]; if (typeof(TDistribution).Name == "Mixture`1") { switch (typeof(TDistribution).GetGenericArguments()[0].FullName) { case "TA.Statistics.Distributions.IMultivariateDistribution": var e = emission as Mixture <IMultivariateDistribution>; for (var i = 0; i < e.Components.Length; i++) { result = result.Add(e.Components[i].Mean.Product(e.Coefficients[i])); } result = e.Mean; break; } } else { switch (typeof(TDistribution).FullName) { case "TA.Statistics.Distributions.IMultivariateDistribution": result = ((IMultivariateDistribution)emission).Mean; break; } } return(result); }
/// <summary> /// Generates a normal distribution noise signal of the specified Volts RMS. /// </summary> /// <param name="amplitudeVrms"></param> /// <param name="points"></param> /// <param name="dcV"></param> /// <returns>double[] array</returns> public static double[] NoiseRms(double amplitudeVrms, UInt32 points, double dcV = 0.0) { double[] rval = new double[points]; // Make an n length noise vector rval = Noise(points, amplitudeVrms); rval = rval.Add(dcV); return(rval); }
public double[][] Estimate(MuEstimationParameters <TDistribution> parameters) { if (_muMultivariate != null) { return(_muMultivariate); } try { _muMultivariate = new double[parameters.Model.N][]; var K = parameters.Observations[0].Dimention; // Number of dimentions var T = parameters.Observations.Count; for (var n = 0; n < parameters.Model.N; n++) { var mean = new double[K]; var nK = 0d; for (var t = 0; t < T; t++) { if (parameters.Model.Normalized) { nK += LogExtention.eExp(parameters.Gamma[t][n]); mean = mean.Add(parameters.Observations[t].Value.Product(LogExtention.eExp(parameters.Gamma[t][n]))); } else { nK += parameters.Gamma[t][n]; mean = mean.Add(parameters.Observations[t].Value.Product(parameters.Gamma[t][n])); } } _muMultivariate[n] = mean.Product(1 / nK); Debug.WriteLine(string.Format("HMM State {0} : Mu {1}", n, new Vector(_muMultivariate[n]))); } } catch (Exception) { for (var n = 0; n < parameters.Model.N; n++) { Debug.WriteLine(string.Format("HMM State {0} : Mu {1}", n, new Vector(_muMultivariate[n]))); } throw; } return(_muMultivariate); }
public double[][,] Estimate(SigmaEstimationParameters <TDistribution, double[][]> parameters) { if (_sigmaMultivariate != null) { return(_sigmaMultivariate); } try { _sigmaMultivariate = new double[parameters.Model.N][, ]; var K = parameters.Observations[0].Dimention; var T = parameters.Observations.Count; for (var n = 0; n < parameters.Model.N; n++) { var covariance = new double[K, K]; var nK = 0d; for (var t = 0; t < T; t++) { var x = parameters.Observations[t].Value; var z = x.Substruct(parameters.Mean[n]); var m = z.OuterProduct(z); if (parameters.Model.Normalized) { nK += LogExtention.eExp(parameters.Gamma[t][n]); m = m.Product(LogExtention.eExp(parameters.Gamma[t][n])); } else { nK += parameters.Gamma[t][n]; m = m.Product(parameters.Gamma[t][n]); } covariance = covariance.Add(m); } _sigmaMultivariate[n] = covariance.Product(1 / nK); var matrix = new Matrix(_sigmaMultivariate[n]); if (!matrix.PositiviDefinite) { _sigmaMultivariate[n] = matrix.ConvertToPositiveDefinite(); Debug.WriteLine("HMM State {0} Sigma is not Positive Definite. Converting.", n); Debug.WriteLine("{0}", matrix); } Debug.WriteLine("HMM State {0} Sigma : {1}", n, new Matrix(_sigmaMultivariate[n])); } } catch (Exception) { for (var n = 0; n < parameters.Model.N; n++) { Debug.WriteLine("HMM State {0} Sigma : {1}", n, new Matrix(_sigmaMultivariate[n])); } throw; } return(_sigmaMultivariate); }
public void ArrayExtentions_Add() { var arr = new double[] { 3.2, 4.5, 7, 9.0 }; var symbol = 7.67; var result = arr.Add(symbol); Assert.AreEqual(5, result.Length); Assert.AreEqual(symbol, result[arr.Length]); }
/// <summary> /// Merge a list of gaussian components into a one big gaussian /// that tries to approximate as much as possible the behaviour /// of the original mixture. /// </summary> /// <param name="components">List of other gaussian components.</param> /// <returns>Merged gaussian.</returns> public static Gaussian Merge(List <Gaussian> components) { if (components == null || components.Count == 0) { return(null); } Gaussian first = components[0]; double weight = 0.0; double[] mean = new double[first.Mean.Length]; double[][] covariance = MatrixExtensions.Zero(first.Mean.Length); // // merged gaussian follows the rules // // w = sum of (wi) // // m = sum of (wi mi) / w // // P = sum of (wi (Pi + (mi - m) (mi - m)^T)) / w // // // equivalent form: // foreach (Gaussian component in components) { // weight += component.Weight; // mean = mean.Add(component.Weight.Multiply(component.Mean)); // } // // mean = mean.Divide(weight); // // foreach (Gaussian component in components) { // double[] diff = component.Mean.Subtract(mean); // covariance = covariance.Add(component.Weight.Multiply( // component.Covariance.Add(diff.OuterProduct(diff).ToArray()))); // } // // covariance = covariance.Divide(weight); foreach (Gaussian component in components) { double w = component.Weight; double[] m = component.Mean; double[][] cov = component.Covariance; weight += w; mean = mean.Add(w.Multiply(m)); covariance = covariance.Add(w.Multiply(cov.Add(m.OuterProduct(m).ToArray()))); } if (weight < 1e-15) { return(new Gaussian(first.Mean, Util.InfiniteCovariance(first.Mean.Length), 0.0)); } mean = mean.Divide(weight); covariance = covariance.Divide(weight).Subtract(mean.OuterProduct(mean).ToArray()); return(new Gaussian(mean, covariance, weight)); }
public double[, ][,] Estimate(MixtureSigmaEstimationParameters <TDistribution> parameters) { if (_sigma != null) { return(_sigma); } try { _sigma = new double[parameters.Model.N, parameters.L][, ]; for (var i = 0; i < parameters.Model.N; i++) { for (var l = 0; l < parameters.L; l++) { var denominator = 0.0d; var nominator = new double[parameters.Observations[0].Dimention, parameters.Observations[0].Dimention]; for (var t = 0; t < parameters.Observations.Count; t++) { // TODO : weights here var weight = GetWeightValue(t, parameters.ObservationWeights); var gammaComponents = (parameters.Model.Normalized) ? LogExtention.eExp(parameters.GammaComponents[t][i, l]) : parameters.GammaComponents[t][i, l]; var x = parameters.Observations[t].Value; var z = x.Substruct(parameters.Mu[i, l]); var m = z.OuterProduct(z); m = m.Product(weight * gammaComponents); denominator += weight * gammaComponents; nominator = nominator.Add(m); } _sigma[i, l] = nominator.Product(1 / denominator); var matrix = new Matrix(_sigma[i, l]); if (!matrix.PositiviDefinite) { _sigma[i, l] = matrix.ConvertToPositiveDefinite(); Debug.WriteLine("HMM State [{0},{1}] Sigma is not Positive Definite. Converting.", i, l); Debug.WriteLine("{0}", matrix); } } } } catch (Exception) { for (var i = 0; i < parameters.Model.N; i++) { for (var l = 0; l < parameters.L; l++) { Debug.WriteLine("Mixture Sigma [{0},{1}] : {2}", i, l, new Matrix(_sigma[i, l])); } } throw; } return(_sigma); }
public static double GetMahalanobisDistance(this KDReading _reading, KDReading[] userReadings) { int vectorSize = _reading.LetterMeasurements.Length, setSize = userReadings.Length; double[] v1 = Array.ConvertAll(_reading.LetterMeasurements, item => (double)item); var vectors = userReadings.Select(reading => reading.LetterMeasurements); var meanVector = vectors.Select(vector => Array.ConvertAll(vector, val => (double)val)).Mean(); var vectorOfDistancesFromMean = v1.Zip(meanVector, (a, b) => a - b); double[,] matrixOfDistancesFromMean = new double[1, vectorSize], matrixOfDistancesFromMeanTransposed = new double[vectorSize, 1]; for (int i = 0; i < vectorSize; i++) { matrixOfDistancesFromMean[0, i] = matrixOfDistancesFromMeanTransposed[i, 0] = vectorOfDistancesFromMean.ElementAt(i); } var covarianceMatrix = new double[vectorSize, vectorSize]; covarianceMatrix.Initialize(); for (int i = 0; i < setSize; i++) { var sample = Array.ConvertAll(userReadings[i].LetterMeasurements, val => (double)val); var sampleDistanceFromMean = sample.Zip(meanVector, (a, b) => a - b); double[,] matrixSampleDistanceFromMean = new double[1, vectorSize], matrixSampleDistanceFromMeanTransposed = new double[vectorSize, 1]; for (int j = 0; j < vectorSize; j++) { matrixSampleDistanceFromMean[0, j] = matrixSampleDistanceFromMeanTransposed[j, 0] = sampleDistanceFromMean.ElementAt(j); } var product = matrixSampleDistanceFromMeanTransposed.Dot(matrixSampleDistanceFromMean); covarianceMatrix = covarianceMatrix.Add(product); } covarianceMatrix = covarianceMatrix.Divide(setSize); try { covarianceMatrix = covarianceMatrix.Inverse(); } catch (Exception) { covarianceMatrix = covarianceMatrix.PseudoInverse(); } var outputMatrix = matrixOfDistancesFromMean.Dot(covarianceMatrix).Dot(matrixOfDistancesFromMeanTransposed); return(Math.Sqrt(outputMatrix[0, 0])); }
private static double[] CalculateMean(double[][] gamma, double[][] observations, int k) { var K = observations[0].Length; var N = observations.Length; var mean = new double[K]; var nK = gamma[k].Sum(); for (var n = 0; n < N; n++) { mean = mean.Add(observations[n].Product(gamma[k][n])); } return(mean.Product(1 / nK)); }
/// <summary> /// Calculate an weighted average of the form Sum(Exp(weight) * vector) / Sum(Exp(weight)) /// as numerically stable as possible. /// </summary> /// <param name="vectors">Array of vectors.</param> /// <param name="weights">Log-weights for the average.</param> /// <param name="begin">First index to consider.</param> /// <param name="end">Last+1 index to consider.</param> /// <returns>Tempered average of the vectors.</returns> public static double[] TemperedAverage(this double[][] vectors, double[] weights, int begin, int end) { if (vectors.Length != weights.Length) { throw new ArgumentException("There must be exactly one weight per vector"); } if (begin < 0) { throw new ArgumentException("Begin index must be greater or equal to zero"); } if (end > vectors.Length) { throw new ArgumentException("End index must be less or equal to the vector size"); } if (vectors.Length == 0) { return(new double[0]); } double max = double.NegativeInfinity; double[] value = new double[vectors[0].Length]; for (int i = begin; i < end; i++) { max = Math.Max(max, weights[i]); } if (double.IsNegativeInfinity(max)) { return(value); } for (int i = begin; i < end; i++) { weights[i] = Math.Exp(weights[i] - max); } weights = weights.Normalize(); for (int i = begin; i < end; i++) { value = value.Add(weights[i].Multiply(vectors[i])); } return(value); }
/// <summary> /// Calculates incremental running average. /// </summary> /// <param name="data">Sample data.</param> /// <param name="onCalculated"> /// Action callback which fires on each element addition. /// <para>Parameters are: (index, incremental average).</para> /// </param> public static void RunningAverageIncremental(this IList <double[]> data, Action <int, double[]> onCalculated) { var dim = data.First().Length; double[] avg = new double[dim]; for (int i = 0; i < data.Count; i++) { var item = data[i]; avg = avg.Add(UpdateAverageIncremental(avg, i, item)); onCalculated(i, avg); } }
public void NoiseAnalysis() { // Setup parameters to generate noise test signal of 5 nVrms / rt-Hz double amplitude = 5.0e-9; UInt32 length = 1000; double samplingRate = 2000; // Generate window & calculate Scale Factor for NOISE! double[] wCoefs = mdsplib.DSP.Window.Coefficients(mdsplib.DSP.Window.Type.Hamming, length); double wScaleFactor = mdsplib.DSP.Window.ScaleFactor.Noise(wCoefs, samplingRate); // Instantiate & Initialize a new DFT DFT dft = new DFT(); dft.Initialize(length); // Average the noise 'N' times Int32 N = 1000; double[] noiseSum = new double[(length / 2) + 1]; for (Int32 i = 0; i < N; i++) { // Generate the noise signal & apply window double[] inputSignal = mdsplib.DSP.Generate.NoisePsd(amplitude, samplingRate, length); inputSignal = inputSignal.Multiply(wCoefs); // DFT the noise -> Convert -> Sum Complex[] cSpectrum = dft.Execute(inputSignal); double[] mag2 = cSpectrum.MagnitudeSquared(); noiseSum = (N == 0) ? noiseSum : noiseSum = noiseSum.Add(mag2); } // Calculate Average, convert to magnitude format // See text for the reasons to use Mag^2 format. double[] averageNoise = noiseSum.Divide(N); double[] lmSpectrum = averageNoise.Magnitude(); // Properly scale the spectrum for the added window lmSpectrum = lmSpectrum.Multiply(wScaleFactor); // For plotting on an XY Scatter plot generate the X Axis frequency Span double[] freqSpan = Util.FFT.FrequencySpan(samplingRate, length); // At this point a XY Scatter plot can be generated from, // X axis => freqSpan // Y axis => lmSpectrum as a Power Spectral Density Value // Extra Credit - Analyze Plot Data Ignoring the first and last 20 Bins // Average value should be what we generated = 5.0e-9 Vrms / rt-Hz double averageValue = mdsplib.DSP.Analyze.FindMean(lmSpectrum, 20, 20); }
public void MatrixAdd_Add_Vectors() { var v1 = new double[] { 2, 3, 5, 10 }; var v2 = new double[] { 1, 1, 1, 1 }; var actual = v1.Add(v2); var expected = new double[] { 3, 4, 6, 11 }; Assert.AreEqual(expected.Length, actual.Length); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 0.1); } }
static void Main(string[] args) { "Hello World.".Print(); "Hello World.".Excite(); Person person = new Person(); person.Fill().Print(); double initialNumber = 4.00; double chainNumber = initialNumber.Add(4).Subtract(2).MultiplyBy(8).DivideBy(3); Console.WriteLine($"\nThe calculated number is { chainNumber }"); }
public static double[] InverseNoOverlap(List <Complex[]> stft, UInt32 wlen, UInt32 fftPad) { int wlength = stft.First().Length; int totalLength = stft.Count() * (int)wlen; var waveout = new double[totalLength]; uint offset = 0; foreach (var spectrum in stft) { double[] slice = spectrum.iFFT().RealPart(); waveout = waveout.Add(slice.Slice(0, wlen), offset); offset += wlen; } return(waveout); }
public static double[] Inverse(List <Complex[]> stft, UInt32 wlen, UInt32 fftPad) { int wlength = stft.First().Length; int totalLength = (stft.Count() + 1) * wlength / 2; var waveout = new double[totalLength]; uint offset = 0; foreach (var spectrum in stft) { double[] slice = spectrum.iFFT().RealPart(); waveout = waveout.Add(slice, offset); offset += (uint)(slice.Length / 2); } return(waveout); }
/// <summary> /// Calculates incremental and decremental running average. /// </summary> /// <param name="data">Sample data.</param> /// <param name="onCalculated"> /// Action callback which fires on each element addition-removal. /// <para>Parameters are: (index, incremental average, decremental average).</para> /// </param> public static void RunningAverageIncDec(this IList <double[]> data, Action <int, double[], double[]> onCalculated) { var dim = data.First().Length; var avgInc = new double[dim]; var avgDec = data.Average(); for (int i = 0; i < data.Count; i++) { var item = data[i]; avgInc = avgInc.Add(UpdateAverageIncremental(avgInc, i, item)); avgDec = avgDec.Subtract(UpdateAverageDecremental(avgDec, data.Count - i, item)); onCalculated(i, avgInc, avgDec); } }
/// <summary> /// Calculates incremental running average and variance. /// </summary> /// <param name="data">Sample data.</param> /// <param name="onCalculated"> /// Action callback which fires on each element addition. /// <para>Parameters are: (index, incremental average, incremental variance).</para> /// </param> public static void RunningVarianceIncremental(this IList <double[]> data, Action <int, double[], double> onCalculated) { var dim = data.First().Length; double[] avg = new double[dim]; double varianceSum = 0d; for (int i = 0; i < data.Count; i++) { var item = data[i]; var prevAvg = avg; avg = avg.Add(RunningAverage.UpdateAverageIncremental(avg, i, item)); varianceSum += UpdateVarianceIncremental(prevAvg, avg, varianceSum, i, item); onCalculated(i, avg, varianceSum); } }
public double[, ][] Estimate(MixtureCoefficientEstimationParameters <TDistribution> parameters) { if (_mu != null) { return(_mu); } try { _mu = new double[parameters.Model.N, parameters.L][]; for (var i = 0; i < parameters.Model.N; i++) { for (var l = 0; l < parameters.L; l++) { var denominator = 0.0d; var nominator = new double[parameters.Observations[0].Dimention]; for (var t = 0; t < parameters.Observations.Count; t++) { // TODO : weights here var weight = GetWeightValue(t, parameters.ObservationWeights); var x = parameters.Observations[t].Value; var gamma = (parameters.Model.Normalized) ? LogExtention.eExp(parameters.GammaComponents[t][i, l]) : parameters.GammaComponents[t][i, l]; denominator += weight * gamma; x = x.Product(gamma * weight); nominator = nominator.Add(x); } _mu[i, l] = nominator.Product(1 / denominator); } } } catch (Exception) { for (var i = 0; i < parameters.Model.N; i++) { for (var l = 0; l < parameters.L; l++) { Debug.WriteLine("Mixture Mu [{0},{1}] : {2}", i, l, new Vector(_mu[i, l])); } } throw; } return(_mu); }