public static double[] NormalDistribution(double[] x) { var mean = WaveMath.Mean(x); var deviation = WaveMath.StandardDeviation(x); var samples = new List <KeyValuePair <int, double> >(); var result = MemoryPool.Pool.New <double>(x.Length); for (var i = 0; i < x.Length; i++) { var norm = WaveMath.ProbabilityDensityFunction(x[i], mean, deviation); samples.Add(new KeyValuePair <int, double>(i, norm)); result[i] = norm; } return(samples.OrderBy(it => it.Value).Select(it => it.Value).ToArray()); }
/// <summary> /// Estimates the disturbances in the signal based on the normal distribution of the details coefficients /// </summary> /// <param name="threshold">The higher the threshold, the higher the tolerance in flutuations on energy of the details</param> /// <param name="minimunDistance">Minimun distance between disturbances to consider a new one</param> /// <returns></returns> public List <Disturbance> GetDisturbances(double threshold = 0.1, int minimunDistance = 3) { threshold = 1 - threshold; var disturbances = new List <Disturbance>(); var mean = WaveMath.Mean(Details); var deviation = WaveMath.StandardDeviation(Details); var samples = new List <KeyValuePair <int, double> >(); var min = double.MaxValue; var max = double.MinValue; for (var i = 0; i < Details.Length; i++) { var sample = Details[i]; var norm = WaveMath.ProbabilityDensityFunction(sample, mean, deviation); if (double.IsNaN(norm)) { continue; } if (norm < min) { min = norm; } if (norm > max) { max = norm; } samples.Add(new KeyValuePair <int, double>(i, norm)); } //ajusta a escala da distribuiĆ§Ć£o normal para 0..1, removendo os valores maiores que threshold for (var i = samples.Count - 1; i >= 0; i--) { var scaledNorm = WaveMath.Scale(samples[i].Value, min, max, 0, 1); samples[i] = new KeyValuePair <int, double>(i, scaledNorm); if (scaledNorm > threshold) { samples.RemoveAt(i); } else { samples[i] = new KeyValuePair <int, double>(i, scaledNorm); } } int?start = null; var startIndex = 0; for (var i = 0; i < samples.Count; i++) { if (start == null) { start = i; startIndex = samples[i].Key; } else if (samples[i].Key - samples[i - 1].Key >= minimunDistance || i == samples.Count - 1) { disturbances.Add(new Disturbance(startIndex, samples[i - 1].Key, Details.Length, Signal.Samples.Length)); if (i < samples.Count - 1) { start = i; startIndex = samples[i].Key; } else { disturbances.Add(new Disturbance(samples[i].Key, samples[i].Key, Details.Length, Signal.Samples.Length)); } } } if (disturbances.Count > 1 && (disturbances[disturbances.Count - 1].Finish - disturbances[disturbances.Count - 2].Finish) < minimunDistance) { disturbances[disturbances.Count - 2] = new Disturbance(disturbances[disturbances.Count - 2].Start, disturbances[disturbances.Count - 1].Finish, Details.Length, Signal.Samples.Length); disturbances.RemoveAt(disturbances.Count - 1); } return(disturbances); }