// // Summary: // Computes the Range of a sequence of nullable System.Double values. // // Parameters: // source: // A sequence of nullable System.Double values to calculate the Range of. // // Returns: // The Range of the sequence of values, or null if the source sequence is // empty or contains only values that are null. // // Exceptions: // System.ArgumentNullException: // source is null. public static double?Range(this IEnumerable <double?> source) { IEnumerable <double> values = source.Coalesce(); if (values.Any()) { return(values.Range()); } return(null); }
private static KendoData GettableByKendofilter(IEnumerable <Struct_Joing_AllTable> enumerable, string kendoQuery) { //applichiamo where QueryDescriptor Qd = JsonConvert.DeserializeObject <QueryDescriptor>(kendoQuery); enumerable = enumerable.Where(Qd) .OrderBy(Qd); return(new KendoData( enumerable.Count(), enumerable .Range(Qd) .ToList() )); }
public static void CalculateThresholds(IClassificationProblemData problemData, IEnumerable <double> estimatedValues, IEnumerable <double> targetClassValues, out double[] classValues, out double[] thresholds) { var estimatedTargetValues = Enumerable.Zip(estimatedValues, targetClassValues, (e, t) => new { EstimatedValue = e, TargetValue = t }).ToList(); double estimatedValuesRange = estimatedValues.Range(); Dictionary <double, double> classMean = new Dictionary <double, double>(); Dictionary <double, double> classStdDev = new Dictionary <double, double>(); // calculate moments per class foreach (var group in estimatedTargetValues.GroupBy(p => p.TargetValue)) { IEnumerable <double> estimatedClassValues = group.Select(x => x.EstimatedValue); double classValue = group.Key; double mean, variance; OnlineCalculatorError meanErrorState, varianceErrorState; OnlineMeanAndVarianceCalculator.Calculate(estimatedClassValues, out mean, out variance, out meanErrorState, out varianceErrorState); if (meanErrorState == OnlineCalculatorError.None && varianceErrorState == OnlineCalculatorError.None) { classMean[classValue] = mean; classStdDev[classValue] = Math.Sqrt(variance); } } double[] originalClasses = classMean.Keys.OrderBy(x => x).ToArray(); int nClasses = originalClasses.Length; List <double> thresholdList = new List <double>(); for (int i = 0; i < nClasses - 1; i++) { for (int j = i + 1; j < nClasses; j++) { double x1, x2; double class0 = originalClasses[i]; double class1 = originalClasses[j]; // calculate all thresholds CalculateCutPoints(classMean[class0], classStdDev[class0], classMean[class1], classStdDev[class1], out x1, out x2); // if the two cut points are too close (for instance because the stdDev=0) // then move them by 0.1% of the range of estimated values if (x1.IsAlmost(x2)) { x1 -= 0.001 * estimatedValuesRange; x2 += 0.001 * estimatedValuesRange; } if (!double.IsInfinity(x1) && !thresholdList.Any(x => x.IsAlmost(x1))) { thresholdList.Add(x1); } if (!double.IsInfinity(x2) && !thresholdList.Any(x => x.IsAlmost(x2))) { thresholdList.Add(x2); } } } thresholdList.Sort(); // add small value and large value for the calculation of most influential class in each thresholded section thresholdList.Insert(0, double.NegativeInfinity); thresholdList.Add(double.PositiveInfinity); // find the most likely class for the points between thresholds m List <double> filteredThresholds = new List <double>(); List <double> filteredClassValues = new List <double>(); for (int i = 0; i < thresholdList.Count - 1; i++) { // determine class with maximal density mass between the thresholds double maxDensity = DensityMass(thresholdList[i], thresholdList[i + 1], classMean[originalClasses[0]], classStdDev[originalClasses[0]]); double maxDensityClassValue = originalClasses[0]; foreach (var classValue in originalClasses.Skip(1)) { double density = DensityMass(thresholdList[i], thresholdList[i + 1], classMean[classValue], classStdDev[classValue]); if (density > maxDensity) { maxDensity = density; maxDensityClassValue = classValue; } } if (maxDensity > double.NegativeInfinity && (filteredClassValues.Count == 0 || !maxDensityClassValue.IsAlmost(filteredClassValues.Last()))) { filteredThresholds.Add(thresholdList[i]); filteredClassValues.Add(maxDensityClassValue); } } if (filteredThresholds.Count == 0 || !double.IsNegativeInfinity(filteredThresholds.First())) { // this happens if there are no thresholds (distributions for all classes are exactly the same) // or when the CDF up to the first threshold is zero // -> all samples should be classified as the class with the most observations // group observations by target class and select the class with largest count double mostFrequentClass = targetClassValues.GroupBy(c => c) .OrderBy(g => g.Count()) .Last().Key; filteredThresholds.Insert(0, double.NegativeInfinity); filteredClassValues.Insert(0, mostFrequentClass); } thresholds = filteredThresholds.ToArray(); classValues = filteredClassValues.ToArray(); }
/// <summary> /// Returns a specific page of a list /// </summary> /// <typeparam name="T"></typeparam> /// <param name="list"></param> /// <param name="pageIndex"></param> /// <param name="pageSize"></param> /// <returns></returns> public static List <T> GetPageOfList <T>(this IEnumerable <T> list, int pageIndex, int pageSize) { int startIndex = (pageIndex - 1) * pageSize; return(list.Range(startIndex, pageSize * pageIndex)); }
public static void CalculateThresholds(IClassificationProblemData problemData, IEnumerable<double> estimatedValues, IEnumerable<double> targetClassValues, out double[] classValues, out double[] thresholds) { var estimatedTargetValues = Enumerable.Zip(estimatedValues, targetClassValues, (e, t) => new { EstimatedValue = e, TargetValue = t }).ToList(); double estimatedValuesRange = estimatedValues.Range(); Dictionary<double, double> classMean = new Dictionary<double, double>(); Dictionary<double, double> classStdDev = new Dictionary<double, double>(); // calculate moments per class foreach (var group in estimatedTargetValues.GroupBy(p => p.TargetValue)) { IEnumerable<double> estimatedClassValues = group.Select(x => x.EstimatedValue); double classValue = group.Key; double mean, variance; OnlineCalculatorError meanErrorState, varianceErrorState; OnlineMeanAndVarianceCalculator.Calculate(estimatedClassValues, out mean, out variance, out meanErrorState, out varianceErrorState); if (meanErrorState == OnlineCalculatorError.None && varianceErrorState == OnlineCalculatorError.None) { classMean[classValue] = mean; classStdDev[classValue] = Math.Sqrt(variance); } } double[] originalClasses = classMean.Keys.OrderBy(x => x).ToArray(); int nClasses = originalClasses.Length; List<double> thresholdList = new List<double>(); for (int i = 0; i < nClasses - 1; i++) { for (int j = i + 1; j < nClasses; j++) { double x1, x2; double class0 = originalClasses[i]; double class1 = originalClasses[j]; // calculate all thresholds CalculateCutPoints(classMean[class0], classStdDev[class0], classMean[class1], classStdDev[class1], out x1, out x2); // if the two cut points are too close (for instance because the stdDev=0) // then move them by 0.1% of the range of estimated values if (x1.IsAlmost(x2)) { x1 -= 0.001 * estimatedValuesRange; x2 += 0.001 * estimatedValuesRange; } if (!double.IsInfinity(x1) && !thresholdList.Any(x => x.IsAlmost(x1))) thresholdList.Add(x1); if (!double.IsInfinity(x2) && !thresholdList.Any(x => x.IsAlmost(x2))) thresholdList.Add(x2); } } thresholdList.Sort(); // add small value and large value for the calculation of most influential class in each thresholded section thresholdList.Insert(0, double.NegativeInfinity); thresholdList.Add(double.PositiveInfinity); // find the most likely class for the points between thresholds m List<double> filteredThresholds = new List<double>(); List<double> filteredClassValues = new List<double>(); for (int i = 0; i < thresholdList.Count - 1; i++) { // determine class with maximal density mass between the thresholds double maxDensity = DensityMass(thresholdList[i], thresholdList[i + 1], classMean[originalClasses[0]], classStdDev[originalClasses[0]]); double maxDensityClassValue = originalClasses[0]; foreach (var classValue in originalClasses.Skip(1)) { double density = DensityMass(thresholdList[i], thresholdList[i + 1], classMean[classValue], classStdDev[classValue]); if (density > maxDensity) { maxDensity = density; maxDensityClassValue = classValue; } } if (maxDensity > double.NegativeInfinity && (filteredClassValues.Count == 0 || !maxDensityClassValue.IsAlmost(filteredClassValues.Last()))) { filteredThresholds.Add(thresholdList[i]); filteredClassValues.Add(maxDensityClassValue); } } if (filteredThresholds.Count == 0 || !double.IsNegativeInfinity(filteredThresholds.First())) { // this happens if there are no thresholds (distributions for all classes are exactly the same) // or when the CDF up to the first threshold is zero // -> all samples should be classified as the class with the most observations // group observations by target class and select the class with largest count double mostFrequentClass = targetClassValues.GroupBy(c => c) .OrderBy(g => g.Count()) .Last().Key; filteredThresholds.Insert(0, double.NegativeInfinity); filteredClassValues.Insert(0, mostFrequentClass); } thresholds = filteredThresholds.ToArray(); classValues = filteredClassValues.ToArray(); }
void Start() { // Initialize the list with levels scenes = new List <int>(IEnumerable <int> .Range(1, MAX)); // This creates a list with values from 1 to 50 }
public void RangeTest() { var subSet = testEnumerable.Range(3, 5).ToList(); CollectionAssert.AreEqual(subSet, Enumerable.Range(3, 5).ToList()); }
public static IEnumerable <T> Limit <T>(this IEnumerable <T> list, int end) { return(list.Range(0, end)); }
protected override object ProcessSubgroup(IEnumerable<double> data) { return data != null && data.Any() ? data.Range() : 0; }
private List <int> FindIndexes(Predicate <Bar> test) { return(IEnumerable.Range(0, bar.Count).Where(i => test(bar[i])).ToList()); }