public static void Ctor_CultureInfo(object a, object b, int expected) { var culture = new CultureInfo("en-US"); var comparer = new Comparer(culture); Assert.Equal(expected, Math.Sign(comparer.Compare(a, b))); }
public static void TestCtor_CultureInfo(object a, object b, int expected) { var culture = new CultureInfo("en-US"); var comparer = new Comparer(culture); Assert.Equal(expected, Helpers.NormalizeCompare(comparer.Compare(a, b))); }
static void Main(string[] args) { SimpleDelegate sd = new SimpleDelegate(); // instantiate the delegate Comparer cmpFirst = new Comparer(Name.CompareFirstNames); Console.WriteLine("\nBefore Sort:\n"); sd.PrintNames(); sd.Sort(cmpFirst); Console.WriteLine("\nAfter Sort:\n"); sd.PrintNames(); Console.ReadKey(); Comparer cmpLast = new Comparer(Name.CompareLastNames); Console.WriteLine("\nBefore Sort:\n"); sd.PrintNames(); sd.Sort(cmpLast); Console.WriteLine("\nAfter Sort:\n"); sd.PrintNames(); Console.ReadKey(); }
public Street(Intersection node1, Intersection node2, Unit width) { var diffH = node1.Position.X - node2.Position.X; var diffV = node1.Position.Y - node2.Position.Y; var isEastWest = diffH.Squared > diffV.Squared; this.comparer = isEastWest ? eastWestComparer : southNorthComparer; this.Width = width; if (this.comparer.Compare(node1, node2) > 0) { this.intersections = new List<Intersection>(3) { node2, node1 }; } else { this.intersections = new List<Intersection>(3) { node1, node2 }; } }
private static string GetResult(Comparer comparer) { string result; switch (Console.ReadLine().ToUpper()[0]) { case 'I': int[] firstIntArray = GetIntArray(); int[] secondIntArray = GetIntArray(); result = comparer.CompareArrays(firstIntArray, secondIntArray).ToString(); break; case 'C': char[] firstCharArray = GetCharArray(); char[] secondCharArray = GetCharArray(); result = comparer.CompareArrays(firstCharArray, secondCharArray).ToString(); break; case 'D': double[] firstDoubleArray = GetDoubleArray(); double[] secondDoubleArray = GetDoubleArray(); result = comparer.CompareArrays(firstDoubleArray, secondDoubleArray).ToString(); break; default: result = "Invalid input!"; break; } return result; }
private static unsafe int QuickSortPartion(TemplateStructType* array, int start, int end, Comparer<TemplateStructType> comparer) { TemplateStructType pivot, startValue, endValue; pivot = array[start]; while (start < end) { startValue = array[start]; while (start < end && comparer.Compare(startValue, pivot) > 0) { start++; startValue = array[start]; } endValue = array[end]; while (start < end && comparer.Compare(endValue, pivot) < 0) { end--; endValue = array[end]; } if (start < end) { array[end] = startValue; array[start] = endValue; } } return start; }
/// <summary> /// Creates an component constraint. /// </summary> /// <param name="comparer">The comparer</param> /// <exception cref="ArgumentNullException">Thrown if <paramref name="comparer"/> is null</exception> public ComponentConstraint(Comparer<Component> comparer) { if (comparer == null) throw new ArgumentNullException("comparer"); this.comparer = comparer; }
static void Main(string[] args) { Delegate dele = new Delegate(); Comparer cmpf = new Comparer(Delegate.CompareFirstNames); Comparer cmpl = new Comparer(Delegate.CompareLastNames); Console.WriteLine("\n Before Sort: \n"); dele.PrintNames(); dele.Sort(cmpf); Console.WriteLine("\n After CompareFirstNames Sort: \n"); dele.PrintNames(); dele.Sort(cmpl); Console.WriteLine("\n After CompareLastNames Sort: \n"); dele.PrintNames(); Console.ReadLine(); }
public void Setup() { _first = new int[] { 1, 2, 3, 4, 5, 5, 6, 6, 6, 7, 8, 9, 10, 10 }; _second = new int[] { 5, 5, 6, 6, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }; _third = new int[] { 100, 200, 300, 400, 500 }; _comparer = Comparer<int>.Default; }
public virtual void TransferStateTo(Comparer targetVisitor){ if (targetVisitor == null) return; targetVisitor.MemberMapping = this.MemberMapping; targetVisitor.MembersThatHaveChanged = this.MembersThatHaveChanged; targetVisitor.DoNotCompareBodies = this.DoNotCompareBodies; targetVisitor.OriginalModule = this.OriginalModule; targetVisitor.NewModule = this.NewModule; }
private static void QuickSort(IntPtr array, int start, int end, Comparer<TemplateStructType> comparer) { if (start >= end) { return; } Stack<int> stack = new Stack<int>(); stack.Push(end); stack.Push(start); QuickSort(array, comparer, stack); }
internal static bool GetPassword(IDataAccess dataAccess, int userId, out byte[] password, out byte[] passwordSalt, out bool isApproved, out bool isSuspended) { var entity = new UserEntity(); var oql = new OQL(entity); var comparer = new Comparer(oql); comparer = comparer.Compare(entity.UserId, "=", userId); return GetPasswordCore(dataAccess, comparer, entity, out password, out passwordSalt, out isApproved, out isSuspended) != 0; }
internal static int? GetPassword(IDataAccess dataAccess, string identity, string @namespace, out byte[] password, out byte[] passwordSalt, out bool isApproved, out bool isSuspended) { var entity = new UserEntity(); var oql = new OQL(entity); var comparer = new Comparer(oql); comparer = GetUserIdentityComparer(identity, @namespace, comparer, entity); return GetPasswordCore(dataAccess, comparer, entity, out password, out passwordSalt, out isApproved, out isSuspended); }
/// <summary> /// Creates an attribute constraint to search for a match using a custom comparer. /// </summary> /// <param name="attributeName">Name of the attribute as recognised by Internet Explorer.</param> /// <param name="comparer">The attribute value comparer</param> /// <exception cref="ArgumentNullException">Thrown if <paramref name="attributeName"/> /// or <paramref name="comparer"/> is null</exception> /// <exception cref="ArgumentException">Thrown if <paramref name="attributeName"/> is empty</exception> public AttributeConstraint(string attributeName, Comparer<string> comparer) { if (string.IsNullOrEmpty(attributeName)) throw new ArgumentNullException("attributeName"); if (comparer == null) throw new ArgumentNullException("comparer"); this.attributeName = attributeName; this.comparer = comparer; }
static public void Sort(Object[] array, Comparer comparer) { for (Int32 i = 0; i < array.Length; i++) for (Int32 j = i + 1; j < array.Length; j++) if (comparer(array[j], array[i])) { Object buffer = array[i]; array[i] = array[j]; array[j] = buffer; } }
public void Sort(Comparer compare) { object tmp; for(int i=0; i<names.Length;i++) for(int j=i; j<names.Length;j++) if(compare(names[i], names[j]) > 0) { tmp = names[i]; names[i] = names[j]; names[j] = (Name)tmp; } }
static void Main(string[] args) { var source = new List<Foo>(); source.Add(new Foo() {A = "A"}); source.Add(new Foo() {A = "B"}); var target = new List<Foo>(); target.Add(new Foo() { A = "A" }); var c1 = new Comparer<Foo>(x => x.A); var c2 = new Comparer<Foo>(x => x.A, x => x.B); }
static void Main(string[] args) { Comparer comparer = new Comparer(); string result; Console.WriteLine(@"Select data type: I -> Integer C -> Character D -> Double"); result = GetResult(comparer); Console.WriteLine(result); }
public void Compare() { var list = new List<string> { "76561197975995523", // ice_mouton "76561197962208538", // dubispacebar "76561197965572012", // siliticx }; var c = new Comparer(_api); var games = c.Compare(list); Assert.IsTrue(games.Count > 0); }
public static void Sort(object[] arr, Comparer comparer) { for (int i = 0; i < arr.Length; ++i) { for (int j = i + 1; j < arr.Length; ++j) { if (comparer(arr[j], arr[i])) { object tmp = arr[i]; arr[i] = arr[j]; arr[j] = tmp; } } } }
public static void Sort(int[][] array, Comparer comparer) { if (array == null) throw new ArgumentNullException("array", "The array is null"); for (int i = 0; i < array.Length - 1; i++) { for (int j = 0; j < array.Length - i - 1; j++) { if (comparer(array[j], array[j + 1]) > 0) { Swap(ref array[j], ref array[j + 1]); } } } }
internal void Sort(Comparer compare) { object temp; for (int i = 0; i< names.Length; i++) { for(int j= i; j<names.Length; j++) { if (compare(names[i], names[j])>0) { temp = names[i]; names[i] = names[j]; names[j] = (Name)temp; } } } }
static void Main() { Employer[] employees = { new Employer("Pupkin V." , 2000), new Employer("Bubkin P." , 1500), new Employer("Zubkin M." , 350) }; Comparer employeeComp = new Comparer(Employer.RhsIsGreater); BubbleSortter.Sort(employees, employeeComp); foreach(Employer em in employees) { Console.WriteLine(em.ToString()); } }
// observe the delegate parameter public void Sort(Comparer compare) { object temp; for (int i = 0; i < names.Length; i++) { for (int j = i; j < names.Length; j++) { // using delegate "compare" just like // a normal method if (compare(names[i], names[j]) > 0) { temp = names[i]; names[i] = names[j]; names[j] = (Name)temp; } } } }
private static unsafe void QuickSort(IntPtr array, Comparer<TemplateStructType> comparer, Stack<int> stack) { TemplateStructType* pointer = (TemplateStructType*)array.ToPointer(); while (stack.Count > 0) { int start = stack.Pop(); int end = stack.Pop(); int index = QuickSortPartion(pointer, start, end, comparer); if (start < index - 1) { stack.Push(index - 1); stack.Push(start); } if (index + 1 < end) { stack.Push(end); stack.Push(index + 1); } } }
public void NoChange_SameOrderOfElements() { //Arrange var xml1 = @"<?xml version=""1.0"" encoding=""utf-8"" ?> <root> <elem1>This is element 1</elem1> <elem2>This is element 2</elem2> <add name=""name1"" value=""value1"" /> </root> "; var xml2 = @"<?xml version=""1.0"" encoding=""utf-8"" ?> <root> <elem1>This is element 1</elem1> <elem2>This is element 2</elem2> <add name=""name1"" value=""value1""/> </root> "; var mockHandler = new Mock<IXmlCompareHandler>(MockBehavior.Strict); //mockHandler.Setup(a => a.AttributeAdded(It.IsAny<string>(),It.IsAny<XAttribute>())); //mockHandler.Setup(a => a.AttributeChanged(It.IsAny<string>(),It.IsAny<XAttribute>(), It.IsAny<XAttribute>())); //mockHandler.Setup(a => a.AttributeRemoved(It.IsAny<string>(),It.IsAny<XAttribute>())); //mockHandler.Setup(a => a.ElementAdded(It.IsAny<string>(),It.IsAny<XElement>())); //mockHandler.Setup(a => a.ElementChanged(It.IsAny<string>(),It.IsAny<XElement>(), It.IsAny<XElement>())); //mockHandler.Setup(a => a.ElementRemoved(It.IsAny<string>(),It.IsAny<XElement>())); var comparer = new Comparer(mockHandler.Object); //act comparer.Compare(GetStream(xml1), GetStream(xml2), mockHandler.Object); //assert mockHandler.Verify(a => a.AttributeAdded(It.IsAny<AttributeAddedEventArgs>()), Times.Never); mockHandler.Verify(a => a.AttributeChanged(It.IsAny<AttributeChangedEventArgs>()), Times.Never); mockHandler.Verify(a => a.AttributeRemoved(It.IsAny<AttributeRemovedEventArgs>()), Times.Never); mockHandler.Verify(a => a.ElementAdded(It.IsAny<ElementAddedEventArgs>()), Times.Never); mockHandler.Verify(a => a.ElementChanged(It.IsAny<ElementChangedEventArgs>()), Times.Never); mockHandler.Verify(a => a.ElementRemoved(It.IsAny<ElementRemovedEventArgs>()), Times.Never); }
public static void SortDescending(int[][] jaggedArray, Comparer comparer) { if (jaggedArray == null) throw new ArgumentNullException("jaggedArray"); if (comparer == null) throw new ArgumentNullException("comparer"); if (jaggedArray.Length == 0) throw new ArgumentException("jaggedArray is empty"); for (int i = 0; i < jaggedArray.Length; i++) { if (jaggedArray[i] == null) throw new ArgumentException(string.Format("inner array {0} is null", i)); if (jaggedArray[i].Length == 0) throw new ArgumentException(string.Format("inner array {0} is empty", i)); } for (int k = 0; k < jaggedArray.Length - 1; k++) { for (int i = 0; i < jaggedArray.Length - 1; i++) { if (comparer(jaggedArray[i], jaggedArray[i + 1]) < 0) Swap(ref jaggedArray[i], ref jaggedArray[i + 1]); } } }
static void Main(string[] args) { try { var file1 = @".\TestFiles\a1.config"; var file2 = @".\TestFiles\a2.config"; var handler = new TestXmlCompareHandler(); using (var comparer = new Comparer(handler)) { comparer.Compare(file1, file2, handler); } } catch (Exception ex) { Console.WriteLine("ERROR: " + ex); } Console.Write("Press RETURN to close..."); Console.ReadLine(); }
static void Main(string[] args) { SimpleDelegate sd = new SimpleDelegate(); //this is the delegate instantiation Comparer cmp = new Comparer(SimpleDelegate.CompareFirstNames); Console.WriteLine("\nBefore Sort:\n"); sd.PrintNames(); Console.WriteLine("\nAfter Sort:\n"); sd.Sort(cmp); sd.PrintNames(); Console.ReadLine(); // Application.Run(new Eventdemo()); }
public static void ArraySort(int[][] jaggedArray, Comparer comparer) { if (jaggedArray == null) throw new ArgumentNullException("NULL"); if (comparer == null) throw new ArgumentNullException("No comparer"); if (jaggedArray.Length == 0) throw new ArgumentException("No elements"); for (int i = 0; i < jaggedArray.Length; i++) { if (jaggedArray[i] == null) throw new ArgumentException("Null raws"); } for (int i = 0; i < jaggedArray.Length - 1; i++) { for (int j = 0; j < jaggedArray.Length - 1; j++) { if (comparer(jaggedArray[j], jaggedArray[j + 1]) == 1) Swap(ref jaggedArray[j], ref jaggedArray[j + 1]); } } }
/// <summary> /// Finds te first row that matches <paramref name="comparer"/> in <paramref name="inColumn"/> /// defined as a TD html element. If no match is found, <c>null</c> is returned. This method will look for rows in all /// <see cref="Core.TableBody"/> elements but will ignore rows in nested tables. /// </summary> /// <param name="comparer">The comparer that the cell text must match.</param> /// <param name="inColumn">Index of the column to find the text in.</param> /// <returns>The searched for <see cref="TableRow"/>; otherwise <c>null</c>.</returns> public virtual TableRow FindRowInOwnTableRows(Comparer <string> comparer, int inColumn) { Logger.LogAction("Matching comparer'{0}' with text in column {1} of {2} '{3}'", comparer, inColumn, GetType().Name, Id); return(FindInOwnTableRows(Find.ByTextInColumn(comparer, inColumn))); }
public SimpleSortedList(int capacity) : this(Comparer <T> .Create((x, y) => x.CompareTo(y)), capacity) { }
public static bool operator ==(string left, SoftString right) => Comparer.Equals(left, right);
public void Prepare(int resultCount = int.MaxValue, Comparer <T> comparer = null) { Prepare(this.root, resultCount > 0 ? resultCount : int.MaxValue, comparer ?? Comparer <T> .Default); }
public void Matches_With_Wildcard() { char[] values = { '0', '1', '1', '1' }; Assert.IsTrue(Comparer.AreEqualOrZero(values)); }
private ParameterSet[] TreeOrderedCandidatesSearch(FastForestRegressionPredictor forest, int numOfCandidates, IEnumerable <IRunResult> previousRuns) { // Step 1: Get ordered list of all leaf values. SortedList <double, Tuple <int, int> > leafValueList = new SortedList <double, Tuple <int, int> >(Comparer <double> .Create((x, y) => y.CompareTo(x))); for (int i = 0; i < forest.TrainedEnsemble.NumTrees; i++) { RegressionTree t = forest.TrainedEnsemble.GetTreeAt(i); for (int j = 0; j < t.NumLeaves; j++) { double val = t.LeafValue(j); while (leafValueList.ContainsKey(val)) { val += Double.Epsilon; } leafValueList.Add(val, Tuple.Create(i, j)); } } // Step 2: Go through, starting from best leaves. //ch.Info("Ha ha, we trained {0} trees", ensemble.NumTrees); //// This is a pretty silly example of inspecting the tree. //int count = ensemble.Trees.Sum(t => t.SplitFeatures.Take(t.NumNodes).Count(f => f == 5)); //ch.Info("Our random forest ensemble used the feature with index 5, {0} times!!", count); //double allLeavesSum = ensemble.Trees.Sum(t => t.LeafValues.Take(t.NumLeaves).Sum()); //ch.Info("Our random forest, across all leaves, summed to {0}", allLeavesSum); //int[] path = t.pathToLeaf(leafIndex); return(null); }
public CustomSortComparer() { this._comparer = Comparer.Default; }
public void Does_Not_Match_On_NonEqual_Sequence_With_Wildcard() { char[] values = { '1', '2', '1', '0' }; Assert.IsFalse(Comparer.AreEqualOrZero(values)); }
public bool runTest() { //////////// Global Variables used for all tests int iCountErrors = 0; int iCountTestcases = 0; Comparer comp; string[] str1 = { "Apple", "abc", }; string[] str2 = { "Æble", "ABC" }; try { do { ///////////////////////// START TESTS //////////////////////////// /////////////////////////////////////////////////////////////////// //[] Vanilla test case - The TextInfo property of the CultureInfo is used in the CaseInsensitiveHashCodeProvider //TextInfo has GetCaseInsensitiveHashCode() methods iCountTestcases++; var somePopularCultureNames = new string[] { "cs-CZ", "da-DK", "de-DE", "el-GR", "en-US", "es-ES", "fi-FI", "fr-FR", "hu-HU", "it-IT", "ja-JP", "ko-KR", "nb-NO", "nl-NL", "pl-PL", "pt-BR", "pt-PT", "ru-RU", "sv-SE", "tr-TR", "zh-CN", "zh-HK", "zh-TW" }; foreach (string cultureName in somePopularCultureNames) { CultureInfo culture = new CultureInfo(cultureName); if (culture == null) { continue; } iCountTestcases++; comp = new Comparer(culture); //The following cultures do this the other way round //da-DK, is-IS, nb-NO, nn-NO if (culture.Name != "da-DK" && culture.Name != "is-IS" && culture.Name != "nb-NO" && culture.Name != "nn-NO") { if (comp.Compare(str1[0], str2[0]) != 1) { iCountErrors++; Console.WriteLine("Err_3245sdg, Wrong value returned, {0}, culture: {1}", comp.Compare(str1[0], str2[0]), culture); } } else { if (comp.Compare(str1[0], str2[0]) != -1) { iCountErrors++; Console.WriteLine("Err_297dg, Wrong value returned, {0}, culture: {1}", comp.Compare(str1[0], str2[0]), culture.Name); } } if (comp.Compare(str1[1], str2[1]) != -1) { iCountErrors++; Console.WriteLine("Err_3467tsg, Wrong value returned, {0}, culture: {1}", comp.Compare(str1[1], str2[1]), culture.Name); } } //[] Call ctor with null CultureInfo try { comp = new Comparer((CultureInfo)null); iCountErrors++; Console.WriteLine("Err_89743asjppn Expected ctor to throw ArgumentNullException"); } catch (ArgumentNullException) { } catch (Exception e) { iCountErrors++; Console.WriteLine("Err_4447abcn Unexpected exceptin thrown: {0}", e); } /////////////////////////// END TESTS ///////////////////////////// } while (false); } catch (Exception exc_general) { ++iCountErrors; Console.WriteLine(" : Error Err_8888yyy! exc_general==\n" + exc_general.ToString()); } //// Finish Diagnostics if (iCountErrors == 0) { return(true); } else { Console.WriteLine("Fail! iCountErrors==" + iCountErrors); return(false); } }
public void Sort(Comparer <RenderOrderKey> keyComparer) { _indices.Sort( (RenderItemIndex first, RenderItemIndex second) => keyComparer.Compare(first.Key, second.Key)); }
public void Sort(Comparer <RenderItemIndex> comparer) { _indices.Sort(comparer); }
public static IComparer<T> CreateDescendingComparer<T>() where T : IComparable<T> => Comparer<T>.Create((x, y) => y.CompareTo(x));
/// <summary> /// <para> /// If a query encounters split up resuming using continuation, we need to regenerate the continuation tokens. /// Specifically, since after split we will have new set of ranges, we need to remove continuation token for the /// parent partition and introduce continuation token for the child partitions. /// </para> /// <para> /// This function does that. Also in that process, we also check validity of the input continuation tokens. For example, /// even after split the boundary ranges of the child partitions should match with the parent partitions. If the Min and Max /// range of a target partition in the continuation token was Min1 and Max1. Then the Min and Max range info for the two /// corresponding child partitions C1Min, C1Max, C2Min, and C2Max should follow the constrain below: /// PMax = C2Max > C2Min > C1Max > C1Min = PMin. /// </para> /// </summary> /// <param name="partitionKeyRanges">The partition key ranges to extract continuation tokens for.</param> /// <param name="suppliedContinuationTokens">The continuation token that the user supplied.</param> /// <param name="targetRangeToContinuationTokenMap">The output dictionary of partition key range to continuation token.</param> /// <typeparam name="TContinuationToken">The type of continuation token to generate.</typeparam> /// <Remarks> /// The code assumes that merge doesn't happen and /// </Remarks> /// <returns>The index of the partition whose MinInclusive is equal to the suppliedContinuationTokens</returns> protected int FindTargetRangeAndExtractContinuationTokens <TContinuationToken>( List <PartitionKeyRange> partitionKeyRanges, IEnumerable <Tuple <TContinuationToken, Range <string> > > suppliedContinuationTokens, out Dictionary <string, TContinuationToken> targetRangeToContinuationTokenMap) { if (partitionKeyRanges == null) { throw new ArgumentNullException($"{nameof(partitionKeyRanges)} can not be null."); } if (partitionKeyRanges.Count < 1) { throw new ArgumentException($"{nameof(partitionKeyRanges)} must have atleast one element."); } foreach (PartitionKeyRange partitionKeyRange in partitionKeyRanges) { if (partitionKeyRange == null) { throw new ArgumentException($"{nameof(partitionKeyRanges)} can not have null elements."); } } if (suppliedContinuationTokens == null) { throw new ArgumentNullException($"{nameof(suppliedContinuationTokens)} can not be null."); } if (suppliedContinuationTokens.Count() < 1) { throw new ArgumentException($"{nameof(suppliedContinuationTokens)} must have atleast one element."); } if (suppliedContinuationTokens.Count() > partitionKeyRanges.Count) { throw new ArgumentException($"{nameof(suppliedContinuationTokens)} can not have more elements than {nameof(partitionKeyRanges)}."); } targetRangeToContinuationTokenMap = new Dictionary <string, TContinuationToken>(); // Find the minimum index. Tuple <TContinuationToken, Range <string> > firstContinuationTokenAndRange = suppliedContinuationTokens .OrderBy((tuple) => tuple.Item2.Min) .First(); TContinuationToken firstContinuationToken = firstContinuationTokenAndRange.Item1; PartitionKeyRange firstContinuationRange = new PartitionKeyRange { MinInclusive = firstContinuationTokenAndRange.Item2.Min, MaxExclusive = firstContinuationTokenAndRange.Item2.Max }; int minIndex = partitionKeyRanges.BinarySearch( firstContinuationRange, Comparer <PartitionKeyRange> .Create((range1, range2) => string.CompareOrdinal(range1.MinInclusive, range2.MinInclusive))); if (minIndex < 0) { this.TraceWarning(string.Format( CultureInfo.InvariantCulture, "Could not find continuation token: {0}", firstContinuationToken.ToString())); throw new BadRequestException(RMResources.InvalidContinuationToken); } foreach (Tuple <TContinuationToken, Range <string> > suppledContinuationToken in suppliedContinuationTokens) { // find what ranges make up the supplied continuation token TContinuationToken continuationToken = suppledContinuationToken.Item1; Range <string> range = suppledContinuationToken.Item2; IEnumerable <PartitionKeyRange> replacementRanges = partitionKeyRanges .Where((partitionKeyRange) => string.CompareOrdinal(range.Min, partitionKeyRange.MinInclusive) <= 0 && string.CompareOrdinal(range.Max, partitionKeyRange.MaxExclusive) >= 0) .OrderBy((partitionKeyRange) => partitionKeyRange.MinInclusive); // Could not find the child ranges if (replacementRanges.Count() == 0) { this.TraceWarning(string.Format( CultureInfo.InvariantCulture, "Could not find continuation token: {0}", continuationToken.ToString())); throw new BadRequestException(RMResources.InvalidContinuationToken); } // PMax = C2Max > C2Min > C1Max > C1Min = PMin. string parentMax = range.Max; string child2Max = replacementRanges.Last().MaxExclusive; string child2Min = replacementRanges.Last().MinInclusive; string child1Max = replacementRanges.First().MaxExclusive; string child1Min = replacementRanges.First().MinInclusive; string parentMin = range.Min; if (!(parentMax == child2Max && string.CompareOrdinal(child2Max, child2Min) >= 0 && (replacementRanges.Count() == 1 ? true : string.CompareOrdinal(child2Min, child1Max) >= 0) && string.CompareOrdinal(child1Max, child1Min) >= 0 && child1Min == parentMin)) { this.TraceWarning(string.Format( CultureInfo.InvariantCulture, "PMax = C2Max > C2Min > C1Max > C1Min = PMin: {0}", continuationToken.ToString())); throw new BadRequestException(RMResources.InvalidContinuationToken); } foreach (PartitionKeyRange partitionKeyRange in replacementRanges) { targetRangeToContinuationTokenMap.Add(partitionKeyRange.Id, continuationToken); } } return(minIndex); }
// algorithm: treat sentence snippets as miniature documents // we can intersect these with the postings lists via BreakIterator.preceding(offset),s // score each sentence as norm(sentenceStartOffset) * sum(weight * tf(freq)) private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength, BreakIterator bi, int doc, TermsEnum termsEnum, DocsAndPositionsEnum[] postings, int n) { PassageScorer scorer = GetScorer(field); if (scorer == null) { throw new NullReferenceException("PassageScorer cannot be null"); } JCG.PriorityQueue <OffsetsEnum> pq = new JCG.PriorityQueue <OffsetsEnum>(); float[] weights = new float[terms.Length]; // initialize postings for (int i = 0; i < terms.Length; i++) { DocsAndPositionsEnum de = postings[i]; int pDoc; if (de == EMPTY) { continue; } else if (de == null) { postings[i] = EMPTY; // initially if (!termsEnum.SeekExact(terms[i])) { continue; // term not found } de = postings[i] = termsEnum.DocsAndPositions(null, null, DocsAndPositionsFlags.OFFSETS); if (de == null) { // no positions available throw new ArgumentException("field '" + field + "' was indexed without offsets, cannot highlight"); } pDoc = de.Advance(doc); } else { pDoc = de.DocID; if (pDoc < doc) { pDoc = de.Advance(doc); } } if (doc == pDoc) { weights[i] = scorer.Weight(contentLength, de.Freq); de.NextPosition(); pq.Add(new OffsetsEnum(de, i)); } } pq.Add(new OffsetsEnum(EMPTY, int.MaxValue)); // a sentinel for termination JCG.PriorityQueue <Passage> passageQueue = new JCG.PriorityQueue <Passage>(n, Comparer <Passage> .Create((left, right) => { if (left.score < right.score) { return(-1); } else if (left.score > right.score) { return(1); } else { return(left.startOffset - right.startOffset); } })); Passage current = new Passage(); while (pq.TryDequeue(out OffsetsEnum off)) { DocsAndPositionsEnum dp = off.dp; int start = dp.StartOffset; if (start == -1) { throw new ArgumentException("field '" + field + "' was indexed without offsets, cannot highlight"); } int end = dp.EndOffset; // LUCENE-5166: this hit would span the content limit... however more valid // hits may exist (they are sorted by start). so we pretend like we never // saw this term, it won't cause a passage to be added to passageQueue or anything. if (Debugging.AssertsEnabled) { Debugging.Assert(EMPTY.StartOffset == int.MaxValue); } if (start < contentLength && end > contentLength) { continue; } if (start >= current.endOffset) { if (current.startOffset >= 0) { // finalize current current.score *= scorer.Norm(current.startOffset); // new sentence: first add 'current' to queue if (passageQueue.Count == n && current.score < passageQueue.Peek().score) { current.Reset(); // can't compete, just reset it } else { passageQueue.Enqueue(current); if (passageQueue.Count > n) { current = passageQueue.Dequeue(); current.Reset(); } else { current = new Passage(); } } } // if we exceed limit, we are done if (start >= contentLength) { Passage[] passages = passageQueue.ToArray(); foreach (Passage p in passages) { p.Sort(); } // sort in ascending order ArrayUtil.TimSort(passages, Comparer <Passage> .Create((left, right) => left.startOffset - right.startOffset)); return(passages); } // advance breakiterator if (Debugging.AssertsEnabled) { Debugging.Assert(BreakIterator.Done < 0); } current.startOffset = Math.Max(bi.Preceding(start + 1), 0); current.endOffset = Math.Min(bi.Next(), contentLength); } int tf = 0; while (true) { tf++; BytesRef term = terms[off.id]; if (term == null) { // multitermquery match, pull from payload term = off.dp.GetPayload(); if (Debugging.AssertsEnabled) { Debugging.Assert(term != null); } } current.AddMatch(start, end, term); if (off.pos == dp.Freq) { break; // removed from pq } else { off.pos++; dp.NextPosition(); start = dp.StartOffset; end = dp.EndOffset; } if (start >= current.endOffset || end > contentLength) { pq.Enqueue(off); break; } } current.score += weights[off.id] * scorer.Tf(tf, current.endOffset - current.startOffset); } // Dead code but compiler disagrees: if (Debugging.AssertsEnabled) { Debugging.Assert(false); } return(null); }
public virtual void TestSimple() { Random random = Random; DocValuesType[] dvTypes = new DocValuesType[] { DocValuesType.NUMERIC, DocValuesType.BINARY, DocValuesType.SORTED, }; Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter( random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NewLogMergePolicy())); bool canUseDV = !"Lucene3x".Equals(w.IndexWriter.Config.Codec.Name, StringComparison.Ordinal); DocValuesType dvType = canUseDV ? dvTypes[random.nextInt(dvTypes.Length)] : DocValuesType.NONE; Document doc = new Document(); AddField(doc, groupField, "1", dvType); AddField(doc, countField, "1", dvType); doc.Add(new TextField("content", "random text", Field.Store.NO)); doc.Add(new StringField("id", "1", Field.Store.NO)); w.AddDocument(doc); // 1 doc = new Document(); AddField(doc, groupField, "1", dvType); AddField(doc, countField, "1", dvType); doc.Add(new TextField("content", "some more random text blob", Field.Store.NO)); doc.Add(new StringField("id", "2", Field.Store.NO)); w.AddDocument(doc); // 2 doc = new Document(); AddField(doc, groupField, "1", dvType); AddField(doc, countField, "2", dvType); doc.Add(new TextField("content", "some more random textual data", Field.Store.NO)); doc.Add(new StringField("id", "3", Field.Store.NO)); w.AddDocument(doc); w.Commit(); // To ensure a second segment // 3 doc = new Document(); AddField(doc, groupField, "2", dvType); doc.Add(new TextField("content", "some random text", Field.Store.NO)); doc.Add(new StringField("id", "4", Field.Store.NO)); w.AddDocument(doc); // 4 doc = new Document(); AddField(doc, groupField, "3", dvType); AddField(doc, countField, "1", dvType); doc.Add(new TextField("content", "some more random text", Field.Store.NO)); doc.Add(new StringField("id", "5", Field.Store.NO)); w.AddDocument(doc); // 5 doc = new Document(); AddField(doc, groupField, "3", dvType); AddField(doc, countField, "1", dvType); doc.Add(new TextField("content", "random blob", Field.Store.NO)); doc.Add(new StringField("id", "6", Field.Store.NO)); w.AddDocument(doc); // 6 -- no author field doc = new Document(); doc.Add(new TextField("content", "random word stuck in alot of other text", Field.Store.YES)); AddField(doc, countField, "1", dvType); doc.Add(new StringField("id", "6", Field.Store.NO)); w.AddDocument(doc); IndexSearcher indexSearcher = NewSearcher(w.GetReader()); w.Dispose(); var cmp = Comparer <AbstractDistinctValuesCollector.IGroupCount <IComparable> > .Create((groupCount1, groupCount2) => { if (groupCount1.GroupValue == null) { if (groupCount2.GroupValue == null) { return(0); } return(-1); } else if (groupCount2.GroupValue == null) { return(1); } else { return(groupCount1.GroupValue.CompareTo(groupCount2.GroupValue)); } }); // === Search for content:random IAbstractFirstPassGroupingCollector <IComparable> firstCollector = CreateRandomFirstPassCollector(dvType, new Sort(), groupField, 10); indexSearcher.Search(new TermQuery(new Term("content", "random")), firstCollector); IAbstractDistinctValuesCollector <AbstractDistinctValuesCollector.IGroupCount <IComparable> > distinctValuesCollector = CreateDistinctCountCollector(firstCollector, groupField, countField, dvType); indexSearcher.Search(new TermQuery(new Term("content", "random")), distinctValuesCollector); //var gcs = distinctValuesCollector.Groups as List<IGroupCount<IComparable>>; // LUCENENET TODO: Try to work out how to do this without an O(n) operation var gcs = new List <AbstractDistinctValuesCollector.IGroupCount <IComparable> >(distinctValuesCollector.Groups); gcs.Sort(cmp); assertEquals(4, gcs.Count); CompareNull(gcs[0].GroupValue); List <IComparable> countValues = new List <IComparable>(gcs[0].UniqueValues); assertEquals(1, countValues.size()); Compare("1", countValues[0]); Compare("1", gcs[1].GroupValue); countValues = new List <IComparable>(gcs[1].UniqueValues); countValues.Sort(nullComparer); assertEquals(2, countValues.size()); Compare("1", countValues[0]); Compare("2", countValues[1]); Compare("2", gcs[2].GroupValue); countValues = new List <IComparable>(gcs[2].UniqueValues); assertEquals(1, countValues.size()); CompareNull(countValues[0]); Compare("3", gcs[3].GroupValue); countValues = new List <IComparable>(gcs[3].UniqueValues); assertEquals(1, countValues.size()); Compare("1", countValues[0]); // === Search for content:some firstCollector = CreateRandomFirstPassCollector(dvType, new Sort(), groupField, 10); indexSearcher.Search(new TermQuery(new Term("content", "some")), firstCollector); distinctValuesCollector = CreateDistinctCountCollector(firstCollector, groupField, countField, dvType); indexSearcher.Search(new TermQuery(new Term("content", "some")), distinctValuesCollector); // LUCENENET TODO: Try to work out how to do this without an O(n) operation //gcs = distinctValuesCollector.Groups as List<IGroupCount<IComparable>>; gcs = new List <AbstractDistinctValuesCollector.IGroupCount <IComparable> >(distinctValuesCollector.Groups); gcs.Sort(cmp); assertEquals(3, gcs.Count); Compare("1", gcs[0].GroupValue); countValues = new List <IComparable>(gcs[0].UniqueValues); assertEquals(2, countValues.size()); countValues.Sort(nullComparer); Compare("1", countValues[0]); Compare("2", countValues[1]); Compare("2", gcs[1].GroupValue); countValues = new List <IComparable>(gcs[1].UniqueValues); assertEquals(1, countValues.size()); CompareNull(countValues[0]); Compare("3", gcs[2].GroupValue); countValues = new List <IComparable>(gcs[2].UniqueValues); assertEquals(1, countValues.size()); Compare("1", countValues[0]); // === Search for content:blob firstCollector = CreateRandomFirstPassCollector(dvType, new Sort(), groupField, 10); indexSearcher.Search(new TermQuery(new Term("content", "blob")), firstCollector); distinctValuesCollector = CreateDistinctCountCollector(firstCollector, groupField, countField, dvType); indexSearcher.Search(new TermQuery(new Term("content", "blob")), distinctValuesCollector); // LUCENENET TODO: Try to work out how to do this without an O(n) operation //gcs = distinctValuesCollector.Groups as List<IGroupCount<IComparable>>; gcs = new List <AbstractDistinctValuesCollector.IGroupCount <IComparable> >(distinctValuesCollector.Groups); gcs.Sort(cmp); assertEquals(2, gcs.Count); Compare("1", gcs[0].GroupValue); countValues = new List <IComparable>(gcs[0].UniqueValues); // B/c the only one document matched with blob inside the author 1 group assertEquals(1, countValues.Count); Compare("1", countValues[0]); Compare("3", gcs[1].GroupValue); countValues = new List <IComparable>(gcs[1].UniqueValues); assertEquals(1, countValues.Count); Compare("1", countValues[0]); indexSearcher.IndexReader.Dispose(); dir.Dispose(); }
public void Matches_With_One_Element() { char[] values = { '9' }; Assert.IsTrue(Comparer.AreEqualOrZero(values)); }
public void Matches_With_One_Wildcard_And_Nothing_Else() { char[] values = { '0' }; Assert.IsTrue(Comparer.AreEqualOrZero(values)); }
/// <summary> /// Method which performs binary search of specified item in a sorted array. /// </summary> /// <typeparam name="T">Type of items.</typeparam> /// <param name="sortedArray">Sorted array.</param> /// <param name="item">Item to find.</param> /// <param name="comparison">Comparison delegate.</param> /// <returns>Index of item of type <see cref="int"/> if it was found; otherwise, null.</returns> /// <exception cref="ArgumentNullException">Thrown when source array is null.</exception> public static int?BinarySearch <T>(this T[] sortedArray, T item, Comparison <T> comparison) => sortedArray.BinarySearch(item, Comparer <T> .Create(comparison));
private static void TestXmlMarked(IList <XElement> config, IEnumerable <Tuple <EntryType, string, View, string, uint?, uint?, bool> > expectedProps, string typeName) { var xmlData = GetDataFromXml(config, typeName); var expectedPropsOrdered = expectedProps.ToList(); var tableHeaders = xmlData.TableHeaders; var expectedTableHeaders = expectedPropsOrdered .Where(tuple => (tuple.Item3 & View.Table) != View.None) .Where(tuple => !tuple.Item7) .Select(tuple => { var strWidth = tuple.Item5?.ToString(); return(new TableHeaderFromXml { Label = tuple.Item4 ?? tuple.Item2, Width = strWidth, }); }) .ToList(); var comparer = new Comparer <TableHeaderFromXml>( (l, r) => l.Label == r.Label && l.Width == r.Width); Assert.Equal(expectedTableHeaders.Count, tableHeaders.Count); Assert.Equal(expectedTableHeaders.OrderBy(i => i.Label), tableHeaders.OrderBy(i => i.Label), comparer); var expectedPosTableHeaders = expectedPropsOrdered .Where(tuple => (tuple.Item3 & View.Table) != View.None) .Where(tuple => tuple.Item6 != null) .Select(tuple => new { Label = tuple.Item4 ?? tuple.Item2, Position = (int)tuple.Item6 }); foreach (var pth in expectedPosTableHeaders) { var value = tableHeaders.Skip(pth.Position).Select(i => i.Label).First(); Assert.Equal(pth.Label, value); } var tableRowPropNameValues = xmlData.TableRowPropNameValues; var expectedTablePropNameValues = expectedPropsOrdered .Where(tuple => tuple.Item1 == EntryType.PropertyName) .Where(tuple => (tuple.Item3 & View.Table) != View.None) .Where(tuple => !tuple.Item7) .Select(tuple => tuple.Item2) .ToList(); Assert.Equal(expectedTablePropNameValues.Count, tableRowPropNameValues.Count); Assert.Equal(expectedTablePropNameValues.OrderBy(i => i), tableRowPropNameValues.OrderBy(i => i)); var tableRowScriptBlockValues = xmlData.TableRowScriptBlockValues; var expectedTableScriptBlockValues = expectedPropsOrdered .Where(tuple => tuple.Item1 == EntryType.ScriptBlock) .Where(tuple => (tuple.Item3 & View.Table) != View.None) .Where(tuple => !tuple.Item7) .Select(tuple => tuple.Item2) .ToList(); Assert.Equal(expectedTableScriptBlockValues.Count, tableRowScriptBlockValues.Count); Assert.Equal(expectedTableScriptBlockValues.OrderBy(i => i), tableRowScriptBlockValues.OrderBy(i => i)); var expectedPosTableItems = expectedPropsOrdered .Where(tuple => (tuple.Item3 & View.Table) != View.None) .Where(tuple => tuple.Item6 != null) .Select(tuple => new { EntryType = tuple.Item1, Item = tuple.Item2, Position = (int)tuple.Item6 }); foreach (var pti in expectedPosTableItems) { var value = xmlData.TableRowEntries .Select(e => e.Element("ScriptBlock")?.Value ?? e.Element("PropertyName")?.Value) .Skip(pti.Position) .First(); Assert.Equal(pti.Item, value); } var listPropNameEntries = xmlData.ListPropNameValues; var expectedListPropNameValues = expectedPropsOrdered .Where(tuple => tuple.Item1 == EntryType.PropertyName) .Where(tuple => (tuple.Item3 & View.List) != View.None) .Where(tuple => !tuple.Item7) .Select(tuple => tuple.Item2) .ToList(); Assert.Equal(expectedListPropNameValues.Count, listPropNameEntries.Count); Assert.Equal(expectedListPropNameValues.OrderBy(i => i), listPropNameEntries.OrderBy(i => i)); var listScriptBlocksEntries = xmlData.ListScriptBlockValues; var expectedListScriptBlocksValues = expectedPropsOrdered .Where(tuple => tuple.Item1 == EntryType.ScriptBlock) .Where(tuple => (tuple.Item3 & View.List) != View.None) .Where(tuple => !tuple.Item7) .Select(tuple => tuple.Item2) .ToList(); Assert.Equal(expectedListScriptBlocksValues.Count, listScriptBlocksEntries.Count); Assert.Equal(expectedListScriptBlocksValues, listScriptBlocksEntries); var expectedPosListItems = expectedPropsOrdered .Where(tuple => (tuple.Item3 & View.List) != View.None) .Where(tuple => tuple.Item6 != null) .Select(tuple => new { EntryType = tuple.Item1, Item = tuple.Item2, Position = (int)tuple.Item6 }); foreach (var pli in expectedPosListItems) { var value = xmlData.ListEntries .Select(e => e.Element("ScriptBlock")?.Value ?? e.Element("PropertyName")?.Value) .Skip(pli.Position) .First(); Assert.Equal(pli.Item, value); } var expectedExcludedValues = expectedPropsOrdered .Where(tuple => tuple.Item3 == View.None) .Select(tuple => tuple.Item2) .ToList(); foreach (var expectedExcludedValue in expectedExcludedValues) { Assert.DoesNotContain(expectedExcludedValue, listPropNameEntries); Assert.DoesNotContain(expectedExcludedValue, listScriptBlocksEntries); Assert.DoesNotContain(expectedExcludedValue, tableHeaders.Select(h => h.Label)); Assert.DoesNotContain(expectedExcludedValue, tableRowPropNameValues); Assert.DoesNotContain(expectedExcludedValue, tableRowScriptBlockValues); } }
// Methods public FunctorComparer(Comparison <T2> comparison) { this.c = Comparer <T2> .Default; this.comparison = comparison; }
public BinaryTree(Comparer <T> comparer) { base.Root = null; base.Comparer = comparer; }
public Fee1559[] SuggestFees(FeeHistoryResult feeHistory, BigInteger tip) { var baseFee = feeHistory.BaseFeePerGas.Select(x => x == null? 0 : x.Value).ToArray(); var gasUsedRatio = feeHistory.GasUsedRatio; // If a block is full then the baseFee of the next block is copied. The reason is that in full blocks the minimal tip might not be enough to get included. // The last (pending) block is also assumed to end up being full in order to give some upwards bias for urgent suggestions. baseFee[baseFee.Length - 1] *= 9 / 8; for (var i = gasUsedRatio.Length - 1; i >= 0; i--) { if (gasUsedRatio[i] > (decimal)0.9) { baseFee[i] = baseFee[i + 1]; } } var order = new int[baseFee.Length]; for (var i = 0; i < baseFee.Length; i++) { order[i] = i; } #if DOTNET35 var comparer = Comparer.Create <int> #else var comparer = Comparer <int> .Create #endif ((int x, int y) => { var aa = baseFee[x]; var bb = baseFee[y]; if (aa < bb) { return(-1); } if (aa > bb) { return(1); } return(0); }); Array.Sort(order, comparer); var result = new List <Fee1559>(); BigDecimal maxBaseFee = 0; for (var timeFactor = MaxTimeFactor; timeFactor >= 0; timeFactor--) { var bf = SuggestBaseFee(baseFee, order, timeFactor); var t = new BigDecimal(tip, 0); if (bf > maxBaseFee) { maxBaseFee = bf; } else { // If a narrower time window yields a lower base fee suggestion than a wider window then we are probably in a price dip. // In this case getting included with a low tip is not guaranteed; instead we use the higher base fee suggestion // and also offer extra tip to increase the chance of getting included in the base fee dip. t += (maxBaseFee - bf) * ExtraTipRatio; bf = maxBaseFee; } result.Add(new Fee1559() { BaseFee = bf.Floor().Mantissa, MaxFeePerGas = (bf + t).Floor().Mantissa, MaxPriorityFeePerGas = t.Floor().Mantissa }); } result.Reverse(); return(result.ToArray()); }
public void Matches_With_Zero_Length() { char[] values = {}; Assert.IsTrue(Comparer.AreEqualOrZero(values)); }
protected override bool Dominates(T x, T y) { return(Comparer.Compare(x, y) <= 0); }
public SortedObservableCollection(Comparer <T> comparator) { _comparator = comparator; }
/// <summary>Parse a market data update message</summary> public void ApplyUpdate(IMarketUpdate update) // Worker thread context { lock (OrderBook) { m_pending.Add(update); m_pending.Sort(x => x.Nonce); // Remove updates older than the OrderBook nonce // Note, snapshots and deltas in the pending list can have the same nonce value. var cnt = m_pending.CountWhile(x => x.Nonce < OrderBook.Nonce || (x.Nonce == OrderBook.Nonce && x is MarketSnapshot)); m_pending.RemoveRange(0, cnt); // If we're still waiting for the first snapshot, leave if (m_pending.Count == 0 || (!(m_pending[0] is MarketSnapshot) && OrderBook.Nonce == 0)) { if (m_pending.Count < MaxBufferedUpdates) { return; } throw new Exception($"Market data snapshot never arrived for {Pair.Id}"); } // If the first update is a snapshot, we can dump any previous book // data and repopulate it from the snapshot and subsequent deltas. if (m_pending[0] is MarketSnapshot snap) { OrderBook.BuyOffers.Clear(); OrderBook.SellOffers.Clear(); PopulateBook(OrderBook.BuyOffers, snap.Buys, -1); PopulateBook(OrderBook.SellOffers, snap.Sells, +1); OrderBook.Nonce = snap.Nonce; m_pending.RemoveAt(0); } // Apply the rest of the delta updates to the order book foreach (var upd in m_pending) { if (!(upd is MarketDelta delta)) { throw new Exception("There should be a maximum of one snapshot in a group of updates"); } if (!(OrderBook.Nonce <= delta.Nonce)) { throw new Exception("Updates should have increasing nonce values"); } UpdateBook(OrderBook.BuyOffers, delta.Buys, -1); UpdateBook(OrderBook.SellOffers, delta.Sells, +1); OrderBook.Nonce = delta.Nonce; } // All pending updates have been applied m_pending.Clear(); } // Handlers void UpdateBook(List <OrderBook.Offer> book, List <MarketDelta.Offer> changes, int sign) { // Merge the 'changes' into 'book' foreach (var chg in changes) { // Find the index of 'price' in 'book' var order = new OrderBook.Offer(chg.Rate, chg.Amount); var cmp = Comparer <OrderBook.Offer> .Create((l, r) => sign *l.PriceQ2B.CompareTo(r.PriceQ2B)); var idx = book.BinarySearch(order, cmp); switch (chg.Type) { default: throw new Exception($"Unknown change type: {chg.Type}"); case MarketDelta.EUpdateType.Add: if (idx < 0) { book.Insert(~idx, order); } else { book[idx] = order; } break; case MarketDelta.EUpdateType.Update: if (idx >= 0) { book[idx] = order; } break; case MarketDelta.EUpdateType.Remove: if (idx >= 0) { book.RemoveAt(idx); } break; } } } void PopulateBook(List <OrderBook.Offer> book, List <MarketSnapshot.Offer> offers, int sign) { // Populate 'book' from 'offers' offers.Sort(x => sign * x.Rate); book.AddRange(offers.Select(x => new OrderBook.Offer(x.Rate, x.Amount))); } }
public MinHeap(Comparer <T> comparer) : base(comparer) { }
public MinHeap(IEnumerable <T> collection, Comparer <T> comparer) : base(collection, comparer) { }
public static void CombSort <T>(this IList <T> collection, Comparer <T> comparer = null) { comparer = comparer ?? Comparer <T> .Default; collection.ShellSortAscending(comparer); }
protected Heap(Comparer <T> comparer) : this(Enumerable.Empty <T>(), comparer) { }