protected void QuickSort_SelectedIndexChanged(object sender, EventArgs e) { IList <KitProduct> parts = _KitComponent.KitProducts; switch (QuickSort.SelectedIndex) { case 2: parts.Sort("DisplayName", CommerceBuilder.Common.SortDirection.DESC); break; case 3: parts.Sort("CalculatedPrice", CommerceBuilder.Common.SortDirection.ASC); break; case 4: parts.Sort("CalculatedPrice", CommerceBuilder.Common.SortDirection.DESC); break; default: parts.Sort("DisplayName", CommerceBuilder.Common.SortDirection.ASC); break; } KitParts.DataSource = parts; KitParts.DataBind(); QuickSort.SelectedIndex = 0; }
private void Init(String actions) { ParseURI(Name); int colon = actions.IndexOf(':'); if (actions.LastIndexOf(':') != colon) { throw new IllegalArgumentException("invalid actions string"); } String methods, headers; if (colon == -1) { methods = actions; headers = ""; } else { methods = actions.Substring(0, colon); headers = actions.Substring(colon + 1); } IList <String> l = NormalizeMethods(methods); l.Sort(); this.Methods = Collections.UnmodifiableList(l); l = NormalizeHeaders(headers); l.Sort(); this.RequestHeaders = Collections.UnmodifiableList(l); this.Actions_Renamed = Actions(); }
public virtual void TestGetProxyHostsAndPortsForAmFilter() { // Check no configs given Configuration conf = new Configuration(false); IList <string> proxyHosts = WebAppUtils.GetProxyHostsAndPortsForAmFilter(conf); NUnit.Framework.Assert.AreEqual(1, proxyHosts.Count); NUnit.Framework.Assert.AreEqual(WebAppUtils.GetResolvedRMWebAppURLWithoutScheme(conf ), proxyHosts[0]); // Check PROXY_ADDRESS has priority conf = new Configuration(false); conf.Set(YarnConfiguration.ProxyAddress, "host1:1000"); conf.SetBoolean(YarnConfiguration.RmHaEnabled, true); conf.Set(YarnConfiguration.RmHaIds, "rm1,rm2,rm3"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm1", "host2:2000"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm2", "host3:3000"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm3", "host4:4000"); proxyHosts = WebAppUtils.GetProxyHostsAndPortsForAmFilter(conf); NUnit.Framework.Assert.AreEqual(1, proxyHosts.Count); NUnit.Framework.Assert.AreEqual("host1:1000", proxyHosts[0]); // Check getting a single RM_WEBAPP_ADDRESS conf = new Configuration(false); conf.Set(YarnConfiguration.RmWebappAddress, "host2:2000"); proxyHosts = WebAppUtils.GetProxyHostsAndPortsForAmFilter(conf); NUnit.Framework.Assert.AreEqual(1, proxyHosts.Count); proxyHosts.Sort(); NUnit.Framework.Assert.AreEqual("host2:2000", proxyHosts[0]); // Check getting multiple RM_WEBAPP_ADDRESSes (RM HA) conf = new Configuration(false); conf.SetBoolean(YarnConfiguration.RmHaEnabled, true); conf.Set(YarnConfiguration.RmHaIds, "rm1,rm2,rm3"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm1", "host2:2000"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm2", "host3:3000"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm3", "host4:4000"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm4", "dummy"); conf.Set(YarnConfiguration.RmWebappHttpsAddress + ".rm1", "host5:5000"); conf.Set(YarnConfiguration.RmWebappHttpsAddress + ".rm2", "host6:6000"); proxyHosts = WebAppUtils.GetProxyHostsAndPortsForAmFilter(conf); NUnit.Framework.Assert.AreEqual(3, proxyHosts.Count); proxyHosts.Sort(); NUnit.Framework.Assert.AreEqual("host2:2000", proxyHosts[0]); NUnit.Framework.Assert.AreEqual("host3:3000", proxyHosts[1]); NUnit.Framework.Assert.AreEqual("host4:4000", proxyHosts[2]); // Check getting multiple RM_WEBAPP_ADDRESSes (RM HA) with HTTPS conf = new Configuration(false); conf.Set(YarnConfiguration.YarnHttpPolicyKey, HttpConfig.Policy.HttpsOnly.ToString ()); conf.SetBoolean(YarnConfiguration.RmHaEnabled, true); conf.Set(YarnConfiguration.RmHaIds, "rm1,rm2,rm3,dummy"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm1", "host2:2000"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm2", "host3:3000"); conf.Set(YarnConfiguration.RmWebappAddress + ".rm3", "host4:4000"); conf.Set(YarnConfiguration.RmWebappHttpsAddress + ".rm1", "host5:5000"); conf.Set(YarnConfiguration.RmWebappHttpsAddress + ".rm2", "host6:6000"); proxyHosts = WebAppUtils.GetProxyHostsAndPortsForAmFilter(conf); NUnit.Framework.Assert.AreEqual(2, proxyHosts.Count); proxyHosts.Sort(); NUnit.Framework.Assert.AreEqual("host5:5000", proxyHosts[0]); NUnit.Framework.Assert.AreEqual("host6:6000", proxyHosts[1]); }
public void Test_Sort(int[] ini, string exp) { list.Init(ini); list.Sort(); string act = list.ToString(); Assert.AreEqual(exp, act); }
public void Sort_WhenValidSortPasse_ShouldSortingListAscending(int[] actualArray, int[] expectedArray) { CreateLists(actualArray, expectedArray, null); actual = actual.Sort(false); Assert.AreEqual(expected, actual); }
public void SortDesc_WhenCollectionIsUnsorted_ShouldSortCollectionInDescendingOrder( int[] inputArray, int[] expectedArray) { Create_Arrays(inputArray, expectedArray); actualList.Sort(false); Assert.AreEqual(expectedList, actualList); }
public void TestAddPosSort(int[] ini, int pos, int val, int[] exp) { lst.Init(ini); lst.AddPos(pos, val); lst.Sort(); CollectionAssert.AreEqual(exp, lst.ToArray()); }
private void BindSearchResultsPanel() { Trace.Write(this.GetType().ToString(), "Begin Bind Search Results"); if (_contentNodes.Count > 0) { //SORT THE CATEGORIES ACCORDINGLY string sortExpression = SortResults.SelectedValue; if (!string.IsNullOrEmpty(sortExpression)) { string[] sortTokens = sortExpression.Split(" ".ToCharArray()); System.Web.UI.WebControls.SortDirection dir = (sortTokens[1] == "ASC" ? System.Web.UI.WebControls.SortDirection.Ascending : System.Web.UI.WebControls.SortDirection.Descending); switch (sortTokens[0]) { case "Featured": _contentNodes.Sort(new FeaturedComparer(dir)); break; case "Price": _contentNodes.Sort(new PriceComparer(dir)); break; case "Name": _contentNodes.Sort(new NameComparer(dir)); break; case "Manufacturer": _contentNodes.Sort(new ManufacturerComparer(dir)); break; } } phCategoryContents.Visible = true; //INITIALIZE PAGING VARIABLES InitializePagingVars(false); //BIND THE RESULT PANE BindResultHeader(); //BIND THE PAGING CONTROLS FOOTER BindPagingControls(); } else { ResultIndexMessage.Text = string.Format(ResultIndexMessage.Text, 0, 0, 0); //HIDE THE CONTENTS phCategoryContents.Visible = false; phEmptyCategory.Visible = (_category != null && _category.CatalogNodes.Count == 0); phNoSearchResults.Visible = !phEmptyCategory.Visible; } //UPDATE AJAX PANEL SearchResultsAjaxPanel.Update(); Trace.Write(this.GetType().ToString(), "End Bind Search Results"); }
private void killRobotsByAge(Population population, IList <RobotInfo> listCopy, IList <RobotInfo> originalList) { listCopy.Sort(RobotInfo.comparatorByAge); int numberToKill = (int)Math.Round(settings.maximumDeathByAge * originalList.Count); killRobots(listCopy, originalList, numberToKill, population, settings.probabilityOfDeathByAge); }
/// <summary> /// Populates the list entries. /// </summary> private void PopulateLists( int offset, IList <HighScoreEntry> list) { // Sort it list.Sort(); // Go through the blow away the list for (int i = 0; i < HighScoreTable.MaximumEntries; i++) { // See if we have it int n = i + offset; if (i > list.Count - 1) { entries[n].Reset(); continue; } // Populate the list entries[n].Name.Text = list[i].Name; entries[n].UtcWhen.Text = list[i].UtcWhen.ToString(); entries[n].Word.Text = list[i].Word + ""; entries[n].Score.Text = list[i].Score.ToString("N0"); } }
public virtual IList <ICoreMap> GetMergedList(params int[] groups) { IList <ICoreMap> res = new List <ICoreMap>(); int last = 0; IList <int> orderedGroups = CollectionUtils.AsList(groups); orderedGroups.Sort(); foreach (int group in orderedGroups) { int groupStart = Start(group); if (groupStart >= last) { Sharpen.Collections.AddAll(res, elements.SubList(last, groupStart)); int groupEnd = End(group); if (groupEnd - groupStart >= 1) { ICoreMap merged = CreateMergedChunk(groupStart, groupEnd); res.Add(merged); last = groupEnd; } } } Sharpen.Collections.AddAll(res, elements.SubList(last, elements.Count)); return(res); }
public static IList <ICoreMap> ReplaceMerged <_T0, _T1>(IList <_T0> list, IList <_T1> matchedExprs) where _T0 : ICoreMap where _T1 : MatchedExpression { if (matchedExprs == null) { return(list); } matchedExprs.Sort(ExprTokenOffsetComparator); IList <ICoreMap> merged = new List <ICoreMap>(list.Count); // Approximate size int last = 0; foreach (MatchedExpression expr in matchedExprs) { int start = expr.chunkOffsets.First(); int end = expr.chunkOffsets.Second(); if (start >= last) { Sharpen.Collections.AddAll(merged, list.SubList(last, start)); ICoreMap m = expr.GetAnnotation(); merged.Add(m); last = end; } } // Add rest of elements if (last < list.Count) { Sharpen.Collections.AddAll(merged, list.SubList(last, list.Count)); } return(merged); }
/// <summary> /// Given a sequence, applies our patterns over the sequence and returns /// all non overlapping matches. /// </summary> /// <remarks> /// Given a sequence, applies our patterns over the sequence and returns /// all non overlapping matches. When multiple patterns overlaps, /// matched patterns are selected by order specified by the comparator /// </remarks> /// <param name="elements">input sequence to match against</param> /// <param name="cmp">comparator indicating order that overlapped sequences should be selected.</param> /// <returns>list of match results that are non-overlapping</returns> public virtual IList <ISequenceMatchResult <T> > FindNonOverlapping <_T0, _T1>(IList <_T0> elements, IComparator <_T1> cmp) where _T0 : T { ICollection <SequencePattern <T> > triggered = GetTriggeredPatterns(elements); IList <ISequenceMatchResult <T> > all = new List <ISequenceMatchResult <T> >(); int i = 0; foreach (SequencePattern <T> p in triggered) { if (Thread.Interrupted()) { // Allow interrupting throw new RuntimeInterruptedException(); } SequenceMatcher <T> m = p.GetMatcher(elements); m.SetMatchWithResult(matchWithResult); m.SetOrder(i); while (m.Find()) { all.Add(m.ToBasicSequenceMatchResult()); } i++; } IList <ISequenceMatchResult <T> > res = IntervalTree.GetNonOverlapping(all, SequenceMatchResultConstants.ToInterval, cmp); res.Sort(SequenceMatchResultConstants.OffsetComparator); return(res); }
public MessageBox(string title, MessageBoxIcon icon, string mainInstruction, string content, string expandedInformation, string footer, MessageBoxIcon footerIcon, IList <IMessageBoxButton <T> > buttons, IMessageBoxButton <T>?defaultButton, IMessageBoxButton <T>?cancelButton) { Title = title; Icon = icon; MainInstruction = mainInstruction; Content = content; ExpandedInformation = expandedInformation; Footer = footer; FooterIcon = footerIcon; DefaultButton = defaultButton; CancelButton = cancelButton; if ((buttons?.Count ?? 0) == 0) { Buttons = new List <IMessageBoxButton <T> >() { new MessageBoxButton("Ok", default) }; DefaultButton = Buttons[0]; CancelButton = Buttons[0]; } else { Buttons = buttons !.ToList(); Buttons.Sort((a, b) => GetButtonOrder(a?.Name).CompareTo(GetButtonOrder(b?.Name))); } }
/// <summary>Get the next subdirectory within the block pool slice.</summary> /// <returns> /// The next subdirectory within the block pool slice, or /// null if there are no more. /// </returns> /// <exception cref="System.IO.IOException"/> private string GetNextSubDir(string prev, FilePath dir) { IList <string> children = IOUtils.ListDirectory(dir, FsVolumeImpl.SubdirFilter.Instance ); this.cache = null; this.cacheMs = 0; if (children.Count == 0) { FsVolumeImpl.Log.Trace("getNextSubDir({}, {}): no subdirectories found in {}", this ._enclosing.storageID, this.bpid, dir.GetAbsolutePath()); return(null); } children.Sort(); string nextSubDir = FsVolumeImpl.NextSorted(children, prev); if (nextSubDir == null) { FsVolumeImpl.Log.Trace("getNextSubDir({}, {}): no more subdirectories found in {}" , this._enclosing.storageID, this.bpid, dir.GetAbsolutePath()); } else { FsVolumeImpl.Log.Trace("getNextSubDir({}, {}): picking next subdirectory {} " + "within {}" , this._enclosing.storageID, this.bpid, nextSubDir, dir.GetAbsolutePath()); } return(nextSubDir); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="Sharpen.TimeoutException"/> /// <exception cref="System.Exception"/> /// <exception cref="Org.Apache.Hadoop.Conf.ReconfigurationException"/> public virtual void TestAddVolumesToFederationNN() { // Starts a Cluster with 2 NameNode and 3 DataNodes. Each DataNode has 2 // volumes. int numNameNodes = 2; int numDataNodes = 1; StartDFSCluster(numNameNodes, numDataNodes); Path testFile = new Path("/test"); // Create a file on the first namespace with 4 blocks. CreateFile(0, testFile, 4); // Create a file on the second namespace with 4 blocks. CreateFile(1, testFile, 4); // Add 2 volumes to the first DataNode. int numNewVolumes = 2; AddVolumes(numNewVolumes); // Append to the file on the first namespace. DFSTestUtil.AppendFile(cluster.GetFileSystem(0), testFile, BlockSize * 8); IList <IList <int> > actualNumBlocks = GetNumBlocksReport(0); NUnit.Framework.Assert.AreEqual(cluster.GetDataNodes().Count, actualNumBlocks.Count ); IList <int> blocksOnFirstDN = actualNumBlocks[0]; blocksOnFirstDN.Sort(); NUnit.Framework.Assert.AreEqual(Arrays.AsList(2, 2, 4, 4), blocksOnFirstDN); // Verify the second namespace also has the new volumes and they are empty. actualNumBlocks = GetNumBlocksReport(1); NUnit.Framework.Assert.AreEqual(4, actualNumBlocks[0].Count); NUnit.Framework.Assert.AreEqual(numNewVolumes, Sharpen.Collections.Frequency(actualNumBlocks [0], 0)); }
public RangeFacetCountCollector(string name, FacetDataCache dataCache, int docBase, FacetSpec ospec, IList <string> predefinedRanges) { m_name = name; m_dataCache = dataCache; m_countLength = m_dataCache.Freqs.Length; m_count = new LazyBigInt32Array(m_countLength); m_array = m_dataCache.OrderArray; m_ospec = ospec; if (predefinedRanges != null) { m_predefinedRanges = new TermStringList(); predefinedRanges.Sort(); m_predefinedRanges.AddAll(predefinedRanges); } else { m_predefinedRanges = null; } if (m_predefinedRanges != null) { m_predefinedRangeIndexes = new int[m_predefinedRanges.Count][]; int i = 0; foreach (string range in this.m_predefinedRanges) { m_predefinedRangeIndexes[i++] = FacetRangeFilter.Parse(this.m_dataCache, range); } } }
public static void VerifyEquals(Document d1, Document d2) { IList <IIndexableField> ff1 = d1.Fields; IList <IIndexableField> ff2 = d2.Fields; ff1.Sort(fieldNameComparer); ff2.Sort(fieldNameComparer); Assert.AreEqual(ff1.Count, ff2.Count, ff1 + " : " + ff2); for (int i = 0; i < ff1.Count; i++) { IIndexableField f1 = ff1[i]; IIndexableField f2 = ff2[i]; if (f1.GetBinaryValue() != null) { if (Debugging.AssertsEnabled) { Debugging.Assert(f2.GetBinaryValue() != null); } } else { string s1 = f1.GetStringValue(); string s2 = f2.GetStringValue(); Assert.AreEqual(s1, s2, ff1 + " : " + ff2); } } }
protected void OrderNotesGrid_RowCommand(object sender, GridViewCommandEventArgs e) { if (e.CommandName.Equals("AddNote")) { string[] data = ((string)e.CommandArgument).Split(':'); int orderId = AlwaysConvert.ToInt(data[0]); HiddenOrderId.Value = orderId.ToString(); AddDialogCaption.Text = String.Format(AddDialogCaption.Text, data[1]); List <string> history = new List <string>(); Order order = EntityLoader.Load <Order>(orderId); if (order != null) { IList <OrderNote> notes = order.Notes; notes.Sort(new PropertyComparer("CreatedDate", CommerceBuilder.Common.SortDirection.DESC)); foreach (OrderNote note in notes) { if (note.NoteType != NoteType.SystemPrivate && note.NoteType != NoteType.SystemPublic) { StringBuilder historyEntry = new StringBuilder(); historyEntry.Append("<i>On " + note.CreatedDate.ToString("g") + ", "); historyEntry.Append(note.User.PrimaryAddress.FullName); historyEntry.Append(note.NoteType == NoteType.Public ? " wrote" : " whispered"); historyEntry.Append(":</i><br />"); historyEntry.Append(note.Comment); history.Add(historyEntry.ToString()); } } } NoteHistory.Text = string.Join("<hr>", history.ToArray()); AddPopup.Show(); NoteText.Focus(); } }
/// <summary> /// Adds unspecified default entries by copying permissions from the /// corresponding access entries. /// </summary> /// <param name="aclBuilder">ArrayList<AclEntry> containing entries to build</param> private static void CopyDefaultsIfNeeded(IList <AclEntry> aclBuilder) { aclBuilder.Sort(AclEntryComparator); ScopedAclEntries scopedEntries = new ScopedAclEntries(aclBuilder); if (!scopedEntries.GetDefaultEntries().IsEmpty()) { IList <AclEntry> accessEntries = scopedEntries.GetAccessEntries(); IList <AclEntry> defaultEntries = scopedEntries.GetDefaultEntries(); IList <AclEntry> copiedEntries = Lists.NewArrayListWithCapacity(3); foreach (AclEntryType type in EnumSet.Of(AclEntryType.User, AclEntryType.Group, AclEntryType .Other)) { AclEntry defaultEntryKey = new AclEntry.Builder().SetScope(AclEntryScope.Default) .SetType(type).Build(); int defaultEntryIndex = Sharpen.Collections.BinarySearch(defaultEntries, defaultEntryKey , AclEntryComparator); if (defaultEntryIndex < 0) { AclEntry accessEntryKey = new AclEntry.Builder().SetScope(AclEntryScope.Access).SetType (type).Build(); int accessEntryIndex = Sharpen.Collections.BinarySearch(accessEntries, accessEntryKey , AclEntryComparator); if (accessEntryIndex >= 0) { copiedEntries.AddItem(new AclEntry.Builder().SetScope(AclEntryScope.Default).SetType (type).SetPermission(accessEntries[accessEntryIndex].GetPermission()).Build()); } } } // Add all copied entries when done to prevent potential issues with binary // search on a modified aclBulider during the main loop. Sharpen.Collections.AddAll(aclBuilder, copiedEntries); } }
public static IList <T> MergeList <T, _T1, _T2>(IList <_T1> list, IList <_T2> matched, Func <IList <T>, T> aggregator) where _T1 : T where _T2 : IHasInterval <int> { matched.Sort(HasIntervalConstants.EndpointsComparator); return(MergeListWithSortedMatched(list, matched, aggregator)); }
public static IList <Interval> Merge2(IList <Interval> intervals) { IList <Interval> res = new List <Interval>(); if (intervals == null || intervals.Count == 0) { return(intervals); } Comparison <Interval> compare = (i1, i2) => { if (i1.start == i2.start) { return(Comparer <int> .Default.Compare(i1.end, i2.end)); } return(Comparer <int> .Default.Compare(i1.start, i2.start)); }; intervals.Sort(compare); res.Add(intervals[0]); for (int i = 1; i < intervals.Count; i++) { if (res[res.Count - 1].end >= intervals[i].start) { res[res.Count - 1].end = Math.Max(res[res.Count - 1].end, intervals[i].end); } else { res.Add(intervals[i]); } } return(res); }
public FieldAndTermEnumAnonymousInnerClassHelper2(string field, List <BytesRef> terms) : base(field) { this.terms = terms; terms.Sort(); iter = terms.GetEnumerator(); }
/// <summary>Return the items of an Iterable as a sorted list.</summary> /// <?/> /// <param name="items">The collection to be sorted.</param> /// <returns>A list containing the same items as the Iterable, but sorted.</returns> public static IList <T> Sorted <T>(IEnumerable <T> items, IComparator <T> comparator) { IList <T> result = ToList(items); result.Sort(comparator); return(result); }
/// <summary> /// 合并文件 /// </summary> /// <param name="files">文件路径列表</param> /// <param name="fileName">生成文件名</param> /// <param name="delete">合并后是否删除源文件</param> /// <param name="encrypt">是否加密</param> /// <param name="sign">签名</param> public static void Combine(IList <string> files, string fileName, bool delete = false, bool encrypt = false, int sign = 0) { if (files == null || files.Count == 0) { return; } files.Sort(); using (var ws = new FileStream(fileName, FileMode.Create)) { foreach (var file in files) { if (file == null || !File.Exists(file)) { continue; } using (var rs = new FileStream(file, FileMode.Open, FileAccess.Read)) { var data = new byte[1024]; var readLen = 0; while ((readLen = rs.Read(data, 0, data.Length)) > 0) { ws.Write(data, 0, readLen); ws.Flush(); } } if (delete) { Delete(file); } } } }
public virtual IList <TrieMapMatcher.PartialApproxMatch <K, V> > ToSortedList() { IList <TrieMapMatcher.PartialApproxMatch <K, V> > res = queue.ValuesList(); res.Sort(TrieMapMatcher.PartialMatchComparator <K, V>()); return(res); }
/// <summary> /// Sort in the order from the least new amount of resources asked (likely /// negative) to the highest. /// </summary> /// <remarks> /// Sort in the order from the least new amount of resources asked (likely /// negative) to the highest. This prevents "order-of-operation" errors related /// to exceeding 100% capacity temporarily. /// </remarks> protected internal virtual IList <ReservationAllocation> SortByDelta(IList <ReservationAllocation > currentReservations, long now, Plan plan) { currentReservations.Sort(new AbstractSchedulerPlanFollower.ReservationAllocationComparator (now, this, plan)); return(currentReservations); }
public CorefChain(CorefCluster c, IDictionary <Mention, IntTuple> positions) { chainID = c.clusterID; // Collect mentions mentions = new List <CorefChain.CorefMention>(); mentionMap = Generics.NewHashMap(); CorefChain.CorefMention represents = null; foreach (Mention m in c.GetCorefMentions()) { CorefChain.CorefMention men = new CorefChain.CorefMention(m, positions[m]); mentions.Add(men); } mentions.Sort(new CorefChain.CorefMentionComparator()); // Find representative mention foreach (CorefChain.CorefMention men_1 in mentions) { IntPair position = new IntPair(men_1.sentNum, men_1.headIndex); if (!mentionMap.Contains(position)) { mentionMap[position] = Generics.NewHashSet <CorefChain.CorefMention>(); } mentionMap[position].Add(men_1); if (men_1.MoreRepresentativeThan(represents)) { represents = men_1; } } representative = represents; }
private void sortLists() { // File handles Dictionary <int, FileHandleInfo> .ValueCollection c = fileHandleIdMap.Values; fileHandleList = new LinkedList <FileHandleInfo>(c); fileHandleList.Sort(); }
/// <summary>Detect renames in the current file set.</summary> /// <remarks>Detect renames in the current file set.</remarks> /// <param name="reader">reader to obtain objects from the repository with.</param> /// <param name="pm">report progress during the detection phases.</param> /// <returns> /// an unmodifiable list of /// <see cref="DiffEntry">DiffEntry</see> /// s representing all files /// that have been changed. /// </returns> /// <exception cref="System.IO.IOException">file contents cannot be read from the repository. /// </exception> public virtual IList <DiffEntry> Compute(ContentSource.Pair reader, ProgressMonitor pm) { if (!done) { done = true; if (pm == null) { pm = NullProgressMonitor.INSTANCE; } if (0 < breakScore) { BreakModifies(reader, pm); } if (!added.IsEmpty() && !deleted.IsEmpty()) { FindExactRenames(pm); } if (!added.IsEmpty() && !deleted.IsEmpty()) { FindContentRenames(reader, pm); } if (0 < breakScore && !added.IsEmpty() && !deleted.IsEmpty()) { RejoinModifies(pm); } Sharpen.Collections.AddAll(entries, added); added = null; Sharpen.Collections.AddAll(entries, deleted); deleted = null; entries.Sort(DIFF_COMPARATOR); } return(Sharpen.Collections.UnmodifiableList(entries)); }
private FullSpriteSet( IList<AbstractSprite> sprites, System.ComponentModel.BackgroundWorker worker, int tasksComplete, int tasks ) { bool haveWorker = worker != null; if ( haveWorker ) worker.ReportProgress( ( tasksComplete++ * 100 ) / tasks, "Sorting" ); sprites.Sort( ( a, b ) => a.Name.CompareTo( b.Name ) ); this.sprites = sprites; }
public FieldAndTermEnumAnonymousInnerClassHelper(List<Term> terms) { if (!terms.Any()) { throw new ArgumentException("no terms provided"); } this.terms = terms; terms.Sort(); iter = terms.GetEnumerator(); }
private FullSpriteSet( IList<AbstractSprite> sprites, System.ComponentModel.BackgroundWorker worker, int tasksComplete, int tasks ) { bool haveWorker = worker != null; if ( haveWorker ) worker.ReportProgress( ( tasksComplete++ * 100 ) / tasks, "Sorting" ); sprites.Sort( ( a, b ) => a.Name.CompareTo( b.Name ) ); this.sprites = sprites; Thumbnails = new ImageList(); Thumbnails.ImageSize = new System.Drawing.Size( 80, 48 ); foreach ( var sprite in sprites ) { if ( haveWorker ) worker.ReportProgress( ( tasksComplete++ * 100 ) / tasks, string.Format( "Generating thumbnail for {0}", sprite.Name ) ); Thumbnails.Images.Add( sprite.Name, sprite.GetThumbnail() ); } }
private IList<PatchedByteArray> DoDteCrap( IList<ISerializableFile> dteFiles, BackgroundWorker worker, DoWorkEventArgs args, out IDictionary<byte, string> dteMapping ) { List<PatchedByteArray> patches = new List<PatchedByteArray>(); dteMapping = null; if (worker.CancellationPending) { args.Cancel = true; return null; } dteFiles.Sort( ( x, y ) => (y.ToCDByteArray().Length - y.Layout.Size).CompareTo( x.ToCDByteArray().Length - x.Layout.Size ) ); if (worker.CancellationPending) { args.Cancel = true; return null; } IDictionary<ISerializableFile, Set<KeyValuePair<string, byte>>> filePreferredPairs = null; Set<KeyValuePair<string, byte>> currentPairs = null; DteResult result = DteResult.Empty; if (dteFiles.Count > 0) { int tries = dteFiles.Count; //DteResult result = DoDteForFiles( dteFiles, worker, args, out filePreferredPairs, out currentPairs ); do { result = DoDteForFiles( dteFiles, worker, args, out filePreferredPairs, out currentPairs ); switch (result.ResultCode) { case DteResult.Result.Cancelled: args.Cancel = true; return null; case DteResult.Result.Fail: var failedFile = result.FailedFile; if (dteFiles[0] == failedFile) { // Failed on the first file... this is hopeless throw new FFTPatcher.TextEditor.DTE.DteException( failedFile ); } // Bump the failed file to the top of the list dteFiles.Remove( failedFile ); dteFiles.Insert( 0, failedFile ); break; case DteResult.Result.Success: // do nothing break; } } while (result.ResultCode != DteResult.Result.Success && --tries >= 0); } switch (result.ResultCode) { case DteResult.Result.Fail: throw new FFTPatcher.TextEditor.DTE.DteException( dteFiles[0] ); case DteResult.Result.Cancelled: args.Cancel = true; return null; } foreach (var file in dteFiles) { worker.ReportProgress( 0, new ProgressForm.FileProgress { File = file, State = ProgressForm.TaskState.Starting, Task = ProgressForm.Task.GeneratePatch } ); var currentFileEncoding = PatcherLib.Utilities.Utilities.DictionaryFromKVPs( filePreferredPairs[file] ); patches.AddRange( file.GetDtePatches( currentFileEncoding ) ); worker.ReportProgress( 0, new ProgressForm.FileProgress { File = file, State = ProgressForm.TaskState.Done, Task = ProgressForm.Task.GeneratePatch } ); if (worker.CancellationPending) { args.Cancel = true; return null; } } var myDteMapping = new Dictionary<byte, string>(); currentPairs.ForEach( kvp => myDteMapping[kvp.Value] = kvp.Key ); dteMapping = myDteMapping; patches.AddRange( DTE.GenerateDtePatches( this.Filetype, currentPairs ) ); return patches.AsReadOnly(); }
/// <summary> /// Populates the list entries. /// </summary> private void PopulateLists( int offset, IList<HighScoreEntry> list) { // Sort it list.Sort(); // Go through the blow away the list for (int i = 0; i < HighScoreTable.MaximumEntries; i++) { // See if we have it int n = i + offset; if (i > list.Count - 1) { entries[n].Reset(); continue; } // Populate the list entries[n].Name.Text = list[i].Name; entries[n].UtcWhen.Text = list[i].UtcWhen.ToString(); entries[n].Word.Text = list[i].Word + ""; entries[n].Score.Text = list[i].Score.ToString("N0"); } }
private void AddOrReplaceCharMapDirectoryEntry( IList<PatcherLib.Iso.DirectoryEntry> dir, PatcherLib.Iso.DirectoryEntry newDirEnt ) { var currentCharmapIndex = dir.IndexOf( dir.Find( d => d.Filename == newDirEnt.Filename ) ); if (currentCharmapIndex != -1) { dir[currentCharmapIndex] = newDirEnt; } else { dir.Add( newDirEnt ); dir.Sort( ( a, b ) => a.Filename.CompareTo( b.Filename ) ); } }
/// <summary> /// Updates the team assignment detail. /// </summary> /// <param name="projects">The projects.</param> /// <returns> /// returns the ProjectFillItem /// </returns> private IList<ProjectFillItem> UpdateTeamAssignmentDetail(IList<ProjectFillItem> projects) { if (this.developer != null) { this.clientsAndProjects = DependencyInjectionContainer.Instance.Container.Resolve<TimesheetService>().OrderClientsAndProjects(this.Clients, projects, this.developer.DeveloperID, this.currentDate); this.Clients = this.Clients.Sort(); projects = projects.Sort(); this.lastWorkDetail = DependencyInjectionContainer.Instance.Container.Resolve<TimesheetService>().RetrieveLastEntry(this.developer.DeveloperID); } return projects; }
public FieldAndTermEnumAnonymousInnerClassHelper2(string field, List<BytesRef> terms) : base(field) { if (!terms.Any()) { throw new ArgumentException("no terms provided"); } this.terms = terms; terms.Sort(); iter = terms.GetEnumerator(); }
public void BindForecastDataUsage(int siteOrcatid, int activIndex) { DataRepository.SaveOrUpdateForecastInfo(_forecastInfo); _lvHistData.BeginUpdate(); _lvHistData.Items.Clear(); List<string> list = new List<string>(); List<string> plist = new List<string>(); List<int> plistId = new List<int>(); if (_activeCategory != null || _activeFSite != null) { int index = 0; int productcount = 0; #region MyRegion if (_forecastInfo.GetListOfDataUsages(siteOrcatid, activIndex).Count > 0) { if (_forecastInfo.Methodology == MethodologyEnum.CONSUMPTION.ToString()) { if (_forecastInfo.DataUsage == DataUsageEnum.DATA_USAGE3.ToString()) { productcatList = (IList<ForecastCategoryProduct>)_forecastInfo.GetListOfDataUsages(siteOrcatid, activIndex); productListunsorted = productcatList[0].Id; productcatList.Sort(delegate(ForecastCategoryProduct p1, ForecastCategoryProduct p2) { return p1.DurationDateTime.Value.Date.CompareTo(p2.DurationDateTime.Value.Date); }); productcount = productcatList.Count; foreach (ForecastCategoryProduct p in productcatList) { if (list.Contains(p.CDuration) != true) { list.Add(p.CDuration); } if (plist.Contains(p.Product.ProductName) != true) { plist.Add(p.Product.ProductName); plistId.Add(p.Product.Id); } if (productListunsorted < p.Id) productListunsorted = p.Id; } } else { productsiteList = (IList<ForecastSiteProduct>)_forecastInfo.GetListOfDataUsages(siteOrcatid, activIndex); productListunsorted = productsiteList[0].Id; productsiteList.Sort(delegate(ForecastSiteProduct p1, ForecastSiteProduct p2) { return p1.DurationDateTime.Value.Date.CompareTo(p2.DurationDateTime.Value.Date); }); productcount = productsiteList.Count; foreach (ForecastSiteProduct p in productsiteList) { if (list.Contains(p.CDuration) != true) { list.Add(p.CDuration); } if (plist.Contains(p.Product.ProductName) != true) { plist.Add(p.Product.ProductName); } if (productListunsorted < p.Id) productListunsorted = p.Id; } } } else { if (_forecastInfo.DataUsage == DataUsageEnum.DATA_USAGE3.ToString()) { testcatList = (IList<ForecastCategoryTest>)_forecastInfo.GetListOfDataUsages(siteOrcatid, activIndex); testListunsorted = testcatList[0].Id; testcatList.Sort(delegate(ForecastCategoryTest t1, ForecastCategoryTest t2) { return t1.DurationDateTime.Value.Date.CompareTo(t2.DurationDateTime.Value.Date); }); productcount = testcatList.Count; foreach (ForecastCategoryTest t in testcatList) { if (list.Contains(t.CDuration) != true) { list.Add(t.CDuration); } if (plist.Contains(t.Test.TestName) != true) { plist.Add(t.Test.TestName); plistId.Add(t.Test.Id); } if (testListunsorted < t.Id) testListunsorted = t.Id; } } else { testsiteList = (IList<ForecastSiteTest>)_forecastInfo.GetListOfDataUsages(siteOrcatid, activIndex); testListunsorted = testsiteList[0].Id; testsiteList.Sort(delegate(ForecastSiteTest t1, ForecastSiteTest t2) { return t1.DurationDateTime.Value.Date.CompareTo(t2.DurationDateTime.Value.Date); }); productcount = testsiteList.Count; foreach (ForecastSiteTest t in testsiteList) { if (list.Contains(t.CDuration) != true) { list.Add(t.CDuration); } if (plist.Contains(t.Test.TestName) != true) { plist.Add(t.Test.TestName); } if (testListunsorted < t.Id) testListunsorted = t.Id; } } } if (_forecastInfo.Period == "YEARLY") list.Sort(); int count = list.Count; _lvHistData.Columns.Clear(); this._lvHistData.Items.Clear(); col = new ColumnHeader[count + 1]; ColumnHeader u = new ColumnHeader(); u.Text = ""; col[0] = u; if (_forecastInfo.Period == ForecastPeriodEnum.Yearly.ToString()) { for (int i = count; i >= 1; i--) { ColumnHeader c = new ColumnHeader(); c.Text = list[i - 1].ToString(); col[i] = c; } } if (_forecastInfo.Period == ForecastPeriodEnum.Monthly.ToString()) { DateTime last = _forecastInfo.StartDate; for (int i = 1; i <= count; i++) { ColumnHeader c = new ColumnHeader(); c.Text = list[i - 1].ToString(); col[i] = c; } } if (_forecastInfo.Period == ForecastPeriodEnum.Bimonthly.ToString()) { DateTime last = _forecastInfo.StartDate; for (int i = count; i >= 1; i--) { ColumnHeader c = new ColumnHeader(); last = last.AddMonths(-2); c.Text = list[i - 1].ToString(); col[i] = c; } } if (_forecastInfo.Period == ForecastPeriodEnum.Quarterly.ToString()) { int quar = GetQuarter(_forecastInfo.StartDate); int year = _forecastInfo.StartDate.Year; for (int i = count; i >= 1; i--) { if (quar == 1) { quar = 4; year--; } else quar--; ColumnHeader c = new ColumnHeader(); c.Text = list[i - 1].ToString(); col[i] = c; } } this._lvHistData.Columns.AddRange(col); addData = false; int j = 0, k = 1; ListViewItem l1 = null; ListViewItem l2 = null; ListViewItem l3 = null; ListViewItem l4 = null; ListViewItem l5 = null; List<string> prolist = new List<string>(); for (int z = 0; z < plist.Count; z++) { bool flag = false; bool flag1 = false; int insert = 1, rem = 0; j = 0; foreach (IBaseDataUsage p in _forecastInfo.GetListOfDataUsages(siteOrcatid, activIndex)) { if (plist[z] ==( _forecastInfo.FMethodologeyEnum == MethodologyEnum.CONSUMPTION ? p.Product.ProductName : p.Test.TestName)) { LQTListViewTag tag = new LQTListViewTag(); tag.GroupTitle = (_forecastInfo.FMethodologeyEnum == MethodologyEnum.CONSUMPTION ? p.Product.ProductName : p.Test.TestName); tag.Id = p.Id; tag.Index = index; if (flag1 == false) { l1 = new ListViewItem("ProductUsed/TestPerformed") { Tag = tag }; l2 = new ListViewItem("StockOut") { Tag = tag }; l3 = new ListViewItem("InstrumentDowntime") { Tag = tag }; l4 = new ListViewItem("Adjusted") { Tag = tag }; l5 = new ListViewItem("Note") { Tag = tag }; for (int f = 1; f <= list.Count; f++) if (list[f - 1] == p.CDuration) { insert = f; for (int l = 1; l < insert; l++) { l1.SubItems.Add("-"); l2.SubItems.Add("-"); l3.SubItems.Add("-"); l4.SubItems.Add("-"); l5.SubItems.Add("-"); l1.SubItems.Insert(l, l1.SubItems[l]); l2.SubItems.Insert(l, l2.SubItems[l]); l3.SubItems.Insert(l, l3.SubItems[l]); l4.SubItems.Insert(l, l4.SubItems[l]); k = k + 2; } break; } l1.SubItems.Add(p.AmountUsed.ToString()); l2.SubItems.Add(p.StockOut.ToString()); l3.SubItems.Add(p.InstrumentDowntime.ToString()); l4.SubItems.Add(p.Adjusted.ToString()); if (p.AmountUsed != p.Adjusted) { l5.SubItems.Add("Adjusted"); } else { l5.SubItems.Add("-"); } l1.SubItems.Insert(insert, l1.SubItems[k]); l2.SubItems.Insert(insert, l2.SubItems[k]); l3.SubItems.Insert(insert, l3.SubItems[k]); l4.SubItems.Insert(insert, l4.SubItems[k]); k = k + 2; flag1 = true; rem = insert; insert++; } else if (j == productcount - 1) { l1.SubItems.Add(p.AmountUsed.ToString()); l2.SubItems.Add(p.StockOut.ToString()); l3.SubItems.Add(p.InstrumentDowntime.ToString()); l4.SubItems.Add(p.Adjusted.ToString()); if (p.AmountUsed != p.Adjusted) { l5.SubItems.Add("Adjusted"); } else { l5.SubItems.Add("-"); } l1.SubItems.Insert(insert, l1.SubItems[k]); l2.SubItems.Insert(insert, l2.SubItems[k]); l3.SubItems.Insert(insert, l3.SubItems[k]); l4.SubItems.Insert(insert, l4.SubItems[k]); j = 0; flag = true; k = 1; insert++; rem = insert; break; } else { l1.SubItems.Add(p.AmountUsed.ToString()); l2.SubItems.Add(p.StockOut.ToString()); l3.SubItems.Add(p.InstrumentDowntime.ToString()); l4.SubItems.Add(p.Adjusted.ToString()); if (p.AmountUsed != p.Adjusted) { l5.SubItems.Add("Adjusted"); } else { l5.SubItems.Add("-"); } l1.SubItems.Insert(insert, l1.SubItems[k]); l2.SubItems.Insert(insert, l2.SubItems[k]); l3.SubItems.Insert(insert, l3.SubItems[k]); l4.SubItems.Insert(insert, l4.SubItems[k]); k = k + 2; rem = insert; insert++; } } else if (flag == false && j == productcount - 1) { if (rem > 1) for (int l = rem + 1; l <= list.Count; l++) { l1.SubItems.Add("-"); l2.SubItems.Add("-"); l3.SubItems.Add("-"); l4.SubItems.Add("-"); l5.SubItems.Add("-"); l1.SubItems.Insert(l, l1.SubItems[k]); l2.SubItems.Insert(l, l2.SubItems[k]); l3.SubItems.Insert(l, l3.SubItems[k]); l4.SubItems.Insert(l, l4.SubItems[k]); k = k + 2; } k = 1; break; } j++; } if (k == 1) { LqtUtil.AddItemToGroup(_lvHistData, l1); LqtUtil.AddItemToGroup(_lvHistData, l2); LqtUtil.AddItemToGroup(_lvHistData, l3); LqtUtil.AddItemToGroup(_lvHistData, l4); LqtUtil.AddItemToGroup(_lvHistData, l5); index++; _lvHistData.Items.AddRange(new ListViewItem[] { l1, l2, l3, l4, l5 }); l1.BackColor = System.Drawing.Color.LightBlue; l2.BackColor = System.Drawing.Color.LightBlue; l3.BackColor = System.Drawing.Color.LightBlue; l4.BackColor = System.Drawing.Color.LightBlue; l5.BackColor = System.Drawing.Color.LightBlue; } } } #endregion _lvHistData.EndUpdate(); } }
public FieldAndTermEnumAnonymousInnerClassHelper(List<Term> terms) { this.terms = terms; terms.Sort(); iter = terms.GetEnumerator(); }
public FieldAndTermEnumAnonymousInnerClassHelper2(string field, List<BytesRef> terms) : base(field) { this.terms = terms; terms.Sort(); iter = terms.GetEnumerator(); }