public int LadderLength(string beginWord, string endWord, IList <string> wordList) { // bfs // tc:O(nl); sc:O(n) if (wordList.Count == 0 || !wordList.Contains(endWord) || beginWord == endWord) // endword must in wordList { return(0); } HashSet <string> dict = wordList.ToHashSet(); // preprocessing: find in hashset is faster than in list // foreach(string word in wordList) { // dict.Add(word); // } Queue <string> queue = new Queue <string>(); int level = 0; queue.Enqueue(beginWord); while (queue.Count > 0) { level++; int levelSize = queue.Count; // maintain the level for (int i = 0; i < levelSize; i++) { string str = queue.Dequeue(); if (str == endWord) { return(level); } FindTransformWords(str, dict, queue); } } return(0); }
//!!!similar code elsewhere and and missing ISGOOD public override Matrix <string, string, T> Filter <T>(Matrix <string, string, T> input, Matrix <string, string, T> target) { Helper.CheckCondition(!MatchRequired || RowToRemoveList.ToHashSet().IsSubsetOf(input.RowKeys), "Expect all items in RowToRemoveList to be in the matrix."); var output = input.SelectRowsView(input.RowKeys.Except(RowToRemoveList)); return(output); }
private void ProcessScc(IList <Procedure> scc) { var procSet = scc.ToHashSet(); var sstSet = new HashSet <SsaTransform>(); foreach (var proc in scc) { var sst = new SsaTransform( program, proc, procSet, dynamicLinker.Object, dataFlow); sst.Transform(); sst.AddUsesToExitBlock(); var vp = new ValuePropagator( program.SegmentMap, sst.SsaState, program.CallGraph, dynamicLinker.Object, NullDecompilerEventListener.Instance); vp.Transform(); sstSet.Add(sst); } var trf = new TrashedRegisterFinder( program, dataFlow, sstSet, NullDecompilerEventListener.Instance); trf.Compute(); }
//Given a non-empty string s and a dictionary wordDict containing a list of non-empty words, // determine if s can be segmented into a space-separated sequence of one or more dictionary words. public bool WordBreak(string s, IList <string> wordDict) { HashSet <string> hash = wordDict.ToHashSet(); HashSet <string> dp = new HashSet <string>(); return(WordBreakHelper(s, hash, dp)); }
public IList <IList <string> > FindLadders(string beginWord, string endWord, IList <string> wordList) { // bfs + dfs if (wordList.Count == 0) { return(new List <IList <string> >()); } HashSet <string> dict = wordList.ToHashSet(); dict.Add(beginWord); Dictionary <string, int> levelDict = new Dictionary <string, int>(); // dictionary to record the level for each node levelDict[beginWord] = 0; var ladders = Bfs(dict, levelDict, beginWord, endWord); if (ladders == null) { return(new List <IList <string> >()); } List <IList <string> > res = new List <IList <string> >(); List <string> row = new List <string>(); Dfs(ladders, levelDict, res, row, beginWord, endWord); return(res); }
public static IList <T> Except <T>(this IList <T> source, IList <T> second) { if (source == null) { throw new ArgumentNullException(nameof(source)); } if (second == null) { throw new ArgumentNullException(nameof(second)); } var results = new List <T>(); var hashSet = second.ToHashSet(); for (var i = 0; i < source.Count; i++) { var item = source[i]; if (!hashSet.Contains(item)) { results.Add(item); } } return(results); }
public IList <string> WordBreak(string s, IList <string> wordDict) { IList <string>[] cache = new IList <string> [s.Length]; ISet <string> set = wordDict.ToHashSet(); var res = Helper(ref s, 0, cache, set); return(res); }
public IList <string> WordBreak(string s, IList <string> wordDict) { IList <string> combinations = new List <string>(); Backtrack(s, 0, wordDict.ToHashSet(), new StringBuilder(), combinations); return(combinations); }
protected bool IsThirteenOrphans(IList <Tile> uncalledTiles, IList <TileGrouping> calledSets) { if (uncalledTiles.Count != WinningHandBaseTileCount || calledSets.Any()) { return(false); } return(uncalledTiles.ToHashSet().SetEquals(GetThirteenOrphansSet())); }
// Validation algorithm. Replays events, and depending on circumstances, sets flags which inform the caller how to save/reject/respond to the incoming // events. public void SimpleFullStateRebuildValidation(IList <GenericTodoEvent> newEvents, out IList <GenericTodoEvent> acceptedEvents, out IList <GenericTodoEvent> skippedEvents, out bool rejected, out bool shouldTriggerRefresh, out string errorMsg) { TaskList tasklist = new TaskList(); Stack <UndoAction> undoStack = new Stack <UndoAction>(); ISet <GenericTodoEvent> newEventSet = newEvents.ToHashSet(eventComparer); errorMsg = ""; acceptedEvents = new List <GenericTodoEvent>(newEvents.Count); skippedEvents = new List <GenericTodoEvent>(newEvents.Count); shouldTriggerRefresh = false; rejected = false; GenericTodoEvent eventUnderQuestion = null; // Used in catch scenarios. int eventNum = 0; // Used in catch scenarios. // Create the full event log. We will attempt to execute these events in order, validating the state each time. IList <GenericTodoEvent> fullEventLog = truthLog.Concat(newEvents).OrderBy(keySelector, keyComparer).ToList(); try { foreach (GenericTodoEvent currEvent in fullEventLog) { eventUnderQuestion = currEvent; eventNum++; // Try to apply the event, and examine the validation results. tasklist = EventReplayer.Replay(currEvent, tasklist, undoStack, out bool saveIfNewEvent, out bool demandsRefresh); // Only save the event if the event replay does not want to skip it, AND the event has not already been saved (i.e. is 'new') if (saveIfNewEvent && newEventSet.Contains(currEvent)) { acceptedEvents.Add(currEvent); } else if (!saveIfNewEvent && newEventSet.Contains(currEvent)) { skippedEvents.Add(currEvent); } // Signal if validating this particular event means that the client should probably refresh their data. (E.g. they are clearly out of date). if (demandsRefresh) { shouldTriggerRefresh = true; } } } // If any InvalidOperationExceptions are thrown, that means the eventlog as a whole is invalid, and nothing should be saved. // This is our response if we deem that the posted events are so out-of-sync and inherently incompatible with the truth log, that // it would be dangerous to attempt any kind of saving from these events. catch (InvalidOperationException e) { errorMsg = "The following error occurred when trying to replay event { " + eventUnderQuestion.EventType + ", " + eventUnderQuestion.Name + " }. The event was event " + eventNum + " out of " + fullEventLog.Count + " events in the event log. Error message: " + e.Message; errorMsg = errorMsg + buildFullEventLogErrorMessage(fullEventLog); rejected = true; shouldTriggerRefresh = true; } }
public int LadderLength(string beginWord, string endWord, IList <string> wordList) { var _set = wordList.ToHashSet(); Queue <string> q = new Queue <string>(); q.Enqueue(beginWord); int ret = 0; if (!_set.Contains(endWord)) { return(ret); } while (q.Count > 0) { int size = q.Count; while (size-- > 0) { var word = q.Dequeue(); if (word == endWord) { return(ret + 1); } var currWord = word.ToArray(); for (int i = 0; i < currWord.Length; i++) { char tmpCurrChar = currWord[i]; for (char ch = 'a'; ch <= 'z'; ch++) { if (ch != tmpCurrChar) { currWord[i] = ch; string next = new string(currWord); if (_set.Contains(next)) { q.Enqueue(next); _set.Remove(next); } } } currWord[i] = tmpCurrChar; } } ret++; } return(0); }
public IList <string> WordBreak(string s, IList <string> wordDict) { // dfs + memo search // TC:O(n^3); sc:O(n) if (s == null || s == string.Empty || wordDict == null || wordDict.Count == 0) { return(new List <string>()); } HashSet <string> hash = wordDict.ToHashSet(); Dictionary <string, List <string> > dict = new Dictionary <string, List <string> >(); return(GetWords(s, hash, dict)); }
/// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='category'> /// </param> /// <param name='collection'> /// </param> /// <param name='keys'> /// </param> /// <param name='keyword'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task <IDictionary <string, string> > FindByKeysAsync(this IMetadata operations, string category, string collection, IList <string> keys = default, string keyword = default, CancellationToken cancellationToken = default) { try { return(await operations.RefitClient.FindByKeys(category, collection, keys.ToHashSet(), keyword)); } catch (ApiException exception) when(exception.StatusCode == HttpStatusCode.NotFound) { return(null); } }
override public IList <T> op <T>(string name, IList <T> old, IList <T> val) { if (!object.ReferenceEquals(old, val)) { m_writer.Write(true); var valSet = val.ToHashSet(); var removedList = new List <byte>(); var changedList = new List <byte>(); if (old != null) { for (byte iOld = 0; iOld < old.Count; ++iOld) { if (valSet.TryGetValue(old[iOld], out var oldV)) { valSet.Remove(old[iOld]); } else { removedList.Add(iOld); } } } m_writer.Write((byte)removedList.Count); foreach (var r in removedList) { m_writer.Write(r); } DlgOp <T> dlg = getOp <T>(); m_writer.Write((byte)valSet.Count); foreach (var v in valSet) { //op("test", default, v); //BitConverter.GetBytes(v); //byte[] b = App.ConvertPOD( v ); //m_writer.Write( b ); dlg.Invoke("{ignore}", v, v); } } else { m_writer.Write(false); } return(val); }
/// <summary> /// This callback is called from the SccFinder, which passes it a list /// of Procedures that form a SCC. /// </summary> /// <param name="procs"></param> private void UntangleProcedureScc(IList <Procedure> procs) { this.sccProcs = procs.ToHashSet(); flow.CreateFlowsFor(procs); // Convert all procedures in the SCC to SSA form and perform // value propagation. var ssts = procs.Select(ConvertToSsa).ToArray(); this.ssts.AddRange(ssts); DumpWatchedProcedure("After extra stack vars", ssts); // At this point, the computation of ProcedureFlow is possible. var trf = new TrashedRegisterFinder(program, flow, ssts, this.eventListener); trf.Compute(); // New stack based variables may be available now. foreach (var sst in ssts) { var vp = new ValuePropagator(program.SegmentMap, sst.SsaState, program.CallGraph, dynamicLinker, this.eventListener); vp.Transform(); sst.RenameFrameAccesses = true; sst.Transform(); DumpWatchedProcedure("After extra stack vars", sst.SsaState.Procedure); } foreach (var ssa in ssts.Select(sst => sst.SsaState)) { RemoveImplicitRegistersFromHellNodes(ssa); var sac = new SegmentedAccessClassifier(ssa); sac.Classify(); var prj = new ProjectionPropagator(ssa, sac); prj.Transform(); DumpWatchedProcedure("After projection propagation", ssa.Procedure); } var uid = new UsedRegisterFinder(flow, procs, this.eventListener); foreach (var sst in ssts) { var ssa = sst.SsaState; RemovePreservedUseInstructions(ssa); DeadCode.Eliminate(ssa); uid.ComputeLiveIn(ssa, true); var procFlow = flow[ssa.Procedure]; RemoveDeadArgumentsFromCalls(ssa.Procedure, procFlow, ssts); DumpWatchedProcedure("After dead call argument removal", ssa.Procedure); } eventListener.Advance(procs.Count); }
private void ProcessHeads(string remote, IList <IGitRef> remoteHeads) { IList <IGitRef> localHeads = GetLocalBranches().ToList(); var remoteBranches = remoteHeads.ToHashSet(h => h.LocalName); // Add all the local branches. foreach (var head in localHeads) { DataRow row = _branchTable.NewRow(); row["Force"] = false; row["Delete"] = false; row["Local"] = head.Name; string remoteName; if (head.Remote == remote) { remoteName = head.MergeWith ?? head.Name; } else { remoteName = head.Name; } row["Remote"] = remoteName; bool knownAtRemote = remoteBranches.Contains(remoteName); row["New"] = knownAtRemote ? _no.Text : _yes.Text; row["Push"] = knownAtRemote; _branchTable.Rows.Add(row); } // Offer to delete all the left over remote branches. foreach (var remoteHead in remoteHeads) { var head = remoteHead; if (localHeads.All(h => h.Name != head.LocalName)) { DataRow row = _branchTable.NewRow(); row["Local"] = null; row["Remote"] = remoteHead.LocalName; row["New"] = _no.Text; row["Push"] = false; row["Force"] = false; row["Delete"] = false; _branchTable.Rows.Add(row); } } BranchGrid.Enabled = true; }
/// <summary> /// 主动的广度优先搜索 /// 时间复杂度:O(n * wordLen) /// 空间复杂度:O(n) /// 还可以双向搜索,大幅减小时间。不过一直搞烦了,留待以后来做。。。 99.999999%不会再做了 /// </summary> /// <param name="beginWord"></param> /// <param name="endWord"></param> /// <param name="wordList"></param> /// <returns></returns> public int LadderLength(string beginWord, string endWord, IList <string> wordList) { HashSet <string> hash = wordList.ToHashSet(); //所有满足条件的候选单词 Queue <string> candidate = new Queue <string>(); //当前的代数中有多少个候选单词 int currentGenerationCount; //广度优先的代数 int generation = 1; candidate.Enqueue(beginWord); //始终有候选人,而且没有超出整个wordList的长度,因为超出了的话一定成环了。 while (candidate.Count > 0) { currentGenerationCount = candidate.Count; while (currentGenerationCount-- > 0) { var curr = candidate.Dequeue(); //标记当前string已经走过了 hash.Remove(curr); if (curr == endWord) { return(generation); } //相较于第一反应的被动DFS,改为主动DFS for (int i = 0; i < curr.Length; i++) { PositiveSearch(curr, i); } } generation++; } return(0); void PositiveSearch(string source, int index) { for (char j = 'a'; j <= 'z'; j++) { var temp = source.ToArray(); temp[index] = j; string tempStr = new string(temp); //主动找到了就入队 if (hash.Contains(tempStr)) { hash.Remove(tempStr); candidate.Enqueue(tempStr); } } } }
/*Word Break*/ public bool WordBreak(string s, IList <string> wordDict) { var wordDictSet = wordDict.ToHashSet(); string word = ""; for (int i = 0; i < s.Length; i++) { word += s[i].ToString(); if (wordDictSet.Contains(word)) { word = ""; } } return(word.Length == 0); }
public static IList <T> Except <T>(this IList <T> items, IList <T> second) { List <T> results = new List <T>(); HashSet <T> hashSet = second.ToHashSet(); for (int i = 0; i < items.Count; i++) { T item = items[i]; if (!hashSet.Contains(item)) { results.Add(item); } } return(results); }
public int LadderLength(string beginWord, string endWord, IList <string> wordList) { // bidirectional bfs // tc:O(nl); sc:O(n) if (wordList.Count == 0 || !wordList.Contains(endWord) || beginWord == endWord) // endword must in wordList { return(0); } HashSet <string> dict = wordList.ToHashSet(); // preprocessing: find in hashset is faster than in list HashSet <string> startVisited = new HashSet <string>(); // hashset to record start visited HashSet <string> endVisited = new HashSet <string>(); // hashset to record end visited Queue <string> startQueue = new Queue <string>(); // queue for start Queue <string> endQueue = new Queue <string>(); // queue for end int level = 0; startQueue.Enqueue(beginWord); endQueue.Enqueue(endWord); while (startQueue.Count > 0 || endQueue.Count > 0) { int startSize = startQueue.Count; // for the start queue level++; for (int i = 0; i < startSize; i++) { string str = startQueue.Dequeue(); if (endVisited.Contains(str)) // start and end meet { return(level); } FindTransformWords(str, dict, startVisited, startQueue); } int endSize = endQueue.Count; // for the end queue level++; for (int i = 0; i < endSize; i++) { string str = endQueue.Dequeue(); if (startVisited.Contains(str)) // end and start meet { return(level); } FindTransformWords(str, dict, endVisited, endQueue); } } return(0); }
public int LadderLength(String beginWord, String endWord, IList <String> wordList) { int n = beginWord.Length; ISet <string> dic = wordList.ToHashSet(); ISet <string> visited = new HashSet <string>(); Queue <string> bfs = new Queue <string>(); bfs.Enqueue(beginWord); visited.Add(beginWord); int pathLength = 0; while (bfs.Count > 0) { int count = bfs.Count; for (int i = 0; i < count; i++) { var current = bfs.Dequeue(); if (current == endWord) { pathLength++; return(pathLength); } StringBuilder sb = new StringBuilder(current); for (int j = 0; j < n; j++) { var oldChar = sb[j]; for (char k = 'a'; k <= 'z'; k++) { sb[j] = k; var newWord = sb.ToString(); if (!visited.Contains(newWord) && dic.Contains(newWord)) { bfs.Enqueue(newWord); visited.Add(newWord); } } sb[j] = oldChar; } } pathLength++; } return(0); }
protected bool IsSevenPairs(IList <Tile> uncalledTiles, IList <TileGrouping> calledSets) { if (uncalledTiles.Count != WinningHandBaseTileCount || calledSets.Any() || uncalledTiles.ToHashSet().Count != WinningHandBaseTileCount / 2) { return(false); } uncalledTiles = TileSorter.SortTiles(uncalledTiles); for (int i = 0; i < uncalledTiles.Count - 2; i += 2) { if (!uncalledTiles[i].Equals(uncalledTiles[i + 1])) { return(false); } } return(true); }
public static IList <GridPoint> GetNextDayBlackTiles(IList <GridPoint> startingBlackTiles) { var result = new List <GridPoint>(); var currentBlackTiles = startingBlackTiles.ToHashSet(); var tilesToCheck = new HashSet <GridPoint>(); foreach (var blackTile in startingBlackTiles) { tilesToCheck.Add(blackTile); var adjacentTiles = GetAdjacentHexPoints(blackTile); foreach (var adjacentTile in adjacentTiles) { if (!tilesToCheck.Contains(adjacentTile)) { tilesToCheck.Add(adjacentTile); } } } foreach (var tile in tilesToCheck) { var isCurrentlyBlack = currentBlackTiles.Contains(tile); var numberOfAdjacentBlackTiles = GetAdjacentHexPoints(tile) .Where(tile => currentBlackTiles.Contains(tile)) .Count(); // Any black tile with zero or more than 2 black tiles immediately adjacent to it is flipped to white. // Any white tile with exactly 2 black tiles immediately adjacent to it is flipped to black. if (isCurrentlyBlack && (numberOfAdjacentBlackTiles == 1 || numberOfAdjacentBlackTiles == 2)) { result.Add(tile); } else if (!isCurrentlyBlack && numberOfAdjacentBlackTiles == 2) { result.Add(tile); } } return(result); }
public int LadderLength(string beginWord, string endWord, IList <string> wordList) { var q = new Queue <string>(); var visited = new HashSet <string>(); var wordSet = wordList.ToHashSet(); int len = 1; q.Enqueue(beginWord); while (q.Any()) { int size = q.Count; while (size-- > 0) { var node = q.Dequeue(); if (node == endWord) { return(len); } for (int i = 0; i < beginWord.Length; i++) { var ca = node.ToCharArray(); for (char c = 'a'; c <= 'z'; c++) { if (ca[i] == c) { continue; } ca[i] = c; var nw = new string(ca); if (wordSet.Contains(nw) && visited.Add(nw)) { q.Enqueue(nw); } } } } len++; } return(0); }
public int LadderLength(string beginWord, string endWord, IList <string> wordList) { var visitedSet = new HashSet <string>(); var wordSet = wordList.ToHashSet(); visitedSet.Add(beginWord); var distance = 1; while (!visitedSet.Contains(endWord)) { var tempSet = new HashSet <string>(); foreach (var oneString in visitedSet) { for (var i = 0; i < oneString.Length; i++) { var chars = oneString.ToCharArray(); for (var j = 'a'; j <= 'z'; j++) { chars[i] = j; var tempString = new string(chars); if (!wordSet.Contains(tempString)) { continue; } tempSet.Add(tempString); wordSet.Remove(tempString); } } } distance++; if (tempSet.Count == 0) { return(0); } visitedSet = tempSet; } return(distance); }
/// <summary> /// Time Complexity: O(N*L) where N is number of words in wordList and L is length of beginWord /// Space Complexity: O(N) /// </summary> private static IList <IList <string> > FindLAdders(string beginWord, string endWord, IList <string> wordList) { var result = new List <IList <string> >(); var seen = wordList.ToHashSet(); seen.Add(beginWord); var adjLists = new Dictionary <string, List <string> >(); // Key: word, Value: adjacent words foreach (string word in seen) { adjLists.Add(word, new List <string>()); } var distanceMap = new Dictionary <string, int>(); // Key: word, Value: distance from beginWord distanceMap.Add(beginWord, 0); Bfs(beginWord, endWord, seen, adjLists, distanceMap); Dfs(beginWord, endWord, seen, adjLists, distanceMap, new List <string>(), result); return(result); }
/// <summary> /// Time Complexity: O(N*L) where N is number of elements from wordList and L is length of word /// Space Complexity: O(N) /// </summary> private static int LadderLength(string beginWord, string endWord, IList <string> wordList) { var seen = wordList.ToHashSet(); var length = 0; var queue = new Queue <string>(); queue.Enqueue(beginWord); while (queue.Count != 0) { int n = queue.Count; for (int i = 0; i < n; i++) { var target = queue.Dequeue(); if (target == endWord) { return(++length); } for (int j = 0; j < target.Length; j++) { var temp = target.ToCharArray(); for (char c = 'a'; c <= 'z'; c++) { temp[j] = c; var changedTarget = new string(temp); if (target != changedTarget && seen.Contains(changedTarget)) { queue.Enqueue(changedTarget); seen.Remove(changedTarget); } } } } length++; } return(0); }
static public Matrix <TRowKey, TColKey, TValue2> MergeRowsAndColsView <TRowKey, TColKey, TValue2>(bool mustMatch, IList <Matrix <TRowKey, TColKey, TValue2> > matrices) { var inNeed = matrices.ToHashSet(); var mergeMatrixList = new List <Matrix <TRowKey, TColKey, TValue2> >(); while (inNeed.Count > 0) { var first = inNeed.First(); var colKeySet = first.ColKeys.ToHashSet(); var nextSet = first.AsSingletonEnumerable().ToHashSet(); foreach (var matrix in inNeed) { if (matrix == first || colKeySet.IntersectAny(matrix.ColKeys)) { nextSet.Add(matrix); colKeySet.AddNewOrOldRange(matrix.ColKeys); } } inNeed.RemoveAll(nextSet); if (nextSet.Count == 1) { mergeMatrixList.Add(nextSet.First()); } else { mergeMatrixList.Add(new MergeRowsView <TRowKey, TColKey, TValue2>(mustMatch, nextSet.ToArray())); } } if (mergeMatrixList.Count == 1) { return(mergeMatrixList[0]); } var output = new MergeColsView <TRowKey, TColKey, TValue2>(mustMatch, mergeMatrixList.ToArray()); return(output); }
/// <summary> /// Duplicates the given object. /// </summary> private void DuplicateObjects(IList <object> objectsToDuplicate) { var uniqueObjectsToDuplicate = objectsToDuplicate.ToHashSet(); foreach (var entity in uniqueObjectsToDuplicate) { var idProps = entity.GetType() .GetProperties() .Where(p => p.PropertyType == typeof(int)) .Where(p => p.Name.EndsWith("Id")) .ToList(); foreach (var prop in idProps) { prop.SetValue(entity, 0); } } foreach (var entity in uniqueObjectsToDuplicate) { _writeDbContext.Add(entity); } }
private void RestoreSelectedFiles(IList<GitItemStatus> unStagedFiles, IList<GitItemStatus> stagedFiles, IList<GitItemStatus> lastSelection) { if (_currentFilesList == null || _currentFilesList.IsEmpty) { SelectStoredNextIndex(); return; } var newItems = _currentFilesList == Staged ? stagedFiles : unStagedFiles; var names = lastSelection.ToHashSet(x => x.Name); var newSelection = newItems.Where(x => names.Contains(x.Name)).ToList(); if (newSelection.Any()) _currentFilesList.SelectedItems = newSelection; else SelectStoredNextIndex(); }
public IList <IList <string> > FindLadders(string beginWord, string endWord, IList <string> wordList) { var ladders = new List <IList <string> >(); var wordListSet = wordList.ToHashSet(); if (!wordListSet.Contains(endWord)) { return(ladders); } var graph = new Dictionary <string, List <string> >(); var wordMappings = GetWordMappings(); var beginQueue = new HashSet <string>(new[] { beginWord }); var endQueue = new HashSet <string>(new[] { endWord }); while (beginQueue.Count > 0 && endQueue.Count > 0) { var foundEndWord = beginQueue.Count <= endQueue.Count ? Bfs(true) : Bfs(false); if (foundEndWord) { break; } } var seen = new HashSet <string>(new[] { beginWord }); var ladder = new List <string>(new [] { beginWord }); Dfs(beginWord); return(ladders); void Dfs(string word) { if (string.Equals(word, endWord)) { ladders.Add(new List <string>(ladder)); return; } if (graph.ContainsKey(word)) { foreach (var nextWord in graph[word]) { if (!seen.Contains(nextWord)) { seen.Add(nextWord); ladder.Add(nextWord); Dfs(nextWord); ladder.RemoveAt(ladder.Count - 1); seen.Remove(nextWord); } } } } bool Bfs(bool isForward) { var queue = isForward ? beginQueue : endQueue; var otherQueue = queue == beginQueue ? endQueue : beginQueue; wordListSet.RemoveWhere(queue.Contains); var nextQueue = new HashSet <string>(); var foundEnd = false; foreach (var word in queue) { foreach (var key in GetWordKeys(word).Where(k => wordMappings.ContainsKey(k))) { foreach (var nextWord in wordMappings[key]) { if (otherQueue.Contains(nextWord)) { foundEnd = true; } if (wordListSet.Contains(nextWord)) { if (isForward) { if (!graph.ContainsKey(word)) { graph.Add(word, new List <string>()); } graph[word].Add(nextWord); } else { if (!graph.ContainsKey(nextWord)) { graph.Add(nextWord, new List <string>()); } graph[nextWord].Add(word); } nextQueue.Add(nextWord); } } } } if (isForward) { beginQueue = nextQueue; } else { endQueue = nextQueue; } return(foundEnd); } Dictionary <string, List <string> > GetWordMappings() { var dict = new Dictionary <string, List <string> >(); foreach (var word in wordList) { foreach (var key in GetWordKeys(word)) { if (!dict.ContainsKey(key)) { dict.Add(key, new List <string>()); } dict[key].Add(word); } } return(dict); } IEnumerable <string> GetWordKeys(string word) => word.Select((chr, i) => $"{word.Substring(0, i)}_{word.Substring(i + 1)}"); }
private void ProcessHeads(string remote, IList<IGitRef> localHeads, IList<IGitRef> remoteHeads) { var remoteBranches = remoteHeads.ToHashSet(h => h.LocalName); // Add all the local branches. foreach (var head in localHeads) { DataRow row = _branchTable.NewRow(); row["Force"] = false; row["Delete"] = false; row["Local"] = head.Name; string remoteName; if (head.Remote == remote) remoteName = head.MergeWith ?? head.Name; else remoteName = head.Name; row["Remote"] = remoteName; bool knownAtRemote = remoteBranches.Contains(remoteName); row["New"] = knownAtRemote ? _no.Text : _yes.Text; row["Push"] = knownAtRemote; _branchTable.Rows.Add(row); } // Offer to delete all the left over remote branches. foreach (var remoteHead in remoteHeads) { var head = remoteHead; if (localHeads.All(h => h.Name != head.LocalName)) { DataRow row = _branchTable.NewRow(); row["Local"] = null; row["Remote"] = remoteHead.LocalName; row["New"] = _no.Text; row["Push"] = false; row["Force"] = false; row["Delete"] = false; _branchTable.Rows.Add(row); } } BranchGrid.Enabled = true; }