public int solution(int[] A) { // write your code in C# 5.0 with .NET 4.5 (Mono) int depth = A.Length; Hashtable ht = new Hashtable(); for (int i = 0; i < A.Length; i++ ) { Node n; if (ht.ContainsKey(A[i])) n = (Node)ht[A[i]]; else { n = new Node(A[i], new List<int>()); ht.Add(A[i], n); } if(i>0) n.childs.Add(A[i-1]); if (i < A.Length-1) n.childs.Add(A[i + 1]); } int lookUp = A[A.Length - 1]; Queue<int> Q = new Queue<int>(); int val = 0; Q.Enqueue(A[val]); ((Node)ht[A[val]]).depth = 1; while (Q.Any()) { val = Q.Dequeue(); if (ht.ContainsKey(val)) { Node n = (Node) ht[val]; if (n.value == lookUp) return n.depth; ht.Remove(val); foreach (int j in n.childs) { if (ht.ContainsKey(j)) { Node x = (Node) ht[j]; if (x.depth == 0) x.depth = n.depth + 1; Q.Enqueue(j); //ht.Remove(j); } } } } return depth; }
public int solution(int[] A) { // write your code in C# 5.0 with .NET 4.5 (Mono) int N = A.Length; Queue<Tup> Q = new Queue<Tup>(); Q.Enqueue(new Tup(-1,-1,0,-4)); while(Q.Any()) { Tup t = Q.Dequeue(); if (isFab(t.mid - t.start) && isFab(N - t.mid)) { Console.WriteLine(t.prev + " | " + t.start + " : " + t.mid + " : " + (N - t.mid)); return t.Depth + 1; } else if (t.Depth == 0) { for (int i = t.mid + 1; i < A.Length; i++) { if (A[i] == 1) { Tup nT = new Tup(t.mid, i, t.Depth + 1, t.start); if (myFab(i) < N) Q.Enqueue(nT); else break; //Console.WriteLine(i); } } } else if (isFab(t.mid - t.start)) { for (int i = t.mid + 1; i < A.Length; i++) { if (A[i] == 1) { Tup nT = new Tup(t.mid, i, t.Depth + 1, t.start); if (myFab(i) < N) Q.Enqueue(nT); else break; //Console.WriteLine(i); } } } } return -1; }
internal static string CreateText(MethodInfo method, object[] args) { var parameterQueue = new Queue<string>(args.Select((o, i) => { var param = method.GetParameters().ElementAtOrDefault(i); return FormatValue(param, o); })); var uncameled = UnCamel(method.Name); var paramsSubsituted = Regex.Replace(uncameled, "_", x => parameterQueue.Any() ? " " + parameterQueue.Dequeue() + " " : " <missing parameter> "); if (parameterQueue.Any()) { paramsSubsituted += "(" + string.Join(", ", parameterQueue) + ")"; } var sb = new StringBuilder(paramsSubsituted); foreach (var kvp in Replacements) { sb.Replace(" " + kvp.Key + " ", " " + kvp.Value + " "); } var normalizeSpaces = Regex.Replace(sb.ToString(), "\\s+", " "); return normalizeSpaces.Trim(); }
private static Action<StringBuilder, ContextObject> Parse(Queue<TokenPair> tokens, ParsingOptions options, InferredTemplateModel currentScope = null) { var buildArray = new List<Action<StringBuilder, ContextObject>>(); while (tokens.Any()) { var currentToken = tokens.Dequeue(); switch (currentToken.Type) { case TokenType.Comment: break; case TokenType.Content: buildArray.Add(HandleContent(currentToken.Value)); break; case TokenType.CollectionOpen: buildArray.Add(HandleCollectionOpen(currentToken, tokens, options, currentScope)); break; case TokenType.ElementOpen: buildArray.Add(HandleElementOpen(currentToken, tokens, options, currentScope)); break; case TokenType.InvertedElementOpen: buildArray.Add(HandleInvertedElementOpen(currentToken, tokens, options, currentScope)); break; case TokenType.CollectionClose: case TokenType.ElementClose: // This should immediately return if we're in the element scope, // and if we're not, this should have been detected by the tokenizer! return (builder, context) => { foreach (var a in buildArray) { a(builder, context); } }; case TokenType.EscapedSingleValue: case TokenType.UnescapedSingleValue: buildArray.Add(HandleSingleValue(currentToken, options, currentScope)); break; } } return (builder, context) => { foreach (var a in buildArray) { a(builder, context); } }; }
public void Uncover(int i, int j, int userId) { var cell = this[i, j]; if (cell.Type == CellType.Mined) { //do not uncover mined cells return; } Queue<MinesweeperCell> toCascade = new Queue<MinesweeperCell>(); toCascade.Enqueue(cell); while (toCascade.Any()) { cell = toCascade.Dequeue(); if (cell.Status == CellStatus.Uncovered || cell.FlagOwnerId != null) //skip if flagged { continue; } cell.Uncover(userId); var neighbors = _neighbors(cell.X, cell.Y); if (!neighbors.Where((x) => { return x.Type == CellType.Mined; }).Any()) { foreach(MinesweeperCell neighbor in neighbors) { toCascade.Enqueue(neighbor); } } } }
private ContextObject GetContextForPath(Queue<String> elements) { var retval = this; if (elements.Any()) { var element = elements.Dequeue(); if (element.StartsWith("..")) { if (Parent != null) { retval = Parent.GetContextForPath(elements); } else { //calling "../" too much may be "ok" in that if we're at root, //we may just stop recursion and traverse down the path. retval = GetContextForPath(elements); } } //TODO: handle array accessors and maybe "special" keys. else { //ALWAYS return the context, even if the value is null. var innerContext = new ContextObject(); innerContext.Key = element; innerContext.Parent = this; var ctx = this.Value as IDictionary<string, object>; if (ctx != null) { object o; ctx.TryGetValue(element, out o); innerContext.Value = o; } retval = innerContext.GetContextForPath(elements); } } return retval; }
protected int Transitions(int to, int? from = null) { if (!from.HasValue) { return to.Digits().Select(d => Transitions(DigitSegments[d])).Sum(); } var toDigits = new Queue<int>(to.Digits()); var fromDigits = new Queue<int>(from.Value.Digits()); var t = 0; while(toDigits.Any()&&fromDigits.Any()) { t += Transitions(DigitSegments[toDigits.Dequeue()], DigitSegments[fromDigits.Dequeue()]); } while(toDigits.Any()) { t += Transitions(toDigits.Dequeue()); } while (fromDigits.Any()) { t += Transitions(fromDigits.Dequeue()); } return t; }
private void BFS(CFGBlock root, BidirectionalGraph<CFGBlock, TaggedEdge<CFGBlock, EdgeTag>> _graph) { Console.WriteLine("Total BFS recursions: " + BFSRUNS + " Active BFS: " + activebfs + " nodes currently in graph: " + nodeList.Count); BFSRUNS++; activebfs++; Queue<CFGBlock> queue = new Queue<CFGBlock> (); queue.Enqueue (root); while (queue.Any ()) { var node = queue.Dequeue (); if (visited.Contains(node)) continue; visited.Add (node); var cNode = MakeCNode (node); if (cNode != null) cNode.graph = _graph; if (node.AstEntryNode != null && node.AstEntryNode.LocalName == AstConstants.Nodes.Expr_Include) { File output = null; resolver.TryResolveInclude (node.AstEntryNode, out output); if (output != null && !inFile.Contains(output)) { var _root = output.CFG.Roots ().Single (v => v.IsSpecialBlock); inFile.Add(output); //Console.WriteLine("Recursive call: " + output.Name); BFS (_root, (BidirectionalGraph<CFGBlock, TaggedEdge<CFGBlock, EdgeTag>>)output.CFG); //Console.WriteLine("Finished call: " + output.Name); //Console.WriteLine("Still " + inFile.Count() + " files left"); inFile.Remove(output); } } foreach (var edge in _graph.OutEdges(node)) if (!visited.Contains (edge.Target)) //No loops, please queue.Enqueue (edge.Target); } activebfs--; }
public void ConstructGenome(string outFile) { int readLength; long genomeLength; using (var session = _database.SessionFactory.OpenStatelessSession()) { var metadata = session.Query<Metadata>().First(); readLength = metadata.ReadLength; genomeLength = metadata.GenomeLength; } using (var file = File.Open(outFile, FileMode.Create)) { var writer = new BinaryWriter(file); var positionsToReads = new Dictionary<uint, byte[][]>(); long maxInserted = 0; ThreadPool.QueueUserWorkItem(o => { Queue<ILookup<uint, byte[]>> queue = new Queue<ILookup<uint, byte[]>>(); long[] maxQueried = {0}; ManualResetEvent enqueueEvent = new ManualResetEvent(false); ThreadPool.QueueUserWorkItem(o2 => { using (var session = _database.SessionFactory.OpenStatelessSession()) { const long readInterval = 1000000; for (long i = 0; i < genomeLength; i += readInterval) { long i1 = i; var lookup = LinqExtensionMethods.Query<Alignment>(session) .Where( a => a.Position >= i1 && a.Position < i1 + readInterval) .ToLookup(a => a.Position, a => a.ShortRead); lock (queue) { queue.Enqueue(lookup); Interlocked.Add(ref maxQueried[0], readInterval); enqueueEvent.Set(); } } } Interlocked.Exchange(ref maxQueried[0], long.MaxValue); }); while (Volatile.Read(ref maxQueried[0]) != long.MaxValue) { ILookup<uint, byte[]> lookup; enqueueEvent.WaitOne(); lock (queue) { lookup = queue.Dequeue(); enqueueEvent.Reset(); } var keys = lookup.Select(g => g.Key).ToArray(); Array.Sort(keys); foreach (var key in keys) { lock (positionsToReads) { positionsToReads.Add(key, lookup[key].ToArray()); Interlocked.Exchange(ref maxInserted, key); } } } while (queue.Any()) { ILookup<uint, byte[]> lookup = queue.Dequeue(); var keys = lookup.Select(g => g.Key).ToArray(); Array.Sort(keys); foreach (var key in keys) { lock (positionsToReads) { positionsToReads.Add(key, lookup[key].ToArray()); Interlocked.Exchange(ref maxInserted, key); } } } Interlocked.Exchange(ref maxInserted, long.MaxValue); }); for (long textIndex = 0; textIndex < genomeLength; textIndex++) { lock (positionsToReads) { positionsToReads.Remove((uint)(textIndex - readLength)); } if (textIndex%100000 == 0) { Console.WriteLine(textIndex); } int[] characterCounts = new int[4]; for (uint i = (uint)textIndex; i > textIndex - readLength && i > 0; i--) { uint i1 = i; SpinWait.SpinUntil(() => Volatile.Read(ref maxInserted) >= i1); bool gotValue; byte[][] reads; lock (positionsToReads) { gotValue = positionsToReads.TryGetValue(i, out reads); } if (!gotValue) continue; foreach (var read in reads) { DnaSequence sequence = new DnaSequence(read, readLength); var character = sequence[textIndex - i]; characterCounts[character]++; } } var finalCharacter = 'N'; var maxCount = 0; var maxIndex = -1; for (int i = 0; i < 4; i++) { if (characterCounts[i] > maxCount) { maxCount = characterCounts[i]; maxIndex = i; } } switch (maxIndex) { case 0: finalCharacter = 'A'; break; case 1: finalCharacter = 'C'; break; case 2: finalCharacter = 'G'; break; case 3: finalCharacter = 'T'; break; } writer.Write(finalCharacter); } } }
public void AlignReads(string readsFile) { using (var session = _database.SessionFactory.OpenStatelessSession()) using (var transaction = session.BeginTransaction()) using (var file = File.OpenRead(readsFile)) { var reader = new BinaryReader(file); var readLength = reader.ReadInt32(); var numReads = (file.Length - 4) / DnaSequence.ByteArrayLength(readLength); Metadata metadata = new Metadata() { Id = 1, GenomeLength = _suffixArray.Length - 1, ReadLength = readLength }; session.Insert(metadata); Random[] randoms = new Random[_threadCount]; var manualResetEvents = new ManualResetEvent[_threadCount]; bool[] doneReading = {false}; Queue<byte[]> queue = new Queue<byte[]>(); long[] readsAligned = {0}; ManualResetEvent queueReadyForMore = new ManualResetEvent(true); for (int i = 0; i < manualResetEvents.Length; i++) { randoms[i] = new Random(); manualResetEvents[i] = new ManualResetEvent(false); var i1 = i; ThreadPool.QueueUserWorkItem(o => { while (Volatile.Read(ref readsAligned[0]) != numReads) { byte[] shortReadBytes; long readId; lock (queue) { if (!queue.Any()) continue; shortReadBytes = queue.Dequeue(); if (queue.Count < 20000) queueReadyForMore.Set(); readId = Volatile.Read(ref readsAligned[0]); Interlocked.Increment(ref readsAligned[0]); } var shortRead = new DnaSequence(shortReadBytes, readLength); AddAlignmentsToDatabase(shortRead, 2, randoms[i1], session, readId); if (readId%20000 == 0) { Console.WriteLine(readId); GC.Collect(); } } manualResetEvents[i1].Set(); }); readsAligned[0]++; } long readsRead = 0; while (readsRead < numReads) { byte[] shortReadBytes = reader.ReadBytes(DnaSequence.ByteArrayLength(readLength)); queueReadyForMore.WaitOne(); lock (queue) { queue.Enqueue(shortReadBytes); if (queue.Count > 40000) queueReadyForMore.Reset(); } readsRead++; } lock (doneReading) { doneReading[0] = true; } WaitHandle.WaitAll(manualResetEvents); transaction.Commit(); } }