internal static T RewriteChildrenInternal <T>( this IRewriter <T> rewriter, Func <T, T> transformer, T value, ref ChunkStack <T> chunks ) => rewriter.WithChildren( (children, t) => { var(r, v, f) = t; var changed = false; for (var i = 0; i < children.Length; i++) { var child = children[i]; var newChild = f(child); children[i] = newChild; changed |= !ReferenceEquals(newChild, child); } if (changed) { return(r.SetChildren(children, v)); } return(v); }, (rewriter, value, transformer), value, ref chunks );
/// <summary> /// Yields all of the nodes in the tree represented by <paramref name="value"/>, starting at the top. /// /// <para> /// This is a depth-first pre-order traversal. /// </para> /// /// <seealso cref="DescendantsAndSelf"/> /// </summary> /// <example> /// <code> /// Expr expr = new Add( /// new Add( /// new Lit(1), /// new Lit(2) /// ), /// new Lit(3) /// ); /// Expr[] expected = new[] /// { /// expr, /// new Add(new Lit(1), new Lit(2)), /// new Lit(1), /// new Lit(2), /// new Lit(3), /// }; /// Assert.Equal(expected, rewriter.SelfAndDescendants(expr)); /// </code> /// </example> /// <typeparam name="T">The rewritable tree type</typeparam> /// <param name="rewriter">The rewriter</param> /// <param name="value">The value to traverse</param> /// <returns>An enumerable containing all of the nodes in the tree represented by <paramref name="value"/>, starting at the top.</returns> public static IEnumerable <T> SelfAndDescendants <T>(this IRewriter <T> rewriter, T value) { if (rewriter == null) { throw new ArgumentNullException(nameof(rewriter)); } IEnumerable <T> Iterator() { var stack = new ChunkStack <T>(); stack.Allocate(1)[0] = value; try { while (!stack.IsEmpty) { var x = stack.Pop(); yield return(x); var count = rewriter.CountChildren(x); var span = stack.Allocate(count); rewriter.GetChildren(span, x); span.Reverse(); // pop them in left to right order } } finally { stack.Dispose(); } } return(Iterator()); }
public void TestEmptyChunk() { var stack = new ChunkStack <int>(); var span = stack.Allocate(0); Assert.Equal(0, span.Length); stack.Free(span); stack.Dispose(); }
public void TestLargeChunk_Region1() { var stack = new ChunkStack <int>(); var span = stack.Allocate(513); Assert.Equal(513, span.Length); stack.Free(span); stack.Dispose(); }
public void TestManySmallChunks() { var stack = new ChunkStack <int>(); for (var i = 0; i < 1025; i++) { var span = stack.Allocate(1); Assert.Equal(1, span.Length); } stack.Dispose(); }
public void ShouldPushChunkPairsRecursively() { var chunkStack1 = new ChunkStack <int>( 100 * MemorySize.MB, new ConstantSizeCalculator <int>(sizeof(int)), null); var chunkStack2 = new ChunkStack <int>( 100 * MemorySize.MB, new ConstantSizeCalculator <int>(sizeof(int)), null); var appender = new MergeSortBase <int> .ChunkStackAppender(chunkStack1, chunkStack2); appender.PushToStackRecursively(new int[] { 1, 2 }); CollectionAssert.AreEqual(new int[] { 2 }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { }, chunkStack2.GetChunkSizes()); appender.PushToStackRecursively(new int[] { 3, 4 }); CollectionAssert.AreEqual(new int[] { }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { 4 }, chunkStack2.GetChunkSizes()); appender.PushToStackRecursively(new int[] { 5, 6 }); CollectionAssert.AreEqual(new int[] { 2 }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { 4 }, chunkStack2.GetChunkSizes()); appender.PushToStackRecursively(new int[] { 7, 8 }); CollectionAssert.AreEqual(new int[] { 8 }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { }, chunkStack2.GetChunkSizes()); appender.PushToStackRecursively(new int[] { 9, 10 }); CollectionAssert.AreEqual(new int[] { 2, 8 }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { }, chunkStack2.GetChunkSizes()); appender.PushToStackRecursively(new int[] { 11, 12 }); CollectionAssert.AreEqual(new int[] { 8 }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { 4 }, chunkStack2.GetChunkSizes()); appender.PushToStackRecursively(new int[] { 12, 13 }); CollectionAssert.AreEqual(new int[] { 2, 8 }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { 4 }, chunkStack2.GetChunkSizes()); appender.PushToStackRecursively(new int[] { 14, 15 }); CollectionAssert.AreEqual(new int[] { }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { 16 }, chunkStack2.GetChunkSizes()); appender.PushToStackRecursively(new int[] { 16, 17 }); CollectionAssert.AreEqual(new int[] { 2 }, chunkStack1.GetChunkSizes()); CollectionAssert.AreEqual(new int[] { 16 }, chunkStack2.GetChunkSizes()); }
public void TestEdgeOfRegion1_DoesntFit() { var stack = new ChunkStack <int>(); var span1 = stack.Allocate(510); Assert.Equal(510, span1.Length); var span2 = stack.Allocate(3); Assert.Equal(3, span2.Length); stack.Free(span2); stack.Free(span1); stack.Dispose(); }
public void TestLargeChunk_Region2() { var stack = new ChunkStack <int>(); var span1 = stack.Allocate(512); Assert.Equal(512, span1.Length); var span2 = stack.Allocate(513); Assert.Equal(513, span2.Length); stack.Free(span2); stack.Free(span1); stack.Dispose(); }
/// <summary> /// Update the immediate children of the value by applying a transformation function to each one. /// </summary> /// <typeparam name="T">The rewritable tree type</typeparam> /// <param name="rewriter">The rewriter</param> /// <param name="transformer">A transformation function to apply to each of <paramref name="value"/>'s immediate children.</param> /// <param name="value">The old value, whose immediate children should be transformed by <paramref name="transformer"/>.</param> /// <returns>A copy of <paramref name="value"/> with updated children.</returns> public static T RewriteChildren <T>(this IRewriter <T> rewriter, Func <T, T> transformer, T value) { if (rewriter == null) { throw new ArgumentNullException(nameof(rewriter)); } if (transformer == null) { throw new ArgumentNullException(nameof(transformer)); } var chunks = new ChunkStack <T>(); var result = rewriter.RewriteChildrenInternal(transformer, value, ref chunks); chunks.Dispose(); return(result); }
public void TestSomeSmallChunks() { var stack = new ChunkStack <int>(); var span1 = stack.Allocate(1); Assert.Equal(1, span1.Length); var span2 = stack.Allocate(2); Assert.Equal(2, span2.Length); var previousValues = span2.ToArray(); span1[0]++; Assert.Equal(previousValues, span2.ToArray()); stack.Free(span2); stack.Free(span1); stack.Dispose(); }
public void TestEdgeOfRegion2_Fits() { var stack = new ChunkStack <int>(); var span1 = stack.Allocate(512); Assert.Equal(512, span1.Length); var span2 = stack.Allocate(510); Assert.Equal(510, span2.Length); var span3 = stack.Allocate(2); Assert.Equal(2, span3.Length); stack.Free(span3); stack.Free(span2); stack.Free(span1); stack.Dispose(); }
internal static R WithChildren <T, R>( this IRewriter <T> rewriter, SpanFunc <T, R> action, T value, ref ChunkStack <T> chunks ) { var count = rewriter.CountChildren(value); if (count <= 4) { return(WithChildren_Fast(rewriter, action, value, count)); } var span = chunks.Allocate(count); rewriter.GetChildren(span, value); var result = action(span); chunks.Free(span); return(result); }
public void Sort(string inputFileName, string outputFileName) { using (var fileStream = FileWithBuffer.OpenRead(inputFileName, _fileBuffer)) using (var targetChunkStorage = CreateTargetFileStorage(outputFileName)) using (var tempChunkStackFactory = new TempChunkStackFactory(outputFileName, _fileBuffer, _streamBuffer, _memoryBuffer)) { var fileSize = fileStream.Length; var chunkStack = new ChunkStack <FileLine>( _memoryBuffer, new FileLineSizeCalculator(), targetChunkStorage); var sortMethod = _sortMethodFactory.CreateSortMethod(chunkStack, tempChunkStackFactory); var inputFileLines = new FileLineReader(fileStream, _streamBuffer); var sortedCollection = sortMethod.Sort(inputFileLines); if (sortedCollection is IChunkReference <FileLine> chunkReference) { chunkReference.Flush(targetChunkStorage); } } }
public long SyntaxScore(string line) { long score = 0; char chpopped; ChunkStack.Clear(); foreach (var ch in line.ToCharArray()) { switch (ch) { case '(': ChunkStack.Push(ch); break; case ')': chpopped = ChunkStack.Pop(); if (chpopped != '(') { // bad chunk score += 3; } break; case '[': ChunkStack.Push(ch); break; case ']': chpopped = ChunkStack.Pop(); if (chpopped != '[') { // bad chunk score += 57; } break; case '{': ChunkStack.Push(ch); break; case '}': chpopped = ChunkStack.Pop(); if (chpopped != '{') { // bad chunk score += 1197; } break; case '<': ChunkStack.Push(ch); break; case '>': chpopped = ChunkStack.Pop(); if (chpopped != '<') { // bad chunk score += 25137; } break; default: break; } if (score > 0) { break; } } if (score == 0) { // an incomplete row long compScore = 0; while (ChunkStack.Count > 0) { char ch = ChunkStack.Pop(); switch (ch) { case '(': compScore = 5 * compScore + 1; break; case '[': compScore = 5 * compScore + 2; break; case '{': compScore = 5 * compScore + 3; break; case '<': compScore = 5 * compScore + 4; break; default: break; } } CompScores.Add(compScore); } return(score); }