private async Task RunAsync() { while (true) { await jobSignal.WaitAsync().ConfigureAwait(false); var jobResultBoxByKey = new SCG.Dictionary <K, AsyncBox <Entry <K, V> > >(); var spinner = new SpinWait(); do { Tuple <K, AsyncBox <Entry <K, V> > > job; while (!jobQueue.TryDequeue(out job)) { spinner.SpinOnce(); } jobResultBoxByKey.Add(job.Item1, job.Item2); } while (jobSignal.TryTake()); // Console.WriteLine("Batch Read: " + jobResultBoxByKey.Count); var results = await hitler.GetManyAsync(new HashSet <K>(jobResultBoxByKey.Keys)).ConfigureAwait(false); foreach (var kvp in results) { jobResultBoxByKey[kvp.Key].SetResult(kvp.Value); } } }
public void HandleData(MemBlock b, ISender return_path, object state) { //Read the header: uint crc32 = (uint)NumberSerializer.ReadInt(b, 0); int id = NumberSerializer.ReadInt(b, 4); ushort block = (ushort)NumberSerializer.ReadShort(b, 8); MemBlock data = b.Slice(10); var cachekey = new Triple <uint, int, ushort>(crc32, id, block); MemBlock packet = null; lock (_sync) { if (false == _fragments.Contains(cachekey)) { //This is a new block: _fragments.Add(cachekey, data); var fc_key = new Pair <uint, int>(crc32, id); Fragments this_fc; if (false == _frag_count.TryGetValue(fc_key, out this_fc)) { this_fc = new Fragments(); _frag_count.Add(fc_key, this_fc); } if (this_fc.AddBlock(block)) { //We have all of them, decode and clean up: packet = DecodeAndClear(crc32, id, (ushort)this_fc.Total); } } } if (null != packet) { Handle(packet, new FragmentingSender(DEFAULT_SIZE, return_path)); } }
private SCG.Dictionary <int, bool[]> MakeSubSubSubDictionary(int n) { var result = new SCG.Dictionary <int, bool[]>(); for (var i = 0; i < n; i++) { bool[] data = Util.Generate(i, x => x % 2 == 0); result.Add(i, data); } return(result); }
SCG.Dictionary <int, string> GetTiller() { var dict = new SCG.Dictionary <int, string>(); for (int i = 1000; i < 2000; ++i) { //var h = Hasher.Rehash(EqualityComparer<int>.Default.GetItemHashCode(Hasher.Rehash(i))); var h = Hasher.Rehash(i); unchecked { dict.Add((int)h, "STR:" + (h & 31).ToString()); } } return(dict); }
internal PackageInfo LoadPackage(Assembly asm) { var path = asm.Location; int flags = GET_ACTIVITIES | GET_CONFIGURATIONS | GET_SERVICES; PackageInfo pkg = getPackageArchiveInfo(path, flags); if (pkg == null) { throw new RuntimeException("Failed to read package '{0}'", path); } pkg.applicationInfo.sourceDir = path; XobotPackageInfo info = new XobotPackageInfo(asm, pkg); loaded_packages.Add(pkg.packageName, info); return(pkg); }
public void NestedArraysTest() { var dict = new SCG.Dictionary <int, float[][]>(); var random = new Random(0); for (var i = 0; i < 10; i++) { dict.Add(i, Util.Generate(i, x => Util.Generate(x, y => (float)random.NextDouble()))); } var context = new PofContext(); context.RegisterPortableObjectType(1, typeof(Wrapper)); var serializer = new PofSerializer(context); using (var ms = new MemoryStream()) { using (var writer = new BinaryWriter(ms, Encoding.UTF8, true)) { serializer.Serialize(writer, new Wrapper(dict)); } ms.Position = 0; Console.WriteLine(ms.ToArray().ToHex()); using (var reader = new BinaryReader(ms, Encoding.UTF8, true)) { var wrapperCopy = (Wrapper)serializer.Deserialize(reader); var dictClone = (SCG.Dictionary <int, float[][]>)wrapperCopy.Value; AssertTrue(new ICL.HashSet <int>(dict.Keys).SetEquals(dictClone.Keys)); random = new Random(); for (var i = 0; i < 10; i++) { var arr = dict[i]; for (var j = 0; j < arr.Length; j++) { AssertTrue(new ICL.HashSet <float>(arr[j]).SetEquals(dictClone[i][j])); } } } } }
Test_IUniqueKeyedCollectionRSG_AsImplicitUniqueKeyedCollection() { Print( "Make an implicit collection out of an explicit one" ); IImplicitUniqueKeyedCollectionRSG< string, int > c = new SCG.Dictionary< string, int >() .AsHalfdecentCollection() .AsImplicitUniqueKeyedCollection( i => i.ToString() ); Print( "Add() an item and check" ); c.Add( 1 ); Assert( c.Count == 1 ); Assert( c.Contains( "1" ) ); Assert( c.Get( "1" ) == 1 ); Print( "Add() another item and check" ); c.Add( 2 ); Assert( c.Count == 2 ); Assert( c.Contains( "2" ) ); Assert( c.Get( "2" ) == 2 ); Print( "Remove() an item and check" ); c.Remove( "1" ); Assert( c.Count == 1 ); Assert( c.Contains( "2" ) ); Assert( c.Get( "2" ) == 2 ); Print( "Add() item with duplicate key throws RTypeException" ); Expect( e => RTypeException.Match< NonExistingKeyIn< string, int > >( e ), () => c.Add( 2 ) ); }
Test_CollectionFromSystemDictionaryAdapter() { Print( "Adapt a new dictionary" ); IUniqueKeyedCollectionRCSG< string, int > c = new SCG.Dictionary< string, int >() .AsHalfdecentCollection(); Print( "Check that it's empty" ); Assert( c.Count == 0 ); Print( ".Add() and check" ); c.Add( "1", 1 ); Assert( c.Count == 1 ); Assert( c.Contains( "1" ) ); Assert( c.Get( "1" ) == 1 ); Print( ".Add() and check" ); c.Add( "2", 2 ); Assert( c.Count == 2 ); Assert( c.Contains( "2" ) ); Assert( c.Get( "2" ) == 2 ); Print( ".Add() and check" ); c.Add( "3", 3 ); Assert( c.Count == 3 ); Assert( c.Contains( "3" ) ); Assert( c.Get( "3" ) == 3 ); Print( ".Replace() and check" ); c.Replace( "2", 22 ); Assert( c.Count == 3 ); Assert( c.Contains( "2" ) ); Assert( c.Get( "2" ) == 22 ); Print( ".Remove() and check" ); c.Remove( "2" ); Assert( c.Count == 2 ); Assert( !c.Contains( "2" ) ); Expect( e => RTypeException.Match( e, (vr,f) => vr.Equals( f.Down().Parameter( "key" ) ), rt => rt.Equals( new ExistingKeyIn< string, int >( c ) ) ), () => c.Get( "2" ) ); }
/// <summary> /// Returns the maximum stack depth required by these CIL instructions. /// </summary> /// <returns>The integer value of the stck depth.</returns> public int GetMaxStackDepthRequired() { if (tide == 0) return 0; // Store the code blocks we find SCG.List<CodeBlock> codeBlocks = new SCG.List<CodeBlock>(); SCG.Dictionary<CILLabel, CodeBlock> cbTable = new SCG.Dictionary<CILLabel, CodeBlock>(); SCG.List<CodeBlock> extraStartingBlocks = new SCG.List<CodeBlock>(); // Start a default code block CodeBlock codeBlock = new CodeBlock(this); codeBlock.StartIndex = 0; // // Identify the code blocks // for (int i = 0; i < tide; i++) { /* Handling the tail instruction: * The tail instruction has not been handled even though * it indicates the end of a code block is coming. The * reason for this is because any valid tail instruction * must be followed by a call* instruction and then a ret * instruction. Given a ret instruction must be the second * next instruction anyway it has been decided to just let * the end block be caught then. */ // If we reach a branch instruction or a switch instruction // then end the current code block inclusive of the instruction. if ((buffer[i] is BranchInstr) || (buffer[i] is SwitchInstr)) { // Close the old block codeBlock.EndIndex = i; if (codeBlock.EndIndex >= codeBlock.StartIndex) // Don't add empty blocks codeBlocks.Add(codeBlock); // Open a new block codeBlock = new CodeBlock(this); codeBlock.StartIndex = i + 1; // If we reach a label then we need to start a new // code block as the label is an entry point. } else if (buffer[i] is CILLabel) { // Close the old block codeBlock.EndIndex = i - 1; if (codeBlock.EndIndex >= codeBlock.StartIndex) // Don't add empty blocks codeBlocks.Add(codeBlock); // Open a new block codeBlock = new CodeBlock(this); codeBlock.StartIndex = i; // Set this label as the entry point for the code block codeBlock.EntryLabel = (CILLabel)buffer[i]; // AND ... list in the dictionary. cbTable.Add(codeBlock.EntryLabel, codeBlock); // Check for the ret, throw, rethrow, or jmp instruction as they also end a block } else if (buffer[i] is Instr) { if ( (((Instr)buffer[i]).GetOp() == Op.ret) || (((Instr)buffer[i]).GetOp() == Op.throwOp) || (((Instr)buffer[i]).GetOp() == Op.rethrow) || ((buffer[i] is MethInstr) && (((MethInstr)buffer[i]).GetMethodOp() == MethodOp.jmp)) ) { // Close the old block codeBlock.EndIndex = i; if (codeBlock.EndIndex >= codeBlock.StartIndex) // Don't add empty blocks codeBlocks.Add(codeBlock); // Open a new block // In theory this should never happen but just in case // someone feels like adding dead code it is supported. codeBlock = new CodeBlock(this); codeBlock.StartIndex = i + 1; } } } // Close the last block codeBlock.EndIndex = tide - 1; if (codeBlock.EndIndex >= codeBlock.StartIndex) // Don't add empty blocks codeBlocks.Add(codeBlock); codeBlock = null; // Check how many code blocks there are. If an blocks return 0. if (codeBlocks.Count == 0) return 0; // // Loop through each code block and calculate the delta distance // for (int j = 0; j < codeBlocks.Count; j++) { CodeBlock block = codeBlocks[j]; int maxDepth = 0; int currentDepth = 0; // Loop through each instruction to work out the max depth for (int i = block.StartIndex; i <= block.EndIndex; i++) { // Get the depth after the next instruction currentDepth += buffer[i].GetDeltaDistance(); // If the new current depth is greater then the maxDepth adjust the maxDepth to reflect if (currentDepth > maxDepth) maxDepth = currentDepth; } // Set the depth of the block block.MaxDepth = maxDepth; block.DeltaDistance = currentDepth; // // Link up the next blocks // // If the block ends with a branch statement set the jump and fall through. if (buffer[block.EndIndex] is BranchInstr) { BranchInstr branchInst = (BranchInstr)buffer[block.EndIndex]; // If this is not a "br" or "br.s" then set the fall through code block if ((branchInst.GetBranchOp() != BranchOp.br) && (branchInst.GetBranchOp() != BranchOp.br_s)) // If there is a following code block set it as the fall through if (j < (codeBlocks.Count - 1)) block.NextBlocks.Add(codeBlocks[j + 1]); // Set the code block we are jumping to CodeBlock cb = null; cbTable.TryGetValue(branchInst.GetDest(), out cb); if (cb == null) throw new Exception("Missing Branch Label"); block.NextBlocks.Add(cb); // If the block ends in a switch instruction work out the possible next blocks } else if (buffer[block.EndIndex] is SwitchInstr) { SwitchInstr switchInstr = (SwitchInstr)buffer[block.EndIndex]; // If there is a following code block set it as the fall through if (j < (codeBlocks.Count - 1)) block.NextBlocks.Add(codeBlocks[j + 1]); // Add each destination block foreach (CILLabel label in switchInstr.GetDests()) { // Check all of the code blocks to find the jump destination CodeBlock cb = null; cbTable.TryGetValue(label, out cb); if (cb == null) throw new Exception("Missing Case Label"); block.NextBlocks.Add(cb); } // So long as the block doesn't end with a terminating instruction like ret or throw, just fall through to the next block } else if (!IsTerminatingInstruction(buffer[block.EndIndex])) { // If there is a following code block set it as the fall through if (j < (codeBlocks.Count - 1)) block.NextBlocks.Add(codeBlocks[j + 1]); } } // // Join up any exception blocks // if (exceptions != null) { foreach (TryBlock tryBlock in exceptions) { // Try to find the code block where this try block starts CodeBlock tryCodeBlock; cbTable.TryGetValue(tryBlock.Start, out tryCodeBlock); // Declare that the entry to this code block must be empty tryCodeBlock.RequireEmptyEntry = true; // Work with each of the handlers foreach (HandlerBlock hb in tryBlock.GetHandlers()) { // Find the code block where this handler block starts. CodeBlock handlerCodeBlock; cbTable.TryGetValue(hb.Start, out handlerCodeBlock); // If the code block is a catch or filter block increment the delta // distance by 1. This is to factor in the exception object that will // be secretly placed on the stack by the runtime engine. // However, this also means that the MaxDepth is up by one also! if (hb is Catch || hb is Filter) { handlerCodeBlock.DeltaDistance++; handlerCodeBlock.MaxDepth++; } // If the code block is a filter block increment the delta distance by 1 // This is to factor in the exception object that will be placed on the stack. // if (hb is Filter) handlerCodeBlock.DeltaDistance++; // Add this handler to the list of starting places extraStartingBlocks.Add(handlerCodeBlock); } } } // // Traverse the code blocks and get the depth // // Get the max depth at the starting entry point int finalMaxDepth = this.TraverseMaxDepth(codeBlocks[0]); // Check the additional entry points // If the additional points have a greater depth update the max depth foreach (CodeBlock cb in extraStartingBlocks) { // int tmpMaxDepth = cb.TraverseMaxDepth(); int tmpMaxDepth = this.TraverseMaxDepth(cb); if (tmpMaxDepth > finalMaxDepth) finalMaxDepth = tmpMaxDepth; } // Return the max depth we have found return finalMaxDepth; }
public static bool UnsequencedEquals <T>(this ICollection <T> first, ICollection <T> second, SCG.IEqualityComparer <T> equalityComparer = null) { // !@ Ensures(Result <bool>() == first.UnsequenceEqual(second, equalityComparer)); // Equal if reference equal - this is true for two nulls as well if (ReferenceEquals(first, second)) { return(true); } if (first == null || second == null) { return(false); } if (first.Count != second.Count) { return(false); } if (equalityComparer == null) { equalityComparer = SCG.EqualityComparer <T> .Default; } /*if (!first.AllowsDuplicates && (second.AllowsDuplicates || second.ContainsSpeed >= first.ContainsSpeed)) * { * return first.All(second.Contains); * } * else if (!second.AllowsDuplicates) * { * return second.All(first.Contains); * } * // Now first.AllowsDuplicates && second.AllowsDuplicates * else if (first.DuplicatesByCounting && second.DuplicatesByCounting) * { * return second.All(item => first.CountDuplicates(item) == second.CountDuplicates(item)); * } * else */ { // To avoid an O(n^2) algorithm, we make an auxiliary dictionary to hold the count of items var dictionary = new SCG.Dictionary <T, int>(equalityComparer); // TODO: Use C6 version (HashBag<T>) foreach (var item in second) { int count; if (dictionary.TryGetValue(item, out count)) { // Dictionary already contained item, so we increment count with one dictionary[item] = count + 1; } else { dictionary.Add(item, 1); } } foreach (var item in first) { int count; if (dictionary.TryGetValue(item, out count) && count > 0) { dictionary[item] = count - 1; } else { return(false); } } return(true); } }