示例#1
0
        public static void PopulateCollections25_25_50PctUnique(int maxN, out int[] uniqueArray, out int[] mixedArray,
                                                                SCG.HashSet <int> h, FastHashSet <int> f = null, C5.HashSet <int> c5 = null, SCG.SortedSet <int> sortedSet = null, SCG.List <int> lst = null)
        {
            uniqueArray = new int[maxN];
            mixedArray  = new int[maxN];

            Random rand = new Random(89);

            BenchUtil.PopulateIntArray(uniqueArray, rand, int.MinValue, int.MaxValue, 1.0); // a array should have 100% unique values

            int uniqueValuesCount = maxN / 2;                                               // this should produce a c array that has 50% unique values (the other 50% are duplicates), but all are actually in the uniqueArray, so 1, 1, 2, 2 would be an example of this

            if (uniqueValuesCount == 0)
            {
                uniqueValuesCount = 1;
            }
            BenchUtil.PopulateIntArrayFromUniqueArray(mixedArray, rand, uniqueArray, uniqueValuesCount);
            BenchUtil.PopulateIntArrayAtRandomIndices(mixedArray, rand, int.MinValue, int.MaxValue, maxN - uniqueValuesCount);

            if (h != null)
            {
                for (int i = 0; i < maxN; i++)
                {
                    h.Add(uniqueArray[i]);
                }
            }

            if (f != null)
            {
                for (int i = 0; i < maxN; i++)
                {
                    f.Add(uniqueArray[i]);
                }
            }

            if (c5 != null)
            {
                for (int i = 0; i < maxN; i++)
                {
                    c5.Add(uniqueArray[i]);
                }
            }

            if (sortedSet != null)
            {
                for (int i = 0; i < maxN; i++)
                {
                    sortedSet.Add(uniqueArray[i]);
                }
            }

            if (lst != null)
            {
                for (int i = 0; i < maxN; i++)
                {
                    lst.Add(uniqueArray[i]);
                }
                lst.Sort();
            }
        }
        private SCG.List <string[]> parseCSV(string path)
        {
            SCG.List <string[]> parsedData = new SCG.List <string[]>();

            try
            {
                using (StreamReader readFile = new StreamReader(path))
                {
                    string   line;
                    string[] row;

                    while ((line = readFile.ReadLine()) != null)
                    {
                        row = line.Split(',');
                        parsedData.Add(row);
                    }
                }
            }
            catch (Exception e)
            {
                throw (e);
            }

            return(parsedData);
        }
        public Task PutAsync(K key, V value)
        {
            return(ExecCommandAsync(async cmd => {
                // Retrieve all rows
                string commandBegin = $"INSERT INTO {tableName} (";
                string commandLeftMiddle = ") VALUES (";
                string commandRightMiddle = ") ON CONFLICT (id) DO UPDATE SET (";
                string commandRightRightMidle = ") = (";
                string commandEnd = $") WHERE {tableName}.id=@id";
                var updatedColumnNames = new SCG.List <string>();

                var properties = typeof(V).GetProperties();
                foreach (var p in properties)
                {
                    var propertyValue = p.GetValue(value);
                    var columnName = p.Name.ToLower();

                    if (columnName == "id")
                    {
                        Trace.Assert(object.Equals(key, propertyValue));
                    }
                    else
                    {
                        if (columnName == "created" || columnName == "updated")
                        {
                            propertyValue = DateTime.Now;
                        }

                        var param = cmd.CreateParameter();
                        param.ParameterName = columnName;
                        param.Value = propertyValue ?? DBNull.Value;
                        cmd.Parameters.Add(param);
                        updatedColumnNames.Add(columnName);
                    }
                }

                var idParam = cmd.CreateParameter();
                idParam.ParameterName = "id";
                idParam.Value = key;
                cmd.Parameters.Add(idParam);

                cmd.CommandText = commandBegin +
                                  updatedColumnNames.Concat("id").Join(", ") +
                                  commandLeftMiddle +
                                  updatedColumnNames.Concat("id").Select(x => $"@{x}").Join(", ") +
                                  commandRightMiddle +
                                  updatedColumnNames.Join(", ") +
                                  commandRightRightMidle +
                                  updatedColumnNames.Select(x => $"@{x}").Join(", ") +
                                  commandEnd;

                var rowsAffected = await cmd.ExecuteNonQueryAsync().ConfigureAwait(false);
                Trace.Assert(1 == rowsAffected);
            }));
        }
        private Task UpdateByDiffHelperAsync(K key, V existing, V updated)
        {
            return(ExecCommandAsync(async cmd => {
                // Retrieve all rows
                string commandBegin = $"UPDATE {tableName} SET (";
                string commandMiddle = ") = (";
                string commandEnd = ") WHERE test.id=@id";
                var updatedColumnNames = new SCG.List <string>();

                var properties = typeof(V).GetProperties();
                foreach (var p in properties)
                {
                    var columnName = p.Name.ToLower();
                    if (columnName == "updated")
                    {
                        p.SetValue(updated, DateTime.Now);
                    }

                    if (object.Equals(p.GetValue(existing), p.GetValue(updated)))
                    {
                        continue;
                    }

                    var propertyValue = p.GetValue(updated);

                    if (columnName == "id")
                    {
                        throw new InvalidStateException();
                    }
                    else
                    {
                        var param = cmd.CreateParameter();
                        param.ParameterName = columnName;
                        param.Value = propertyValue ?? DBNull.Value;
                        cmd.Parameters.Add(param);
                        updatedColumnNames.Add(columnName);
                    }
                }

                var idParam = cmd.CreateParameter();
                idParam.ParameterName = "id";
                idParam.Value = key;
                cmd.Parameters.Add(idParam);

                cmd.CommandText = commandBegin +
                                  updatedColumnNames.Concat("id").Join(", ") +
                                  commandMiddle +
                                  updatedColumnNames.Concat("id").Select(x => $"@{x}").Join(", ") +
                                  commandEnd;

                var rowsModified = await cmd.ExecuteNonQueryAsync().ConfigureAwait(false);
                Trace.Assert(1 == rowsModified);
            }));
        }
示例#5
0
            /// <summary></summary>
            public override object Read(object target, Package package, System.IO.BinaryReader reader, long end)
            {
                var count = reader.ReadInt32();
                var list  = new SCG.List <Reference>(count);

                while (count-- > 0)
                {
                    list.Add(package.ReadReference(reader));
                }
                return(list);
            }
示例#6
0
        public static async Task <SCG.List <ICacheFacade <K, V> > > CreateCluster <K, V>(int cohortCount, Func <CacheConfiguration <K, V> > configurationFactory = null)
        {
            configurationFactory = configurationFactory ?? (() => new CacheConfiguration <K, V>("my-cache"));
            var cacheFacades = new SCG.List <ICacheFacade <K, V> >();

            for (var i = 0; i < cohortCount; i++)
            {
                cacheFacades.Add(await CreateCohortAsync <K, V>(i, configurationFactory()).ConfigureAwait(false));
            }
            return(cacheFacades);
        }
示例#7
0
        void submitCDRJob(int tx, int ty)
        {
            long key = Combine(tx, ty);

            if (!processedTiles.Contains(key))
            {
                //Debug.Log("Submitting new CDR job[" + tx + "," + ty + "]");
                cdrJobQueue.Add(new KeyValuePair <int, int>(tx, ty));
                processedTiles.Add(key);
            }
        }
        internal override void AddOneChild(Atk.Object child)
        {
            base.AddOneChild(child);

            RadioButton rad = child as RadioButton;

            if (rad == null)
            {
                return;
            }

            RadioButtons.Add(rad);
        }
示例#9
0
 private void processJobAdds()
 {
     while (true)
     {
         NifLoadJob job;
         lock (jobsToAdd)
         {
             if (jobsToAdd.IsEmpty)
             {
                 break;
             }
             job = jobsToAdd.Dequeue();
         }
         Vector3 pos    = job.parentPos;
         float[] floatf = new float[] { pos.x, pos.z };
         SCG.List <NifLoadJob> nList;
         if (job.filename.Contains("terrain"))
         {
             lock (terraintree)
             {
                 if (!this.terraintree.TryFindValueAt(floatf, out nList))
                 {
                     nList = new SCG.List <NifLoadJob>();
                     nList.Add(job);
                     this.terraintree.Add(floatf, nList);
                 }
                 else
                 {
                     nList.Add(job);
                 }
             }
         }
         else
         {
             lock (postree)
             {
                 if (!this.postree.TryFindValueAt(floatf, out nList))
                 {
                     nList = new SCG.List <NifLoadJob>();
                     nList.Add(job);
                     this.postree.Add(floatf, nList);
                 }
                 else
                 {
                     nList.Add(job);
                 }
             }
         }
     }
 }
        public Task <Entry <K, V> > InsertAsync(V item)
        {
            return(ExecCommandAsync(async cmd => {
                // Retrieve all rows
                var commandStart = $"INSERT INTO {tableName} (";
                var commandMiddle = ") VALUES (";
                var commandEnd = ") RETURNING *";
                var insertedColumnNames = new SCG.List <string>();

                foreach (var property in typeof(V).GetProperties())
                {
                    var columnName = property.Name.ToLower();
                    if (columnName == "id")
                    {
                        continue;
                    }

                    var propertyValue = property.GetValue(item);
                    var defaultPropertyValue = property.PropertyType.IsValueType ? Activator.CreateInstance(property.PropertyType) : null;

                    if (!Equals(propertyValue, defaultPropertyValue))
                    {
                        insertedColumnNames.Add(columnName);

                        var parameter = cmd.CreateParameter();
                        parameter.ParameterName = columnName;
                        parameter.Value = propertyValue;
                        cmd.Parameters.Add(parameter);
                    }
                }
                cmd.CommandText = commandStart +
                                  insertedColumnNames.Join(", ") +
                                  commandMiddle +
                                  insertedColumnNames.Select(c => $"@{c}").Join(", ") +
                                  commandEnd;

                using (var reader = await cmd.ExecuteReaderAsync().ConfigureAwait(false)) {
                    Trace.Assert(reader.HasRows);
                    var readSuccessful = await reader.ReadAsync().ConfigureAwait(false);
                    Trace.Assert(readSuccessful);

                    var entry = ReadToEntry(reader);
                    readSuccessful = await reader.ReadAsync().ConfigureAwait(false);
                    Trace.Assert(!readSuccessful);

                    return entry;
                }
            }));
        }
示例#11
0
        public SCG.IEnumerable<Athlete> Query(string sql)
        {
            command.CommandText = sql;
             var list = new SCG.List<Athlete>();
             using (var reader = command.ExecuteReader()) {
            while (reader.Read()) {
               var at = new Athlete();

               at.Id = reader.GetInt32(0);
               at.Name = reader.GetString(1);
               at.Surname = reader.GetString(2);
               at.Year = reader.GetInt32(3);
               at.Gender  = reader.GetString(4);
               at.Time  = reader.GetString(5);
               list.Add(at);
            }
             }
             return list;
        }
示例#12
0
            /// <summary></summary>
            public override object Read(object target, Package package, BinaryReader reader, long end)
            {
                int count = reader.ReadInt32();

                if (count == 0)
                {
                    return(null);
                }
                var result = new SCG.List <T>(count);

                for (var index = 0; index < count; index++)
                {
                    var value = new T();
                    value.Package = package;
                    value.Load(reader, end);
                    result.Add(value);
                }

                return(result);
            }
示例#13
0
        void processCDRQueue()
        {
            int tileX = Mathf.FloorToInt(telaraWorldCamPos.x / 256.0f);
            int tileY = Mathf.FloorToInt(telaraWorldCamPos.z / 256.0f);

            cdrJobQueue = cdrJobQueue.OrderBy(x => Vector2.Distance(new Vector2(tileX, tileY), new Vector2(x.Key, x.Value))).ToList();
            while (runningTerrainThreads < MAX_TERRAIN_THREADS && cdrJobQueue.Count() > 0)
            {
                KeyValuePair <int, int> job = cdrJobQueue[0];
                cdrJobQueue.RemoveAt(0);
                int tx = job.Key;
                int ty = job.Value;
                runningTerrainThreads++;
                //Debug.Log("Starting thread for CDR job[" + tx + "," + ty + "]");

                System.Threading.Thread m_Thread = new System.Threading.Thread(() =>
                {
                    try
                    {
                        SCG.List <ObjectPosition> objs = new SCG.List <ObjectPosition>();
                        CDRParse.doWorldTile(AssetDatabaseInst.DB, DBInst.inst, GameWorld.worldName, tx * 256, ty * 256, (p) =>
                        {
                            objs.Add(p);
                        });
                        lock (objectPositions)
                        {
                            objectPositions.AddRange(objs);
                        }
                    }
                    finally
                    {
                        runningTerrainThreads--;
                    }
                });
                m_Thread.Priority = (System.Threading.ThreadPriority)ProgramSettings.get("MAP_LOAD_THREAD_PRIORITY", (int)System.Threading.ThreadPriority.Normal);
                m_Thread.Start();
            }
        }
示例#14
0
        public void SortedSet_Generic_GetViewBetween_MiddleOfSet(int setLength)
        {
            if (setLength >= 3)
            {
                SCG.IComparer <T> comparer = GetIComparer() ?? Comparer <T> .Default;
                SortedSet <T>     set      = (SortedSet <T>)GenericISetFactory(setLength);
                T firstElement             = set.ElementAt(1);
                T lastElement = set.ElementAt(setLength - 2);

                SCG.List <T> expected = new SCG.List <T>(setLength - 2);
                foreach (T value in set)
                {
                    if (comparer.Compare(value, firstElement) >= 0 && comparer.Compare(value, lastElement) <= 0)
                    {
                        expected.Add(value);
                    }
                }

                SortedSet <T> view = set.GetViewBetween(firstElement, lastElement);
                Assert.Equal(expected.Count, view.Count);
                Assert.True(view.SetEquals(expected));
            }
        }
示例#15
0
        private static void TestAdd1(int cLevel, int initSize, int threads, int addsPerThread)
        {
            LurchTable <int, int>  dictConcurrent = new LurchTable <int, int>(/*cLevel,*/ 1);
            IDictionary <int, int> dict           = dictConcurrent;

            int count = threads;

            using (ManualResetEvent mre = new ManualResetEvent(false))
            {
                for (int i = 0; i < threads; i++)
                {
                    int ii = i;
                    Task.Run(
                        () =>
                    {
                        for (int j = 0; j < addsPerThread; j++)
                        {
                            dict.Add(j + ii * addsPerThread, -(j + ii * addsPerThread));
                        }
                        if (Interlocked.Decrement(ref count) == 0)
                        {
                            mre.Set();
                        }
                    });
                }
                mre.WaitOne();
            }

            foreach (var pair in dict)
            {
                Assert.Equal(pair.Key, -pair.Value);
            }

            SCG.List <int> gotKeys = new SCG.List <int>();
            foreach (var pair in dict)
            {
                gotKeys.Add(pair.Key);
            }

            gotKeys.Sort();

            SCG.List <int> expectKeys = new SCG.List <int>();
            int            itemCount  = threads * addsPerThread;

            for (int i = 0; i < itemCount; i++)
            {
                expectKeys.Add(i);
            }

            Assert.Equal(expectKeys.Count, gotKeys.Count);

            for (int i = 0; i < expectKeys.Count; i++)
            {
                Assert.True(expectKeys[i].Equals(gotKeys[i]),
                            string.Format("The set of keys in the dictionary is are not the same as the expected" + Environment.NewLine +
                                          "TestAdd1(cLevel={0}, initSize={1}, threads={2}, addsPerThread={3})", cLevel, initSize, threads, addsPerThread)
                            );
            }

            // Finally, let's verify that the count is reported correctly.
            int expectedCount = threads * addsPerThread;

            Assert.Equal(expectedCount, dict.Count);
            Assert.Equal(expectedCount, dictConcurrent.ToArray().Length);
        }
示例#16
0
文件: PERWAPI.cs 项目: nomit007/f4
        /// <summary>
        /// Returns the maximum stack depth required by these CIL instructions.
        /// </summary>
        /// <returns>The integer value of the stck depth.</returns>
        public int GetMaxStackDepthRequired()
        {
            if (tide == 0) return 0;

            // Store the code blocks we find
            SCG.List<CodeBlock> codeBlocks = new SCG.List<CodeBlock>();
            SCG.Dictionary<CILLabel, CodeBlock> cbTable = new SCG.Dictionary<CILLabel, CodeBlock>();
            SCG.List<CodeBlock> extraStartingBlocks = new SCG.List<CodeBlock>();

            // Start a default code block
            CodeBlock codeBlock = new CodeBlock(this);
            codeBlock.StartIndex = 0;

            //
            // Identify the code blocks
            //
            for (int i = 0; i < tide; i++) {

                /* Handling the tail instruction:
                 * The tail instruction has not been handled even though
                 * it indicates the end of a code block is coming.  The
                 * reason for this is because any valid tail instruction
                 * must be followed by a call* instruction and then a ret
                 * instruction.  Given a ret instruction must be the second
                 * next instruction anyway it has been decided to just let
                 * the end block be caught then.
                 */

                // If we reach a branch instruction or a switch instruction
                // then end the current code block inclusive of the instruction.
                if ((buffer[i] is BranchInstr) || (buffer[i] is SwitchInstr)) {

                    // Close the old block
                    codeBlock.EndIndex = i;
                    if (codeBlock.EndIndex >= codeBlock.StartIndex) // Don't add empty blocks
                        codeBlocks.Add(codeBlock);

                    // Open a new block
                    codeBlock = new CodeBlock(this);
                    codeBlock.StartIndex = i + 1;

                    // If we reach a label then we need to start a new
                    // code block as the label is an entry point.
                } else if (buffer[i] is CILLabel) {

                    // Close the old block
                    codeBlock.EndIndex = i - 1;
                    if (codeBlock.EndIndex >= codeBlock.StartIndex) // Don't add empty blocks
                        codeBlocks.Add(codeBlock);

                    // Open a new block
                    codeBlock = new CodeBlock(this);
                    codeBlock.StartIndex = i;

                    // Set this label as the entry point for the code block
                    codeBlock.EntryLabel = (CILLabel)buffer[i];
                    // AND ... list in the dictionary.
                    cbTable.Add(codeBlock.EntryLabel, codeBlock);

                    // Check for the ret, throw, rethrow, or jmp instruction as they also end a block
                } else if (buffer[i] is Instr) {
                    if (
                        (((Instr)buffer[i]).GetOp() == Op.ret) ||
                        (((Instr)buffer[i]).GetOp() == Op.throwOp) ||
                        (((Instr)buffer[i]).GetOp() == Op.rethrow) ||
                        ((buffer[i] is MethInstr) && (((MethInstr)buffer[i]).GetMethodOp() == MethodOp.jmp))
                       ) {

                        // Close the old block
                        codeBlock.EndIndex = i;
                        if (codeBlock.EndIndex >= codeBlock.StartIndex) // Don't add empty blocks
                            codeBlocks.Add(codeBlock);

                        // Open a new block
                        // In theory this should never happen but just in case
                        // someone feels like adding dead code it is supported.
                        codeBlock = new CodeBlock(this);
                        codeBlock.StartIndex = i + 1;

                    }

                }

            }

            // Close the last block
            codeBlock.EndIndex = tide - 1;
            if (codeBlock.EndIndex >= codeBlock.StartIndex) // Don't add empty blocks
                codeBlocks.Add(codeBlock);
            codeBlock = null;

            // Check how many code blocks there are.  If an blocks return 0.
            if (codeBlocks.Count == 0) return 0;

            //
            // Loop through each code block and calculate the delta distance
            //
            for (int j = 0; j < codeBlocks.Count; j++) {
                CodeBlock block = codeBlocks[j];

                int maxDepth = 0;
                int currentDepth = 0;

                // Loop through each instruction to work out the max depth
                for (int i = block.StartIndex; i <= block.EndIndex; i++) {

                    // Get the depth after the next instruction
                    currentDepth += buffer[i].GetDeltaDistance();

                    // If the new current depth is greater then the maxDepth adjust the maxDepth to reflect
                    if (currentDepth > maxDepth)
                        maxDepth = currentDepth;

                }

                // Set the depth of the block
                block.MaxDepth = maxDepth;
                block.DeltaDistance = currentDepth;

                //
                // Link up the next blocks
                //

                // If the block ends with a branch statement set the jump and fall through.
                if (buffer[block.EndIndex] is BranchInstr) {
                    BranchInstr branchInst = (BranchInstr)buffer[block.EndIndex];

                    // If this is not a "br" or "br.s" then set the fall through code block
                    if ((branchInst.GetBranchOp() != BranchOp.br) &&
                        (branchInst.GetBranchOp() != BranchOp.br_s))
                        // If there is a following code block set it as the fall through
                        if (j < (codeBlocks.Count - 1))
                            block.NextBlocks.Add(codeBlocks[j + 1]);

                    // Set the code block we are jumping to
                    CodeBlock cb = null;
                    cbTable.TryGetValue(branchInst.GetDest(), out cb);
                    if (cb == null)
                        throw new Exception("Missing Branch Label");
                    block.NextBlocks.Add(cb);

                    // If the block ends in a switch instruction work out the possible next blocks
                } else if (buffer[block.EndIndex] is SwitchInstr) {
                    SwitchInstr switchInstr = (SwitchInstr)buffer[block.EndIndex];

                    // If there is a following code block set it as the fall through
                    if (j < (codeBlocks.Count - 1))
                        block.NextBlocks.Add(codeBlocks[j + 1]);

                    // Add each destination block
                    foreach (CILLabel label in switchInstr.GetDests()) {

                        // Check all of the code blocks to find the jump destination
                        CodeBlock cb = null;
                        cbTable.TryGetValue(label, out cb);
                        if (cb == null) throw new Exception("Missing Case Label");
                        block.NextBlocks.Add(cb);

                    }

                    // So long as the block doesn't end with a terminating instruction like ret or throw, just fall through to the next block
                } else if (!IsTerminatingInstruction(buffer[block.EndIndex])) {

                    // If there is a following code block set it as the fall through
                    if (j < (codeBlocks.Count - 1))
                        block.NextBlocks.Add(codeBlocks[j + 1]);
                }

            }

            //
            // Join up any exception blocks
            //

            if (exceptions != null) {
                foreach (TryBlock tryBlock in exceptions) {

                    // Try to find the code block where this try block starts
                    CodeBlock tryCodeBlock;
                    cbTable.TryGetValue(tryBlock.Start, out tryCodeBlock);

                    // Declare that the entry to this code block must be empty
                    tryCodeBlock.RequireEmptyEntry = true;

                    // Work with each of the handlers
                    foreach (HandlerBlock hb in tryBlock.GetHandlers()) {

                        // Find the code block where this handler block starts.
                        CodeBlock handlerCodeBlock;
                        cbTable.TryGetValue(hb.Start, out handlerCodeBlock);

                        // If the code block is a catch or filter block increment the delta
                        // distance by 1. This is to factor in the exception object that will
                        // be secretly placed on the stack by the runtime engine.
                        // However, this also means that the MaxDepth is up by one also!
                        if (hb is Catch || hb is Filter)
                        {
                            handlerCodeBlock.DeltaDistance++;
                            handlerCodeBlock.MaxDepth++;
                        }

                        // If the code block is a filter block increment the delta distance by 1
                        // This is to factor in the exception object that will be placed on the stack.
                        // if (hb is Filter) handlerCodeBlock.DeltaDistance++;

                        // Add this handler to the list of starting places
                        extraStartingBlocks.Add(handlerCodeBlock);

                    }

                }
            }

            //
            // Traverse the code blocks and get the depth
            //

            // Get the max depth at the starting entry point
            int finalMaxDepth = this.TraverseMaxDepth(codeBlocks[0]);

            // Check the additional entry points
            // If the additional points have a greater depth update the max depth
            foreach (CodeBlock cb in extraStartingBlocks) {
                // int tmpMaxDepth = cb.TraverseMaxDepth();
                int tmpMaxDepth = this.TraverseMaxDepth(cb);
                if (tmpMaxDepth > finalMaxDepth) finalMaxDepth = tmpMaxDepth;
            }

            // Return the max depth we have found
            return finalMaxDepth;
        }
        /// <summary>
        /// Processes an inbound data event.
        /// This is assumed to be invoked on an IOCP thread so a goal is to do as little as possible.
        /// </summary>
        public void HandleInboundDataEvent(InboundDataEvent e, Action <InboundDataEvent> returnInboundDataEvent)
        {
#if DEBUG
            Interlocked.Increment(ref DebugRuntimeStats.in_de);
#endif

            // Deserialize inbound payloads
            SCG.List <object> payloads = new SCG.List <object>();
            try {
                using (var ms = new MemoryStream(e.Data, e.DataOffset, e.DataLength, false, true)) {
                    while (ms.Position < ms.Length)
                    {
                        payloads.Add(Deserialize.From(ms));
                    }
                }
            } catch (Exception ex) {
                if (!isShutdown)
                {
                    logger.Warn("Error at payload deserialize", ex);
                }
                return;
            }
            returnInboundDataEvent(e);
#if DEBUG
            Interlocked.Add(ref DebugRuntimeStats.in_payload, payloads.Count);
#endif

            // Categorize inbound payloads
            var acknowledgements  = new SCG.List <AcknowledgementDto>();
            var announcements     = new SCG.List <AnnouncementDto>();
            var reliablePackets   = new SCG.List <PacketDto>();
            var unreliablePackets = new SCG.List <PacketDto>();
            foreach (var payload in payloads)
            {
                if (payload is AcknowledgementDto)
                {
                    acknowledgements.Add((AcknowledgementDto)payload);
                }
                else if (payload is AnnouncementDto)
                {
                    announcements.Add((AnnouncementDto)payload);
                }
                else if (payload is PacketDto)
                {
                    // Filter packets not destined to us.
                    var packet = (PacketDto)payload;
                    if (!identity.Matches(packet.ReceiverId, IdentityMatchingScope.Broadcast))
                    {
                        tossedCounter.Increment();
                        continue;
                    }

                    // Bin into reliable vs unreliable.
                    if (packet.IsReliable())
                    {
                        reliablePackets.Add(packet);
                    }
                    else
                    {
                        unreliablePackets.Add(packet);
                    }
                }
            }

            // Process acks to prevent resends.
            foreach (var ack in acknowledgements)
            {
#if DEBUG
                Interlocked.Increment(ref DebugRuntimeStats.in_ack);
#endif
                acknowledgementCoordinator.ProcessAcknowledgement(ack);
#if DEBUG
                Interlocked.Increment(ref DebugRuntimeStats.in_ack_done);
#endif
            }

            // Process announcements as they are necessary for routing.
            foreach (var announcement in announcements)
            {
#if DEBUG
                Interlocked.Increment(ref DebugRuntimeStats.in_ann);
#endif
                HandleAnnouncement(e.RemoteInfo, announcement);
            }

            // Ack inbound reliable messages to prevent resends.
            foreach (var packet in reliablePackets)
            {
#if DEBUG
                Interlocked.Increment(ref DebugRuntimeStats.in_out_ack);
#endif
                var            ack = AcknowledgementDto.Create(packet.Id);
                RoutingContext routingContext;
                if (routingContextsByPeerId.TryGetValue(packet.SenderId, out routingContext))
                {
                    routingContext.SendAcknowledgementAsync(packet.SenderId, ack).Forget();
                }
                else
                {
                    payloadSender.BroadcastAsync(ack).Forget();
                }
#if DEBUG
                Interlocked.Increment(ref DebugRuntimeStats.in_out_ack_done);
#endif
            }

            // Test reliable packets' guids against bloom filter.
            var isNewByPacketId            = duplicateFilter.TestPacketIdsAreNew(new HashSet <Guid>(reliablePackets.Select(p => p.Id)));
            var standalonePacketsToProcess = new SCG.List <PacketDto>(unreliablePackets);
            var chunksToProcess            = new SCG.List <MultiPartChunkDto>();
            foreach (var packet in reliablePackets)
            {
                // Toss out duplicate packets
                if (!isNewByPacketId[packet.Id])
                {
                    duplicateReceivesCounter.Increment();
                    continue;
                }

                // Bin into multipart chunk vs not
                var multiPartChunk = packet.Message.Body as MultiPartChunkDto;
                if (multiPartChunk != null)
                {
                    multiPartChunksBytesReceivedAggregator.Put(multiPartChunk.BodyLength);
                    chunksToProcess.Add(multiPartChunk);
                }
                else
                {
                    standalonePacketsToProcess.Add(packet);
                }
            }

            // Kick off async stanadalone packet process on thread pool.
            foreach (var packet in standalonePacketsToProcess)
            {
                inboundMessageDispatcher.DispatchAsync(packet.Message).Forget();
            }

            // Synchronously handle multipart chunk processing.
            foreach (var chunk in chunksToProcess)
            {
                multiPartPacketReassembler.HandleInboundMultiPartChunk(chunk);
            }
        }
        public GreenC5UserControl(string greenC5Name, bool infiniteExecution, int k1, double k2)
        {
            InitializeComponent();
            Status                = "Idle";
            IsInfiniteMode        = infiniteExecution;
            GreenC5Name           = greenC5Name;
            lbName.Text           = "Instance ID: " + GreenC5Name;
            InternalDataStructure = new GreenC5 <string>();
            K1 = k1;
            K2 = k2;


            //set datastructure list by groups for validation
            SCG.List <C5DataStructure> iCollection = new SCG.List <C5DataStructure>();
            iCollection.Add(C5DataStructure.ArrayList);
            iCollection.Add(C5DataStructure.HashBag);
            iCollection.Add(C5DataStructure.HashedArrayList);
            iCollection.Add(C5DataStructure.HashedLinkedList);
            iCollection.Add(C5DataStructure.HashSet);
            iCollection.Add(C5DataStructure.LinkedList);
            iCollection.Add(C5DataStructure.SortedArray);
            iCollection.Add(C5DataStructure.TreeBag);
            iCollection.Add(C5DataStructure.TreeSet);
            dataStructuresByGroup.Add(new KeyValuePair <DataStructureGroup, SCG.List <C5DataStructure> >(DataStructureGroup.ICollection, iCollection));

            SCG.List <C5DataStructure> iCollectionBag = new SCG.List <C5DataStructure>();
            iCollectionBag.Add(C5DataStructure.HashBag);
            iCollectionBag.Add(C5DataStructure.TreeBag);
            iCollectionBag.Add(C5DataStructure.ArrayList);
            iCollectionBag.Add(C5DataStructure.LinkedList);
            dataStructuresByGroup.Add(new KeyValuePair <DataStructureGroup, SCG.List <C5DataStructure> >(DataStructureGroup.ICollectionBag, iCollectionBag));

            SCG.List <C5DataStructure> iCollectionSet = new SCG.List <C5DataStructure>();
            iCollectionSet.Add(C5DataStructure.HashSet);
            iCollectionSet.Add(C5DataStructure.TreeSet);
            iCollectionSet.Add(C5DataStructure.HashedArrayList);
            iCollectionSet.Add(C5DataStructure.HashedLinkedList);
            iCollectionSet.Add(C5DataStructure.SortedArray);
            dataStructuresByGroup.Add(new KeyValuePair <DataStructureGroup, SCG.List <C5DataStructure> >(DataStructureGroup.ICollectionSet, iCollectionSet));

            SCG.List <C5DataStructure> iList = new SCG.List <C5DataStructure>();
            iList.Add(C5DataStructure.ArrayList);
            iList.Add(C5DataStructure.LinkedList);
            iList.Add(C5DataStructure.HashedArrayList);
            iList.Add(C5DataStructure.HashedLinkedList);
            iList.Add(C5DataStructure.SortedArray);
            dataStructuresByGroup.Add(new KeyValuePair <DataStructureGroup, SCG.List <C5DataStructure> >(DataStructureGroup.IList, iList));

            SCG.List <C5DataStructure> iListBag = new SCG.List <C5DataStructure>();
            iListBag.Add(C5DataStructure.ArrayList);
            iListBag.Add(C5DataStructure.LinkedList);
            dataStructuresByGroup.Add(new KeyValuePair <DataStructureGroup, SCG.List <C5DataStructure> >(DataStructureGroup.IListBag, iListBag));

            SCG.List <C5DataStructure> iListSet = new SCG.List <C5DataStructure>();
            iListSet.Add(C5DataStructure.HashedArrayList);
            iListSet.Add(C5DataStructure.HashedLinkedList);
            iListSet.Add(C5DataStructure.SortedArray);
            dataStructuresByGroup.Add(new KeyValuePair <DataStructureGroup, SCG.List <C5DataStructure> >(DataStructureGroup.IListSet, iListSet));


            //set data structure groups
            SCG.List <KeyValuePair <DataStructureGroup, C5DataStructure> > dsGroupAndWorstDataStructures = new SCG.List <KeyValuePair <DataStructureGroup, C5DataStructure> >();
            dsGroupAndWorstDataStructures.Add(new KeyValuePair <DataStructureGroup, C5DataStructure>(DataStructureGroup.ICollection, C5DataStructure.ArrayList));
            dsGroupAndWorstDataStructures.Add(new KeyValuePair <DataStructureGroup, C5DataStructure>(DataStructureGroup.ICollectionBag, C5DataStructure.LinkedList));
            dsGroupAndWorstDataStructures.Add(new KeyValuePair <DataStructureGroup, C5DataStructure>(DataStructureGroup.ICollectionSet, C5DataStructure.SortedArray));
            dsGroupAndWorstDataStructures.Add(new KeyValuePair <DataStructureGroup, C5DataStructure>(DataStructureGroup.IList, C5DataStructure.LinkedList));
            dsGroupAndWorstDataStructures.Add(new KeyValuePair <DataStructureGroup, C5DataStructure>(DataStructureGroup.IListBag, C5DataStructure.LinkedList));
            dsGroupAndWorstDataStructures.Add(new KeyValuePair <DataStructureGroup, C5DataStructure>(DataStructureGroup.IListSet, C5DataStructure.SortedArray));

            //cbDataStructureGroup.DisplayMember = "Key";
            //cbDataStructureGroup.ValueMember = "Value";
            cbDataStructureGroup.DataSource = dsGroupAndWorstDataStructures;


            SCG.List <KeyValuePair <string, string> > crudWorkloadPrograms = new SCG.List <KeyValuePair <string, string> >();
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Small Program #1", @"CRUD Workloads\SmallRandomApplication1.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Small Program #2", @"CRUD Workloads\SmallRandomApplication2.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Small Program #3", @"CRUD Workloads\SmallRandomApplication3.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Small Program #4", @"CRUD Workloads\SmallRandomApplication4.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #1", @"CRUD Workloads\RandomApplication1.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #2", @"CRUD Workloads\RandomApplication2.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #3", @"CRUD Workloads\RandomApplication3.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #4", @"CRUD Workloads\RandomApplication4.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #5", @"CRUD Workloads\RandomApplication5.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #6", @"CRUD Workloads\RandomApplication6.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #7", @"CRUD Workloads\RandomApplication7.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #8", @"CRUD Workloads\RandomApplication8.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #9", @"CRUD Workloads\RandomApplication9.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Simulated Program #10", @"CRUD Workloads\RandomApplication10.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("A* Path Finder - Custom", @"CRUD Workloads\AStarPathFinderProgram_Custom_Formula.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("A* Path Finder - Diagnal Shortcut", @"CRUD Workloads\AStarPathFinderProgram_DiagonalShortcut_Formula.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("A* Path Finder - Euclidean", @"CRUD Workloads\AStarPathFinderProgram_Euclidean_Formula.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("A* Path Finder - Manhatan", @"CRUD Workloads\AStarPathFinderProgram_Manhatan_Formula.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("A* Path Finder - Max Dx Dy", @"CRUD Workloads\AStarPathFinderProgram_MaxDXDY_Formula.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Genetic Algorithm - Fitness Table", @"CRUD Workloads\GA-FittnessTableProgram.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Genetic Algorithm - Next Generation", @"CRUD Workloads\GA-NextGenerationProgram.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Genetic Algorithm - This Generation", @"CRUD Workloads\GA-ThisGenerationProgram.csv"));
            crudWorkloadPrograms.Add(new KeyValuePair <string, string>("Huffman Encoding", @"CRUD Workloads\HuffmanCodingProgram.csv"));


            cbPrograms.DataSource    = new BindingSource(crudWorkloadPrograms, null);
            cbPrograms.DisplayMember = "Key";
            //cbPrograms.ValueMember = "Value";
            //set the values to the program potion dropdownlist

            //set data structure options to a Static Mode selection
            cbStartDataStructure.DataSource = c5DSs;

            //Set run mode
            cbRunMode.DataSource   = Enum.GetValues(typeof(DataStructureMode));
            cbRunMode.SelectedItem = DataStructureMode.Dynamic;


            if (cbDataStructureGroup.SelectedItem != null)
            {
                KeyValuePair <DataStructureGroup, C5DataStructure> dsg = (KeyValuePair <DataStructureGroup, C5DataStructure>)cbDataStructureGroup.SelectedItem;
                CurrentDataStructureGroup         = dsg.Key;
                StartDataStructure                = dsg.Value;
                CurrentC5DataStructure            = dsg.Value;
                lbCurrentDataStructure.Text       = "Current Data Structure: " + CurrentC5DataStructure.ToString();
                cbStartDataStructure.SelectedItem = StartDataStructure;
            }

            if (cbPrograms.SelectedItem != null)
            {
                CurrentProgram = (KeyValuePair <string, string>)cbPrograms.SelectedItem;
            }

            startMode = false;
            bool ok = LoadProgram();
        }
示例#19
0
    private GameObject process(ObjectPosition op)
    {
        if (op is LightPosition)
        {
            GameObject lgo = new GameObject();

//#if UNITY_EDITOR
            addCDR(op, lgo);
//#endif
            LightPosition lp = (LightPosition)op;
            lgo.transform.SetParent(meshRoot.transform);
            lgo.transform.localScale    = new Vector3(op.scale, op.scale, op.scale);
            lgo.transform.localPosition = op.min;
            lgo.transform.localRotation = op.qut;

            Light light = lgo.AddComponent <Light>();
            light.type      = LightType.Point;
            light.color     = new Color(lp.r, lp.g, lp.b);
            light.intensity = lp.range;
            light.shadows   = LightShadows.Hard;
            return(lgo);
        }

        GameObject go = GameObject.Instantiate(telaraObjectPrefab, meshRoot.transform);

#if UNITY_EDITOR
        addCDR(op, go);
#endif
        if (!op.visible || (op.nifFile != null && op.nifFile.Contains("30meter.nif")))
        {
            go.tag = "invisible";
            go.SetActive(false);
            invisibleObjects.Add(go);
        }


        string name = op.nifFile;
        Assets.RiftAssets.AssetDatabase.RequestCategory category = Assets.RiftAssets.AssetDatabase.RequestCategory.NONE;
        if (name.Contains("_terrain_") || name.Contains("ocean_chunk"))
        {
            if (name.Contains("_terrain_"))
            {
                category = Assets.RiftAssets.AssetDatabase.RequestCategory.GEOMETRY;
            }
        }

        telara_obj tobj = go.GetComponent <telara_obj>();
        tobj.setProps(category);

        //go.transform.SetParent(meshRoot.transform);

        tobj.setFile(name);
        go.name = name;
        go.transform.localScale    = new Vector3(op.scale, op.scale, op.scale);
        go.transform.localPosition = op.min;
        go.transform.localRotation = op.qut;


        triggerLoad(tobj);
        return(go);
    }
        public Task BatchUpdateAsync(SCG.IReadOnlyList <PendingUpdate <K, V> > inputPendingUpdates)
        {
            return(ExecCommandAsync(async cmd => {
                var properties = typeof(V).GetProperties();
                bool hasUpdatedColumn = false;
                var pendingUpdatesByUpdatedPropertyGroup = new MultiValueDictionary <string[], PendingUpdate <K, V> >(
                    new LambdaEqualityComparer <string[]>(
                        (a, b) => a.Length == b.Length && a.Zip(b, (aElement, bElement) => aElement == bElement).All(x => x),
                        a => a.Aggregate(13, (h, x) => h * 17 + x.GetHashCode())
                        ));
                var pendingInserts = new SCG.List <PendingUpdate <K, V> >();
                foreach (var pendingUpdate in inputPendingUpdates)
                {
                    if (!pendingUpdate.Base.Exists)
                    {
                        pendingInserts.Add(pendingUpdate);
                    }
                    else
                    {
                        SortedSet <string> updatedProperties = new SortedSet <string>();
                        foreach (var p in properties)
                        {
                            var columnName = p.Name.ToLower();
                            if (columnName == "updated")
                            {
                                hasUpdatedColumn = true;
                                continue;
                            }

                            if (object.Equals(p.GetValue(pendingUpdate.Base.Value), p.GetValue(pendingUpdate.Updated.Value)))
                            {
                                continue;
                            }

                            if (columnName == "id")
                            {
                                throw new InvalidStateException();
                            }
                            else
                            {
                                updatedProperties.Add(p.Name);
                            }
                        }
                        pendingUpdatesByUpdatedPropertyGroup.Add(updatedProperties.ToArray(), pendingUpdate);
                    }
                }

                var commandTextBuilder = new StringBuilder();

                /*
                 * INSERT INTO test (id, name, updated)
                 * SELECT
                 *    unnest(@ids), unnest(@names), unnest(@updateds)
                 * ON CONFLICT (id) DO UPDATE
                 * SET
                 *    name = excluded.name, updated = excluded.updated
                 */
                var batchIndex = 0;
                foreach (var kvp in pendingUpdatesByUpdatedPropertyGroup)
                {
                    var updatedPropertyNames = kvp.Key;
                    var updatedProperties = updatedPropertyNames.Map(n => typeof(V).GetProperty(n));

                    var updatedColumnNames = kvp.Key.Map(x => x.ToLower());
                    var pendingUpdates = kvp.Value.ToArray();

                    var additionalColumns = new SCG.List <string>();

                    var idParameter = cmd.CreateParameter();
                    idParameter.ParameterName = "id" + batchIndex;
                    idParameter.Value = pendingUpdates.Map(p => p.Base.Key);
                    cmd.Parameters.Add(idParameter);

                    if (hasUpdatedColumn)
                    {
                        var updatedParameter = cmd.CreateParameter();
                        updatedParameter.ParameterName = "updated" + batchIndex;
                        updatedParameter.Value = pendingUpdates.Map(p => DateTime.Now);
                        cmd.Parameters.Add(updatedParameter);
                        additionalColumns.Add("updated");
                    }

                    for (var i = 0; i < updatedPropertyNames.Length; i++)
                    {
                        var updatedPropertyName = updatedPropertyNames[i];
                        var updatedProperty = typeof(V).GetProperty(updatedPropertyName);
                        var array = Array.CreateInstance(updatedProperty.PropertyType, pendingUpdates.Length);
                        for (var j = 0; j < pendingUpdates.Length; j++)
                        {
                            array.SetValue(updatedProperty.GetValue(pendingUpdates[j].Updated.Value), j);
                        }
                        var parameter = cmd.CreateParameter();
                        parameter.ParameterName = updatedColumnNames[i] + batchIndex;
                        parameter.Value = array;
                        cmd.Parameters.Add(parameter);
                    }

                    var query = $"UPDATE {tableName} " +
                                "SET " +
                                updatedColumnNames.Concat(additionalColumns).Select(n => $"{n} = temp.{n}").Join(", ") + " " +
                                "FROM ( select " +
                                updatedColumnNames.Concat("id").Concat(additionalColumns).Select(n => $"unnest(@{n}{batchIndex}) as {n}").Join(", ") +
                                " ) as temp " +
                                $"where {tableName}.id = temp.id";

                    commandTextBuilder.Append(query);
                    commandTextBuilder.Append("; ");
                    batchIndex++;
                }

                // inserts;
                if (pendingInserts.Any())
                {
                    var propertyNames = properties.Map(p => p.Name);
                    var columnNames = properties.Map(p => p.Name.ToLower());

                    var idParameter = cmd.CreateParameter();
                    idParameter.ParameterName = "id_ins";
                    idParameter.Value = pendingInserts.Map(p => p.Base.Key);
                    cmd.Parameters.Add(idParameter);

                    for (var i = 0; i < properties.Length; i++)
                    {
                        var property = properties[i];
                        var array = Array.CreateInstance(property.PropertyType, pendingInserts.Count);
                        for (var j = 0; j < pendingInserts.Count; j++)
                        {
                            object propertyValue;
                            if (columnNames[i] == "updated" || columnNames[i] == "created")
                            {
                                propertyValue = DateTime.Now;
                            }
                            else
                            {
                                propertyValue = property.GetValue(pendingInserts[j].Updated.Value);
                            }
                            array.SetValue(propertyValue, j);
                        }
                        var parameter = cmd.CreateParameter();
                        parameter.ParameterName = columnNames[i] + "_ins";
                        parameter.Value = array;
                        cmd.Parameters.Add(parameter);
                    }

                    var query = $"INSERT INTO {tableName} ({columnNames.Concat("id").Join(", ")}) " +
                                "SELECT " +
                                columnNames.Concat("id").Select(n => $"unnest(@{n}_ins)").Join(", ") + " " +
                                "ON CONFLICT (id) DO UPDATE " +
                                "SET " +
                                columnNames.Select(n => $"{n} = excluded.{n}").Join(", ");
                    commandTextBuilder.Append(query);
                    commandTextBuilder.Append("; ");
                }
                cmd.CommandText = commandTextBuilder.ToString();

                await cmd.ExecuteNonQueryAsync().ConfigureAwait(false);
            }));
        }
      /// <summary>
      /// Processes an inbound data event. 
      /// This is assumed to be invoked on an IOCP thread so a goal is to do as little as possible.
      /// </summary>
      public void HandleInboundDataEvent(InboundDataEvent e, Action<InboundDataEvent> returnInboundDataEvent) {
#if DEBUG
         Interlocked.Increment(ref DebugRuntimeStats.in_de);
#endif

         // Deserialize inbound payloads
         SCG.List<object> payloads = new SCG.List<object>();
         try {
            using (var ms = new MemoryStream(e.Data, e.DataOffset, e.DataLength, false, true)) {
               while (ms.Position < ms.Length) {
                  payloads.Add(Deserialize.From(ms));
               }
            }
         } catch (Exception ex) {
            if (!isShutdown) {
               logger.Warn("Error at payload deserialize", ex);
            }
            return;
         }
         returnInboundDataEvent(e);
#if DEBUG
         Interlocked.Add(ref DebugRuntimeStats.in_payload, payloads.Count);
#endif

         // Categorize inbound payloads
         var acknowledgements = new SCG.List<AcknowledgementDto>();
         var announcements = new SCG.List<AnnouncementDto>();
         var reliablePackets = new SCG.List<PacketDto>();
         var unreliablePackets = new SCG.List<PacketDto>();
         foreach (var payload in payloads) {
            if (payload is AcknowledgementDto) {
               acknowledgements.Add((AcknowledgementDto)payload);
            } else if (payload is AnnouncementDto) {
               announcements.Add((AnnouncementDto)payload);
            } else if (payload is PacketDto) {
               // Filter packets not destined to us.
               var packet = (PacketDto)payload;
               if (!identity.Matches(packet.ReceiverId, IdentityMatchingScope.Broadcast)) {
                  tossedCounter.Increment();
                  continue;
               }

               // Bin into reliable vs unreliable.
               if (packet.IsReliable()) {
                  reliablePackets.Add(packet);
               } else {
                  unreliablePackets.Add(packet);
               }
            }
         }

         // Process acks to prevent resends.
         foreach (var ack in acknowledgements) {
#if DEBUG
            Interlocked.Increment(ref DebugRuntimeStats.in_ack);
#endif
            acknowledgementCoordinator.ProcessAcknowledgement(ack);
#if DEBUG
            Interlocked.Increment(ref DebugRuntimeStats.in_ack_done);
#endif
         }

         // Process announcements as they are necessary for routing.
         foreach (var announcement in announcements) {
#if DEBUG
            Interlocked.Increment(ref DebugRuntimeStats.in_ann);
#endif
            HandleAnnouncement(e.RemoteInfo, announcement);
         }

         // Ack inbound reliable messages to prevent resends.
         foreach (var packet in reliablePackets) {
#if DEBUG
            Interlocked.Increment(ref DebugRuntimeStats.in_out_ack);
#endif
            var ack = AcknowledgementDto.Create(packet.Id);
            RoutingContext routingContext;
            if (routingContextsByPeerId.TryGetValue(packet.SenderId, out routingContext)) {
               routingContext.SendAcknowledgementAsync(packet.SenderId, ack).Forget();
            } else {
               payloadSender.BroadcastAsync(ack).Forget();
            }
#if DEBUG
            Interlocked.Increment(ref DebugRuntimeStats.in_out_ack_done);
#endif
         }

         // Test reliable packets' guids against bloom filter.
         var isNewByPacketId = duplicateFilter.TestPacketIdsAreNew(new HashSet<Guid>(reliablePackets.Select(p => p.Id)));
         var standalonePacketsToProcess = new SCG.List<PacketDto>(unreliablePackets);
         var chunksToProcess = new SCG.List<MultiPartChunkDto>();
         foreach (var packet in reliablePackets) {
            // Toss out duplicate packets
            if (!isNewByPacketId[packet.Id]) {
               duplicateReceivesCounter.Increment();
               continue;
            } 

            // Bin into multipart chunk vs not
            var multiPartChunk = packet.Message.Body as MultiPartChunkDto;
            if (multiPartChunk != null) {
               multiPartChunksBytesReceivedAggregator.Put(multiPartChunk.BodyLength);
               chunksToProcess.Add(multiPartChunk);
            } else {
               standalonePacketsToProcess.Add(packet);
            }
         }

         // Kick off async stanadalone packet process on thread pool.
         foreach (var packet in standalonePacketsToProcess) {
            inboundMessageDispatcher.DispatchAsync(packet.Message).Forget();
         }

         // Synchronously handle multipart chunk processing.
         foreach (var chunk in chunksToProcess) {
            multiPartPacketReassembler.HandleInboundMultiPartChunk(chunk);
         }
      }
示例#22
0
        private static void TestUpdate1(int cLevel, int threads, int updatesPerThread)
        {
            IDictionary <int, int> dict = new LurchTable <int, int>(/*cLevel,*/ 1);

            for (int i = 1; i <= updatesPerThread; i++)
            {
                dict[i] = i;
            }

            int running = threads;

            using (ManualResetEvent mre = new ManualResetEvent(false))
            {
                for (int i = 0; i < threads; i++)
                {
                    int ii = i;
                    Task.Run(
                        () =>
                    {
                        for (int j = 1; j <= updatesPerThread; j++)
                        {
                            dict[j] = (ii + 2) * j;
                        }
                        if (Interlocked.Decrement(ref running) == 0)
                        {
                            mre.Set();
                        }
                    });
                }
                mre.WaitOne();
            }

            foreach (var pair in dict)
            {
                var div = pair.Value / pair.Key;
                var rem = pair.Value % pair.Key;

                Assert.Equal(0, rem);
                Assert.True(div > 1 && div <= threads + 1,
                            string.Format("* Invalid value={3}! TestUpdate1(cLevel={0}, threads={1}, updatesPerThread={2})", cLevel, threads, updatesPerThread, div));
            }

            SCG.List <int> gotKeys = new SCG.List <int>();
            foreach (var pair in dict)
            {
                gotKeys.Add(pair.Key);
            }
            gotKeys.Sort();

            SCG.List <int> expectKeys = new SCG.List <int>();
            for (int i = 1; i <= updatesPerThread; i++)
            {
                expectKeys.Add(i);
            }

            Assert.Equal(expectKeys.Count, gotKeys.Count);

            for (int i = 0; i < expectKeys.Count; i++)
            {
                Assert.True(expectKeys[i].Equals(gotKeys[i]),
                            string.Format("The set of keys in the dictionary is are not the same as the expected." + Environment.NewLine +
                                          "TestUpdate1(cLevel={0}, threads={1}, updatesPerThread={2})", cLevel, threads, updatesPerThread)
                            );
            }
        }
示例#23
0
        private static void TestGetOrAddOrUpdate(int cLevel, int initSize, int threads, int addsPerThread, bool isAdd)
        {
            LurchTable <int, int> dict = new LurchTable <int, int>(/*cLevel,*/ 1);

            int count = threads;

            using (ManualResetEvent mre = new ManualResetEvent(false))
            {
                for (int i = 0; i < threads; i++)
                {
                    int ii = i;
                    Task.Run(
                        () =>
                    {
                        for (int j = 0; j < addsPerThread; j++)
                        {
                            if (isAdd)
                            {
                                //call one of the overloads of GetOrAdd
                                switch (j % 2)
                                {
                                case 0:
                                    dict.GetOrAdd(j, -j);
                                    break;

                                case 1:
                                    dict.GetOrAdd(j, x => - x);
                                    break;
                                    //case 2: // J2N TODO: Implement this overload
                                    //    dict.GetOrAdd(j, (x, m) => x * m, -1);
                                    //    break;
                                }
                            }
                            else
                            {
                                switch (j % 2)
                                {
                                case 0:
                                    dict.AddOrUpdate(j, -j, (k, v) => - j);
                                    break;

                                case 1:
                                    dict.AddOrUpdate(j, (k) => - k, (k, v) => - k);
                                    break;
                                    //case 2: // J2N TODO: Implement this overload
                                    //    dict.AddOrUpdate(j, (k, m) => k * m, (k, v, m) => k * m, -1);
                                    //    break;
                                }
                            }
                        }
                        if (Interlocked.Decrement(ref count) == 0)
                        {
                            mre.Set();
                        }
                    });
                }
                mre.WaitOne();
            }

            foreach (var pair in dict)
            {
                Assert.Equal(pair.Key, -pair.Value);
            }

            SCG.List <int> gotKeys = new SCG.List <int>();
            foreach (var pair in dict)
            {
                gotKeys.Add(pair.Key);
            }
            gotKeys.Sort();

            SCG.List <int> expectKeys = new SCG.List <int>();
            for (int i = 0; i < addsPerThread; i++)
            {
                expectKeys.Add(i);
            }

            Assert.Equal(expectKeys.Count, gotKeys.Count);

            for (int i = 0; i < expectKeys.Count; i++)
            {
                Assert.True(expectKeys[i].Equals(gotKeys[i]),
                            string.Format("* Test '{4}': Level={0}, initSize={1}, threads={2}, addsPerThread={3})" + Environment.NewLine +
                                          "> FAILED.  The set of keys in the dictionary is are not the same as the expected.",
                                          cLevel, initSize, threads, addsPerThread, isAdd ? "GetOrAdd" : "GetOrUpdate"));
            }

            // Finally, let's verify that the count is reported correctly.
            Assert.Equal(addsPerThread, dict.Count);
            Assert.Equal(addsPerThread, dict.ToArray().Length);
        }
示例#24
0
        private static void TestRemove1(int cLevel, int threads, int removesPerThread)
        {
            LurchTable <int, int> dict = new LurchTable <int, int>(/*cLevel,*/ 1);
            string methodparameters    = string.Format("* TestRemove1(cLevel={0}, threads={1}, removesPerThread={2})", cLevel, threads, removesPerThread);
            int    N = 2 * threads * removesPerThread;

            for (int i = 0; i < N; i++)
            {
                dict[i] = -i;
            }

            // The dictionary contains keys [0..N), each key mapped to a value equal to the key.
            // Threads will cooperatively remove all even keys

            int running = threads;

            using (ManualResetEvent mre = new ManualResetEvent(false))
            {
                for (int i = 0; i < threads; i++)
                {
                    int ii = i;
                    Task.Run(
                        () =>
                    {
                        for (int j = 0; j < removesPerThread; j++)
                        {
                            int value;
                            int key = 2 * (ii + j * threads);
                            Assert.True(dict.TryRemove(key, out value), "Failed to remove an element! " + methodparameters);

                            Assert.Equal(-key, value);
                        }

                        if (Interlocked.Decrement(ref running) == 0)
                        {
                            mre.Set();
                        }
                    });
                }
                mre.WaitOne();
            }

            foreach (var pair in dict)
            {
                Assert.Equal(pair.Key, -pair.Value);
            }

            SCG.List <int> gotKeys = new SCG.List <int>();
            foreach (var pair in dict)
            {
                gotKeys.Add(pair.Key);
            }
            gotKeys.Sort();

            SCG.List <int> expectKeys = new SCG.List <int>();
            for (int i = 0; i < (threads * removesPerThread); i++)
            {
                expectKeys.Add(2 * i + 1);
            }

            Assert.Equal(expectKeys.Count, gotKeys.Count);

            for (int i = 0; i < expectKeys.Count; i++)
            {
                Assert.True(expectKeys[i].Equals(gotKeys[i]), "  > Unexpected key value! " + methodparameters);
            }

            // Finally, let's verify that the count is reported correctly.
            Assert.Equal(expectKeys.Count, dict.Count);
            Assert.Equal(expectKeys.Count, dict.ToArray().Length);
        }