Example #1
0
        public void Add(JobItem item)
        {
            count++;

            if (item.Method == JobMethods.Database_InsertLogs)
            {
                insertLogItems.Add(item);
                insertLogCount += item.LogEntityItems.Count;
            }
            else if (item.Method == JobMethods.Database_RetrieveLogs)
            {
                retrieveLogItems.Add(item);
                retrieveLogCount += item.QueryResults.Count;
            }
            else if (item.Method == JobMethods.Empty)
            {
                emptyLogItems.Add(item);
            }
            else if (item.Method == JobMethods.Database_UpdateIndex)
            {
                updateIndex = true;
                item.Processed = true;
            }
            else if (item.Method == JobMethods.Index_QueryRequest)
            {
                queryRequestItems.Add(item);
            }

            if (count >= processorQueueSize)
            {
                Flush(false);
            }
        }
 JobItem[] CreateEmptyJobItems()
 {
     var testSize = 10000000; // 10 million
     var items = new JobItem[testSize];
     for (int i = 0; i < testSize; i++)
     {
         items[i] = new JobItem(JobMethods.Empty);
     }
     return items;
 }
 public void AddJob(JobItem job)
 {
     if (job.Method > 900)
     {
         AddDatabaseJob(job);
     }
     else
     {
         AddIndexJob(job);
     }
 }
 JobItem[] CreateInsertLogItems()
 {
     var testSize = 50000;
     var items = new JobItem[testSize];
     var insertStart = new DateTime(2011, 11, 1);
     var interval = (long)((DateTime.Now - insertStart).Ticks / testSize);
     var rndElapsed = new Random();
     for (int i = 0; i < items.Length; i++)
     {
         var createdDT = insertStart.AddTicks(interval * i);
         var elapsed = TimeSpan.FromMilliseconds((long)rndElapsed.Next(1, 30000)).Ticks;
         var log = new NAppProfiler.Client.DTO.Log()
         {
             ClientIP = new byte[] { 10, 26, 10, 142 },
             CreatedDateTime = createdDT,
             Details = new List<NAppProfiler.Client.DTO.LogDetail>(),
             Elapsed = elapsed,
             IsError = Convert.ToBoolean(rndElapsed.Next(0, 1)),
             Method = "Method",
             Service = "Service",
         };
         log.Details.Add(new Client.DTO.LogDetail()
         {
             CreatedDateTime = createdDT,
             Description = "Description " + i.ToString(),
             Elapsed = 100,
         });
         log.Details.Add(new Client.DTO.LogDetail()
         {
             CreatedDateTime = createdDT,
             Description = "Description2 " + i.ToString(),
             Elapsed = 100,
         });
         items[i] = new JobItem(JobMethods.Database_InsertLogs)
         {
             LogEntityItems = new List<Server.Essent.LogEntity>(),
         };
         items[i].LogEntityItems.Add(new Server.Essent.LogEntity(createdDT, new TimeSpan(elapsed), log.IsError, Client.DTO.Log.SerializeLog(log)));
     }
     return items;
 }
Example #5
0
 // Multithread in Add
 public void Add(JobItem item)
 {
     var localPending = Interlocked.Increment(ref pendingCurIn);
     var wrapping = curIn + maxSize - 1;
     // Number of pending Add's > then maxSize of array
     if (localPending >= wrapping)
     {
         var sw = new SpinWait();
         do
         {
             sw.SpinOnce();
             wrapping = curIn + maxSize - 2;
         } while (localPending >= wrapping);
     }
     var localCurIn = Interlocked.Increment(ref curIn);
     var longIndex = localCurIn % maxSize;
     var index = Convert.ToInt32(longIndex);
     if (Interlocked.CompareExchange(ref itemStates[index], 1, 0) != 0)
     {
         var sw = new SpinWait();
         while (Interlocked.CompareExchange(ref itemStates[index], 1, 0) != 0)
         {
             sw.SpinOnce();
         }
     }
     items[index] = item;
     var beforeEx = Interlocked.CompareExchange(ref itemStates[index], 2, 1);
     if (beforeEx != 1)
     {
         throw new ApplicationException("unknown value");
     }
     if (traceEnabled)
     {
         Interlocked.Increment(ref addCounter);
     }
 }
 void AddDatabaseJob(JobItem job)
 {
     AddToQueue(job, 0);
     if (traceEnabled || nLogger.IsTraceEnabled)
     {
         Interlocked.Increment(ref addDBCounter);
         if (nLogger.IsTraceEnabled)
         {
             nLogger.Trace("Database Job Added: {0:#,##0}", addDBCounter);
         }
     }
 }
Example #7
0
 private void ProcessQueryRequests()
 {
     if (queryRequestItems.Count > 0)
     {
         var results = new List<LogQueryResults>();
         for (int i = 0; i < queryRequestItems.Count; i++)
         {
             var curQueries = queryRequestItems[i].LogQueries;
             for (int j = 0; j < curQueries.Count; j++)
             {
                 var cur = curQueries[i];
                 var curResult = indexReader.Search(cur);
                 curResult.RequestID = cur.RequestID;
                 curResult.IncludeData = false;
                 curResult.ClientSocket = cur.ClientSocket;
                 results.Add(curResult);
             }
         }
         queryRequestItems.Clear();
         var job = new JobItem(JobMethods.Database_RetrieveLogs);
         job.QueryResults = results;
         Task.Factory.StartNew(() => manager.AddJob(job));
     }
 }
 void AddToQueue(JobItem job, int index)
 {
     queues[index].Add(job);
 }
Example #9
0
 private void ProcessSendLog(byte[] data)
 {
     var item = new JobItem(JobMethods.Database_InsertLogs);
     var log = Log.DeserializeLog(data);
     var entity = new LogEntity(log.CreatedDateTime, TimeSpan.FromTicks(log.Elapsed), log.IsError, data);
     item.LogEntityItems = new List<LogEntity>(1);
     item.LogEntityItems.Add(entity);
     AddJob(item);
 }
Example #10
0
 private void AddJob(JobItem job)
 {
     Task.Factory.StartNew(() => manager.AddJob(job));
 }
Example #11
0
 private void ProcessLogRequest(byte[] data, Socket client)
 {
     var request = LogQueryResults.DeserializeLog(data);
     request.ClientSocket = client;
     var item = new JobItem(JobMethods.Database_RetrieveLogs);
     item.QueryResults = new List<LogQueryResults>(1);
     item.QueryResults.Add(request);
     AddJob(item);
 }
Example #12
0
 private void ProcessQueryRequest(byte[] data, Socket client, Guid requestGuid)
 {
     var query = LogQuery.DeserializeQuery(data);
     query.ClientSocket = client;
     query.RequestID = requestGuid;
     var item = new JobItem(JobMethods.Index_QueryRequest);
     item.LogQueries = new List<LogQuery>(1);
     item.LogQueries.Add(query);
     AddJob(item);
 }
Example #13
0
 private void ProcessEmptyItem()
 {
     var item = new JobItem(JobMethods.Empty);
     AddJob(item);
 }
Example #14
0
 void AddIndexJob(JobItem job)
 {
     var addIndex = 0;
     CheckToStartNewJob();
     if (curNumberRunningTasks > 1)
     {
         // Changed to Round Robin Method
         while (addIndex == 0)
         {
             curAddTask++;
             var localCurTask = curAddTask;
             if (localCurTask >= maxTasks)
             {
                 curAddTask = 1;
                 localCurTask = 1;
             }
             if (taskRunning[localCurTask] == 1)
             {
                 addIndex = localCurTask;
             }
         }
         // Loop through all Index queues that are running and add to the smallest queue
         //var minSize = int.MaxValue;
         //for (int i = 1; i < taskRunning.Length; i++)
         //{
         //    var localTaskRunning = taskRunning[i];
         //    if (localTaskRunning == 1 && queues[i].Size() < minSize)
         //    {
         //        addIndex = i;
         //    }
         //}
     }
     AddToQueue(job, addIndex);
     if (traceEnabled || nLogger.IsTraceEnabled)
     {
         Interlocked.Increment(ref addIndexCounter);
         if (nLogger.IsTraceEnabled)
         {
             nLogger.Trace("Non Database job Added: {0:#,##0}", addIndexCounter);
         }
     }
 }
 void RunParallel(JobItem[] items)
 {
     var testSize = items.Length;
     System.Threading.Tasks.Parallel.For(0, testSize, i => queueMgr.AddJob(items[i]));
 }
        void RunTest(bool parallel, JobItem[] items)
        {
            long testSize = items.Length;
            var dt1 = DateTime.UtcNow;
            if (parallel)
            {
                RunParallel(items);
            }
            else
            {
                RunSync(items);
            }
            var localProcessCount = queueMgr.TotalProcessCount;
            while (localProcessCount != testSize && queueMgr.CurrentQueueSize() > 0)
            {
                mre.Reset();
                mre.Wait(TimeSpan.FromSeconds(5));
                localProcessCount = queueMgr.TotalProcessCount;
            }
            var dt2 = DateTime.UtcNow;
            var ts = dt2 - dt1;
            var unProcessedCount = 0;
            for (int i = 0; i < testSize; i++)
            {
                if (!items[i].Processed)
                {
                    unProcessedCount++;
                }
                items[i] = null;
            }
            Console.WriteLine("Parallel: " + parallel.ToString());
            Console.WriteLine(ts.ToString() + " " + unProcessedCount.ToString());
            var itemsPerSecond = (testSize / ts.TotalMilliseconds) * 1000D;
            Console.WriteLine(itemsPerSecond.ToString("#,##0") + " items per second");
            Assert.That(unProcessedCount, Is.EqualTo(0), "Items not Processed");

            GC.WaitForPendingFinalizers();
            GC.Collect();
            GC.Collect();
            Thread.Sleep(1);
        }
 void RunSync(JobItem[] items)
 {
     var testSize = items.Length;
     for (int i = 0; i < testSize; i++) { queueMgr.AddJob(items[i]); }
 }