public void Start()
        {
            var ventilatorQueue = new BlockingCollection<WorkItem>();
            var sinkQueue = new BlockingCollection<WorkItem>();

            StartSink(sinkQueue);

            StartWorker(0, ventilatorQueue, sinkQueue);
            StartWorker(1, ventilatorQueue, sinkQueue);
            StartWorker(2, ventilatorQueue, sinkQueue);

            StartVentilator(ventilatorQueue);

            Thread.Sleep(1000);
            ventilatorQueue.CompleteAdding();
            sinkQueue.CompleteAdding();
            ventilatorQueue.Dispose();
            sinkQueue.Dispose();
        }
Example #2
0
        /// <summary>
        /// Blocking collection acts as a threadsafe blocking queue
        /// </summary>
        public void BlockingCollection()
        {
            var queue = new BlockingCollection<string>();

            Task.Run(() =>
                {
                    while (true)
                    {
                        Console.Out.WriteLine(queue.Take());
                    }
                });

            for (int i = 0; i < 5; i++)
            {
                queue.Add(string.Format("item {0}", i));
                Thread.Sleep(1000);
            }

            queue.Dispose();
        }
Example #3
0
        protected bool Subscribe(string topic, ServiceContext context)
        {
            var id     = GetId(context);
            var topics = Messages.GetOrAdd(id, (_) => new ConcurrentDictionary <string, BlockingCollection <Message> >());

            if (topics.TryGetValue(topic, out var messages))
            {
                if (messages != null)
                {
                    return(false);
                }
            }
            messages = new BlockingCollection <Message>(MessageQueueMaxLength);
            topics.AddOrUpdate(topic, messages, (_, oldmessages) => {
                if (oldmessages != null)
                {
                    messages.Dispose();
                    return(oldmessages);
                }
                return(messages);
            });
            OnSubscribe?.Invoke(id, topic, context);
            return(true);
        }
Example #4
0
        /// <summary>
        /// Dispose of all components when being disposed
        /// </summary>
        /// <param name="disposing">Pass true to dispose of the managed and unmanaged resources or false to just
        /// dispose of the unmanaged resources.</param>
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                AppDomain.CurrentDomain.AssemblyResolve -= CurrentDomain_AssemblyResolve;

                this.ClearComponents();

                if (messageLog != null)
                {
                    messageLog.Dispose();
                }

                if (componentContainer != null)
                {
                    componentContainer.Dispose();
                }

                if (tokenSource != null)
                {
                    tokenSource.Dispose();
                }
            }
        }
Example #5
0
        public async Task <TValue> CreateValue()
        {
            Exception lastException = null;

            try
            {
                IPendingValue <TValue> pendingValue;
                while (_pendingCollection.TryTake(out pendingValue, _timeout))
                {
                    try
                    {
                        var value = await pendingValue.CreateValue().ConfigureAwait(false);

                        _value.TrySetResult(value);

                        CompletePendingValues();

                        return(value);
                    }
                    catch (Exception ex)
                    {
                        lastException = ex;
                    }
                }
            }
            finally
            {
                _pendingCollection.Dispose();
            }

            var exception = lastException ?? new ValueFactoryException("The value factory method faulted.");

            _value.TrySetException(exception);

            throw exception;
        }
Example #6
0
        protected override void Dispose(bool disposing)
        {
            if (_disposed)
            {
                return;
            }

            if (disposing)
            {
                _logger.Log(LogLevel.Information, $"AE Title Job Process for {_configuration.AeTitle} disposing");

                _timer.Stop();
                _timer.Dispose();
                _jobs.CompleteAdding();
                _jobs.Dispose();
            }

            lock (SyncRoot)
            {
                _disposed = true;
            }

            base.Dispose(disposing);
        }
        public DeferredSubject(ISubject <T> subject)
        {
            if (subject == null)
            {
                throw new ArgumentNullException("subject");
            }

            _inner         = subject;
            _listeningTask = Task.Run(
                () =>
            {
                try
                {
                    foreach (var notification in _queue.GetConsumingEnumerable())
                    {
                        notification.Accept(_inner);
                    }
                }
                finally
                {
                    _queue.Dispose();
                }
            });
        }
        public static void Writer(BlockingCollection <Answer> calculationResultsQueue)
        {
            var calculationResult = calculationResultsQueue.Take();

            while (calculationResult.mathExpression != "STOP")
            {
                if (calculationResult.errors.Count == 0)
                {
                    Console.Write("Answer: ");
                    Console.WriteLine(calculationResult.answer);
                }
                else
                {
                    foreach (var error in calculationResult.errors)
                    {
                        printError(calculationResult.mathExpression, error);
                    }
                }

                calculationResult = calculationResultsQueue.Take();
            }

            calculationResultsQueue.Dispose();
        }
Example #9
0
        /// <summary>
        /// Wait for the document writer task to complete when disposed
        /// </summary>
        /// <param name="disposing">Pass true to dispose of the managed and unmanaged resources or false to just
        /// dispose of the unmanaged resources.</param>
        protected override void Dispose(bool disposing)
        {
            if (disposing)
            {
                documentList.CompleteAdding();

                if (documentWriter != null && documentWriter.Status != TaskStatus.Faulted)
                {
                    int count = documentList.Count;

                    if (count != 0)
                    {
                        this.WriteMessage(MessageLevel.Diagnostic, "Waiting for the document writer task to " +
                                          "finish ({0} {1}file(s) remaining)...", count, groupId);
                    }

                    documentWriter.Wait();
                }

                documentList.Dispose();
            }

            base.Dispose(disposing);
        }
                public void Release()
                {
                    int n = Interlocked.Decrement(ref _cref);

                    Contracts.Assert(n >= 0);

                    if (n != 0)
                    {
                        return;
                    }

                    if (_thdRead != null)
                    {
                        _abort = true;
                        _thdRead.Join();
                        _thdRead = null;
                    }

                    if (_queue != null)
                    {
                        _queue.Dispose();
                        _queue = null;
                    }
                }
Example #11
0
        /// <summary>
        /// Stops collection of performance counter metrics and relaying of data to Graphite
        /// </summary>
        public static void StopCollection()
        {
            if (!_started)
            {
                return;
            }
            _started = false;

            _metricCollectorTimer.Dispose();
            _metricReporterTimer.Dispose();

            if (_tcpClient != null && _tcpClient.Connected)
            {
                _tcpClient.Close();
            }

            foreach (var counter in _counters)
            {
                counter.Item4.Dispose();
            }
            _counters.Clear();

            _metricsList.Dispose();
        }
        protected virtual void Dispose(bool disposing)
        {
            _dispatchCts?.Cancel();
            _dispatchThread?.Join();

            if (!disposing)
            {
                return;
            }

            _dispatchQueue.CompleteAdding();

            lock (_watchers)
            {
                foreach (var watcher in _watchers)
                {
                    watcher.Dispose();
                }

                _watchers.Clear();
            }

            _dispatchQueue.Dispose();
        }
Example #13
0
        /// <summary>
        /// Stop http listener and processor
        /// </summary>
        public void Stop()
        {
            //stop listener
            if (_islistening)
            {
                //send DELETE method call
                log.Debug("Sending HTTP request to stop");
                WebRequest stopreq = WebRequest.Create(String.Format("http://127.0.0.1:{0}/", _service._config.port));
                stopreq.Method = "DELETE";
                WebResponse resp = stopreq.GetResponse();
            }
            _httpthread.Join(Timeout.Infinite);
            while (_islistening)
            {
            }

            //stop processor
            log.Debug("Waiting for http worker thread");
            _keywordthread.Join(Timeout.Infinite);
            while (_isprocessing)
            {
            }
            _requests.Dispose();
        }
Example #14
0
        /// <summary>
        /// Wait for the comments writer task to complete when disposed
        /// </summary>
        /// <param name="disposing">Pass true to dispose of the managed and unmanaged resources or false to just
        /// dispose of the unmanaged resources.</param>
        protected override void Dispose(bool disposing)
        {
            if (disposing)
            {
                commentsList.CompleteAdding();

                if (commentsWriter != null)
                {
                    int count = commentsList.Count;

                    if (count != 0)
                    {
                        this.WriteMessage(MessageLevel.Diagnostic, "Waiting for the IntelliSense comments " +
                                          "writer task to finish ({0} member(s) remaining)...", count);
                    }

                    commentsWriter.Wait();
                }

                commentsList.Dispose();
            }

            base.Dispose(disposing);
        }
        protected virtual void Dispose(bool disposing)
        {
            if (_disposed)
            {
                return;
            }

            if (disposing)
            {
                cancellationTokenSource.Cancel();
                _logger.Information("Disposing..........");
                // Indicate that no new tasks will be coming in
                _tasks.CompleteAdding();
                // _schedulerThread.Abort()
                // The Thread.Abort method is not supported in .NET Core. If you need to terminate the execution of third - party code
                // forcibly in .NET Core, run it in the separate process and use Process.Kill.
                _customThreadPool.Dispose();
                _schedulerThread.Join();
                _tasks.Dispose();
                cancellationTokenSource.Dispose();
            }

            _disposed = true;
        }
Example #16
0
        /// <summary>
        /// Cleans up the scheduler by indicating that no more tasks will be queued.
        /// This method blocks until all threads successfully shutdown.
        /// </summary>
        public void Dispose()
        {
            if (disposed)
            {
                return;
            }

            disposed = true;
            if (tasks != null)
            {
                // Indicate that no new tasks will be coming in
                tasks.CompleteAdding();

                // Wait for all threads to finish processing tasks
                foreach (var thread in _threads)
                {
                    thread.Join();
                }

                // Cleanup
                tasks.Dispose();
                tasks = null;
            }
        }
Example #17
0
        /// <summary>
        /// This should only be called by public method LoadlogFiles since it takes care of index drop and rebuild.
        /// </summary>
        /// <param name="logFile">Path to a log file to load.</param>
        /// <param name="pb">ProgressBroadcaster which will received progress updates, can be null.</param>
        private long LoadLogFile(string logFile, long startingRecordCount, ProgressBroadcaster pb = null)
        {
            FileInfo logFileInfo = new FileInfo(logFile);

            if (logFileInfo.Length < 1)
            {
                // Skip 0 length files
                return(startingRecordCount);
            }

            int indexingConcurrency = 4;

            pb.BroadcastStatus("Loading from " + logFile);

            // Can be tricky to do batch insert and get each new record's ID, so instead we query database for current
            // highest ID value and increment and assign IDs here rather than letting DB auto increment do the job.
            long nextId = Database.GetHighestLogEntryID();

            NuixLogReader reader = new NuixLogReader(logFile);

            SQLiteBatchInserter batchInserter = Database.CreateBatchInserter(1000);

            batchInserter.Begin(Database.GetEmbeddedSQL("NuixLogReviewer.LogRepository.InsertLogEntry.sqlite"));

            // Used for progress updates
            object locker      = new object();
            long   recordCount = startingRecordCount;

            List <IEntryClassifier> classifiers = getAllClassifiers();

            BlockingCollection <NuixLogEntry> toInsert   = new BlockingCollection <NuixLogEntry>();
            BlockingCollection <NuixLogEntry> toClassify = new BlockingCollection <NuixLogEntry>();
            BlockingCollection <NuixLogEntry> toIndex    = new BlockingCollection <NuixLogEntry>();

            // ==== Task Dedicated to Pulling Entries from Source ====
            Task readerConsumer = new Task(new Action(() =>
            {
                foreach (var entry in reader)
                {
                    toClassify.Add(entry);
                }

                // Signal that was the last one
                toClassify.Add(null);
            }), TaskCreationOptions.LongRunning);

            // ==== Classify Log Entries ====
            Task classificationTask = new Task(new Action(() =>
            {
                while (true)
                {
                    NuixLogEntry entry = toClassify.Take();
                    if (entry == null)
                    {
                        break;
                    }

                    // Give each classifier a chance to look at this entry and provide flag
                    // values to be assigned to the entry.
                    HashSet <string> flags = new HashSet <string>();
                    foreach (var classifier in classifiers)
                    {
                        var calculatedFlags = classifier.Classify(entry);
                        if (calculatedFlags != null)
                        {
                            foreach (var calculatedFlag in calculatedFlags)
                            {
                                flags.Add(calculatedFlag.ToLower());
                            }
                        }
                    }
                    entry.Flags = flags;

                    toInsert.Add(entry);
                }

                // Signal that was the last one
                toInsert.Add(null);
            }), TaskCreationOptions.LongRunning);

            // ==== Task Dedicated to Inserting to SQLite Database ====
            Task dbConsumer = new Task(new Action(() =>
            {
                DateTime lastProgress = DateTime.Now;

                while (true)
                {
                    NuixLogEntry entry = toInsert.Take();
                    if (entry == null)
                    {
                        break;
                    }

                    nextId++;

                    // Push to SQLite database
                    entry.ID                     = nextId;
                    batchInserter["@id"]         = entry.ID;
                    batchInserter["@linenumber"] = entry.LineNumber;
                    batchInserter["@filename"]   = Database.GetFilenameID(entry.FilePath);
                    batchInserter["@timestamp"]  = entry.TimeStamp.ToFileTime();
                    batchInserter["@channel"]    = Database.GetChannelID(entry.Channel);
                    batchInserter["@elapsed"]    = entry.Elapsed.TotalMilliseconds;
                    batchInserter["@level"]      = Database.GetLevelID(entry.Level);
                    batchInserter["@source"]     = Database.GetSourceID(entry.Source);
                    batchInserter["@content"]    = entry.Content;
                    batchInserter["@flags"]      = String.Join(" ", entry.Flags);
                    batchInserter.Insert();

                    recordCount++;

                    // Periodically report progress
                    if ((DateTime.Now - lastProgress).TotalMilliseconds >= 1000)
                    {
                        lock (this) { pb.BroadcastProgress(recordCount); }
                        lastProgress = DateTime.Now;
                    }

                    toIndex.Add(entry);
                }

                // Let each indexing task know there are no more to index
                for (int i = 0; i < indexingConcurrency; i++)
                {
                    toIndex.Add(null);
                }
            }), TaskCreationOptions.LongRunning);

            // ==== Series of Tasks Dedicated to Adding Entries to Lucene Index ====
            Task[] indexers = new Task[indexingConcurrency];
            for (int i = 0; i < indexingConcurrency; i++)
            {
                Task indexConsumer = new Task(new Action(() =>
                {
                    while (true)
                    {
                        NuixLogEntry entry = toIndex.Take();
                        if (entry == null)
                        {
                            break;
                        }

                        // Push to Lucene
                        SearchIndex.IndexLogEntry(entry);
                    }

                    pb.BroadcastProgress(recordCount);
                }), TaskCreationOptions.LongRunning);
                indexers[i] = indexConsumer;
                indexConsumer.Start();
            }

            readerConsumer.Start();
            classificationTask.Start();
            dbConsumer.Start();

            // Wait for them all to finish up
            Task.WaitAll(readerConsumer, classificationTask, dbConsumer);
            Task.WaitAll(indexers);

            // Report final progress
            pb.BroadcastProgress(recordCount);

            // Make sure batch inserter flushes any pending inserts
            batchInserter.Complete();

            Database.ReleaseBatchInserter(batchInserter);

            toClassify.Dispose();
            toInsert.Dispose();
            toIndex.Dispose();

            return(recordCount);
        }
        private async static void WriteToLog()
        {
            try
            {
                foreach (var tuple in messageCollection.GetConsumingEnumerable())
                {
                    await Task.Delay(TimeSpan.FromMilliseconds(50));

                    if (tuple != null && tuple.Item2 != null)
                    {
                        try
                        {
                            var direction = tuple.Item1;
                            var message   = tuple.Item2;
                            var now       = DateTime.Now;
                            var builder   = new StringBuilder();
                            builder.AppendLine(string.Format(DateFormat,
                                                             now.Hour,
                                                             now.Minute,
                                                             now.Second,
                                                             line));
                            builder.AppendLine(string.Format(CultureInfo.CurrentCulture,
                                                             direction == MessageDirection.Send
                                    ? MessageSuccessfullySent
                                    : MessageSuccessfullyReceived,
                                                             string.IsNullOrWhiteSpace(message.MessageId) ? NullValue : message.MessageId,
                                                             string.IsNullOrWhiteSpace(message.SessionId) ? NullValue : message.SessionId,
                                                             string.IsNullOrWhiteSpace(message.Label) ? NullValue : message.Label,
                                                             message.Size));
                            builder.AppendLine(MessagePayloadHeader);
                            var messageText = GetMessageText(message);
                            builder.AppendLine(string.Format(MessageTextFormat,
                                                             messageText.Contains('\n')
                                    ? messageText
                                    : messageText.Substring(0, Math.Min(messageText.Length, 128)) +
                                                             (messageText.Length >= 128 ? "..." : "")));
                            if (message.Properties.Any())
                            {
                                builder.AppendLine(MessagePropertiesHeader);
                                foreach (var p in message.Properties)
                                {
                                    builder.AppendLine(string.Format(MessagePropertyFormat,
                                                                     p.Key,
                                                                     p.Value));
                                }
                            }
                            if (writer != null)
                            {
                                await writer.WriteAsync(builder.ToString());

                                await writer.FlushAsync();
                            }
                            else
                            {
                                break;
                            }
                        }
                        // ReSharper disable once EmptyGeneralCatchClause
                        catch
                        {
                        }
                    }
                }
            }
            // ReSharper disable once EmptyGeneralCatchClause
            catch
            {
            }
            finally
            {
                messageCollection.Dispose();
            }
        }
        void CalculateIntoGroup()
        {
            List <Guid>    leftRecords    = new List <Guid>();
            HashSet <Guid> filesProcessed = new HashSet <Guid>();

            groupedFiles = new Dictionary <int, Dictionary <Guid, List <Guid> > >();

            Dictionary <Guid, List <Guid> > disconnectedFiles = null;

            if (IncludesDisconnected)
            {
                disconnectedFiles = new Dictionary <Guid, List <Guid> >();
                foreach (var record in allRecords)
                {
                    if (record.Value.IgnoredMode == IgnoredMode.HiddenAndDisconnected)
                    {
                        if (!disconnectedFiles.TryGetValue(record.Value.File1Id, out var list))
                        {
                            list = new List <Guid>();
                            disconnectedFiles.Add(record.Value.File1Id, list);
                        }
                        list.Add(record.Key);
                        if (!disconnectedFiles.TryGetValue(record.Value.File2Id, out list))
                        {
                            list = new List <Guid>();
                            disconnectedFiles.Add(record.Value.File2Id, list);
                        }
                        list.Add(record.Key);
                    }
                    else
                    {
                        leftRecords.Add(record.Key);
                    }
                }
            }
            else
            {
                foreach (var record in allRecords)
                {
                    if (record.Value.IgnoredMode != IgnoredMode.HiddenAndDisconnected)
                    {
                        leftRecords.Add(record.Key);
                    }
                }
            }

            if (leftRecords.Count != 0)
            {
                //FileId, SimilarRecordID
                var unsortedGroup = new List <Dictionary <Guid, List <Guid> > >();

                BlockingCollection <Guid> needToPrepareThumbprints = null;
                Thread preparingFileThumbprints = null;

                needToPrepareThumbprints = new BlockingCollection <Guid>();
                preparingFileThumbprints = new Thread(PreparingFileThumbprints);
                preparingFileThumbprints.Start(needToPrepareThumbprints);

                while (leftRecords.Count > 0)
                {
                    var firstRecord = allRecords[leftRecords[0]];
                    leftRecords.RemoveAt(0);

                    Dictionary <Guid, List <Guid> > currentFilesGroup = new Dictionary <Guid, List <Guid> >();
                    unsortedGroup.Add(currentFilesGroup);

                    Queue <Guid> filesToProcess = new Queue <Guid>();
                    if (!filesProcessed.Contains(firstRecord.File1Id))
                    {
                        filesToProcess.Enqueue(firstRecord.File1Id);
                    }
                    if (!filesProcessed.Contains(firstRecord.File1Id))
                    {
                        filesToProcess.Enqueue(firstRecord.File2Id);
                    }
                    currentFilesGroup.Add(firstRecord.File1Id, new List <Guid>()
                    {
                        firstRecord.Id
                    });
                    needToPrepareThumbprints?.Add(firstRecord.File1Id);
                    currentFilesGroup.Add(firstRecord.File2Id, new List <Guid>()
                    {
                        firstRecord.Id
                    });
                    needToPrepareThumbprints?.Add(firstRecord.File2Id);

                    while (filesToProcess.Count > 0)
                    {
                        var fileToProcess = filesToProcess.Dequeue();
                        filesProcessed.Add(fileToProcess);

                        var records = fileToRecords[fileToProcess];

                        foreach (var recordId in records)
                        {
                            if (leftRecords.Remove(recordId))
                            {
                                var record = allRecords[recordId];
                                if (!filesProcessed.Contains(record.File1Id))
                                {
                                    filesToProcess.Enqueue(record.File1Id);
                                }
                                if (!filesProcessed.Contains(record.File2Id))
                                {
                                    filesToProcess.Enqueue(record.File2Id);
                                }
                                if (!currentFilesGroup.TryGetValue(record.File1Id, out var list))
                                {
                                    list = new List <Guid>();
                                    currentFilesGroup.Add(record.File1Id, list);
                                    needToPrepareThumbprints?.Add(record.File1Id);
                                }
                                list.Add(record.Id);
                                if (!currentFilesGroup.TryGetValue(record.File2Id, out list))
                                {
                                    list = new List <Guid>();
                                    currentFilesGroup.Add(record.File2Id, list);
                                    needToPrepareThumbprints?.Add(record.File2Id);
                                }
                                list.Add(record.Id);
                            }
                        }
                    }
                }

                var sortedGroup = unsortedGroup.OrderByDescending(i => i.Values.Sum(j => j.Count(k => allRecords[k].IgnoredMode == IgnoredMode.Effective))); //order by descending: total effective records in group.
                var groupIndex  = 0;
                foreach (var item in sortedGroup)
                {
                    groupedFiles.Add(groupIndex++, item);
                }

                needToPrepareThumbprints.CompleteAdding();

                preparingFileThumbprints.Join();
                needToPrepareThumbprints.Dispose();
            }

            if (disconnectedFiles?.Count > 0)
            {
                groupedFiles.Add(-1, disconnectedFiles);
            }
        }
Example #20
0
        private async Task ProcessQueueAsync()
        {
            var tasks = new List <Task>();

            _ctsCancel = new CancellationTokenSource();
            _ctCancel  = _ctsCancel.Token;
            RaisePropertyChanged(nameof(this.IsBusy));

            while (!_ctCancel.IsCancellationRequested)
            {
                IQueueable item;
                if (!_queueList.TryTake(out item))
                {
                    break;
                }

                if (!item.IsQueued)
                {
                    continue;
                }
                _itemsProcessing.Add(item);
                Task t = Task.Run(async() =>
                {
                    try
                    {
                        await _semaphore.WaitAsync(_ctCancel);
                        if (item.IsQueued && !_ctCancel.IsCancellationRequested)
                        {
                            await item.StartAsync();
                        }
                        _semaphore.Release();
                    }
                    catch (OperationCanceledException)
                    {
                        return;
                    }
                });
                tasks.Add(t);
            }

            await Task.WhenAll(tasks.ToArray());

            if (_ctCancel.IsCancellationRequested)
            {
                var newList = new BlockingCollection <IQueueable>();
                foreach (var item in _itemsProcessing)
                {
                    if (!item.IsCompleted && !_queueList.Contains(item) && item.IsQueued)
                    {
                        newList.Add(item);
                    }
                }
                foreach (var item in _queueList)
                {
                    newList.Add(item);
                }
                _queueList.Dispose();
                _queueList = newList;
            }

            _itemsProcessing.Clear();
            _ctsCancel = null;
            _ctCancel  = default;
            RaisePropertyChanged(nameof(this.IsBusy));
        }
Example #21
0
        public void LoadFromPath(string Folder)
        {
            // read/write table
            QueryTable        = GetTable(MinHashSize);
            InitialScanFolder = Folder;
            ReadyQueue        = new BlockingCollection <List <HashRec> >();

            source    = new CancellationTokenSource();
            FL.source = source;

            batches = new List <HashRec> [256];
            for (int i = 0; i < 256; i++)
            {
                batches[i] = new List <HashRec>();
            }

            source.Token.Register(() => WriteColor(ConsoleColor.Red, $"Cancelation requested. {FL.LoadExceptions.Count} file load exceptions occured."), true);
            CancellationToken token = source.Token;

            try
            {
                var po = new ParallelOptions()
                {
                    CancellationToken = token, MaxDegreeOfParallelism = MaxBatchParallel
                };

                Parallel.Invoke((po),
                                () =>
                {
                    FL.GenerateSW = Stopwatch.StartNew();

                    FL.RecursiveGenerate(Folder, po);
                    DoneDirScan = true;
                    WriteColor(ConsoleColor.Green, $"Finished FS load from {Folder} task time: {FL.GenerateSW.Elapsed}");
                },
                                () =>
                {
                    DumpToCloud(po);
                    DoneDump = true;
                },
                                () =>
                {
                    UploadedSW = Stopwatch.StartNew();
                    var po2    = new ParallelOptions()
                    {
                        MaxDegreeOfParallelism = MaxBatchParallel
                    };
                    do
                    {
                        Parallel.ForEach(ReadyQueue.GetConsumingEnumerable(token), po2, (recs) => {
                            BatchBatch(QueryTable, recs, BatchInsert);
                        });
                    } while (!DoneDump || !ReadyQueue.IsCompleted);
                }
                                );
            }
            catch (AggregateException agg)
            {
                WriteColor(ConsoleColor.Yellow, $"AggregateException: {agg.ToString()} InnerException {agg.InnerException.ToString()}");
                source.Cancel();
            }
            WriteColor(ConsoleColor.Cyan, $"Total uploaded {TotalUpdated}, TotalRequested {TotalRequested}");
            WriteColor(ConsoleColor.White, $"Total task runtime: {FL.GenerateSW.Elapsed}. {TotalUpdated/UploadedSW.Elapsed.TotalSeconds} per second");
            ReadyQueue.Dispose();
        }
 /// <inheritdoc/>
 void IDisposable.Dispose() => queue.Dispose();
Example #23
0
 public void Dispose() => _targetCollection.Dispose();
Example #24
0
 public void Dispose()
 {
     _tasks.CompleteAdding();
     _threads.ForEach(t => t.Join());
     _tasks.Dispose();
 }
Example #25
0
 public void Dispose()
 {
     _requestsList.CompleteAdding();
     Task.WhenAll(_tasks).Wait();
     _requestsList.Dispose();
 }
Example #26
0
 public void Close()
 {
     this.resetEvent.Reset();
     netSockets.Dispose();
 }
 /// <summary>
 /// Implemetation of Dispose
 /// </summary>
 void IDisposable.Dispose()
 {
     _queue.Dispose();
     GC.SuppressFinalize(this);
 }
Example #28
0
        static void Main(string[] args)
        {
            List <string> dirList  = new List <string>();
            List <string> fileList = new List <string>();
            string        CWD      = FileByFileASC3Decoder.Properties.Settings.Default.ASC3LogsPath;


            foreach (string s in Directory.GetDirectories(CWD))
            {
                dirList.Add(s);
            }



            foreach (string dir in dirList)
            {
                if (FileByFileASC3Decoder.Properties.Settings.Default.WriteToConsole)
                {
                    Console.WriteLine("-----------------------------Starting Signal " + dir);
                }


                //get the name of the directory and casting it to an string
                //This is the only way the program knows the signal number of the controller.
                string[] strsplit = dir.Split(new char[] { '\\' });
                string   dirname  = strsplit.Last();
                string   sigid    = dirname;


                var files = (Directory.GetFiles(dir, "*.dat"));

                foreach (string s in files)
                {
                    try
                    {
                        FileInfo f = new FileInfo(s);
                        if (f.Name.Contains("INT") || f.Name.Contains("_1970_") || f.Length < 367)
                        {
                            try
                            {
                                File.Delete(s);
                            }
                            catch { }
                            continue;
                        }
                    }
                    catch { }



                    try
                    {
                        var mergedEventsTable = new BlockingCollection <MOE.Common.Data.MOE.Controller_Event_LogRow>();

                        MOE.Common.Business.LogDecoder.ASC3Decoder.DecodeASC3File(s, sigid, mergedEventsTable);


                        using (MOE.Common.Data.MOE.Controller_Event_LogDataTable EventsTable = new MOE.Common.Data.MOE.Controller_Event_LogDataTable())
                        {
                            mergedEventsTable.CopyToDataTable(EventsTable, LoadOption.PreserveChanges);


                            mergedEventsTable.Dispose();

                            string connectionString = FileByFileASC3Decoder.Properties.Settings.Default.SPMConnectionString;
                            string destTable        = FileByFileASC3Decoder.Properties.Settings.Default.DestinationTableNAme;


                            MOE.Common.Business.BulkCopyOptions Options = new MOE.Common.Business.BulkCopyOptions(connectionString, destTable,
                                                                                                                  FileByFileASC3Decoder.Properties.Settings.Default.WriteToConsole, true, 0, FileByFileASC3Decoder.Properties.Settings.Default.DeleteFile,
                                                                                                                  FileByFileASC3Decoder.Properties.Settings.Default.EarliestAcceptableDate, 5000, 30);



                            if (EventsTable.Count > 0)
                            {
                                if (MOE.Common.Business.Signal.BulktoDB(EventsTable, Options) && FileByFileASC3Decoder.Properties.Settings.Default.DeleteFile)
                                {
                                    try
                                    {
                                        File.Delete(s);
                                    }
                                    catch { }
                                }
                            }
                        }
                    }
                    catch { }
                }
            }
        }
Example #29
0
        private void RunTestMLangMultiThreaded(string dir, int numThreads)
        {
            _numFoundEncodings     = 0;
            _totalFiles            = 0;
            _filesToDetectEncoding = new BlockingCollection <DetectEncodingFileDto>();

            StopWatch stopWatch = new StopWatch();

            stopWatch.Start();

            Console.WriteLine("");
            Console.WriteLine("=====================================================");
            Console.WriteLine("Number of threads = " + numThreads);

            //EncodingTools.PreDetectInputCodepages2();

            var tasks = new List <Task>();

            for (int i = 0; i < numThreads; i++)
            {
                int i1 = i;
                tasks.Add((Task.Factory.StartNew(() => DetectEncodingAsyncAction(i1))));
            }

            int fileCount = 0;

            foreach (string filePath in Directory.EnumerateFiles(dir, "*.*", SearchOption.AllDirectories))
            {
                StopWatch.Start("ReadFileSample");

                var sampleBytes = Utils.ReadFileContentSample(filePath);

                StopWatch.Stop("ReadFileSample");

                StopWatch.Start("IsBinaryFile");

                if (Utils.IsBinaryFile(sampleBytes))
                {
                    StopWatch.Stop("IsBinaryFile");
                    continue;
                }

                StopWatch.Stop("IsBinaryFile");

                fileCount++;

                if (fileCount > 1000)
                {
                    break;
                }

                _filesToDetectEncoding.Add(new DetectEncodingFileDto {
                    FilePath = filePath, SampleBytes = sampleBytes
                });
            }

            _filesToDetectEncoding.CompleteAdding();

            Task.WaitAll(tasks.ToArray());


            //EncodingTools.PostDetectInputCodepages2();

            Console.WriteLine("Found Encoding in:" + _numFoundEncodings + " out of " + _totalFiles);

            stopWatch.Stop();


            StopWatch.PrintCollection(stopWatch.Milliseconds);
            StopWatch.Collection.Clear();

            _filesToDetectEncoding.Dispose();
        }
Example #30
0
        /// <summary>
        /// Query a hosted Azure Table Service
        /// </summary>
        /// <param name="hashArr"></param>
        public void QueryHashes(HashRecord[] hashArr)
        {
            ParallelOptions po = new ParallelOptions();

            po.MaxDegreeOfParallelism = MaxBatchParallel;

            var name = $"hash{MinHashSize}";

            QueryTable = AccessTable(name);

            ReadyQueue = new BlockingCollection <List <HashRec> >();

            #region CanNotBatchContains

            //var items = (from ha in hashArr
            //              from agg in ha.GetAllRecs()
            //              select agg).ToArray();

            //Parallel.Invoke(() =>
            //{

            //    // group into 100 count batches
            //    batches = new List<HashRec>[256];
            //    for (int i = 0; i < 256; i++)
            //        batches[i] = new List<HashRec>();

            //    List<HashRec> batch = null;

            //    foreach (var item in items)
            //    {
            //        batch = batches[item.FullHash[0]];

            //        bool contains = batch.Any(x => x.FullHash.SequenceEqual(item.FullHash));

            //        if (contains) continue;

            //        batch.Add(item);
            //        if (batch.Count == 100)
            //        {
            //            // signal uploader
            //            ReadyQueue.Add(batch);

            //            // reset batch
            //            batch = new List<HashRec>();
            //            batches[item.FullHash[0]] = batch;
            //        }
            //    }
            //    foreach (var b in batches)
            //    {
            //        if (b.Count < 1)
            //            continue;

            //        ReadyQueue.Add(b);
            //    }
            //    ReadyQueue.CompleteAdding();
            //}, () =>
            //{
            //    var po2 = new ParallelOptions() { MaxDegreeOfParallelism = MaxBatchParallel };
            //    do
            //    {
            //        Parallel.ForEach(ReadyQueue.GetConsumingEnumerable(), po2, (recs) => {
            //            var readIn = BatchBatch(QueryTable, recs, BatchContains);
            //            foreach(var rec in readIn)
            //            {
            //                if(rec.FoundInDB)
            //                {
            //                    for (int i = 0; i < items.Length; i++)
            //                    {
            //                        if (items[i].FullHash == rec.Hash.FullHash)
            //                        {
            //                            items[i].Verified = true;
            //                            break;
            //                        }
            //                    }
            //                }
            //            }
            //        });
            //    } while (!DoneDump || !ReadyQueue.IsCompleted);
            //});

            //foreach (var hr in hashArr)
            //    hr.AssignRecResults(items);
            #endregion
            #region old
            int Count = hashArr.Length;
            var rv    = new List <bool>(Count);
            //for (int i = 0; i < hashArr.Length; i++)
            //{
            Parallel.ForEach(hashArr, po, (hashModule) =>
            {
                //var hashModule = hashArr[i];
                for (int l = 0; l < hashModule.Regions.Count; l++)
                {
                    var hashRegion = hashModule.Regions[l];
                    Parallel.ForEach(hashRegion.InnerList, po, (il) =>
                    {
                        //foreach (var il in hashRegion.InnerList)
                        //{
                        for (int m = 0; m < il.Length; m++)
                        {
                            var check = il[m];

                            if (Contains(QueryTable, new HashEntity(check)))
                            {
                                Interlocked.Increment(ref hashModule.Regions[l].Validated);
                            }
                            else
                            {
                                Interlocked.Increment(ref hashModule.Regions[l].Failed);
                            }

                            Interlocked.Increment(ref hashRegion.Total);
                        }
                        //}
                    });
                }
            });
            #endregion
            ReadyQueue.Dispose();
            return;
        }
 protected virtual void Dispose(bool disposing)
 {
     if (!_disposed)
     {
         if (disposing)
         {
             if (_loggingTask != null)
             {
                 if (!(_loggingTask.IsCanceled || _loggingTask.IsCompleted || _loggingTask.IsFaulted))
                 {
                     try
                     {
                         CompleteSubscriberTask();
                     }
                     catch (Exception ex)
                     {
                         LogLog.Error(ThisType, "Exception Completing Subscriber Task in Dispose Method", ex);
                     }
                 }
                 try
                 {
                     _loggingTask.Dispose();
                 }
                 catch (Exception ex)
                 {
                     LogLog.Error(ThisType, "Exception Disposing Logging Task", ex);
                 }
                 finally
                 {
                     _loggingTask = null;
                 }
             }
             if (_loggingEvents != null)
             {
                 try
                 {
                     _loggingEvents.Dispose();
                 }
                 catch (Exception ex)
                 {
                     LogLog.Error(ThisType, "Exception Disposing BlockingCollection", ex);
                 }
                 finally
                 {
                     _loggingEvents = null;
                 }
             }
             if (_loggingCancelationTokenSource != null)
             {
                 try
                 {
                     _loggingCancelationTokenSource.Dispose();
                 }
                 catch (Exception ex)
                 {
                     LogLog.Error(ThisType, "Exception Disposing CancellationTokenSource", ex);
                 }
                 finally
                 {
                     _loggingCancelationTokenSource = null;
                 }
             }
         }
         _disposed = true;
     }
 }
Example #32
0
 public void Dispose()
 {
     WaitAll();
     _queue.Dispose();
 }
Example #33
0
        public void Build(IEnumerable<ISequence> sequences)
        {
            // Size of Kmer List to grab, somewhat arbitrary but want to keep list size below large object threshold, which is ~85 kb 
            const int blockSize = 4096;

            // When to add list to blocking collection, most short reads are <=151 bp so this should avoid needing to grow the list
            const int addThreshold = blockSize - 151;

            // When to pause adding
            const int stopAddThreshold = 2000000 / blockSize;

            if (sequences == null)
                throw new ArgumentNullException("sequences");

            if (KmerLength > KmerData32.MAX_KMER_LENGTH)
                throw new ArgumentException(Properties.Resource.KmerLengthGreaterThan31);

            // A dictionary kmers to debruijin nodes
            KmerDictionary kmerManager = new KmerDictionary();

            // Create the producer thread.
            var kmerDataCollection = new BlockingCollection<List<KmerData32>>();
            Task producer = Task.Factory.StartNew(() =>
            {
                try
                {
                    List<KmerData32> kmerList = new List<KmerData32>(blockSize);

                    IAlphabet alphabet = Alphabets.DNA;
                    HashSet<byte> gapSymbols;
                    alphabet.TryGetGapSymbols(out gapSymbols);

                    // Generate the kmers from the sequences
                    foreach (ISequence sequence in sequences)
                    {
                        // if the sequence alphabet is not of type DNA then ignore it.
                        bool skipSequence = false;
                        if (sequence.Alphabet != Alphabets.DNA)
                        {
                            skipSequence = true;
                        }
                        else
                        {
                            // if the sequence contains any gap symbols then ignore the sequence.
                            foreach (byte symbol in gapSymbols)
                            {
                                for (long index = 0; index < sequence.Count; ++index)
                                {
                                    if (sequence[index] == symbol)
                                    {
                                        skipSequence = true;
                                        break;
                                    }
                                }

                                if (skipSequence)
                                    break;
                            }
                        }

                        if (skipSequence)
                        {
                            Interlocked.Increment(ref _skippedSequencesCount);
                            Interlocked.Increment(ref _processedSequencesCount);
                            continue;
                        }

                        // if the blocking collection count is exceeding 2 million kmers wait for 2 sec 
                        // so that the task can remove some kmers and create the nodes. 
                        // This will avoid OutofMemoryException
                        while (kmerDataCollection.Count > stopAddThreshold)
                        {
                            Task.Delay(TimeSpan.FromSeconds(2)).Wait();
                        }

                        // Convert sequences to k-mers
                        kmerList.AddRange(KmerData32.GetKmers(sequence, KmerLength));

                        // Most reads are <=150 basepairs, so this should avoid having to grow the list
                        // by keeping it below blockSize
                        if (kmerList.Count > addThreshold)
                        {
                            kmerDataCollection.Add(kmerList);
                            kmerList = new List<KmerData32>(4092);
                        }
                        Interlocked.Increment(ref _processedSequencesCount);
                    }

                    if (kmerList.Count <= addThreshold)
                        kmerDataCollection.Add(kmerList);
                }
                finally
                {
                    kmerDataCollection.CompleteAdding();
                }
            });

            // Consume k-mers by addding them to binary tree structure as nodes
            Parallel.ForEach(kmerDataCollection.GetConsumingEnumerable(),newKmerList=>
            {
                foreach (KmerData32 newKmer in newKmerList)
                {
                    // Create Vertex
                    DeBruijnNode node = kmerManager.SetNewOrGetOld(newKmer);

                    // Need to lock node if doing this in parallel
                    if (node.KmerCount <= 255)
                    {
                        lock (node)
                        {
                            node.KmerCount++;
                        }
                    }
                }
            });

            // Ensure producer exceptions are handled.
            producer.Wait();

            // Done filling binary tree
            kmerDataCollection.Dispose();

            //NOTE: To speed enumeration make the nodes into an array and dispose of the collection
            _nodeCount = kmerManager.NodeCount;
            _nodes = kmerManager.GenerateNodeArray();
            
            // Generate the links
            GenerateLinks(kmerManager);
            
            // Since we no longer need to search for values set left and right nodes of child array to null
            // so that they are available for GC if no longer needed
            foreach (DeBruijnNode node in _nodes)
            {
                node.Left = node.Right = null;
            }

            GraphBuildCompleted = true;
        }