コード例 #1
1
ファイル: Builder.cs プロジェクト: cmgross/WebFormsTPL
        public static List<Builder> LoadBuildersWithTasks(int numberOfBuilders)
        {
            BlockingCollection<Builder> buildersToLoad = new BlockingCollection<Builder>();
            BlockingCollection<Builder> loadedBuilders = new BlockingCollection<Builder>();

            for (int i = 0; i < numberOfBuilders; i++)
            {
                buildersToLoad.Add(new Builder { Name = "Builder" + i, Status = "Status" + i });
            }
            buildersToLoad.CompleteAdding();

            Task loader1 = Task.Factory.StartNew(() =>
                {
                    foreach (Builder item in buildersToLoad.GetConsumingEnumerable())
                    {
                        Thread.Sleep(1000);
                        loadedBuilders.Add(item);
                    }
                }, TaskCreationOptions.LongRunning);

            Task loader2 = Task.Factory.StartNew(() =>
            {
                foreach (Builder item in buildersToLoad.GetConsumingEnumerable())
                {
                    Thread.Sleep(1000);
                    loadedBuilders.Add(item);
                }
            }, TaskCreationOptions.LongRunning);

            Task.WaitAll(loader1, loader2);
            return loadedBuilders.ToList();
        }
コード例 #2
1
ファイル: Program.cs プロジェクト: jbijoux/Exam70_483
        static void Main(string[] args)
        {
            BlockingCollection<string> col = new BlockingCollection<string>();
            Task read = Task.Run(() =>
            {
                foreach (var v in col.GetConsumingEnumerable())
                {
                    Console.WriteLine(v);
                }
            });
            Console.WriteLine("Enter white space to break");
            Task write = Task.Run(() =>
            {
                while (true)
                {
                    string s = Console.ReadLine();
                    if (string.IsNullOrWhiteSpace(s))
                    {
                        break;
                    }

                    col.Add(s);
                }
            });
            write.Wait();

            Console.Write("Press a key to exit");
            Console.ReadKey();
        }
コード例 #3
0
        /// <summary>Initializes a new instance of the StaTaskScheduler class with the specified concurrency level.</summary>
        /// <param name="numberOfThreads">The number of threads that should be created and used by this scheduler.</param>
        public StaTaskScheduler(int numberOfThreads)
        {
            // Validate arguments
            if (numberOfThreads < 1)
                throw new ArgumentOutOfRangeException("concurrencyLevel");

            // Initialize the tasks collection
            _tasks = new BlockingCollection<Task>();

            // Create the threads to be used by this scheduler
            _threads = Enumerable.Range(0, numberOfThreads).Select(i =>
            {
                var thread = new Thread(() =>
                {
                    // Continually get the next task and try to execute it.
                    // This will continue until the scheduler is disposed and no more tasks remain.
                    foreach (var t in _tasks.GetConsumingEnumerable())
                    {
                        if (!TryExecuteTask(t))
                        {
                            System.Diagnostics.Debug.Assert(t.IsCompleted, "Can't run, not completed");
                        }
                    }
                });
                thread.IsBackground = true;
                thread.SetApartmentState(ApartmentState.STA);
                return thread;
            }).ToImmutableArray();

            // Start all of the threads
            foreach (var thread in _threads)
            {
                thread.Start();
            }
        }
コード例 #4
0
ファイル: Program.cs プロジェクト: Willamar/ExamRef-70-483
        static void Main(string[] args)
        {
            BlockingCollection<String> col = new BlockingCollection<string>();

            Task read = Task.Run(() =>
            {
                foreach (var item in col.GetConsumingEnumerable())
                {
                    Console.WriteLine(item);
                }

            });

            Task write = Task.Run(() =>
            {
                while (true)
                {
                    string s = Console.ReadLine();
                    if (string.IsNullOrWhiteSpace(s))
                    {
                        break;
                    }
                    col.Add(s);
                }
            });

            write.Wait();
        }
コード例 #5
0
ファイル: StaTaskScheduler.cs プロジェクト: heinzsack/DEV
		public StaTaskScheduler(int numberOfThreads)

			{
			if (numberOfThreads < 1)
				throw new ArgumentOutOfRangeException("concurrencyLevel");

			_tasks = new BlockingCollection<Task>();
			_threads = Enumerable.Range(0, numberOfThreads).Select(i =>
			{
				var thread = new Thread(() =>
				{
					foreach (var t in
						_tasks.GetConsumingEnumerable())
						{
						TryExecuteTask(t);
						}
				})
					{
					IsBackground = true
					};
				thread.SetApartmentState(ApartmentState.STA);
				return thread;
			}).ToList();

			_threads.ForEach(t => t.Start());
			}
コード例 #6
0
        public static void Run()
        {
            int size = 10;
            BlockingCollection<int> col = new BlockingCollection<int>(size/3);

            Task read = Task.Run(() =>
            {
                foreach(var item in col.GetConsumingEnumerable())
                {
                    Console.WriteLine("Read " + item);
                }
            });

            Task write = Task.Run(() =>
            {
                foreach(int i in Enumerable.Range(1, size))
                {
                    Console.WriteLine("adding " + i);
                    col.Add(i);
                }

                col.CompleteAdding();
            });

            write.Wait();
            read.Wait();
        }
コード例 #7
0
ファイル: Program.cs プロジェクト: vmp/CSharpExamples
        public static void Main(string[] args)
        {
            Console.WriteLine("Hello World!");

            BlockingCollection<String> col = new BlockingCollection<string>();
            Task read = Task.Run(() =>
            {
                foreach (String v in col.GetConsumingEnumerable())
                    Console.WriteLine(v);
            });

            Task write = Task.Run(() =>
            {
                while (true)
                {
                    String s = Console.ReadLine();
                    if (String.IsNullOrWhiteSpace(s)) break;
                    col.Add(s);
                }
            });

            write.Wait();

            Console.Write("Press any key to continue . . . ");
            Console.ReadKey(true);
        }
コード例 #8
0
        public static void Main(string[] args)
        {
            BlockingCollection<string> collection = new BlockingCollection<string>();

            Task read = Task.Run(() =>
            {
                foreach (string v in collection.GetConsumingEnumerable())
                {
                    Console.WriteLine(v);
                }
            });

            Task write = Task.Run(() =>
            {
                while (true)
                {
                    string s = Console.ReadLine();

                    if (string.IsNullOrWhiteSpace(s))
                    {
                        collection.CompleteAdding();
                        break;
                    }

                    collection.Add(s);
                }
            });

            write.Wait();
        }
コード例 #9
0
        public override IEnumerable<Row> Execute(IEnumerable<Row> rows) {
            var blockingCollection = new BlockingCollection<Row>();
            var count = _operations.Count;
            if (count == 0) {
                yield break;
            }

            Debug("Creating tasks for {0} operations.", count);

            var tasks = _operations.Select(currentOp =>
            Task.Factory.StartNew(() => {
                try {
                    foreach (var row in currentOp.Execute(null)) {
                        blockingCollection.Add(row);
                    }
                }
                finally {
                    if (Interlocked.Decrement(ref count) == 0) {
                        blockingCollection.CompleteAdding();
                    }
                }
            })).ToArray();

            foreach (var row in blockingCollection.GetConsumingEnumerable()) {
                yield return row;
            }
            Task.WaitAll(tasks); // raise any exception that were raised during execution
        }
コード例 #10
0
        protected override void EndProcessing()
        {
            SevenZipBase.SetLibraryPath(Utils.SevenZipLibraryPath);

            var queue = new BlockingCollection<object>();
            var worker = CreateWorker();
            worker.Queue = queue;

            _thread = StartBackgroundThread(worker);

            foreach (var o in queue.GetConsumingEnumerable()) {
                var record = o as ProgressRecord;
                var errorRecord = o as ErrorRecord;
                if (record != null) {
                    WriteProgress(record);
                } else if (errorRecord != null) {
                    WriteError(errorRecord);
                } else if (o is string) {
                    WriteVerbose((string) o);
                } else {
                    WriteObject(o);
                }
            }

            _thread.Join();
        }
コード例 #11
0
		/// <summary>
		/// Do the specified input and output.
		/// </summary>
		/// <param name="input">Input.</param>
		/// <param name="output">Output.</param>
        public void Do(BlockingCollection<ISkeleton> input, Action<IEnumerable<Result>> fireNewCommand)
        {
            var data = new Dictionary<JointType, InputVector>();
            foreach (var skeleton in input.GetConsumingEnumerable())
            {
                foreach (var joint in skeleton.Joints)
                {
                    if (!data.ContainsKey(joint.JointType))
                    {
                        data.Add(joint.JointType, new InputVector());
                    }
                    data[joint.JointType].Stream.Add(joint.Point);
                }
                if (data.First().Value.Stream.Count < 5)
                {
                    continue;
                }
                var results = Recognizer.Recognize(data);
                try
                {
                    fireNewCommand(results);
                }
                catch (Exception)
                {
                    if (data.First().Value.Stream.Count > 40)
                    {
                        data.Clear();
                    }
                    continue;
                }
                data.Clear();
            }
        }
コード例 #12
0
        /// <summary>Initializes a new instance of the MTATaskScheduler class with the specified concurrency level.</summary>
        /// <param name="numberOfThreads">The number of threads that should be created and used by this scheduler.</param>
        /// <param name="nameFormat">The template name form to use to name threads.</param>
        public MTATaskScheduler(int numberOfThreads, string nameFormat)
        {
            // Validate arguments
            if (numberOfThreads < 1) throw new ArgumentOutOfRangeException("numberOfThreads");

            // Initialize the tasks collection
            tasks = new BlockingCollection<Task>();

            // Create the threads to be used by this scheduler
            _threads = Enumerable.Range(0, numberOfThreads).Select(i =>
                       {
                           var thread = new Thread(() =>
                           {
                               // Continually get the next task and try to execute it.
                               // This will continue until the scheduler is disposed and no more tasks remain.
                               foreach (var t in tasks.GetConsumingEnumerable())
                               {
                                   TryExecuteTask(t);
                               }
                           })
                           {
                               IsBackground = true
                           };
                           thread.SetApartmentState(ApartmentState.MTA);
                           thread.Name = String.Format("{0} - {1}", nameFormat, thread.ManagedThreadId);
                           return thread;
                       }).ToList();

            // Start all of the threads
            _threads.ForEach(t => t.Start());
        }
コード例 #13
0
        public void Run()
        {
            BlockingCollection<string> col = new BlockingCollection<string>();
            Task read = Task.Run(() =>
            {
                foreach (string v in col.GetConsumingEnumerable())
                    Console.WriteLine(v);

                Console.WriteLine("End of read task.");
            });

            Task write = Task.Run(() =>
            {
                while (true)
                {
                    string s = Console.ReadLine();
                    if (string.IsNullOrWhiteSpace(s))
                    {
                        col.CompleteAdding();
                        break;
                    }
                    col.Add(s);
                }
            });
            write.Wait();
            Thread.Sleep(1000);
        }
コード例 #14
0
		/// <summary>
		/// Do the specified input and output.
		/// </summary>
		/// <param name="input">Input.</param>
		/// <param name="output">Output.</param>
		public void Do(BlockingCollection<ISkeleton> input, BlockingCollection<ISkeleton> output)
        {
            var skeletons = new List<ISkeleton>();
            try
			{
			    foreach (var skeleton in input.GetConsumingEnumerable())
			    {
                    skeletons.Add(skeleton);
			        if (skeletons.Count < 3)
			        {
			            continue;
			        }
                    var first = skeletons.First();
                    var tail = skeletons.Skip(1);
                    foreach (var joint in first.Joints)
                    {
                        var tailJoints = tail.Select(s => s.GetJoint(joint.JointType));
                        joint.Point = Mean(new List<Vector3> { joint.Point }.Concat(tailJoints.Select(j => j.Point)).ToList());
                        joint.Orientation = Mean(new List<Vector4> { joint.Orientation }.Concat(tailJoints.Select(j => j.Orientation)).ToList());
                        first.UpdateSkeleton(joint.JointType, joint);
                    }
                    output.Add(first);
                    skeletons.Clear();
                }
			}
			finally
			{
				output.CompleteAdding();
			}
		}
コード例 #15
0
ファイル: StaTaskScheduler.cs プロジェクト: bevacqua/Swarm
        /// <summary>Initializes a new instance of the StaTaskScheduler class with the specified concurrency level.</summary>
        /// <param name="numberOfThreads">The number of threads that should be created and used by this scheduler.</param>
        public StaTaskScheduler(int numberOfThreads)
        {
            // Validate arguments
            if (numberOfThreads < 1) throw new ArgumentOutOfRangeException("concurrencyLevel");

            // Initialize the tasks collection
            _tasks = new BlockingCollection<Task>();

            // Create the threads to be used by this scheduler
            _threads = Enumerable.Range(0, numberOfThreads).Select(i =>
                       {
                           var thread = new Thread(() =>
                           {
                               // Continually get the next task and try to execute it.
                               // This will continue until the scheduler is disposed and no more tasks remain.
                               foreach (var t in _tasks.GetConsumingEnumerable())
                               {
                                   TryExecuteTask(t);
                               }
                           });
                           thread.IsBackground = true;
                           thread.SetApartmentState(ApartmentState.STA);
                           return thread;
                       }).ToList();

            // Start all of the threads
            _threads.ForEach(t => t.Start());
        }
コード例 #16
0
ファイル: Form1.cs プロジェクト: HimasRe/Text_DataBase
        private void ReadDT()
        {
            BlockingCollection<string> lines = new BlockingCollection<string>();
            var stage1 = Task.Run(() =>
            {
                    using (StreamReader sr = new StreamReader("text.txt"))
                    {
                        string s;
                        while ((s = sr.ReadLine()) != null)
                            lines.Add(s);
                    }
                lines.CompleteAdding();
            });

            var stage2 = Task.Run(() =>
            {
                int i = 0;
                dataGridView1.Invoke((Action)(() => dataGridView1.SuspendLayout()));
                foreach (string line in lines.GetConsumingEnumerable())
                {
                    dataGridView1.Invoke((Action)(() => dataGridView1.Rows.Add(line.Split(';'))));
                    dataGridView1.Invoke((Action)(() => dataGridView1.Rows[i].HeaderCell.Value = i.ToString()));
                    i++;
                }
                dataGridView1.Invoke((Action)(() => dataGridView1.ResumeLayout(false)));
            });
            Task.WaitAll(stage1, stage2);
        }
コード例 #17
0
        protected override void EndProcessing()
        {
            var libraryPath = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(GetType().Assembly.Location), Environment.Is64BitProcess ? "7z64.dll" : "7z.dll");
            SevenZipBase.SetLibraryPath(libraryPath);

            var queue = new BlockingCollection<object>();
            var worker = CreateWorker();
            worker.Queue = queue;

            _thread = StartBackgroundThread(worker);

            foreach (var o in queue.GetConsumingEnumerable()) {
                var record = o as ProgressRecord;
                var errorRecord = o as ErrorRecord;
                if (record != null) {
                    WriteProgress(record);
                } else if (errorRecord != null) {
                    WriteError(errorRecord);
                } else {
                    WriteObject(o);
                }
            }

            _thread.Join();
        }
コード例 #18
0
        public void when_consuming_enumerable_then_succeeds()
        {
            var bytes = new BlockingCollection<byte>();

            var incoming = bytes.GetConsumingEnumerable().ToObservable(TaskPoolScheduler.Default);

            var messages = from header in incoming.Buffer(4)
                           let length = BitConverter.ToInt32(header.ToArray(), 0)
                           let body = incoming.Take(length)
                           select Encoding.UTF8.GetString(body.ToEnumerable().ToArray());

            messages.Subscribe(s => Console.Write(s));

            var message = "hello";

            BitConverter.GetBytes(message.Length).Concat(Encoding.UTF8.GetBytes(message)).ToList().ForEach(b => bytes.Add(b));

            message = "world";

            BitConverter.GetBytes(message.Length).Concat(Encoding.UTF8.GetBytes(message)).ToList().ForEach(b => bytes.Add(b));

            Thread.Sleep(2000);

            Console.WriteLine(bytes.Count);
        }
コード例 #19
0
        private void StartSenderQueueConsumer()
        {
            if (_disposed)
            {
                throw new AblyException($"Attempting to start sender queue consumer when {typeof(MsWebSocketConnection)} has been disposed is not allowed.");
            }

            Task.Run(
                async() =>
            {
                try
                {
                    if (_sendQueue != null)
                    {
                        foreach (var tuple in _sendQueue?.GetConsumingEnumerable(_tokenSource.Token))
                        {
                            await Send(tuple.Item1, tuple.Item2, _tokenSource.Token);
                        }
                    }
                }
                catch (OperationCanceledException e)
                {
                    if (Logger != null && Logger.IsDebug)
                    {
                        Logger.Debug(
                            _disposed ? $"{typeof(MsWebSocketConnection)} has been Disposed, WebSocket send operation cancelled." : "WebSocket Send operation cancelled.",
                            e);
                    }
                }
                catch (Exception e)
                {
                    Logger?.Error("Error Sending to WebSocket", e);
                }
            }, _tokenSource.Token).ConfigureAwait(false);
        }
コード例 #20
0
		public int ConsumingEnumerablexample ()
		{

			var collection = new BlockingCollection<int> ();             

			var taker = Task<int>.Run (() => {                     
				var take = 0;
				foreach (var aTake in collection.GetConsumingEnumerable()) {
					take = aTake;
				}

				return take;
			});

			var adder = Task.Run (() => {

				for (int x = 0; x <= 10; x++) {
					collection.Add (x);
					System.Threading.Thread.Sleep (1);
				}
				collection.CompleteAdding ();
			});

			Task.WaitAll (taker, adder);
			return taker.Result;
		}
コード例 #21
0
    static void UseBlockingCollection()
    {
        var count = 0;
            const int countMax = 10;
            var blockingCollection = new BlockingCollection<string>();

            var producer1 = Task.Factory.StartNew(() =>
            {
                while (count <= countMax)
                {
                    blockingCollection.Add("value" + count);
                    count++;
                }
                blockingCollection.CompleteAdding();
            });

            var producer2 = Task.Factory.StartNew(() =>
            {
                while (count <= countMax)
                {
                    blockingCollection.Add("value" + count);
                    count++;
                }
                blockingCollection.CompleteAdding();
            });

            var consumer1 = Task.Factory.StartNew(() =>
            {
                foreach (var value in blockingCollection.GetConsumingEnumerable())
                {
                    Console.WriteLine("Worker 1: " + value);
                    Thread.Sleep(1000);
                }
            });

            var consumer2 = Task.Factory.StartNew(() =>
            {
                foreach (var value in blockingCollection.GetConsumingEnumerable())
                {
                    Console.WriteLine("Worker 1: " + value);
                    Thread.Sleep(1000);
                }
            });

            Task.WaitAll(producer1, consumer1, consumer2);
    }
コード例 #22
0
ファイル: Curl.cs プロジェクト: CatmanIX/Shades
 private void CurlServicer()
 {
     curlRequests = new BlockingCollection<CurlRequest>();
     foreach (var curlRequest in curlRequests.GetConsumingEnumerable())
     {
         PerformCurl(curlRequest);
     }
 }
コード例 #23
0
        /// <summary>
        /// Protected constructor used by <see cref="ReactiveClient"/> 
        /// client.
        /// </summary>
        protected internal ReactiveSocket(int maximumBufferSize)
        {
            if(maximumBufferSize < 0)
                throw new ArgumentOutOfRangeException("maximumBufferSize", "Maximum buffer size must be greater than 0");

            received = new BlockingCollection<byte>(maximumBufferSize);
            receiver = received.GetConsumingEnumerable().ToObservable(TaskPoolScheduler.Default)
                .TakeUntil(receiverTermination);
        }
コード例 #24
0
ファイル: BlobUploader.cs プロジェクト: MartinBG/Gva
        public void StartUploading(
            //intput
            BlockingCollection<Tuple<int, MemoryStream>> blobContents,
            RateLimiter rateLimiter,
            //output
            ConcurrentDictionary<int, string> blobIdsToFileKeys,
            BlockingCollection<long> uploadedBytes,
            //cancellation
            CancellationTokenSource cts,
            CancellationToken ct)
        {
            try
            {
                this.sqlConn.Open();
            }
            catch (Exception)
            {
                cts.Cancel();
                throw;
            }

            foreach (var blobContent in blobContents.GetConsumingEnumerable())
            {
                ct.ThrowIfCancellationRequested();

                var blobId = blobContent.Item1;
                var content = blobContent.Item2;

                try
                {
                    using (var blobWriter = new BlobWriter(sqlConn))
                    {
                        long length;
                        using (var stream = blobWriter.OpenStream())
                        using (content)
                        {
                            length = content.Length;
                            content.CopyTo(stream);
                        }

                        if (!blobIdsToFileKeys.TryAdd(blobId, blobWriter.GetBlobKey().ToString()))
                        {
                            throw new Exception("blobId already present in dictionary");
                        }

                        rateLimiter.Decrement(length);
                        uploadedBytes.Add(length);
                    }
                }
                catch (Exception)
                {
                    cts.Cancel();
                    throw;
                }
            }
        }
コード例 #25
0
		static async Task TaskProcessor(
			BlockingCollection<CustomTask> collection, string name)
		{
			await GetRandomDelay();
			foreach (CustomTask item in collection.GetConsumingEnumerable())
			{
				Console.WriteLine("Task {0} has been processed by {1}", item.Id, name);
				await GetRandomDelay();
			}
		}
コード例 #26
0
 public static void StartSink(BlockingCollection<WorkItem> sinkQueue)
 {
     Task.Factory.StartNew(() =>
     {
         foreach (var workItem in sinkQueue.GetConsumingEnumerable())
         {
             Console.WriteLine("Processed Messsage: {0}", workItem.Text);
         }
     }, TaskCreationOptions.LongRunning);
 }
コード例 #27
0
 public static Task StartSink(BlockingCollection<PageResultMessage> sinkQueue)
 {
     return Task.Factory.StartNew(() =>
     {
         foreach (var workItem in sinkQueue.GetConsumingEnumerable())
         {
             Console.WriteLine("Thread:\t{3}, Time:\t{0},\tSize {1},\tUrl {2}", 
                 workItem.Milliseconds, workItem.Size, workItem.Url, workItem.ThreadId);
         }
     }, TaskCreationOptions.LongRunning);
 }
コード例 #28
0
        private IEnumerable<KafkaRecord> PollForChanges(long index)
        {
            if (index <= 0) index = DateTime.UtcNow.AddYears(-1).Ticks;

            _dataQueue = new BlockingCollection<KafkaRecord>(100000);

            Task.Factory.StartNew(() => PopulateData(index, _dataQueue), CancellationToken.None,
                TaskCreationOptions.LongRunning, TaskScheduler.Default);

            return _dataQueue.GetConsumingEnumerable();
        }
コード例 #29
0
 static Decoupled_Console()
 {
     // create the blocking collection
     blockingQueue = new BlockingCollection<Action>();
     // create and start the worker task
     messageWorker = Task.Factory.StartNew(() => {
         foreach (Action action in blockingQueue.GetConsumingEnumerable()) {
             // invoke the action
             action.Invoke();
         }
     }, TaskCreationOptions.LongRunning);
 }
コード例 #30
0
        protected virtual void CrawlSite()
        {
            // Create a scheduler that uses two threads. 
            LimitedConcurrencyLevelTaskScheduler lcts = new LimitedConcurrencyLevelTaskScheduler(2);//TODO get this from config and set in constructor
            //List<Task> tasks = new List<Task>();

            // Create a TaskFactory and pass it our custom scheduler. 
            TaskFactory factory = new TaskFactory(lcts);

            Object lockObj = new Object();

            BlockingCollection<PageToCrawl> pagesToCrawl = new BlockingCollection<PageToCrawl>();

            foreach (PageToCrawl page in pagesToCrawl.GetConsumingEnumerable())
            {
                Task t = factory.StartNew(() =>
                {
                    ProcessPage(_scheduler.GetNext());
                }, _crawlContext.CancellationTokenSource.Token);
                
                //TODO Add t to some collection that keeps track of only x number
            }

            while (!_crawlComplete)
            {
                RunPreWorkChecks();

                // Use our factory to run a set of tasks. 
                int outputItem = 0;

                   //tasks.Add(t);

                // Wait for the tasks to complete before displaying a completion message.
                //Task.WaitAll(tasks.ToArray());
                //Console.WriteLine("\n\nSuccessful completion.");


                //if (_scheduler.Count > 0)
                //{
                //    _threadManager.DoWork(() => ProcessPage(_scheduler.GetNext()));
                    
                //}
                //else if (!_threadManager.HasRunningThreads())
                //{
                //    _crawlComplete = true;
                //}
                //else
                //{
                //    _logger.DebugFormat("Waiting for links to be scheduled...");
                //    Thread.Sleep(2500);
                //}
            }
        }
コード例 #31
0
ファイル: TransferStream.cs プロジェクト: bitthicket/Git.NET
 public TransferStream(Stream writeableStream)
 {
     // TODO validate arguments
     m_WriteableStream = writeableStream;
     m_Blocks = new BlockingCollection<byte[]>();
     m_ProcessingTask = Task.Factory.StartNew(() =>
         {
             foreach (var block in m_Blocks.GetConsumingEnumerable())
             {
                 m_WriteableStream.Write(block, 0, block.Length);
             }
         }, TaskCreationOptions.PreferFairness);
 }
コード例 #32
0
 public override IEnumerable <T> GetDynamicPartitions()
 {
     return(_collection.GetConsumingEnumerable());
 }
コード例 #33
0
    private static async Task InitializeInstance(ITestOutputHelper output)
    {
        var port = FindAvailablePort();
        var uri  = new UriBuilder("http", "localhost", port, "/wd/hub").Uri;

        var seleniumConfigPath = typeof(SeleniumStandaloneServer).Assembly
                                 .GetCustomAttributes <AssemblyMetadataAttribute>()
                                 .FirstOrDefault(k => k.Key == "Microsoft.AspNetCore.Testing.SeleniumConfigPath")
                                 ?.Value;

        if (seleniumConfigPath == null)
        {
            throw new InvalidOperationException("Selenium config path not configured. Does this project import the E2ETesting.targets?");
        }

        // In AzDO, the path to the system chromedriver is in an env var called CHROMEWEBDRIVER
        // We want to use this because it should match the installed browser version
        // If the env var is not set, then we fall back on using whatever is in the Selenium config file
        var chromeDriverArg        = string.Empty;
        var chromeDriverPathEnvVar = Environment.GetEnvironmentVariable("CHROMEWEBDRIVER");

        if (!string.IsNullOrEmpty(chromeDriverPathEnvVar))
        {
            chromeDriverArg = $"--javaArgs=-Dwebdriver.chrome.driver={chromeDriverPathEnvVar}/chromedriver";
            output.WriteLine($"Using chromedriver at path {chromeDriverPathEnvVar}");
        }

        var psi = new ProcessStartInfo
        {
            FileName  = "npm",
            Arguments = $"run selenium-standalone start -- --config \"{seleniumConfigPath}\" {chromeDriverArg} -- -port {port}",
            RedirectStandardOutput = true,
            RedirectStandardError  = true,
        };

        if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
        {
            psi.FileName  = "cmd";
            psi.Arguments = $"/c npm {psi.Arguments}";
        }

        // It's important that we get the folder value before we start the process to prevent
        // untracked processes when the tracking folder is not correctly configure.
        var trackingFolder = GetProcessTrackingFolder();

        if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("helix")))
        {
            // Just create a random tracking folder on helix
            trackingFolder = Path.Combine(Directory.GetCurrentDirectory(), Path.GetRandomFileName());
            Directory.CreateDirectory(trackingFolder);
        }

        if (!Directory.Exists(trackingFolder))
        {
            throw new InvalidOperationException($"Invalid tracking folder. Set the 'SeleniumProcessTrackingFolder' MSBuild property to a valid folder.");
        }

        Process process     = null;
        Process sentinel    = null;
        string  pidFilePath = null;

        try
        {
            process     = Process.Start(psi);
            pidFilePath = await WriteTrackingFileAsync(output, trackingFolder, process);

            if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
            {
                sentinel = StartSentinelProcess(process, pidFilePath, SeleniumProcessTimeout);
            }
        }
        catch
        {
            ProcessCleanup(process, pidFilePath);

            if (sentinel is not null)
            {
                ProcessCleanup(sentinel, pidFilePath: null);
            }

            throw;
        }

        // Log output for selenium standalone process.
        // This is for the case where the server fails to launch.
        var logOutput = new BlockingCollection <string>();

        process.OutputDataReceived += LogOutput;
        process.ErrorDataReceived  += LogOutput;

        process.BeginOutputReadLine();
        process.BeginErrorReadLine();

        // The Selenium sever has to be up for the entirety of the tests and is only shutdown when the application (i.e. the test) exits.
        // AppDomain.CurrentDomain.ProcessExit += (sender, args) => ProcessCleanup(process, pidFilePath);

        // Log
        void LogOutput(object sender, DataReceivedEventArgs e)
        {
            try
            {
                logOutput.TryAdd(e.Data);
            }
            catch (Exception)
            {
            }

            // We avoid logging on the output here because it is unreliable. We can only log in the diagnostics sink.
            lock (_diagnosticsMessageSink)
            {
                _diagnosticsMessageSink.OnMessage(new DiagnosticMessage(e.Data));
            }
        }

        var httpClient = new HttpClient
        {
            Timeout = TimeSpan.FromSeconds(1),
        };

        var retries = 0;

        do
        {
            await Task.Delay(1000);

            try
            {
                var response = await httpClient.GetAsync(uri);

                if (response.StatusCode == HttpStatusCode.OK)
                {
                    output = null;
                    Instance.Initialize(uri, process, pidFilePath, sentinel);
                    return;
                }
            }
            catch (OperationCanceledException)
            {
            }
            catch (HttpRequestException)
            {
            }

            retries++;
        } while (retries < 30);

        // Make output null so that we stop logging to it.
        output = null;
        logOutput.CompleteAdding();
        var exitCodeString = process.HasExited ? process.ExitCode.ToString(CultureInfo.InvariantCulture) : "Process has not yet exited.";
        var message        = $@"Failed to launch the server.
ExitCode: {exitCodeString}
Captured output lines:
{string.Join(Environment.NewLine, logOutput.GetConsumingEnumerable())}.";

        // If we got here, we couldn't launch Selenium or get it to respond. So shut it down.
        ProcessCleanup(process, pidFilePath);
        throw new InvalidOperationException(message);
    }
コード例 #34
0
        async public Task TestAsyncOutputStream_BeginCancelBeginOutputRead()
        {
            using (AnonymousPipeServerStream pipeWrite = new AnonymousPipeServerStream(PipeDirection.Out, HandleInheritability.Inheritable))
                using (AnonymousPipeServerStream pipeRead = new AnonymousPipeServerStream(PipeDirection.In, HandleInheritability.Inheritable))
                {
                    using (Process p = CreateProcess(TestAsyncOutputStream_BeginCancelBeinOutputRead_RemotelyInvokable, $"{pipeWrite.GetClientHandleAsString()} {pipeRead.GetClientHandleAsString()}"))
                    {
                        var dataReceived = new BlockingCollection <int>();

                        p.StartInfo.RedirectStandardOutput = true;
                        p.OutputDataReceived += (s, e) =>
                        {
                            if (e.Data != null)
                            {
                                dataReceived.Add(int.Parse(e.Data));
                            }
                        };

                        // Start child process
                        p.Start();

                        pipeWrite.DisposeLocalCopyOfClientHandle();
                        pipeRead.DisposeLocalCopyOfClientHandle();

                        // Wait child process start
                        Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Child process not started");

                        //Start listening and signal client to produce 1,2,3
                        p.BeginOutputReadLine();
                        await pipeWrite.WriteAsync(new byte[1], 0, 1);

                        // Wait child signal produce number 1,2,3
                        Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing child signal for value 1,2,3");
                        using (CancellationTokenSource cts = new CancellationTokenSource(WaitInMS))
                        {
                            try
                            {
                                List <int> expectedValue123 = new List <int>()
                                {
                                    1, 2, 3
                                };
                                foreach (int value in dataReceived.GetConsumingEnumerable(cts.Token))
                                {
                                    expectedValue123.Remove(value);
                                    if (expectedValue123.Count == 0)
                                    {
                                        break;
                                    }
                                }
                            }
                            catch (OperationCanceledException)
                            {
                                Assert.False(cts.IsCancellationRequested, "Values 1,2,3 not arrived");
                            }
                        }

                        // Cancel and signal child
                        p.CancelOutputRead();
                        await pipeWrite.WriteAsync(new byte[1], 0, 1);

                        // Re-start listening and signal child
                        p.BeginOutputReadLine();
                        await pipeWrite.WriteAsync(new byte[1], 0, 1);

                        // Wait child process close
                        Assert.True(p.WaitForExit(WaitInMS), "Child process didn't close");

                        // Wait for value 7,8,9
                        using (CancellationTokenSource cts = new CancellationTokenSource(WaitInMS))
                        {
                            try
                            {
                                List <int> expectedValue789 = new List <int>()
                                {
                                    7, 8, 9
                                };
                                foreach (int value in dataReceived.GetConsumingEnumerable(cts.Token))
                                {
                                    expectedValue789.Remove(value);
                                    if (expectedValue789.Count == 0)
                                    {
                                        break;
                                    }
                                }
                            }
                            catch (OperationCanceledException)
                            {
                                Assert.False(cts.IsCancellationRequested, "Values 7,8,9 not arrived");
                            }
                        }
                    }
                }
        }
コード例 #35
0
 public IEnumerable <DataSlice> GetConsumingEnumerable(CancellationToken token)
 {
     return(documentCollection.GetConsumingEnumerable(token));
 }
コード例 #36
0
        private void WriteWorker(Stream stream, BlockingCollection <Block> toWrite, ColumnCodec[] activeColumns,
                                 DataViewSchema sourceSchema, int rowsPerBlock, IChannelProvider cp, ExceptionMarshaller exMarshaller)
        {
            _host.AssertValue(exMarshaller);
            try
            {
                _host.AssertValue(cp);
                cp.AssertValue(stream);
                cp.AssertValue(toWrite);
                cp.AssertValue(activeColumns);
                cp.AssertValue(sourceSchema);
                cp.Assert(rowsPerBlock > 0);

                using (IChannel ch = cp.Start("Write"))
                {
                    var blockLookups = new List <BlockLookup> [activeColumns.Length];
                    for (int c = 0; c < blockLookups.Length; ++c)
                    {
                        blockLookups[c] = new List <BlockLookup>();
                    }
                    var deadLookups = new int[activeColumns.Length];

                    // Reserve space for the header at the start. This will be filled
                    // in with valid values once writing has completed.
                    ch.CheckIO(stream.Position == 0);
                    stream.Write(new byte[Header.HeaderSize], 0, Header.HeaderSize);
                    ch.CheckIO(stream.Position == Header.HeaderSize);
                    long        expectedPosition = stream.Position;
                    BlockLookup deadLookup       = new BlockLookup();
                    foreach (Block block in toWrite.GetConsumingEnumerable(exMarshaller.Token))
                    {
                        ch.CheckIO(stream.Position == expectedPosition);
                        MemoryStream        compressed = block.BlockData;
                        ArraySegment <byte> buffer;
                        bool tmp = compressed.TryGetBuffer(out buffer);
                        ch.Assert(tmp);
                        stream.Write(buffer.Array, buffer.Offset, buffer.Count);
                        BlockLookup currLookup = new BlockLookup(expectedPosition, (int)compressed.Length, block.UncompressedLength);
                        expectedPosition += compressed.Length;
                        _memPool.Return(ref compressed);
                        ch.CheckIO(stream.Position == expectedPosition);

                        // Record the position. We have this "lookups" list per column. Yet, it may be that sometimes
                        // the writer receives things out of order.
                        // REVIEW: The format and the rest of the pipeline supposedly supports a long number
                        // of blocks, but the writing scheme does not yet support that.
                        int blockIndex = (int)block.BlockIndex;
                        var lookups    = blockLookups[block.ColumnIndex];
                        if (lookups.Count == block.BlockIndex) // Received in order.
                        {
                            lookups.Add(currLookup);
                        }
                        else if (lookups.Count < block.BlockIndex) // Received a block a little bit early.
                        {
                            // Add a bunch of dead filler lookups, until these late blocks come in.
                            int deadToAdd = (int)block.BlockIndex - lookups.Count;
                            for (int i = 0; i < deadToAdd; ++i)
                            {
                                lookups.Add(deadLookup);
                            }
                            deadLookups[block.ColumnIndex] += deadToAdd;
                            ch.Assert(lookups.Count == block.BlockIndex);
                            lookups.Add(currLookup);
                        }
                        else // Received a block a little bit late.
                        {
                            // This should be a dead block unless the compressors are buggy and somehow
                            // yielding duplicate blocks or something.
                            ch.Assert(lookups[blockIndex].BlockOffset == 0);
                            deadLookups[block.ColumnIndex]--;
                            lookups[blockIndex] = currLookup;
                        }
                    }

                    // We have finished writing all blocks. We will now write the block lookup tables (so we can
                    // find the blocks), the slot names (for any columns that have them), the column table of
                    // contents (so we know how to decode the blocks, and where the lookups and names are),
                    // and the header (so we know dataview wide information and where to find the table of
                    // contents) in that order.
                    long[] lookupOffsets = new long[blockLookups.Length];
                    using (BinaryWriter writer = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true))
                    {
                        // Write the block lookup directories. These are referenced from the table of contents,
                        // so that someone knows where to look for some block data.
                        for (int c = 0; c < blockLookups.Length; ++c)
                        {
                            ch.Assert(deadLookups[c] == 0);
                            // The block lookup directories are written uncompressed and in fixed length
                            // to enable rapid seeking.
                            lookupOffsets[c] = stream.Position;
                            foreach (BlockLookup lookup in blockLookups[c])
                            {
                                // *** Lookup table entry format ***
                                // long: Offset to the start of a block
                                // int: Byte length of block as written
                                // int: Byte length of block when uncompressed

                                ch.Assert(lookup.BlockOffset > 0);
                                writer.Write(lookup.BlockOffset);
                                writer.Write(lookup.BlockLength);
                                writer.Write(lookup.DecompressedBlockLength);
                            }
                            ch.CheckIO(stream.Position == lookupOffsets[c] + (16 * blockLookups[c].Count),
                                       "unexpected offsets after block lookup table write");
                        }
                        // Write the metadata for each column.
                        long[] metadataTocOffsets = new long[activeColumns.Length];
                        for (int c = 0; c < activeColumns.Length; ++c)
                        {
                            metadataTocOffsets[c] = WriteMetadata(writer, sourceSchema, activeColumns[c].SourceIndex, ch);
                        }

                        // Write the table of contents.
                        long tocOffset = stream.Position;
                        {
                            int c = 0;
                            expectedPosition = stream.Position;
                            foreach (var active in activeColumns)
                            {
                                // *** Column TOC entry format ***
                                // string: column name
                                // codec (as interpretable by CodecFactory.TryGetCodec): column block codec
                                // CompressionKind(byte): block compression strategy
                                // LEB128 int: Rows per block
                                // long: Offset to the start of the lookup table
                                // long: Offset to the start of the metadata TOC entries, or 0 if this has no metadata

                                string name = sourceSchema[active.SourceIndex].Name;
                                writer.Write(name);
                                int nameLen = Encoding.UTF8.GetByteCount(name);
                                expectedPosition += Utils.Leb128IntLength((uint)nameLen) + nameLen;
                                ch.CheckIO(stream.Position == expectedPosition, "unexpected offsets after table of contents name");
                                expectedPosition += _factory.WriteCodec(stream, active.Codec);
                                ch.CheckIO(stream.Position == expectedPosition, "unexpected offsets after table of contents type description");
                                writer.Write((byte)_compression);
                                expectedPosition++;
                                // REVIEW: Right now the number of rows per block is fixed, so we
                                // write the same value each time. In some future state, it may be that this
                                // is relaxed, with possibly some tradeoffs (for example, inability to randomly seek).
                                writer.WriteLeb128Int((ulong)rowsPerBlock);
                                expectedPosition += Utils.Leb128IntLength((uint)rowsPerBlock);
                                // Offset of the lookup table.
                                writer.Write(lookupOffsets[c]);
                                expectedPosition += sizeof(long);
                                // Offset of the metadata table of contents.
                                writer.Write(metadataTocOffsets[c]);
                                expectedPosition += sizeof(long);
                                ch.CheckIO(stream.Position == expectedPosition, "unexpected offsets after table of contents");
                                c++;
                            }
                        }
                        // Write the tail signature.
                        long tailOffset = stream.Position;
                        writer.Write(Header.TailSignatureValue);

                        // Now move back to the beginning of the stream, and write out the now completed header.
                        Header header = new Header()
                        {
                            Signature             = Header.SignatureValue,
                            Version               = Header.WriterVersion,
                            CompatibleVersion     = Header.CanBeReadByVersion,
                            TableOfContentsOffset = tocOffset,
                            TailOffset            = tailOffset,
                            RowCount              = _rowCount,
                            ColumnCount           = activeColumns.Length
                        };
                        byte[] headerBytes = new byte[Header.HeaderSize];
                        unsafe
                        {
                            Marshal.Copy(new IntPtr(&header), headerBytes, 0, Marshal.SizeOf(typeof(Header)));
                        }
                        writer.Seek(0, SeekOrigin.Begin);
                        writer.Write(headerBytes);
                    }
                }
            }
            catch (Exception ex)
            {
                exMarshaller.Set("writing", ex);
            }
        }
コード例 #37
0
ファイル: Program.cs プロジェクト: markchipman/sql2csv
        public static int Main(string[] args)
        {
            var(query, output, connectionString) = ParseArgs(args);
#if DEBUG
            query            = "SELECT top 1000 EMail, Domain, cast(IsConfirmed as smallint), convert(varchar, AddDate, 120), cast(IsSend_System_JobRecommendation as smallint), cast(IsNeedConfirm_UkrNet as smallint) FROM EMailSource with (nolock)";
            output           = "emailsource.csv";
            connectionString = "Data Source=beta.rabota.ua;Initial Catalog=RabotaUA2;Integrated Security=False;User ID=sa;Password=rabota;";
#endif
            if (string.IsNullOrEmpty(query) || string.IsNullOrEmpty(output) || string.IsNullOrEmpty(connectionString))
            {
                PrintHelpMessage();
                return(1);
            }

            var global = Stopwatch.StartNew();

            var status = new CancellationTokenSource();
            if (Environment.UserInteractive)
            {
                Task.Run(() =>
                {
                    Console.WriteLine("Read -> Process -> Write");
                    while (!status.IsCancellationRequested)
                    {
                        Console.Write($"{InputRows:N0} -> {ProccessedRows:N0} -> {OutputRows:N0} in {global.Elapsed}\r");
                        Thread.Sleep(200);
                    }
                }, status.Token);
            }

            var read = Task.Run(() =>
            {
                var timer = Stopwatch.StartNew();
                using (var connection = new SqlConnection(connectionString))
                {
                    using (var command = new SqlCommand(query, connection)
                    {
                        CommandTimeout = 0
                    })
                    {
                        connection.Open();
                        using (var reader = command.ExecuteReader())
                        {
                            if (reader.HasRows)
                            {
                                while (reader.Read())
                                {
                                    var values = new object[reader.FieldCount];
                                    reader.GetValues(values);
                                    InputQueue.Add(values, status.Token);
                                    Interlocked.Increment(ref InputRows);
                                }
                            }
                        }
                    }
                }
                InputQueue.CompleteAdding();
                InputTime = timer.Elapsed;
            }, status.Token);

            var process = Task.Run(() =>
            {
                var timer = Stopwatch.StartNew();
                Parallel.ForEach(InputQueue.GetConsumingEnumerable(), ParallelOptions, values =>
                {
                    var sb = new StringBuilder();
                    for (var i = 0; i < values.Length; i++)
                    {
                        var str = (values[i] ?? "").ToString().Trim();

                        if (values[i] is string)
                        {
                            //str = HttpUtility.UrlDecode(str);
                            str = Regex.Replace(str, @"[\u0000-\u001F]", string.Empty);
                            str = Regex.Replace(str, "\\s+", " ");
                            str = str.Replace("\"", "\"\"");
                        }

                        sb.AppendFormat("{0}\"{1}\"", i > 0 ? "," : "", str);
                    }

                    OutputQueue.Add(sb.ToString(), status.Token);
                    Interlocked.Increment(ref ProccessedRows);
                });
                OutputQueue.CompleteAdding();
                ProcessTime = timer.Elapsed;
            }, status.Token);

            var write = Task.Run(() =>
            {
                var timer = Stopwatch.StartNew();
                using (var writer = new StreamWriter(output, false, new UTF8Encoding(false))
                {
                    NewLine = "\n"
                })
                {
                    foreach (var row in OutputQueue.GetConsumingEnumerable())
                    {
                        writer.WriteLine(row);
                        Interlocked.Increment(ref OutputRows);
                    }
                }
                OutputTime = timer.Elapsed;
            }, status.Token);

            Task.WaitAll(read, process, write);
            status.Cancel();
            Console.WriteLine();
            Console.WriteLine($"{InputRows:N0} read in {InputTime}");
            Console.WriteLine($"{ProccessedRows:N0} process in {ProcessTime}");
            Console.WriteLine($"{OutputRows:N0} write in {OutputTime}");
            Console.WriteLine($"Done in {global.Elapsed}");

            return(0);
        }
コード例 #38
0
        /// <summary>
        ///     下载文件方法(自动确定是否使用分片下载)
        /// </summary>
        /// <param name="fileEnumerable">文件列表</param>
        /// <param name="downloadThread">下载线程</param>
        /// <param name="tokenSource"></param>
        public static async Task AdvancedDownloadListFile(IEnumerable <DownloadFile> fileEnumerable, int downloadThread,
                                                          CancellationTokenSource tokenSource)
        {
            var downloadFiles  = fileEnumerable.ToList();
            var token          = tokenSource?.Token ?? CancellationToken.None;
            var processorCount = ProcessorHelper.GetPhysicalProcessorCount();

            if (downloadThread <= 0)
            {
                downloadThread = processorCount;
            }

            using var bc = new BlockingCollection <DownloadFile>(downloadThread * 4);
            using var downloadQueueTask = Task.Run(() =>
            {
                foreach (var df in downloadFiles)
                {
                    bc.Add(df, token);
                }

                bc.CompleteAdding();
            }, token);

            using var downloadTask = Task.Run(() =>
            {
                void DownloadAction()
                {
                    foreach (var df in bc.GetConsumingEnumerable())
                    {
                        var di = new DirectoryInfo(
                            df.DownloadPath.Substring(0, df.DownloadPath.LastIndexOf('\\')));
                        if (!di.Exists)
                        {
                            di.Create();
                        }

                        // DownloadData(df);
                        if (df.FileSize >= 1048576 || df.FileSize == 0) // || df.FileSize == default)
                        {
                            MultiPartDownload(df);
                        }
                        else
                        {
                            DownloadData(df);
                        }
                    }
                }

                var threads = new List <Thread>();

                for (var i = 0; i < downloadThread * 2; i++)
                {
                    threads.Add(new Thread(DownloadAction));
                }

                foreach (var t in threads)
                {
                    t.Start();
                }

                foreach (var t in threads)
                {
                    t.Join();
                }
            }, token);

            await Task.WhenAll(downloadQueueTask, downloadTask).ConfigureAwait(false);
        }
コード例 #39
0
        private void ConsumeSignal()
        {
            foreach (Block signal in _signalsQueue.GetConsumingEnumerable(_cancellationTokenSource.Token))
            {
                Block parentBlock = signal;
                while (_signalsQueue.TryTake(out Block nextSignal))
                {
                    if (parentBlock.Number <= nextSignal.Number)
                    {
                        parentBlock = nextSignal;
                    }
                }

                try
                {
                    Block block = PrepareBlock(parentBlock);
                    if (block == null)
                    {
                        if (_logger.IsTrace)
                        {
                            _logger.Trace("Skipping block production or block production failed");
                        }
                        continue;
                    }

                    if (_logger.IsInfo)
                    {
                        _logger.Info($"Processing prepared block {block.Number}");
                    }
                    Block processedBlock = _processor.Process(block, ProcessingOptions.NoValidation | ProcessingOptions.ReadOnlyChain | ProcessingOptions.WithRollback, NullBlockTracer.Instance);
                    if (processedBlock == null)
                    {
                        if (_logger.IsInfo)
                        {
                            _logger.Info($"Prepared block has lost the race");
                        }
                        continue;
                    }

                    if (_logger.IsDebug)
                    {
                        _logger.Debug($"Sealing prepared block {processedBlock.Number}");
                    }

                    _sealer.SealBlock(processedBlock, _cancellationTokenSource.Token).ContinueWith(t =>
                    {
                        if (t.IsCompletedSuccessfully)
                        {
                            if (t.Result != null)
                            {
                                if (_logger.IsInfo)
                                {
                                    _logger.Info($"Sealed block {t.Result.ToString(Block.Format.HashNumberDiffAndTx)}");
                                }
                                _scheduledBlock = t.Result;
                            }
                            else
                            {
                                if (_logger.IsInfo)
                                {
                                    _logger.Info($"Failed to seal block {processedBlock.ToString(Block.Format.HashNumberDiffAndTx)} (null seal)");
                                }
                            }
                        }
                        else if (t.IsFaulted)
                        {
                            if (_logger.IsError)
                            {
                                _logger.Error("Mining failed", t.Exception);
                            }
                        }
                        else if (t.IsCanceled)
                        {
                            if (_logger.IsInfo)
                            {
                                _logger.Info($"Sealing block {processedBlock.Number} cancelled");
                            }
                        }
                    }, _cancellationTokenSource.Token);
                }
                catch (Exception e)
                {
                    if (_logger.IsError)
                    {
                        _logger.Error($"Block producer could not produce block on top of {parentBlock.ToString(Block.Format.Short)}", e);
                    }
                }
            }
        }
コード例 #40
0
        Task StartConsumer(BlockingCollection <DBObject> input, BlockingCollection <ACLInfo> output, TaskFactory factory)
        {
            return(factory.StartNew(() =>
            {
                foreach (DBObject obj in input.GetConsumingEnumerable())
                {
                    Interlocked.Increment(ref count);
                    if (obj.NTSecurityDescriptor == null)
                    {
                        options.WriteVerbose($"DACL was null on ${obj.SAMAccountName}");
                        continue;
                    }
                    RawSecurityDescriptor desc = new RawSecurityDescriptor(obj.NTSecurityDescriptor, 0);
                    RawAcl acls = desc.DiscretionaryAcl;
                    //Figure out whose the owner
                    string ownersid = desc.Owner.ToString();

                    if (!manager.FindBySID(ownersid, CurrentDomain, out DBObject owner))
                    {
                        if (MappedPrincipal.GetCommon(ownersid, out MappedPrincipal mapped))
                        {
                            owner = new DBObject
                            {
                                BloodHoundDisplayName = $"{mapped.SimpleName}@{CurrentDomain}",
                                Type = "group",
                                Domain = CurrentDomain,
                                DistinguishedName = $"{mapped.SimpleName}@{CurrentDomain}",
                            };
                        }
                        else if (NullSIDS.TryGetValue(ownersid, out byte val))
                        {
                            owner = null;
                            continue;
                        }
                        else
                        {
                            try
                            {
                                DirectoryEntry entry = new DirectoryEntry($"LDAP://<SID={ownersid}>");
                                owner = entry.ConvertToDB();
                                manager.InsertRecord(owner);
                            }
                            catch
                            {
                                owner = null;
                                NullSIDS.TryAdd(ownersid, new byte());
                                options.WriteVerbose($"Unable to resolve {ownersid} for object owner");
                                continue;
                            }
                        }
                    }

                    if (owner != null)
                    {
                        output.Add(new ACLInfo
                        {
                            ObjectName = obj.BloodHoundDisplayName,
                            ObjectType = obj.Type,
                            Inherited = false,
                            RightName = "Owner",
                            PrincipalName = owner.BloodHoundDisplayName,
                            PrincipalType = owner.Type,
                            AceType = "",
                            Qualifier = "AccessAllowed"
                        });
                    }

                    foreach (QualifiedAce r in acls)
                    {
                        string PrincipalSID = r.SecurityIdentifier.ToString();

                        //Try to map our SID to the principal using a few different methods
                        if (!manager.FindBySID(PrincipalSID, CurrentDomain, out DBObject principal))
                        {
                            if (MappedPrincipal.GetCommon(PrincipalSID, out MappedPrincipal mapped))
                            {
                                principal = new DBObject
                                {
                                    BloodHoundDisplayName = $"{mapped.SimpleName}@{CurrentDomain}",
                                    Type = "group",
                                    Domain = CurrentDomain,
                                    DistinguishedName = $"{mapped.SimpleName}@{CurrentDomain}"
                                };
                            }
                            else if (NullSIDS.TryGetValue(ownersid, out byte val))
                            {
                                continue;
                            }
                            else
                            {
                                try
                                {
                                    DirectoryEntry entry = new DirectoryEntry($"LDAP://<SID={PrincipalSID}>");
                                    principal = entry.ConvertToDB();
                                    manager.InsertRecord(principal);
                                }
                                catch
                                {
                                    NullSIDS.TryAdd(PrincipalSID, new byte());
                                    options.WriteVerbose($"Unable to resolve {PrincipalSID} for ACL");
                                    continue;
                                }
                            }
                        }
                        //If we're here, we have a principal. Yay!

                        //Resolve the ActiveDirectoryRight
                        ActiveDirectoryRights right = (ActiveDirectoryRights)Enum.ToObject(typeof(ActiveDirectoryRights), r.AccessMask);
                        string rs = right.ToString();
                        string guid = r is ObjectAce ? ((ObjectAce)r).ObjectAceType.ToString() : "";
                        List <string> foundrights = new List <string>();
                        bool cont = false;

                        //Figure out if we need more processing
                        cont |= (rs.Contains("WriteDacl") || rs.Contains("WriteOwner"));
                        if (rs.Contains("GenericWrite") || rs.Contains("GenericAll"))
                        {
                            cont |= ("00000000-0000-0000-0000-000000000000".Equals(guid) || guid.Equals("") || cont);
                        }

                        if (rs.Contains("ExtendedRight"))
                        {
                            cont |= (guid.Equals("00000000-0000-0000-0000-000000000000") || guid.Equals("") || guid.Equals("00299570-246d-11d0-a768-00aa006e0529") || cont);

                            //DCSync
                            cont |= (guid.Equals("1131f6aa-9c07-11d1-f79f-00c04fc2dcd2") || guid.Equals("1131f6ad-9c07-11d1-f79f-00c04fc2dcd2") || cont);
                        }

                        if (rs.Contains("WriteProperty"))
                        {
                            cont |= (guid.Equals("00000000-0000-0000-0000-000000000000") || guid.Equals("bf9679c0-0de6-11d0-a285-00aa003049e2") || guid.Equals("bf9679a8-0de6-11d0-a285-00aa003049e2") || cont);
                        }

                        if (!cont)
                        {
                            continue;
                        }

                        string acetype = null;
                        MatchCollection coll = GenericRegex.Matches(rs);
                        if (rs.Contains("ExtendedRight"))
                        {
                            switch (guid)
                            {
                            case "1131f6aa-9c07-11d1-f79f-00c04fc2dcd2":
                                acetype = "DS-Replication-Get-Changes";
                                break;

                            case "1131f6ad-9c07-11d1-f79f-00c04fc2dcd2":
                                acetype = "DS-Replication-Get-Changes-All";
                                break;

                            default:
                                acetype = "All";
                                break;
                            }
                        }

                        if (acetype != null && (acetype.Equals("DS-Replication-Get-Changes-All") || acetype.Equals("DS-Replication-Get-Changes")))
                        {
                            if (!syncers.TryGetValue(principal.DistinguishedName, out DCSync SyncObject))
                            {
                                SyncObject = new DCSync
                                {
                                    Domain = obj.BloodHoundDisplayName,
                                    PrincipalName = principal.BloodHoundDisplayName,
                                    PrincipalType = principal.Type
                                };
                            }

                            if (acetype.Contains("-All"))
                            {
                                SyncObject.GetChangesAll = true;
                            }
                            else
                            {
                                SyncObject.GetChanges = true;
                            }

                            syncers.AddOrUpdate(principal.DistinguishedName, SyncObject, (key, oldVar) => SyncObject);
                            //We only care about these privs if we have both, so store that stuff and continue on
                            continue;
                        }

                        if (rs.Contains("GenericAll"))
                        {
                            output.Add(new ACLInfo
                            {
                                ObjectName = obj.BloodHoundDisplayName,
                                ObjectType = obj.Type,
                                AceType = "",
                                Inherited = r.IsInherited,
                                PrincipalName = principal.BloodHoundDisplayName,
                                PrincipalType = principal.Type,
                                Qualifier = r.AceQualifier.ToString(),
                                RightName = "GenericAll"
                            });
                        }

                        if (rs.Contains("GenericWrite"))
                        {
                            output.Add(new ACLInfo
                            {
                                ObjectName = obj.BloodHoundDisplayName,
                                ObjectType = obj.Type,
                                AceType = "",
                                Inherited = r.IsInherited,
                                PrincipalName = principal.BloodHoundDisplayName,
                                PrincipalType = principal.Type,
                                Qualifier = r.AceQualifier.ToString(),
                                RightName = "GenericWrite"
                            });
                        }

                        if (rs.Contains("WriteOwner"))
                        {
                            output.Add(new ACLInfo
                            {
                                ObjectName = obj.BloodHoundDisplayName,
                                ObjectType = obj.Type,
                                AceType = "",
                                Inherited = r.IsInherited,
                                PrincipalName = principal.BloodHoundDisplayName,
                                PrincipalType = principal.Type,
                                Qualifier = r.AceQualifier.ToString(),
                                RightName = "WriteOwner"
                            });
                        }

                        if (rs.Contains("WriteDacl"))
                        {
                            output.Add(new ACLInfo
                            {
                                ObjectName = obj.BloodHoundDisplayName,
                                ObjectType = obj.Type,
                                AceType = "",
                                Inherited = r.IsInherited,
                                PrincipalName = principal.BloodHoundDisplayName,
                                PrincipalType = principal.Type,
                                Qualifier = r.AceQualifier.ToString(),
                                RightName = "WriteDacl"
                            });
                        }

                        if (rs.Contains("WriteProperty"))
                        {
                            if (guid.Equals("bf9679c0-0de6-11d0-a285-00aa003049e2"))
                            {
                                output.Add(new ACLInfo
                                {
                                    ObjectName = obj.BloodHoundDisplayName,
                                    ObjectType = obj.Type,
                                    AceType = "Member",
                                    Inherited = r.IsInherited,
                                    PrincipalName = principal.BloodHoundDisplayName,
                                    PrincipalType = principal.Type,
                                    Qualifier = r.AceQualifier.ToString(),
                                    RightName = "WriteProperty"
                                });
                            }
                            else
                            {
                                output.Add(new ACLInfo
                                {
                                    ObjectName = obj.BloodHoundDisplayName,
                                    ObjectType = obj.Type,
                                    AceType = "Script-Path",
                                    Inherited = r.IsInherited,
                                    PrincipalName = principal.BloodHoundDisplayName,
                                    PrincipalType = principal.Type,
                                    Qualifier = r.AceQualifier.ToString(),
                                    RightName = "WriteProperty"
                                });
                            }
                        }

                        if (rs.Contains("ExtendedRight"))
                        {
                            if (guid.Equals("00299570-246d-11d0-a768-00aa006e0529"))
                            {
                                output.Add(new ACLInfo
                                {
                                    ObjectName = obj.BloodHoundDisplayName,
                                    ObjectType = obj.Type,
                                    AceType = "User-Force-Change-Password",
                                    Inherited = r.IsInherited,
                                    PrincipalName = principal.BloodHoundDisplayName,
                                    PrincipalType = principal.Type,
                                    Qualifier = r.AceQualifier.ToString(),
                                    RightName = "ExtendedRight"
                                });
                            }
                            else
                            {
                                output.Add(new ACLInfo
                                {
                                    ObjectName = obj.BloodHoundDisplayName,
                                    ObjectType = obj.Type,
                                    AceType = "All",
                                    Inherited = r.IsInherited,
                                    PrincipalName = principal.BloodHoundDisplayName,
                                    PrincipalType = principal.Type,
                                    Qualifier = r.AceQualifier.ToString(),
                                    RightName = "ExtendedRight"
                                });
                            }
                        }
                    }
                }
            }));
        }
コード例 #41
0
 /// <summary>
 /// Filters the consuming objects by calling a Func&lt;S, Boolean&gt;.
 /// </summary>
 /// <param name="Func">A Func&lt;S, Boolean&gt; filtering the consuming objects. True means filter (ignore).</param>
 public ArrowSink()
 {
     this.BlockingCollection  = new BlockingCollection <TIn>();
     this._InternalEnumerator = BlockingCollection.GetConsumingEnumerable().GetEnumerator();
 }
コード例 #42
0
        Task StartWriter(BlockingCollection <ACLInfo> output, TaskFactory factory)
        {
            return(factory.StartNew(() =>
            {
                if (options.URI == null)
                {
                    string path = options.GetFilePath("acls");
                    bool append = false || File.Exists(path);
                    using (StreamWriter writer = new StreamWriter(path, append))
                    {
                        if (!append)
                        {
                            writer.WriteLine("ObjectName,ObjectType,PrincipalName,PrincipalType,ActiveDirectoryRights,ACEType,AccessControlType,IsInherited");
                        }
                        int localcount = 0;
                        foreach (ACLInfo info in output.GetConsumingEnumerable())
                        {
                            writer.WriteLine(info.ToCSV());
                            localcount++;
                            if (localcount % 100 == 0)
                            {
                                writer.Flush();
                            }
                        }
                    }
                }
                else
                {
                    using (WebClient client = new WebClient())
                    {
                        client.Headers.Add("content-type", "application/json");
                        client.Headers.Add("Accept", "application/json; charset=UTF-8");
                        client.Headers.Add("Authorization", options.GetEncodedUserPass());

                        int localcount = 0;
                        Dictionary <string, RESTOutputACL> restmap = new Dictionary <string, RESTOutputACL>();

                        JavaScriptSerializer serializer = new JavaScriptSerializer();
                        List <object> statements = new List <object>();

                        foreach (ACLInfo info in output.GetConsumingEnumerable())
                        {
                            localcount++;
                            string key = info.GetKey();
                            if (!restmap.TryGetValue(key, out RESTOutputACL val))
                            {
                                val = new RESTOutputACL();
                            }

                            val.props.Add(info.ToParam());

                            restmap[key] = val;

                            if (localcount % 1000 == 0)
                            {
                                statements = new List <object>();
                                foreach (string k in restmap.Keys)
                                {
                                    statements.Add(restmap[k].GetStatement(k));
                                }

                                var ToPost = serializer.Serialize(new
                                {
                                    statements = statements.ToArray()
                                });

                                try
                                {
                                    client.UploadData(options.GetURI(), "POST", Encoding.Default.GetBytes(ToPost));
                                }
                                catch (Exception e)
                                {
                                    Console.WriteLine(e);
                                }

                                restmap = new Dictionary <string, RESTOutputACL>();
                            }
                        }

                        statements = new List <object>();
                        foreach (string k in restmap.Keys)
                        {
                            statements.Add(restmap[k].GetStatement(k));
                        }

                        var FinalPost = serializer.Serialize(new
                        {
                            statements = statements.ToArray()
                        });

                        try
                        {
                            client.UploadData(options.GetURI(), "POST", Encoding.Default.GetBytes(FinalPost));
                        }
                        catch (Exception e)
                        {
                            Console.WriteLine(e);
                        }
                    }
                }
            }));
        }
コード例 #43
0
        private async Task RunRefreshPeerLoop()
        {
            foreach (PeerInfo peerInfo in _peerRefreshQueue.GetConsumingEnumerable(_refreshLoopCancellation.Token))
            {
                try
                {
                    if (_logger.IsDebug)
                    {
                        _logger.Debug($"Running refresh peer info for {peerInfo}.");
                    }
                    _syncPeersReport.Write();
                    var initCancelSource = _refreshCancelTokens[peerInfo.SyncPeer.Node.Id] = new CancellationTokenSource();
                    var linkedSource     = CancellationTokenSource.CreateLinkedTokenSource(initCancelSource.Token, _refreshLoopCancellation.Token);
                    await RefreshPeerInfo(peerInfo, linkedSource.Token).ContinueWith(t =>
                    {
                        _refreshCancelTokens.TryRemove(peerInfo.SyncPeer.Node.Id, out _);
                        if (t.IsFaulted)
                        {
                            if (t.Exception != null && t.Exception.InnerExceptions.Any(x => x.InnerException is TimeoutException))
                            {
                                if (_logger.IsTrace)
                                {
                                    _logger.Trace($"Refreshing {peerInfo} failed due to timeout: {t.Exception.Message}");
                                }
                            }
                            else if (_logger.IsDebug)
                            {
                                _logger.Debug($"Refreshing {peerInfo} failed {t.Exception}");
                            }
                        }
                        else if (t.IsCanceled)
                        {
                            if (_logger.IsTrace)
                            {
                                _logger.Trace($"Refresh peer info canceled: {peerInfo.SyncPeer.Node:s}");
                            }
                        }
                        else
                        {
                            UpdateAllocations("REFRESH");
                            // cases when we want other nodes to resolve the impasse (check Goerli discussion on 5 out of 9 validators)
                            if (peerInfo.TotalDifficulty == _blockTree.BestSuggested?.TotalDifficulty && peerInfo.HeadHash != _blockTree.BestSuggested?.Hash)
                            {
                                Block block = _blockTree.FindBlock(_blockTree.BestSuggested.Hash, false);
                                if (block != null) // can be null if fast syncing headers only
                                {
                                    peerInfo.SyncPeer.SendNewBlock(block);
                                    if (_logger.IsDebug)
                                    {
                                        _logger.Debug($"Sending my best block {block} to {peerInfo}");
                                    }
                                }
                            }
                        }

                        if (_logger.IsDebug)
                        {
                            _logger.Debug($"Refreshed peer info for {peerInfo}.");
                        }

                        initCancelSource.Dispose();
                        linkedSource.Dispose();
                    });
                }
                catch (Exception e)
                {
                    if (_logger.IsDebug)
                    {
                        _logger.Debug($"Failed to refresh {peerInfo} {e}");
                    }
                }
            }

            if (_logger.IsInfo)
            {
                _logger.Info($"Exiting sync peer refresh loop");
            }
        }
コード例 #44
0
 /// <summary>
 /// Returns a blocking enumerable of messages received from Kafka.
 /// </summary>
 /// <returns>Blocking enumberable of messages from Kafka.</returns>
 public IEnumerable <Message> Consume(CancellationToken?cancellationToken = null)
 {
     _options.Log.DebugFormat("Consumer: Beginning consumption of topic: {0}", _options.Topic);
     EnsurePartitionPollingThreads();
     return(_fetchResponseQueue.GetConsumingEnumerable(cancellationToken ?? CancellationToken.None));
 }
コード例 #45
0
        public void Build(IEnumerable <ISequence> sequences)
        {
            // Size of Kmer List to grab, somewhat arbitrary but want to keep list size below large object threshold, which is ~85 kb
            const int blockSize = 4096;

            // When to add list to blocking collection, most short reads are <=151 bp so this should avoid needing to grow the list
            const int addThreshold = blockSize - 151;

            // When to pause adding
            const int stopAddThreshold = 2000000 / blockSize;

            if (sequences == null)
            {
                throw new ArgumentNullException("sequences");
            }

            if (KmerLength > KmerData32.MAX_KMER_LENGTH)
            {
                throw new ArgumentException(Properties.Resource.KmerLengthGreaterThan31);
            }

            // A dictionary kmers to debruijin nodes
            KmerDictionary kmerManager = new KmerDictionary();

            // Create the producer thread.
            var  kmerDataCollection = new BlockingCollection <List <KmerData32> >();
            Task producer           = Task.Factory.StartNew(() =>
            {
                try
                {
                    List <KmerData32> kmerList = new List <KmerData32>(blockSize);

                    IAlphabet alphabet = Alphabets.DNA;
                    HashSet <byte> gapSymbols;
                    alphabet.TryGetGapSymbols(out gapSymbols);

                    // Generate the kmers from the sequences
                    foreach (ISequence sequence in sequences)
                    {
                        // if the sequence alphabet is not of type DNA then ignore it.
                        bool skipSequence = false;
                        if (sequence.Alphabet != Alphabets.DNA)
                        {
                            skipSequence = true;
                        }
                        else
                        {
                            // if the sequence contains any gap symbols then ignore the sequence.
                            foreach (byte symbol in gapSymbols)
                            {
                                for (long index = 0; index < sequence.Count; ++index)
                                {
                                    if (sequence[index] == symbol)
                                    {
                                        skipSequence = true;
                                        break;
                                    }
                                }

                                if (skipSequence)
                                {
                                    break;
                                }
                            }
                        }

                        if (skipSequence)
                        {
                            Interlocked.Increment(ref _skippedSequencesCount);
                            Interlocked.Increment(ref _processedSequencesCount);
                            continue;
                        }

                        // if the blocking collection count is exceeding 2 million kmers wait for 2 sec
                        // so that the task can remove some kmers and create the nodes.
                        // This will avoid OutofMemoryException
                        while (kmerDataCollection.Count > stopAddThreshold)
                        {
                            Task.Delay(TimeSpan.FromSeconds(2)).Wait();
                        }

                        // Convert sequences to k-mers
                        kmerList.AddRange(KmerData32.GetKmers(sequence, KmerLength));

                        // Most reads are <=150 basepairs, so this should avoid having to grow the list
                        // by keeping it below blockSize
                        if (kmerList.Count > addThreshold)
                        {
                            kmerDataCollection.Add(kmerList);
                            kmerList = new List <KmerData32>(4092);
                        }
                        Interlocked.Increment(ref _processedSequencesCount);
                    }

                    if (kmerList.Count <= addThreshold)
                    {
                        kmerDataCollection.Add(kmerList);
                    }
                }
                finally
                {
                    kmerDataCollection.CompleteAdding();
                }
            });

            // Consume k-mers by addding them to binary tree structure as nodes
            Parallel.ForEach(kmerDataCollection.GetConsumingEnumerable(), newKmerList =>
            {
                foreach (KmerData32 newKmer in newKmerList)
                {
                    // Create Vertex
                    DeBruijnNode node = kmerManager.SetNewOrGetOld(newKmer);

                    // Need to lock node if doing this in parallel
                    if (node.KmerCount <= 255)
                    {
                        lock (node)
                        {
                            node.KmerCount++;
                        }
                    }
                }
            });

            // Ensure producer exceptions are handled.
            producer.Wait();

            // Done filling binary tree
            kmerDataCollection.Dispose();

            //NOTE: To speed enumeration make the nodes into an array and dispose of the collection
            _nodeCount = kmerManager.NodeCount;
            _nodes     = kmerManager.GenerateNodeArray();

            // Generate the links
            GenerateLinks(kmerManager);

            // Since we no longer need to search for values set left and right nodes of child array to null
            // so that they are available for GC if no longer needed
            foreach (DeBruijnNode node in _nodes)
            {
                node.Left = node.Right = null;
            }

            GraphBuildCompleted = true;
        }
コード例 #46
0
        async Task StartLoop(CancellationToken cancellation)
        {
            Logs.PayServer.LogInformation("Start watching invoices");
            await Task.Delay(1).ConfigureAwait(false); // Small hack so that the caller does not block on GetConsumingEnumerable

            foreach (var invoiceId in _WatchRequests.GetConsumingEnumerable(cancellation))
            {
                int maxLoop   = 5;
                int loopCount = -1;
                while (loopCount < maxLoop)
                {
                    loopCount++;
                    try
                    {
                        cancellation.ThrowIfCancellationRequested();
                        var invoice = await _InvoiceRepository.GetInvoice(invoiceId, true);

                        if (invoice == null)
                        {
                            break;
                        }
                        var updateContext = new UpdateInvoiceContext(invoice);
                        await UpdateInvoice(updateContext);

                        if (updateContext.Dirty)
                        {
                            await _InvoiceRepository.UpdateInvoiceStatus(invoice.Id, invoice.GetInvoiceState());

                            updateContext.Events.Insert(0, new InvoiceDataChangedEvent(invoice));
                        }

                        foreach (var evt in updateContext.Events)
                        {
                            _EventAggregator.Publish(evt, evt.GetType());
                        }

                        if (invoice.Status == InvoiceStatus.Complete ||
                            ((invoice.Status == InvoiceStatus.Invalid || invoice.Status == InvoiceStatus.Expired) && invoice.MonitoringExpiration < DateTimeOffset.UtcNow))
                        {
                            var updateConfirmationCountIfNeeded = invoice
                                                                  .GetPayments()
                                                                  .Select <PaymentEntity, Task>(async payment =>
                            {
                                var paymentNetwork = _NetworkProvider.GetNetwork(payment.GetCryptoCode());
                                var paymentData    = payment.GetCryptoPaymentData();
                                if (paymentData is Payments.Bitcoin.BitcoinLikePaymentData onChainPaymentData)
                                {
                                    // Do update if confirmation count in the paymentData is not up to date
                                    if ((onChainPaymentData.ConfirmationCount < paymentNetwork.MaxTrackedConfirmation && payment.Accounted) &&
                                        (onChainPaymentData.Legacy || invoice.MonitoringExpiration < DateTimeOffset.UtcNow))
                                    {
                                        var transactionResult = await _ExplorerClientProvider.GetExplorerClient(payment.GetCryptoCode())?.GetTransactionAsync(onChainPaymentData.Outpoint.Hash);
                                        var confirmationCount = transactionResult?.Confirmations ?? 0;
                                        onChainPaymentData.ConfirmationCount = confirmationCount;
                                        payment.SetCryptoPaymentData(onChainPaymentData);
                                        await _InvoiceRepository.UpdatePayments(new List <PaymentEntity> {
                                            payment
                                        });
                                    }
                                }
                            })
                                                                  .ToArray();
                            await Task.WhenAll(updateConfirmationCountIfNeeded);

                            if (await _InvoiceRepository.RemovePendingInvoice(invoice.Id))
                            {
                                _EventAggregator.Publish(new InvoiceStopWatchedEvent(invoice.Id));
                            }
                            break;
                        }

                        if (updateContext.Events.Count == 0)
                        {
                            break;
                        }
                    }
                    catch (Exception ex) when(!cancellation.IsCancellationRequested)
                    {
                        Logs.PayServer.LogError(ex, "Unhandled error on watching invoice " + invoiceId);
                        _ = Task.Delay(10000, cancellation)
                            .ContinueWith(t => Watch(invoiceId), TaskScheduler.Default);
                        break;
                    }
                }
            }
        }
コード例 #47
0
        public async Task SizeMixParallel(bool asciiOnly, bool useDeflateStream)
        {
            var harness = new StringTableTestHarness();
            var st      = harness.StringTable;

            ConcurrentDictionary <StringId, string> map = new ConcurrentDictionary <StringId, string>();
            var sb = new StringBuilder(3000000);

            var strings   = new BlockingCollection <string>();
            var stringIds = new BlockingCollection <StringId>();

            var createStringsTask = Task.Run(() =>
            {
                // zip through small sizes
                for (int i = 0; i < 3000; i++)
                {
                    sb.Length = 0;
                    sb.Append(asciiOnly ? 'x' : '建', i);
                    string str = sb.ToString();
                    strings.Add(str);
                }

                // now use increasingly large amounts, including exceeding the size of a single buffer's worth
                for (int i = 0; i < 100; i++)
                {
                    sb.Length = 0;
                    sb.Append('x', i * 10000);
                    string str = sb.ToString();
                }

                strings.CompleteAdding();
            });

            var validateStringsTask = Task.Run(() =>
            {
                for (int i = 0; i < 2; i++)
                {
                    // make sure all the right strings come out
                    int startBias = 0;
                    var buf       = new char[4000000];
                    foreach (StringId sd in stringIds.GetConsumingEnumerable())
                    {
                        // get the actual string we kept
                        string str = map[sd];

                        // does the table report the right length?
                        int length = st.GetLength(sd);
                        XAssert.AreEqual(str.Length, length);

                        // put sentinel bytes in the char buffer, extract the string from the table, and check the sentinels
                        if (startBias > 0)
                        {
                            buf[startBias - 1] = (char)42;
                        }

                        buf[startBias + length] = (char)31415;
                        st.CopyString(sd, buf, startBias);

                        if (startBias > 0)
                        {
                            XAssert.AreEqual(42, buf[startBias - 1]);
                        }

                        XAssert.AreEqual(31415, buf[startBias + length]);

                        // make sure we got all the characters out that we should have
                        for (int j = 0; j < str.Length; j++)
                        {
                            XAssert.AreEqual(str[j], buf[startBias + j]);
                        }

                        startBias++;
                    }

                    // make sure the same behavior occurs after freezing
                    st.Freeze();
                }
            });

            Parallel.ForEach(strings.GetConsumingEnumerable(), str =>
            {
                StringId sd = st.AddString(str);
                map.TryAdd(sd, str);
                stringIds.Add(sd);
            });

            stringIds.CompleteAdding();

            await createStringsTask;
            await validateStringsTask;
        }
コード例 #48
0
 public IEnumerable <TNotification> TakeMany()
 {
     return(notifications.GetConsumingEnumerable());
 }
コード例 #49
0
        static void Main(string[] args)
        {
            int       n         = 10000;
            Stopwatch stopWatch = new Stopwatch();
            CancellationTokenSource cancelTokenSource = new CancellationTokenSource();
            CancellationToken       token             = cancelTokenSource.Token;
            Task task1 = new Task(() => {
                for (int i = 0; i < n; i++)
                {
                    if (token.IsCancellationRequested)
                    {
                        Console.WriteLine("Операция прервана");
                        return;
                    }
                    if (isSimple(i))
                    {
                        Console.WriteLine(i);
                    }
                }
            });

            stopWatch.Start();
            task1.Start();
            Thread.Sleep(1000);
            cancelTokenSource.Cancel(); //2
            Console.WriteLine("Status: " + task1.Status);
            Console.WriteLine("isComplited: " + task1.IsCompleted);
            Console.WriteLine("Id: " + task1.Id);
            Task.WaitAll(task1);
            stopWatch.Stop();
            TimeSpan ts          = stopWatch.Elapsed;
            string   elapsedTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}",
                                                 ts.Hours, ts.Minutes, ts.Seconds,
                                                 ts.Milliseconds / 10);

            Console.WriteLine("RunTime " + elapsedTime);

            Task <string> tsk1 = new Task <string>(() => { return("London is the"); });
            Task <string> tsk2 = new Task <string>(() => { return("the capital"); });
            Task <string> tsk3 = new Task <string>(() => { return("of Great Britan"); });

            tsk1.Start();
            tsk2.Start();
            tsk3.Start();
            Task.WaitAll(tsk1, tsk2, tsk3);
            //Task tsk4 = Task.WhenAll(tsk1, tsk2, tsk3).ContinueWith(t => { Console.WriteLine(tsk1.Result + " " + tsk2.Result + " " + tsk3.Result); }); //4
            Task tsk4 = Task.Run(() => { Console.WriteLine(tsk1.Result + " " + tsk2.Result + " " + tsk3.Result); }); //3

            Task <int> what    = Task.Run(() => { return(5); });
            var        awaiter = what.GetAwaiter();

            awaiter.OnCompleted(() => { Console.WriteLine("Task complited with result: " + what.Result); }); //4

            Parallel.For(1, 10, sum);                                                                        //5
            ParallelLoopResult result = Parallel.ForEach <int>(new List <int>()
            {
                1, 3, 5, 8
            }, sum);                                                                               //5

            Parallel.Invoke(() => sum(100),
                            () => {
                int[] mass = new int[n];
                for (int i = 0; i < n; i++)
                {
                    mass[i] = i;
                }
            }
                            );

            BlockingCollection <int> blockcoll = new BlockingCollection <int>();

            for (int producer = 0; producer < 5; producer++)
            {
                Thread.Sleep(producer * 100);
                Task.Factory.StartNew(() =>
                {
                    blockcoll.Add(producer);

                    Console.WriteLine("Поставщик " + producer);
                });
            }
            Task consumer = Task.Factory.StartNew(
                () =>
            {
                foreach (var item in blockcoll.GetConsumingEnumerable())
                {
                    Console.WriteLine(" Покупатель взял " + item);
                }
            });

            SumAsync();

            Console.Read();
        }
コード例 #50
0
        public MainWindowViewModel()
        {
            this.Alpha = new PlotModel()
            {
                Title = "Pressure Readback (MKS)"
            };
            DateTimeAxis xAxis = new DateTimeAxis
            {
                Position     = AxisPosition.Bottom,
                StringFormat = "HH:mm:ss:fff",

                Title              = "Pressure",
                MinorIntervalType  = DateTimeIntervalType.Seconds,
                IntervalType       = DateTimeIntervalType.Seconds,
                MajorGridlineStyle = LineStyle.Solid,
                MinorGridlineStyle = LineStyle.None,
            };

            this.Alpha.Axes.Add(xAxis);
            this.Alpha.Axes.Add(new LinearAxis()
            {
                AbsoluteMinimum = 0, Position = AxisPosition.Left, Title = "milli-torr"
            });
            var lineSeries = new LineSeries()
            {
                Color = OxyColors.DeepSkyBlue, Title = "SLIM"
            };

            this.Alpha.Series.Add(lineSeries);

            var betaLineSeries = new LineSeries()
            {
                Color = OxyColors.ForestGreen, Title = "IFT"
            };

            this.Alpha.Series.Add(betaLineSeries);

            this.PressureSettings = new PressureSettingsViewModel();

            this.WhenAnyValue(x => x.SaveToFile).Where(x => x).Select(async b =>
            {
                dataToSave = new BlockingCollection <ChannelViewModel>();

                this.StartingDateTime = DateTime.Now;

                await Task.Run(async() =>
                {
                    foreach (var analogWaveform in dataToSave.GetConsumingEnumerable())
                    {
                        var filePath = Path.Combine(PressureSettings.Directory, $"{this.PressureSettings.FileName}{analogWaveform.Address.Replace('/', '_')}.csv");
                        using (var filestream = File.Open(filePath, FileMode.Append))
                        {
                            using (var streamWriter = new StreamWriter(filestream))
                            {
                                foreach (var analogWaveformSample in analogWaveform.Samples)
                                {
                                    await streamWriter.WriteLineAsync(
                                        $"{analogWaveformSample.PrecisionTimeStamp.ToString("HH:mm:ss:fff")}\t{analogWaveform.ConvertPressure(analogWaveformSample.Value)}");
                                }
                            }
                        }
                    }
                    this.SaveToFile = false;
                });

                return(b);
            }).Subscribe();

            this.Stop = ReactiveCommand.Create(() =>
            {
                this.tokenSource.Cancel();
            });
            this.Start = ReactiveCommand.CreateFromTask(async() =>
            {
                tokenSource = new CancellationTokenSource();
                await Acquire(tokenSource.Token);
            }, this.WhenAnyValue(x => x.PressureSettings.Directory).Select(x => x != null));

            this.Start.ThrownExceptions.Subscribe(exception =>
            {
                TaskDialog dialog      = new TaskDialog();
                dialog.Icon            = TaskDialogStandardIcon.Error;
                dialog.Text            = exception.Message;
                dialog.StandardButtons = TaskDialogStandardButtons.Ok;
                dialog.Show();
            });

            this.OpenSettings = ReactiveCommand.CreateFromTask(async() =>
            {
                await Application.Current.MainWindow.ShowChildWindowAsync(new ChildWindow()
                {
                    Content = new PressureSettingsView()
                    {
                        ViewModel = this.PressureSettings
                    },
                    IsModal         = true,
                    CloseOnOverlay  = true,
                    ShowCloseButton = true
                });
            });

            this.PressureSettings.AiPressureChannels.ItemsAdded.Subscribe(model =>
            {
                using (var streamWriter = new StreamWriter("channels.txt"))
                {
                    foreach (var pressureSettingsAiPressureChannel in PressureSettings.AiPressureChannels)
                    {
                        streamWriter.WriteLine($"{pressureSettingsAiPressureChannel.Address},{pressureSettingsAiPressureChannel.MultiplierFactor}");
                    }
                }
            });

            this.PressureSettings.AiPressureChannels.ItemsRemoved.Subscribe(model =>
            {
                using (var streamWriter = new StreamWriter("channels.txt"))
                {
                    foreach (var pressureSettingsAiPressureChannel in PressureSettings.AiPressureChannels)
                    {
                        streamWriter.WriteLine($"{pressureSettingsAiPressureChannel.Address},{pressureSettingsAiPressureChannel.MultiplierFactor}");
                    }
                }
            });

            this.PressureSettings.AiPressureChannels.ItemChanged.Subscribe(args =>
            {
                using (var streamWriter = new StreamWriter("channels.txt"))
                {
                    foreach (var pressureSettingsAiPressureChannel in PressureSettings.AiPressureChannels)
                    {
                        streamWriter.WriteLine($"{pressureSettingsAiPressureChannel.Address},{pressureSettingsAiPressureChannel.MultiplierFactor}");
                    }
                }
            });
        }
コード例 #51
0
        /// <summary>
        /// Connect to a given COM port and set up handlers.
        /// </summary>
        /// <param name="port">COM port identifier</param>
        public void Run(string port)
        {
            if (_serialPort == null)
            {
                _serialPort = new SerialPort(port, 3600, Parity.None, 8, StopBits.One);
            }
            if (!_serialPort.IsOpen)
            {
                _serialPort.Open();
            }


            var    buffer      = new byte[256];
            Action startListen = null;
            var    onResult    = new AsyncCallback(result => OnResult(result, startListen, _serialPort, buffer));

            Task.Run(() =>
            {
                foreach (var item in Queue.GetConsumingEnumerable())
                {
                    if (item == null || item.Length == 0)
                    {
                        return;
                    }

                    if (item[item.Length - 1] == 12)
                    {
                        var message = Encoding.UTF8.GetString(_bytes.Concat(item).ToArray());

                        Console.WriteLine($@"Message Received: {Environment.NewLine}{message}{Environment.NewLine}");

                        _bytes.Clear();
                        _serialPort.Close();
                        OnComplete?.Invoke(this, message);
                    }
                    else
                    {
                        _bytes.AddRange(item);
                    }
                }
            });

            startListen = () =>
            {
                _serialPort.BaseStream.BeginRead(buffer, 0, buffer.Length, onResult, null);
            };

            startListen();

            while (true && _serialPort.IsOpen)
            {
                // handle user's console window interaction.
            }

            Queue.CompleteAdding();

            if (_serialPort.IsOpen)
            {
                _serialPort.Close();
            }
        }
コード例 #52
0
        public async Task ProcessAsync(MessageContext superContext, HttpApiClient api)
        {
            var context      = superContext as GroupMessage;
            var groupMembers = await api.GetGroupMemberListAsync(context.GroupId);

            Logger.Debug($"群 {context.GroupId} 开启今日高光,成员共 {groupMembers.Length} 名。");

            var stopwatch = Stopwatch.StartNew();

            await using var dbContext = _dbContextFactory.CreateDbContext();
            //var bindings = await (from b in dbContext.Bindings
            //                      join mi in groupMembers on b.UserId equals mi.UserId
            //                      select new { Info = mi, Binding = b.OsuId }).ToListAsync();
            ////var history = (from bi in bindings.AsQueryable()
            ////               join ui in motherShip.Userinfo on bi.Binding equals ui.UserId into histories
            ////               select new { bi.Info, bi.Binding, History = histories.OrderByDescending(ui => ui.QueryDate).First() }).ToList();
            //var osuIds = bindings.Select(b => b.Binding).Distinct().ToList();
            var qqs    = groupMembers.Select(mi => mi.UserId).ToList();
            var osuIds = await dbContext.Bindings.Where(bi => qqs.Contains(bi.UserId)).Select(bi => bi.OsuId).Distinct().ToListAsync();

            Logger.Debug($"找到 {osuIds.Count} 个绑定信息,耗时 {stopwatch.ElapsedMilliseconds}ms。");

            Bleatingsheep.Osu.Mode mode = 0;
            if (!string.IsNullOrEmpty(ModeString))
            {
                try
                {
                    mode = Bleatingsheep.Osu.ModeExtensions.Parse(ModeString);
                }
                catch (FormatException)
                {
                    // ignore
                }
            }

            stopwatch = Stopwatch.StartNew();
            List <UserSnapshot> history = await GetHistories(osuIds, mode).ConfigureAwait(false);

            Logger.Debug($"找到 {history.Count} 个历史信息,耗时 {stopwatch.ElapsedMilliseconds}ms。");

            // Using ConcurrentBag is enough here. ConcurrentDictionary is unnecessary and costly.
            var nowInfos = new ConcurrentDictionary <int, UserInfo>(10, history.Count);
            var fails    = new BlockingCollection <int>();

            stopwatch = Stopwatch.StartNew();
            var fetchIds          = history.Select(h => (int)h.UserId).Distinct().ToList();
            int completes         = 0;
            var cancellationToken = new CancellationTokenSource(TimeSpan.FromMinutes(1)).Token;
            var tasks             = fetchIds.Concat(fails.GetConsumingEnumerable()).Select(async bi =>
            {
                var(success, userInfo) = await OsuApi.GetCachedUserInfo(bi, (Bleatingsheep.Osu.Mode)mode).ConfigureAwait(false);
                if (!success)
                {
                    if (cancellationToken.IsCancellationRequested)
                    {
                        fails.CompleteAdding();
                    }
                    if (!fails.IsAddingCompleted)
                    {
                        try
                        {
                            fails.Add(bi);
                        }
                        catch (InvalidOperationException)
                        {
                        }
                    }
                }
                else
                {
                    Interlocked.Increment(ref completes);
                    if (completes == fetchIds.Count)
                    {
                        fails.CompleteAdding();
                    }
                    if (userInfo != null)
                    {
                        nowInfos[bi] = userInfo;
                    }
                }
            }).ToArray();
            await Task.WhenAll(tasks).ConfigureAwait(false);

            Logger.Debug($"查询 API 花费 {stopwatch.ElapsedMilliseconds}ms,失败 {fails.Count} 个。");

            var cps = (from his in history
                       join now in nowInfos on his.UserId equals now.Key
                       where his.UserInfo.PlayCount != now.Value.PlayCount
                       orderby now.Value.Performance - his.UserInfo.Performance descending
                       select new { Old = his.UserInfo, New = now.Value, Meta = his }).ToList();

            if (fails.Count > 0)
            {
                await api.SendGroupMessageAsync(context.GroupId, $"失败了 {fails.Count} 人。");
            }
            if (cps.Count == 0)
            {
                await api.SendMessageAsync(context.Endpoint, "你群根本没有人屙屎。");

                return;
            }
            else
            {
                var increase = cps.Find(cp => cp.Old.Performance != 0 && cp.New.Performance != cp.Old.Performance);
                var mostPlay = cps.OrderByDescending(cp => cp.New.TotalHits - cp.Old.TotalHits).First();
                var sb       = new StringBuilder(100);
                sb.AppendLine("最飞升:");
                if (increase != null)
                {
                    // sb.AppendLine($"{increase.New.Name} 增加了 {increase.New.Performance - increase.Old.Performance:#.##} PP。")
                    sb.Append(increase.New.Name).Append(" 增加了 ").AppendFormat("{0:#.##}", increase.New.Performance - increase.Old.Performance).AppendLine(" PP。")
                    // .AppendLine($"({increase.Old.Performance:#.##} -> {increase.New.Performance:#.##})");
                    .Append('(').AppendFormat("{0:#.##}", increase.Old.Performance).Append(" -> ").AppendFormat("{0:#.##}", increase.New.Performance).AppendLine(")");
                }
                else
                {
                    sb.AppendLine("你群没有人飞升。");
                }
                sb.AppendLine("最肝:")
                // .Append($"{mostPlay.New.Name} 打了 {mostPlay.New.TotalHits - mostPlay.Old.TotalHits} 下。");
                .Append(mostPlay.New.Name).Append(" 打了 ").Append(mostPlay.New.TotalHits - mostPlay.Old.TotalHits).Append(" 下。");


                await api.SendMessageAsync(context.Endpoint, sb.ToString());
            }
        }
コード例 #53
0
        private void ReplicationRun()
        {
            try
            {
                foreach (var item in _replicationQueue.GetConsumingEnumerable(_cts.Token))
                {
                    ReplicationItem currentItem = item;

                    ScriptScope scope = _scriptEngine.CreateScope();
                    scope.SetVariable("fileInfo", currentItem);
                    scope.SetVariable("fileset", currentItem.Fileset);
                    scope.SetVariable("retryCopy", new Action(() =>
                    {
                        currentItem.IncrementRetries();
                        _replicationQueue.Add(currentItem);
                    }));
                    scope.SetVariable("log", new Action <string>(message =>
                    {
                        _activityLogService.Log(currentItem.Fileset.Id, message);
                    }));

                    if (File.Exists(_copyEventScriptFile))
                    {
                        try
                        {
                            ExecuteScripts(_scriptEngine, scope, currentItem.Fileset.OnCopyScripts);
                            ExecuteScripts(_scriptEngine, scope, currentItem.Fileset.OnCopySuccessScripts);

                            _eventAggregator.GetEvent <CopiedEvent>().Publish(currentItem);
                        }
                        catch (Exception e)
                        {
                            try
                            {
                                scope.SetVariable("exception", e);
                                ExecuteScripts(_scriptEngine, scope, currentItem.Fileset.OnCopyErrorScripts);
                            }
                            catch (Exception exception)
                            {
                                Trace.WriteLine(exception);
                            }

                            _eventAggregator.GetEvent <CopyErrorEvent>().Publish(Tuple.Create(currentItem, e));
                        }
                        finally
                        {
                            try
                            {
                                ExecuteScripts(_scriptEngine, scope, currentItem.Fileset.OnCopyFinallyScripts);
                            }
                            catch (Exception e)
                            {
                                Trace.WriteLine(e);
                            }
                        }
                    }
                }
            }
            catch (OperationCanceledException oce)
            {
                Trace.WriteLine(oce);
            }
        }
コード例 #54
0
 public IEnumerator <T> GetEnumerator()
 {
     return(_collection.GetConsumingEnumerable().GetEnumerator());
 }
コード例 #55
0
        private void Worker()
        {
            var  flushMessages  = new Queue <WorkerMessage>();
            var  cts            = new CancellationTokenSource();
            bool changed        = false;
            var  lastUpdateTime = DateTime.Now;

            try {
                // First time through, we don't want to abort the queue. There
                // should be at least one message or the worker would not have
                // been started.

                foreach (var msg in _workerQueue.GetConsumingEnumerable(cts.Token))
                {
                    // Prevent timeouts while processing the message
                    cts.CancelAfter(-1);

                    if (msg is WorkerMessage.FlushMessage)
                    {
                        // Keep flush messages until we've exited the loop
                        flushMessages.Enqueue(msg);
                    }
                    else
                    {
                        // Apply the message to our collection
                        changed |= msg.Apply(_items, _itemsLock);
                    }

                    // Every second, we want to force another update
                    if (changed)
                    {
                        var currentTime = DateTime.Now;
                        if ((currentTime - lastUpdateTime).TotalMilliseconds > 1000)
                        {
                            Refresh();
                            lastUpdateTime = currentTime;
                            changed        = false;
                        }
                    }

                    // Reset the timeout back to 1 second
                    cts.CancelAfter(1000);
                }
            } catch (OperationCanceledException) {
                // Expected when the timeout expires
            } catch (ObjectDisposedException ex) {
                // We have been disposed.
                Debug.Assert(
                    ex.ObjectName == "BlockingCollection",
                    "Handled ObjectDisposedException for the wrong type"
                    );
                return;
            } finally {
                lock (_workerQueue) {
                    _hasWorker = false;
                }
            }

            // Handle any changes that weren't handled in the loop
            if (changed)
            {
                Refresh();
            }

            // Notify all the flush messages we received
            while (flushMessages.Any())
            {
                var msg = flushMessages.Dequeue();
                msg.Apply(_items, _itemsLock);
            }

            try {
                if (_workerQueue.IsCompleted)
                {
                    _workerQueue.Dispose();
                }
            } catch (ObjectDisposedException) {
            }
        }
コード例 #56
0
        private static IEnumerable <PSObject> InvokeTopLevelPowerShell(
            PowerShell powerShell,
            CancellationToken cancellationToken,
            PSCmdlet cmdlet,
            PSInvocationSettings invocationSettings,
            string errorMessageTemplate)
        {
            using (var mergedOutput = new BlockingCollection <Func <PSCmdlet, IEnumerable <PSObject> > >(s_blockingCollectionCapacity))
            {
                var asyncOutput = new PSDataCollection <PSObject>();
                EventHandler <DataAddedEventArgs> outputHandler = GetStreamForwarder <PSObject>(
                    output => mergedOutput.Add(_ => new[] { output }),
                    swallowInvalidOperationExceptions: true);

                EventHandler <DataAddedEventArgs> errorHandler = GetStreamForwarder <ErrorRecord>(
                    errorRecord => mergedOutput.Add(
                        delegate(PSCmdlet c)
                {
                    errorRecord = GetErrorRecordForRemotePipelineInvocation(errorRecord, errorMessageTemplate);
                    HandleErrorFromPipeline(c, errorRecord, powerShell);
                    return(Enumerable.Empty <PSObject>());
                }),
                    swallowInvalidOperationExceptions: true);

                EventHandler <DataAddedEventArgs> warningHandler = GetStreamForwarder <WarningRecord>(
                    warningRecord => mergedOutput.Add(
                        delegate(PSCmdlet c)
                {
                    c.WriteWarning(warningRecord.Message);
                    return(Enumerable.Empty <PSObject>());
                }),
                    swallowInvalidOperationExceptions: true);

                EventHandler <DataAddedEventArgs> verboseHandler = GetStreamForwarder <VerboseRecord>(
                    verboseRecord => mergedOutput.Add(
                        delegate(PSCmdlet c)
                {
                    c.WriteVerbose(verboseRecord.Message);
                    return(Enumerable.Empty <PSObject>());
                }),
                    swallowInvalidOperationExceptions: true);

                EventHandler <DataAddedEventArgs> debugHandler = GetStreamForwarder <DebugRecord>(
                    debugRecord => mergedOutput.Add(
                        delegate(PSCmdlet c)
                {
                    c.WriteDebug(debugRecord.Message);
                    return(Enumerable.Empty <PSObject>());
                }),
                    swallowInvalidOperationExceptions: true);

                EventHandler <DataAddedEventArgs> informationHandler = GetStreamForwarder <InformationRecord>(
                    informationRecord => mergedOutput.Add(
                        delegate(PSCmdlet c)
                {
                    c.WriteInformation(informationRecord);
                    return(Enumerable.Empty <PSObject>());
                }),
                    swallowInvalidOperationExceptions: true);

                asyncOutput.DataAdded += outputHandler;
                powerShell.Streams.Error.DataAdded       += errorHandler;
                powerShell.Streams.Warning.DataAdded     += warningHandler;
                powerShell.Streams.Verbose.DataAdded     += verboseHandler;
                powerShell.Streams.Debug.DataAdded       += debugHandler;
                powerShell.Streams.Information.DataAdded += informationHandler;

                try
                {
                    // TODO/FIXME: ETW event for PowerShell invocation

                    var asyncResult = powerShell.BeginInvoke <PSObject, PSObject>(
                        input: null,
                        output: asyncOutput,
                        settings: invocationSettings,
                        callback: delegate
                    {
                        try
                        {
                            mergedOutput.CompleteAdding();
                        }
                        catch (InvalidOperationException)
                        // ignore exceptions thrown because mergedOutput.CompleteAdding was called
                        {
                        }
                    },
                        state: null);

                    using (cancellationToken.Register(powerShell.Stop))
                    {
                        try
                        {
                            foreach (Func <PSCmdlet, IEnumerable <PSObject> > mergedOutputItem in mergedOutput.GetConsumingEnumerable())
                            {
                                foreach (PSObject outputObject in mergedOutputItem(cmdlet))
                                {
                                    yield return(outputObject);
                                }
                            }
                        }
                        finally
                        {
                            mergedOutput.CompleteAdding();
                            powerShell.EndInvoke(asyncResult);
                        }
                    }
                }
                finally
                {
                    asyncOutput.DataAdded -= outputHandler;
                    powerShell.Streams.Error.DataAdded       -= errorHandler;
                    powerShell.Streams.Warning.DataAdded     -= warningHandler;
                    powerShell.Streams.Verbose.DataAdded     -= verboseHandler;
                    powerShell.Streams.Debug.DataAdded       -= debugHandler;
                    powerShell.Streams.Information.DataAdded -= informationHandler;
                }
            }
        }
コード例 #57
0
        /// <summary>
        ///   Process files (get fingerprint signatures, hash them into storage)
        /// </summary>
        /// <param name = "files">List of files to be hashed</param>
        /// <param name = "processed">Callback invoked once 1 track is processed</param>
        /// <returns>List of processed tracks</returns>
        private List <TrackData> ProcessFiles(IEnumerable <string> files, Action <TrackData> processed)
        {
            /*preprocessing stage ended, now make sure to do the actual job*/

            int numProcs = Environment.ProcessorCount;

            // 1024 (Kb) * BufferSize / SampleRate * SecondsRead * 4 (1 float = 4 bytes) / 1024 (Kb)
            const int Buffersize =
                (int)((1024.0 * BufferSize) / ((double)SampleRate * SecondsToProcess / 1000 * 4 / 1024));

            // ~317 songs are allowed for 15 seconds snippet at 5512 Hz sample rate
            var buffer              = new BlockingCollection <Tuple <TrackData, float[]> >(Buffersize);
            var processedtracks     = new List <TrackData>();
            var consumers           = new List <Task>();
            var producers           = new List <Task>();
            CancellationToken token = cts.Token;
            var bag = new ConcurrentBag <string>(files);

            int maxprod = numProcs > 2 ? 2 : numProcs;

            for (var i = 0; i < maxprod; i++)
            {
                /*producers*/
                producers.Add(Task.Factory.StartNew(
                                  () =>
                {
                    while (!bag.IsEmpty)
                    {
                        if (token.IsCancellationRequested)
                        {
                            return;
                        }

                        string file;
                        if (!bag.TryTake(out file))
                        {
                            return;
                        }

                        TrackData track;
                        float[] samples;
                        try
                        {
                            track   = trackHelper.GetTrack(MinTrackLength, MaxTrackLength, file);       // lame casting I know
                            samples = trackHelper.GetTrackSamples(track, SampleRate, SecondsToProcess, StartProcessingAtSecond);
                        }
                        catch
                        {
                            continue;
                            /*Continue processing even if getting samples failed*/
                            /*the failing might be caused by a bunch of File I/O factors, that cannot be considered critical*/
                        }

                        try
                        {
                            buffer.TryAdd(new Tuple <TrackData, float[]>(track, samples), 1, token);        /*producer*/
                        }
                        catch (OperationCanceledException)
                        {
                            /*it is safe to break here, operation was canceled*/
                            break;
                        }
                    }
                },
                                  token));
            }

            /*When all producers ended with their operations, call the CompleteAdding() to tell Consumers no more items are available*/
            Task.Factory.ContinueWhenAll(producers.ToArray(), p => buffer.CompleteAdding());

            for (int i = 0; i < numProcs * 4; i++)
            {
                /*consumer*/
                consumers.Add(Task.Factory.StartNew(
                                  () =>
                {
                    foreach (Tuple <TrackData, float[]> tuple in buffer.GetConsumingEnumerable())    /*If OCE is thrown it will be caught in the caller's AggregateException*/
                    {
                        if (tuple != null)
                        {
                            /*Long running procedure*/
                            duplicatesDetectorService.CreateInsertFingerprints(tuple.Item2, tuple.Item1);

                            processedtracks.Add(tuple.Item1);
                            if (processed != null)
                            {
                                processed.Invoke(tuple.Item1);
                            }
                        }
                    }
                },
                                  token));
            }

            Task.WaitAll(consumers.ToArray()); /*wait for all consumers to end*/
            return(processedtracks);
        }
コード例 #58
0
        private static async Task InitializeInstance(ITestOutputHelper output)
        {
            var port = FindAvailablePort();
            var uri  = new UriBuilder("http", "localhost", port, "/wd/hub").Uri;

            var psi = new ProcessStartInfo
            {
                FileName  = "npm",
                Arguments = $"run selenium-standalone start -- -- -port {port}",
                RedirectStandardOutput = true,
                RedirectStandardError  = true,
            };

            if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
            {
                psi.FileName  = "cmd";
                psi.Arguments = $"/c npm {psi.Arguments}";
            }

            // It's important that we get the folder value before we start the process to prevent
            // untracked processes when the tracking folder is not correctly configure.
            var trackingFolder = GetProcessTrackingFolder();

            if (!Directory.Exists(trackingFolder))
            {
                throw new InvalidOperationException($"Invalid tracking folder. Set the 'SeleniumProcessTrackingFolder' MSBuild property to a valid folder.");
            }

            Process process     = null;
            Process sentinel    = null;
            string  pidFilePath = null;

            try
            {
                process     = Process.Start(psi);
                pidFilePath = await WriteTrackingFileAsync(output, trackingFolder, process);

                sentinel = StartSentinelProcess(process, pidFilePath, SeleniumProcessTimeout);
            }
            catch
            {
                ProcessCleanup(process, pidFilePath);
                ProcessCleanup(sentinel, pidFilePath: null);
                throw;
            }

            // Log output for selenium standalone process.
            // This is for the case where the server fails to launch.
            var logOutput = new BlockingCollection <string>();

            process.OutputDataReceived += LogOutput;
            process.ErrorDataReceived  += LogOutput;

            process.BeginOutputReadLine();
            process.BeginErrorReadLine();

            // The Selenium sever has to be up for the entirety of the tests and is only shutdown when the application (i.e. the test) exits.
            // AppDomain.CurrentDomain.ProcessExit += (sender, args) => ProcessCleanup(process, pidFilePath);

            // Log
            void LogOutput(object sender, DataReceivedEventArgs e)
            {
                logOutput.TryAdd(e.Data);

                // We avoid logging on the output here because it is unreliable. We can only log in the diagnostics sink.
                lock (_diagnosticsMessageSink)
                {
                    _diagnosticsMessageSink.OnMessage(new DiagnosticMessage(e.Data));
                }
            }

            var httpClient = new HttpClient
            {
                Timeout = TimeSpan.FromSeconds(1),
            };

            var retries = 0;

            do
            {
                await Task.Delay(1000);

                try
                {
                    var response = await httpClient.GetAsync(uri);

                    if (response.StatusCode == HttpStatusCode.OK)
                    {
                        output = null;
                        Instance.Initialize(uri, process, pidFilePath, sentinel);
                        return;
                    }
                }
                catch (OperationCanceledException)
                {
                }

                retries++;
            } while (retries < 30);

            // Make output null so that we stop logging to it.
            output = null;
            logOutput.CompleteAdding();
            var exitCodeString = process.HasExited ? process.ExitCode.ToString() : "Process has not yet exited.";
            var message        = $@"Failed to launch the server.
ExitCode: {exitCodeString}
Captured output lines:
{string.Join(Environment.NewLine, logOutput.GetConsumingEnumerable())}.";

            // If we got here, we couldn't launch Selenium or get it to respond. So shut it down.
            ProcessCleanup(process, pidFilePath);
            throw new InvalidOperationException(message);
        }
コード例 #59
0
        private void Process()
        {
            try
            {
                // Don't use cancellation token for GetConsumingEnumerable, causes some strange exceptions
                // The call to _configQueue.CompleteAdding will exit the loop instead
                foreach (var newMap in _configQueue.GetConsumingEnumerable())
                {
                    try
                    {
                        _logger.LogDebug(LoggingEvents.ConfigEvent, "Receiving new map revision {revision}", newMap.Rev);
                        var isNewOrUpdate = false;
                        var stored        = _configs.AddOrUpdate(newMap.Name, key =>
                        {
                            _logger.LogDebug(LoggingEvents.ConfigEvent, "Storing new map revision {revision}", newMap.Rev);
                            isNewOrUpdate = true;
                            return(newMap);
                        },
                                                                 (key, map) =>
                        {
                            _logger.LogDebug(LoggingEvents.ConfigEvent, "Updating new map revision {revision}", newMap.Rev);
                            if (newMap.Equals(map))
                            {
                                return(map);
                            }

                            isNewOrUpdate = true;
                            return(newMap.Rev > map.Rev ? newMap : map);
                        });

                        if (isNewOrUpdate)
                        {
                            _logger.LogDebug("Publishing config revision {revision} to subscribers for processing.", stored.Rev);
                            List <IConfigUpdateEventSink> subscribers;
                            lock (_configChangedSubscribers)
                            {
                                subscribers = _configChangedSubscribers.ToList();
                            }

                            var tasks = subscribers.Select(p => p.ConfigUpdatedAsync(stored));
                            Task.WhenAll(tasks).GetAwaiter().GetResult();
                        }
                    }
                    catch (Exception e)
                    {
                        _logger.LogWarning(e, "Error processing new clusterOptions");
                    }

                    if (_tokenSource.IsCancellationRequested)
                    {
                        break;
                    }
                }
            }
            catch (Exception ex)
            {
                // There is a problem in older versions of SemaphoreSlim used by BlockingCollection than can
                // cause an NRE if GetConsumingEnumerable is still enumerating when BlockingCollection is
                // disposed. We need to eat that error to prevent crashes. https://github.com/dotnet/coreclr/pull/24776
                _logger.LogDebug(ex, "Ignoring unhandled exception in ConfigHandler.");
            }
        }
コード例 #60
0
        /// <summary>
        /// This will go till the token is canceled
        /// </summary>
        /// <typeparam name="TBatchItem"></typeparam>
        /// <param name="token"></param>
        /// <param name="workQueue"></param>
        /// <param name="log"></param>
        /// <param name="listOperation"></param>
        /// <param name="maxRetryReachedExceptionHandler"></param>
        /// <param name="fatalExceptionHandler"></param>
        /// <param name="maxSaveDelay"></param>
        /// <param name="maxRecords"></param>
        /// <param name="maxRetries"></param>
        /// <param name="retryWaitSeconds"></param>
        /// <param name="tryTakeWaitSeconds"></param>
        public static void BatchingConsumer <TBatchItem>(
            CancellationToken token,
            BlockingCollection <TBatchItem> workQueue,
            Action <string, Exception> log,
            Action <IEnumerable <TBatchItem> > listOperation,
            Action <Exception, IEnumerable <TBatchItem> > maxRetryReachedExceptionHandler,
            Action <Exception, IEnumerable <TBatchItem> > fatalExceptionHandler,
            int maxSaveDelay,
            int maxRecords         = 1000,
            int maxRetries         = 600,
            int retryWaitSeconds   = 1,
            int tryTakeWaitSeconds = 1)
        {
            var operatingListSize = maxRecords + 5;

            var list = new List <TBatchItem>(operatingListSize);

            try
            {
                var lastSave = DateTime.Now;
                var counter  = 0;

                while (!token.IsCancellationRequested)
                {
                    try
                    {
                        TBatchItem outVar;

                        while (!token.IsCancellationRequested &&
                               //check if the counter is greater than max records then increment
                               counter++ < maxRecords &&
                               //Make sure we save every X seconds no matter what
                               lastSave.AddSeconds(maxSaveDelay) > DateTime.Now &&
                               //Make sure we take something
                               //TryTake blocks till the wait time passes or the token is canceled
                               workQueue.TryTake(out outVar, tryTakeWaitSeconds * 1000, token))
                        {
                            list.Add(outVar);
                        }
                    }
                    catch (OperationCanceledException)
                    {
                        //The Token is canceled while waiting.  Dont care since we expect this to happen every now and then when stopping.
                    }

                    if (list.Count > 0)
                    {
                        var retry      = true;
                        var retryCount = 0;

                        do
                        {
                            try
                            {
                                listOperation(list);

                                retry = false;
                            }
                            catch (Exception ex)
                            {
                                retryCount++;

                                if (retryCount >= maxRetries)
                                {
                                    maxRetryReachedExceptionHandler(ex,
                                                                    list);

                                    retry = false;
                                }
                                else if (token.IsCancellationRequested)
                                {
                                    throw;
                                }
                                else
                                {
                                    log("Error occurred in batching, retrying operation after wait delay.", ex);

                                    token.WaitHandle.WaitOne(retryWaitSeconds * 1000);
                                }
                            }
                        } while (retry);
                    }

                    //reset our counters.
                    list     = new List <TBatchItem>(operatingListSize);
                    counter  = 0;
                    lastSave = DateTime.Now;
                }

                //Token is canceled
                //clean up the loop

                //Make sure we get all the items
                foreach (var item in workQueue.GetConsumingEnumerable())
                {
                    list.Add(item);
                }

                //save all the changes.
                listOperation(list);
            }
            catch (Exception ex)
            {
                fatalExceptionHandler(ex,
                                      list);
            }
        }