public async Task SerializeToXml(ISourceBlock <Person> source) { var settings = new XmlWriterSettings(); settings.Indent = true; XmlSerializer xml = new XmlSerializer(typeof(Person)); using (FileStream fs = File.Create(XmlPath)) using (StreamWriter sw = new StreamWriter(fs)) using (XmlWriter xw = XmlTextWriter.Create(sw, settings)) { xw.WriteStartDocument(); xw.WriteStartElement("People"); string temp = string.Empty; while (await source.OutputAvailableAsync()) { var person = source.Receive(); xw.WriteStartElement("Person"); xw.WriteElementString("Name", person.Name); xw.WriteElementString("Surname", person.Surname); xw.WriteElementString("Countrt", person.Country); xw.WriteElementString("Email", person.Email); xw.WriteElementString("IpAddress", person.IpAddress); xw.WriteEndElement(); } xw.WriteEndElement(); xw.WriteEndDocument(); } }
static async Task <int> ConsumeAsync(ISourceBlock <List <Vertex> > source) { // Initialize a counter to track the number of bytes that are processed. int nodesProcessed = 0; int batch = 0; ElasticClient client = new ElasticClient(new Uri(AppSettings.Current.ElasticServerUrl)); Console.WriteLine("Indexing documents into elasticsearch..."); // Read from the source buffer until the source buffer has no // available output data. while (await source.OutputAvailableAsync()) { Stopwatch sw = new Stopwatch(); sw.Start(); List <Vertex> nodes = source.Receive(); var sb = new StringBuilder(); foreach (var item in nodes) { sb.AppendLine(JsonConvert.SerializeObject(item, Formatting.None)); } File.AppendAllText("/Users/chinkit/00D2D-CRC/04-BigData/stackoverflow/step2/full-graph.json", sb.ToString()); // Increment the count of bytes received. nodesProcessed += nodes.Count; Console.WriteLine($"Batch {batch++} : Documented {nodesProcessed} ellapsed is {sw.ElapsedMilliseconds / 1000} seconds"); } return(nodesProcessed); }
// Demonstrates the consumption end of the producer and consumer pattern. public async Task <int> ConsumeAsync(ISourceBlock <byte[]> source) { // Initialize a counter to track the number of bytes that are processed. int bytesProcessed = 0; // Read from the source buffer until the source buffer has no // available output data. while (await source.OutputAvailableAsync()) { byte[] data = source.Receive(); // Increment the count of bytes received. bytesProcessed += data.Length; //My code for debugging lock (consumerLocker) { Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine("The processed bytes so far: " + bytesProcessed); Console.ForegroundColor = ConsoleColor.White; } } return(bytesProcessed); }
// TODO: Wait until this becomes a part of the API // https://github.com/dotnet/corefx/issues/41125 public async static IAsyncEnumerable <TOutput> ReceiveAllAsync <TOutput>(this ISourceBlock <TOutput> source, [EnumeratorCancellation] CancellationToken cancellationToken = default) { while (await source.OutputAvailableAsync(cancellationToken)) { yield return(source.Receive()); } }
static async Task UserInputHandler(ISourceBlock <UserInputEvent> source) { // Read from the source buffer until the source buffer has no // available output data. while (await source.OutputAvailableAsync()) { var input = source.Receive(); if (input == UserInputEvent.ButtonPress) { if (boardState == clock) { boardState = countdown; } else { boardState = clock; } } if (input == UserInputEvent.RotateRight && boardState == countdown) { countdown.Increase(); } else if (input == UserInputEvent.RotateLeft && boardState == countdown) { countdown.Decrease(); } } }
//This is the method that consumes the Matrix images and runs some openCV //processing on each before saving them. The threads are asynchronus //so processing can occur out of order intentionally to speed up the flow private static async void AsynchronousImageConv(ISourceBlock <Image> imageQueue) { while (await imageQueue.OutputAvailableAsync()) { Image producedResult = imageQueue.Receive(); Task.Run( () => { if (DateTime.Now.Ticks % 3 == 0) { Thread.Sleep(550); } Mat output = new Mat(); Mat input = producedResult.mat; Mat flipped = input.Flip(FlipMode.Y); //create an inversion effect i.e white becomes black Cv2.BitwiseNot(flipped, output); string outputName = @"C:\Temp\MetroImgs\Output\" + producedResult.filename; output.ImWrite(outputName); Console.WriteLine("Processed Image {0} from the queue:", producedResult.filename); WriteToLog("Processed Image " + producedResult.filename + " from the queue: \n"); }); } }
private async Task <bool> SaveAsync(ISourceBlock <System.Action> source) { while (await source.OutputAvailableAsync()) { var data = source.Receive(); data.Invoke(); } return(true); }
protected override async Task LogConsumerAsync(ISourceBlock <string> Source) { while (await Source.OutputAvailableAsync()) { await WriteToFile($"{DateTime.Now} {Source.Receive()}\n"); LogWriteEvent?.Invoke(this, EventArgs.Empty); } }
internal async Task <int> ConsumeBuffer(ISourceBlock <SyslogMessageInfo> source) { int count = 0; using (var bucket = DbCluster.OpenBucket("BornToFail1")) { while (await source.OutputAvailableAsync()) { var message = source.Receive(); var document = new Document <dynamic> { Id = DocumentPrefix + "message::" + Guid.NewGuid().ToString(), Content = message }; var insert = await bucket.InsertAsync(document); if (insert.Success) { Console.WriteLine(document.Id); count++; } else { System.Diagnostics.Debug.WriteLine(insert.Status.ToString()); } if ( message.Message != null && message.Message.Header != null && message.Message.Header.MessageType != null && message.Message.Header.MessageType.Facility == "SYS" && message.Message.Header.MessageType.Mnemonic == "CONFIG_I" ) { var oldColor = Console.ForegroundColor; Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine("Received Configuration change notification from " + message.Sender.ToString()); Console.ForegroundColor = oldColor; var client = new HttpClient(); var response = await PostAsJsonAsync( client, "http://localhost:51954/api/values", new ConfigRequestObject { ConfigurationId = Guid.NewGuid(), DeviceId = message.Sender.Address.ToString() } ); } } } return(count); }
// Demonstrates the consumption end of the producer and consumer pattern. async Task ConsumeAsync(ISourceBlock <IMessage> source) { // Read from the source buffer until the source buffer has no // available output data. while (await source.OutputAvailableAsync()) { var data = source.Receive(); aggregateMessage.AddMessage(data); } }
protected override async Task LogConsumerAsync(ISourceBlock <string> Source) { while (await Source.OutputAvailableAsync()) { LogEntry le = new LogEntry() { Timestamp = DateTimeOffset.Now, Message = Source.Receive() }; _repoManager.LogEntryRepository.Add(le); } }
protected override async Task ErrorConsumerAsync(ISourceBlock <Exception> Source) { while (await Source.OutputAvailableAsync()) { LogEntry le = new LogEntry() { Event = "Exception", Timestamp = DateTimeOffset.Now, Message = Source.Receive().ToString() }; _repoManager.LogEntryRepository.Add(le); } }
public async Task <dynamic> ConsumeAsync(ISourceBlock <object> source, Action <object> action) { while (await source.OutputAvailableAsync()) { object data = source.Receive(); action(data); } return(null); }
private static async void AsynchronousConsumer(ISourceBlock <IList <int> > sourceBlock) { while (await sourceBlock.OutputAvailableAsync()) { var producedResult = sourceBlock.Receive(); foreach (var result in producedResult) { Console.WriteLine("Receiver Received:" + result); } } }
public async Task Sender(ISourceBlock <string> source) { while (await source.OutputAvailableAsync()) { try { string data = source.Receive(); await Client.SendAsync(data); } catch (Exception) { } } }
private async Task ConsumeObjects(ImportContext context, SchemaType type, ISourceBlock <IUser> source) { long userHighestTicks = 0; while (await source.OutputAvailableAsync()) { IUser user = source.Receive(); try { if (user.LastUpdated.HasValue) { AsyncHelper.InterlockedMax(ref userHighestTicks, user.LastUpdated.Value.Ticks); } CSEntryChange c = await this.UserToCSEntryChange(context.InDelta, type, user, context).ConfigureAwait(false); if (c != null) { context.ImportItems.Add(c, context.CancellationTokenSource.Token); } } catch (Exception ex) { UserImportProvider.logger.Error(ex); CSEntryChange csentry = CSEntryChange.Create(); csentry.DN = user.Id; csentry.ErrorCodeImport = MAImportError.ImportErrorCustomContinueRun; csentry.ErrorDetail = ex.StackTrace; csentry.ErrorName = ex.Message; context.ImportItems.Add(csentry, context.CancellationTokenSource.Token); } context.CancellationTokenSource.Token.ThrowIfCancellationRequested(); } string wmv; if (userHighestTicks <= 0) { wmv = context.IncomingWatermark["users"].Value; } else { wmv = userHighestTicks.ToString(); } context.OutgoingWatermark.Add(new Watermark("users", wmv, "DateTime")); }
public static async Task <int> SaveFileAsync(ISourceBlock <string> source) { while (await source.OutputAvailableAsync()) { WebClient wc = new WebClient(); if (!Directory.Exists("img")) { Directory.CreateDirectory("img"); } var imageUrl = source.Receive(); var savePath = GetCacheImageName(imageUrl); wc.DownloadFile(imageUrl, savePath); } return(1); }
public static async Task <MeasurmentsData> MeasureDataAsync(this ISourceBlock <int> block, int[] keys) { var stopwatches = keys.ToDictionary(x => x, x => new Stopwatch()); var seq = new List <double>(); var stats = keys.ToDictionary(x => x, x => new List <double>()); while (await block.OutputAvailableAsync().ConfigureAwait(false)) { var x = block.Receive(); seq.Add(x); stopwatches[x].Stop(); stats[x].Add(stopwatches[x].ElapsedMilliseconds); stopwatches[x].Restart(); } return(new MeasurmentsData(stats, seq)); }
// Demonstrates the consumption end of the producer and consumer pattern. static async Task <int> ConsumeAsync(ISourceBlock <int> source) { // Initialize a counter to track the number of bytes that are processed. int intProcessed = 0; // Read from the source buffer until the source buffer has no // available output data. while (await source.OutputAvailableAsync()) { int data = source.Receive(); // Increment the sum of int received. intProcessed += data; } return(intProcessed); }
static async Task <int> ConsumeAsync(ISourceBlock <int> source) { // Initialize a counter to track the sum. int sumOfProcessed = 0; // Read from the source buffer until empty while (await source.OutputAvailableAsync()) { int data = source.Receive(); // calculate the sum. sumOfProcessed += data; } return(sumOfProcessed); }
static async Task <int> ConsumeAsync(ISourceBlock <byte[]> source) { int chunksProcessed = 0; int byteProcessed = 0; // keep reading untill source data is exhausted while (await source.OutputAvailableAsync()) { byte[] sourceChunkData = source.Receive(); chunksProcessed++; byteProcessed += sourceChunkData.Length; } Console.WriteLine("Chunks ; " + chunksProcessed); return(byteProcessed); }
// Demonstrates the consumption end of the producer and consumer pattern. private static async Task <int> ConsumeAsync(ISourceBlock <byte[]> source) { // Initialize a counter to track the number of bytes that are processed. int bytesProcessed = 0; // Read from the source buffer until the source buffer has no // available output data. while (await source.OutputAvailableAsync().ConfigureAwait(false)) { byte[] data = source.Receive(); // Increment the count of bytes received. Interlocked.Add(ref bytesProcessed, data.Length); } return(bytesProcessed); }
// Demonstrates the consumption end of the producer and consumer pattern. public async Task <int> ConsumeAsync(ISourceBlock <byte[]> source) { // Initialize a counter to track the number of bytes that are processed. int bytesProcessed = 0; // Read from the source buffer until the source buffer has no // available output data. while (await source.OutputAvailableAsync()) { byte[] data = source.Receive(); // Increment the count of bytes received. bytesProcessed += data.Length; } return(bytesProcessed); }
/// <summary> /// Preparing the match list for saving do tb - include isExist check /// </summary> /// <param name="source"></param> /// <returns></returns> static async Task <List <Match> > FetchData(ISourceBlock <Match> source) { List <Match> matches = new List <Match>(); while (await source.OutputAvailableAsync()) { Match match = source.Receive(); // Check is match already exist in the db if (!matches.Any(m => m.Competition == match.Competition && m.TeamA == match.TeamA && m.TeamB == match.TeamB)) { matches.Add(match); } } return(matches); }
// Demonstrates the consumption end of the producer and consumer pattern. static async Task <int> ConsumeAsync(ISourceBlock <string> source) { // Initialize a counter to track the number of bytes that are processed. int bytesProcessed = 0; // Read from the source buffer until the source buffer has no // available output data. while (await source.OutputAvailableAsync()) { //byte[] data = source.Receive(); //// Increment the count of bytes received. //bytesProcessed += data.Length; //Console.WriteLine(data[3].ToString()); Console.WriteLine(source.Receive()); bytesProcessed++; } return(bytesProcessed); }
public Task StartAsync(CancellationToken cancellationToken) { return(Task.Run(async() => { try { while (await _source.OutputAvailableAsync(cancellationToken)) { var channelMessage = _source.Receive(); channelMessage.Send(_channel); } } catch (OperationCanceledException) { //all good, time to stop } })); }
public static async Task <MeasurmentsData> MeasureDataAsync(this ISourceBlock <int> block) { var stats = new List <double>(); var seq = new List <double>(); Stopwatch sw = Stopwatch.StartNew(); while (await block.OutputAvailableAsync().ConfigureAwait(false)) { var item = block.Receive(); seq.Add(item); sw.Stop(); stats.Add(sw.ElapsedMilliseconds); sw.Restart(); } return(new MeasurmentsData(new Dictionary <int, List <double> > { [-1] = stats }, seq)); }
static async Task ConsumeAsync(ISourceBlock <Pokemon> source) { BlockingCollection <Pokemon> bag = new BlockingCollection <Pokemon>(); while (await source.OutputAvailableAsync()) { Pokemon data = (Pokemon)source.Receive(); bag.Add(data); if (!arquivosMultiplos) { await CriarArquivoUnico(bag); } else { await CriarArquivMultiplo(bag); } } }
/// <summary> /// Consumer. Reads tweets from producer. /// </summary> /// <param name="source"></param> /// <returns></returns> public async Task <int> ConsumeAsync(ISourceBlock <ITweet> source) { // Read from the source buffer until the source buffer has no // available output data while (await source.OutputAvailableAsync()) { ITweet tweet = source.Receive(); if (tweet.IsRetweet) { // If tweet is a retweet, use the orginal tweet tweet = tweet.RetweetedTweet; } AddTweet(tweet); } return(0); }
static async Task ConsumeAsync(ISourceBlock <Pokemon> source) { BlockingCollection <Pokemon> bag = new BlockingCollection <Pokemon>(); while (await source.OutputAvailableAsync()) { Pokemon data = (Pokemon)source.Receive(); bag.Add(data); } if (!multipleFiles) { await CreateSingleFile(bag); } else { await CreateMultipleFile(bag); } }
private async System.Threading.Tasks.Task WriteOutputAsync(ISourceBlock<string> source) { while (await source.OutputAvailableAsync()) { string data = source.Receive(); if (OutputPane != null) { OutputPane.OutputStringThreadSafe(data); OutputPane.OutputStringThreadSafe("\r\n"); } } }