private void ExportResult(string exportPath) { Output("{0} started exporting result", GetCurrentTimeString()); var logs = _logs.AsEnumerable(); if (_startTime.HasValue) { logs = logs.Where(l => l.Time > _startTime.Value); } if (_endTime.HasValue) { logs = logs.Where(l => l.Time < _endTime.Value); } var orderedLogs = logs.OrderBy(l => l.Time).ThenBy(l => l.LineNumber).ToArray(); Output("{0} Started exporting file", GetCurrentTimeString()); using (var writer = new StreamWriter(exportPath, false, Encoding.UTF8)) { foreach (var log in orderedLogs) { writer.WriteLine(log.LogValue); } } Output("{0} Finished exporting file", GetCurrentTimeString()); }
public IEnumerable <string> SaveMseCalculations(Group group, string args) { var bag = new ConcurrentBag <string>(); Parallel.ForEach( group.People, person => { if (String.IsNullOrEmpty(args)) { var calculationService = new PersonCalculationService(); calculationService.CalculateSnapshots(person); var lenght = person.Snapshots.Data.Count(); var builder = new MseArgumentBuilder(); args = builder .WithMaxStopIndex(lenght - 1) .Build(); } string toFilePath = String.Concat(person.Id, "MseResult", ".txt"); string output = _personService.SaveMseCalculations(person, null, args); bag.Add(output); } ); return(bag.AsEnumerable()); }
private void SaveCustomSubcollection(ConcurrentBag <EPatientMedicationFrequency> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("MongoId", "MongoId"); bcc.ColumnMappings.Add("Name", "Name"); bcc.ColumnMappings.Add("MongoPatientId", "MongoPatientId"); bcc.ColumnMappings.Add("Version", "Version"); bcc.ColumnMappings.Add("MongoUpdatedBy", "MongoUpdatedBy"); bcc.ColumnMappings.Add("DeleteFlag", "DeleteFlag"); bcc.ColumnMappings.Add("TTLDate", "TTLDate"); bcc.ColumnMappings.Add("LastUpdatedOn", "LastUpdatedOn"); bcc.ColumnMappings.Add("MongoRecordCreatedBy", "MongoRecordCreatedBy"); bcc.ColumnMappings.Add("RecordCreatedOn", "RecordCreatedOn"); bcc.DestinationTableName = "RPT_CustomPatientMedFrequency"; bcc.WriteToServer(objRdr); } catch (Exception ex) { FormatError(ex, bcc); } } }
private void SaveSubcollection(ConcurrentBag <ESystem> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("MongoId", "MongoId"); bcc.ColumnMappings.Add("DisplayLabel", "DisplayLabel"); bcc.ColumnMappings.Add("Field", "Field"); bcc.ColumnMappings.Add("Name", "Name"); bcc.ColumnMappings.Add("Primary", "Primary"); bcc.ColumnMappings.Add("Status", "Status"); bcc.ColumnMappings.Add("Version", "Version"); bcc.ColumnMappings.Add("UpdatedBy", "UpdatedBy"); bcc.ColumnMappings.Add("RecordCreatedBy", "RecordCreatedBy"); bcc.ColumnMappings.Add("RecordCreatedOn", "RecordCreatedOn"); bcc.ColumnMappings.Add("DeleteFlag", "DeleteFlag"); bcc.ColumnMappings.Add("TTLDate", "TTLDate"); bcc.ColumnMappings.Add("LastUpdatedOn", "LastUpdatedOn"); bcc.DestinationTableName = "RPT_System"; bcc.WriteToServer(objRdr); } catch (Exception ex) { FormatError(ex, bcc); } } }
private static IEnumerable <string> GetTestProjectAssemblies(string projectDirectory) { _loggerCallback(LogType.Normal, ""); _loggerCallback(LogType.Info, "Starting - Discovering Test Projects"); var testProjectsAndAssemblyNames = DiscoverUnitTestProjectsAndAssemblyNames(projectDirectory); var concurrentBag = new ConcurrentBag <string>(); Parallel.ForEach(testProjectsAndAssemblyNames, testProjectAndAssemblyName => { var testProjectDirectory = Path.GetDirectoryName(testProjectAndAssemblyName.Key); var testProjectAssemblies = Directory.EnumerateFiles(testProjectDirectory + "\\bin", "*" + testProjectAndAssemblyName.Value + ".dll", SearchOption.AllDirectories); var testProjectAssembly = testProjectAssemblies.Single(); concurrentBag.Add("\"" + testProjectAssembly + "\""); _loggerCallback(LogType.Info, "Found Test Project: " + testProjectAssembly); }); _loggerCallback(LogType.Normal, ""); _loggerCallback(LogType.Info, concurrentBag.Count.ToString(CultureInfo.CurrentCulture) + " Test Project Found"); _loggerCallback(LogType.Normal, ""); _loggerCallback(LogType.Info, "Finished - Discovering Test Projects"); _loggerCallback(LogType.Normal, ""); return(concurrentBag.AsEnumerable()); }
public IEnumerable <string> SaveAutocorrelations(Group group, int shiftBy = 0, int degreeOfParallelism = 2, bool performDataRandomization = false) { var bag = new ConcurrentBag <string>(); var options = new ParallelOptions { MaxDegreeOfParallelism = degreeOfParallelism }; Parallel.ForEach(group.People, options, person => { string toFilePath = String.Concat(person.Id, "Autocorrelations_", shiftBy, "_shiftPoints_", person.CheckpointRate.ToReadableString(), ".csv"); var result = _personCalcService.GetAutoCorrelationsForShifts( person, shiftBy, performDataRandomization); string output = _correlationService.SaveOne(result, toFilePath); bag.Add(output); }); return(bag.AsEnumerable()); }
private void SaveSubcollection(ConcurrentBag <EContactTypeLookUp> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("MongoId", "MongoId"); bcc.ColumnMappings.Add("Name", "Name"); bcc.ColumnMappings.Add("Role", "Role"); bcc.ColumnMappings.Add("ParentId", "ParentId"); bcc.ColumnMappings.Add("Group", "Group"); bcc.ColumnMappings.Add("Active", "Active"); bcc.ColumnMappings.Add("Version", "Version"); bcc.ColumnMappings.Add("DeleteFlag", "DeleteFlag"); bcc.ColumnMappings.Add("TTLDate", "TTLDate"); bcc.ColumnMappings.Add("UpdatedBy", "UpdatedBy"); bcc.ColumnMappings.Add("LastUpdatedOn", "LastUpdatedOn"); bcc.ColumnMappings.Add("RecordCreatedBy", "RecordCreatedBy"); bcc.ColumnMappings.Add("RecordCreatedOn", "RecordCreatedOn"); bcc.DestinationTableName = "RPT_ContactTypeLookUp"; bcc.WriteToServer(objRdr); } catch (Exception ex) { FormatError(ex, bcc); } } }
public override IEnumerable <string> GetMarkets() { if (markets == null && backtestingService.GetCurrentTickers() != null) { this.markets = new ConcurrentBag <string>(backtestingService.GetCurrentTickers().Keys .Select(pair => GetPairMarket(pair)).Distinct().ToList()); } return(markets.AsEnumerable() ?? new List <string>()); }
public static async Task <IQueueMessage <T>[]> DequeueManyAsync <T>(this IQueue <T> queue, int limit = Size) where T : IMessageData { var bag = new ConcurrentBag <Task <IQueueMessage <T> > >(); Enumerable.Range(0, limit).ForEach(_ => bag.Add(queue.DequeueAsync())); return(await Task.WhenAll(bag.AsEnumerable()).ConfigureAwait(false)); }
public async Task <IEnumerable <ProviderResult> > GetProviderResultsBySpecificationIdAndProviders(IEnumerable <string> providerIds, string specificationId) { Guard.ArgumentNotNull(providerIds, nameof(providerIds)); Guard.IsNullOrWhiteSpace(specificationId, nameof(specificationId)); if (providerIds.IsNullOrEmpty()) { return(Enumerable.Empty <ProviderResult>()); } List <Task> allTasks = new List <Task>(providerIds.Count()); ConcurrentBag <ProviderResult> results = new ConcurrentBag <ProviderResult>(); SemaphoreSlim throttler = new SemaphoreSlim(_engineSettings.CalculateProviderResultsDegreeOfParallelism); foreach (string providerId in providerIds) { await throttler.WaitAsync(); allTasks.Add( Task.Run(async() => { try { CosmosDbQuery cosmosDbQuery = new CosmosDbQuery { QueryText = @"SELECT * FROM Root r WHERE r.documentType = @DocumentType AND r.content.specificationId = @SpecificationId AND r.deleted = false", Parameters = new[] { new CosmosDbQueryParameter("@DocumentType", nameof(ProviderResult)), new CosmosDbQueryParameter("@SpecificationId", specificationId) } }; IEnumerable <ProviderResult> providerResults = await _cosmosRepository.QueryPartitionedEntity <ProviderResult>(cosmosDbQuery, partitionKey: providerId); foreach (ProviderResult providerResult in providerResults) { results.Add(providerResult); } } finally { throttler.Release(); } })); } await TaskHelper.WhenAllAndThrow(allTasks.ToArray()); return(results.AsEnumerable()); }
public async Task <IEnumerable <ProviderSourceDataset> > GetProviderSourceDatasetsByProviderIdsAndSpecificationId(IEnumerable <string> providerIds, string specificationId) { if (providerIds.IsNullOrEmpty()) { return(Enumerable.Empty <ProviderSourceDataset>()); } Guard.IsNullOrWhiteSpace(specificationId, nameof(specificationId)); ConcurrentBag <ProviderSourceDataset> results = new ConcurrentBag <ProviderSourceDataset>(); List <Task> allTasks = new List <Task>(); SemaphoreSlim throttler = new SemaphoreSlim(initialCount: _engineSettings.GetProviderSourceDatasetsDegreeOfParallelism); foreach (string providerId in providerIds) { await throttler.WaitAsync(); allTasks.Add( Task.Run(async() => { try { SqlQuerySpec sqlQuerySpec = new SqlQuerySpec { QueryText = @"SELECT * FROM Root r WHERE r.documentType = @DocumentType AND r.content.specificationId = @SpecificationId AND r.deleted = false", Parameters = new SqlParameterCollection { new SqlParameter("@DocumentType", nameof(ProviderSourceDataset)), new SqlParameter("@SpecificationId", specificationId) } }; IEnumerable <ProviderSourceDataset> providerSourceDatasetResults = await _cosmosRepository.QueryPartitionedEntity <ProviderSourceDataset>(sqlQuerySpec, partitionEntityId: providerId); foreach (ProviderSourceDataset repoResult in providerSourceDatasetResults) { results.Add(repoResult); } } finally { throttler.Release(); } })); } await TaskHelper.WhenAllAndThrow(allTasks.ToArray()); return(results.AsEnumerable()); }
public async Task <IEnumerable <TestScenarioResult> > GetCurrentTestResults(IEnumerable <string> providerIds, string specificationId) { Guard.ArgumentNotNull(providerIds, nameof(providerIds)); Guard.IsNullOrWhiteSpace(specificationId, nameof(specificationId)); if (providerIds.IsNullOrEmpty()) { return(Enumerable.Empty <TestScenarioResult>()); } ConcurrentBag <TestScenarioResult> results = new ConcurrentBag <TestScenarioResult>(); int completedCount = 0; ParallelLoopResult result = Parallel.ForEach(providerIds, new ParallelOptions() { MaxDegreeOfParallelism = _engineSettings.GetCurrentProviderTestResultsDegreeOfParallelism }, async(providerId) => { try { CosmosDbQuery cosmosDbQuery = new CosmosDbQuery { QueryText = @"SELECT * FROM Root r WHERE r.documentType = @DocumentType AND r.content.specification.id = @SpecificationId AND r.deleted = false", Parameters = new[] { new CosmosDbQueryParameter("@DocumentType", nameof(TestScenarioResult)), new CosmosDbQueryParameter("@SpecificationId", specificationId) } }; IEnumerable <TestScenarioResult> testScenarioResults = await _cosmosRepository.QueryPartitionedEntity <TestScenarioResult>(cosmosDbQuery, partitionKey: providerId); foreach (TestScenarioResult testScenarioResult in testScenarioResults) { results.Add(testScenarioResult); } } finally { completedCount++; } }); while (completedCount < providerIds.Count()) { await Task.Delay(20); } return(results.AsEnumerable()); }
private IEnumerable <Chromosome <T> > DoCrossover(Population <T> generationPopulation, SecureRandom random, int childCount) { const int ParentCount = 2; var RouletteSelection = (uint)Math.Ceiling((double)generationPopulation.Chromosomes.Count * 0.05); var crossovers = new ConcurrentBag <Chromosome <T> >(); var tasks = new Task[this.Parameters.TaskCount]; for (var c = 0; c < this.Parameters.TaskCount; c++) { tasks[c] = Task.Factory.StartNew(() => { do { var parents = new List <Chromosome <T> >(); var indexes = random.GetInt32Values(RouletteSelection, new Range <int>(0, generationPopulation.Chromosomes.Count), ValueGeneration.UniqueValuesOnly); for (var i = 0; i < ParentCount; i++) { var parent = (from chromosome in (from index in indexes select generationPopulation.Chromosomes[index]) orderby chromosome.Fitness descending select chromosome).Take(1).First(); parents.Add(parent); } var children = random.NextDouble() < this.Parameters.CrossoverProbability ? this.Parameters.Crossover(parents.AsReadOnly()) : new List <T>(from parent in parents select this.Parameters.Copy(parent.Value)).AsReadOnly(); foreach (var child in children) { var mutatedChild = this.Parameters.Mutator(child); if (mutatedChild != null) { crossovers.Add(new Chromosome <T>( mutatedChild, this.Parameters.FitnessEvaluator(mutatedChild))); } } } while (crossovers.Count < childCount); }); } Task.WaitAll(tasks); return(crossovers.AsEnumerable()); }
public async Task <IEnumerable <ProviderResult> > GetProviderResultsBySpecificationIdAndProviders(IEnumerable <string> providerIds, string specificationId) { Guard.ArgumentNotNull(providerIds, nameof(providerIds)); Guard.IsNullOrWhiteSpace(specificationId, nameof(specificationId)); if (providerIds.IsNullOrEmpty()) { return(Enumerable.Empty <ProviderResult>()); } ConcurrentBag <ProviderResult> results = new ConcurrentBag <ProviderResult>(); int completedCount = 0; Parallel.ForEach(providerIds, async(providerId) => { try { SqlQuerySpec sqlQuerySpec = new SqlQuerySpec { QueryText = @"SELECT * FROM Root r WHERE r.documentType = @DocumentType AND r.content.specificationId = @SpecificationId AND r.deleted = false", Parameters = new SqlParameterCollection { new SqlParameter("@DocumentType", nameof(ProviderResult)), new SqlParameter("@SpecificationId", specificationId) } }; IEnumerable <ProviderResult> providerResults = await _cosmosRepository.QueryPartitionedEntity <ProviderResult>(sqlQuerySpec, partitionEntityId: providerId); foreach (ProviderResult providerResult in providerResults) { results.Add(providerResult); } } finally { completedCount++; } }); while (completedCount < providerIds.Count()) { await Task.Delay(20); } return(results.AsEnumerable()); }
public async Task <IList <TOut> > Execute <TIn, TOut>(string queueName, long qty, int batchSize, Func <int, Task <IList <TIn> > > getMessages, Func <TIn, Task <TOut> > processMessage) { var peekedMessages = new ConcurrentBag <TOut>(); var tasks = new List <Task>(); var throttler = new SemaphoreSlim(5); var maxWaitTime = TimeSpan.FromMinutes(10); var runtime = new Stopwatch(); var isComplete = false; var breaker = 0; runtime.Start(); while (!isComplete) { var remainingQty = qty - peekedMessages.Count; var batches = Batches(batchSize, remainingQty); await throttler.WaitAsync(); tasks.AddRange(batches.Select(size => Task.Run(async() => { try { var read = await getMessages(size); Interlocked.Exchange(ref breaker, read == null ? 1 : 0); if (read != null && read.Any()) { Parallel.ForEach(read, (msg) => { var formattedMsg = processMessage(msg).GetAwaiter().GetResult(); peekedMessages.Add(formattedMsg); }); } } finally { throttler.Release(); } }))); await Task.WhenAll(tasks.ToArray()); isComplete = (peekedMessages.Count >= qty || runtime.Elapsed > maxWaitTime) || breaker == 1; } return(peekedMessages.AsEnumerable().ToList()); }
protected virtual IEnumerable <Entity> GetEntities(CrmDbContext context, SolutionDefinition solution, IDictionary <string, object> parameters) { ConcurrentBag <Entity> result = new ConcurrentBag <Entity>(); var queries = solution.GetQueries(parameters).Where(query => query != null); Parallel.ForEach(queries, query => { // Add content map entities explicitly to the Portalused entities list. // Since we are skipping cache for content map entities and hence these entities won't be added in this list via Dependency calucaltion. ADXTrace.Instance.TraceVerbose(TraceCategory.Application, $"Entity {query.Entity.Name} is added to Enabled Entity List for Portal Cache "); WebAppConfigurationProvider.PortalUsedEntities.TryAdd(query.Entity.Name, true); Fetch(context.Service, query, result); }); return(result.AsEnumerable()); }
public IEnumerable <string> SaveCompressionRates(Group group, TimeSpan start, TimeSpan stop, string toFile = null, CompressionType type = CompressionType.Gzip) { var bag = new ConcurrentBag <string>(); Parallel.ForEach(group.People, person => { string result = _personService .SaveCompressionRates(person, toFile, start, stop, type); bag.Add(result); }); return(bag.AsEnumerable()); }
private void SaveSubcollection(ConcurrentBag <EToDoProgram> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("MongoToDoId", "MongoToDoId"); bcc.ColumnMappings.Add("MongoProgramId", "MongoProgramId"); bcc.DestinationTableName = "RPT_ToDoProgram"; bcc.WriteToServer(objRdr); } catch (Exception ex) { if (ex.Message.Contains("Received an invalid column length from the bcp client for colid")) { var pattern = @"\d+"; Match match = Regex.Match(ex.Message.ToString(), pattern); var index = Convert.ToInt32(match.Value) - 1; FieldInfo fi = typeof(SqlBulkCopy).GetField("_sortedColumnMappings", BindingFlags.NonPublic | BindingFlags.Instance); var sortedColumns = fi.GetValue(bcc); var items = (Object[])sortedColumns.GetType().GetField("_items", BindingFlags.NonPublic | BindingFlags.Instance).GetValue(sortedColumns); FieldInfo itemdata = items[index].GetType().GetField("_metadata", BindingFlags.NonPublic | BindingFlags.Instance); var metadata = itemdata.GetValue(items[index]); var column = metadata.GetType().GetField("column", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance).GetValue(metadata); var length = metadata.GetType().GetField("length", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance).GetValue(metadata); OnDocColEvent(new ETLEventArgs { Message = "[" + Contract + "] ToDo():SqlBulkCopy process failure: " + ex.Message + String.Format("Column: {0} contains data with a length greater than: {1}", column, length) + " : " + ex.InnerException, IsError = true }); } } } }
public static async Task <string> ToPingStr(RobotSetting setting) { int failedNum = 0; int warningNum = 0; ConcurrentBag <string> pingBag = new ConcurrentBag <string>(); ConcurrentBag <string> DNSBag = new ConcurrentBag <string>(); await Parallel.ForEachAsync(setting.IpSettings, async (item, cancellationToken) => { switch (await PingIp(item.Url, setting)) { case var s when s.Success == 0: Interlocked.Increment(ref failedNum); pingBag.Add(s.ToString() + "\r\n"); break; case var s when s.Max > setting.PingWarningTime: Interlocked.Increment(ref warningNum); pingBag.Add(s.ToString() + "\r\n"); break; } if (setting.IsAnalyzeUrl)//是否解析URL { var(result, resStr) = await item.DNSAnalyze(); if (result) { DNSBag.Add(resStr); } } }); string pingResult = null; if (pingBag.Count > 0) { pingResult = string.Concat($"ping {setting.PingTimes}次,响应超过{(double)setting.PingWarningTime / 1000}秒的有{warningNum}个,响应失败的有{failedNum}个\r\n", string.Join("", pingBag.AsEnumerable()), "\r\n"); } if (DNSBag.Count > 0) { pingResult += string.Join("", DNSBag.AsEnumerable()); } return(pingResult); }
private void SaveSubcollection(ConcurrentBag <ECareTeam> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("MongoCareTeamId", "MongoCareTeamId"); bcc.ColumnMappings.Add("MongoContactIdForPatient", "MongoContactIdForPatient"); bcc.ColumnMappings.Add("MongoCareMemberId", "MongoCareMemberId"); bcc.ColumnMappings.Add("MongoContactIdForCareMember", "MongoContactIdForCareMember"); bcc.ColumnMappings.Add("RoleId", "RoleId"); bcc.ColumnMappings.Add("CustomRoleName", "CustomRoleName"); bcc.ColumnMappings.Add("StartDate", "StartDate"); bcc.ColumnMappings.Add("EndDate", "EndDate"); bcc.ColumnMappings.Add("Core", "Core"); bcc.ColumnMappings.Add("Notes", "Notes"); bcc.ColumnMappings.Add("FrequencyId", "FrequencyId"); bcc.ColumnMappings.Add("Distance", "Distance"); bcc.ColumnMappings.Add("DistanceUnit", "DistanceUnit"); bcc.ColumnMappings.Add("DataSource", "DataSource"); bcc.ColumnMappings.Add("ExternalRecordId", "ExternalRecordId"); bcc.ColumnMappings.Add("Status", "Status"); bcc.ColumnMappings.Add("Version", "Version"); bcc.ColumnMappings.Add("DeleteFlag", "DeleteFlag"); bcc.ColumnMappings.Add("TTLDate", "TTLDate"); bcc.ColumnMappings.Add("UpdatedBy", "UpdatedBy"); bcc.ColumnMappings.Add("LastUpdatedOn", "LastUpdatedOn"); bcc.ColumnMappings.Add("RecordCreatedBy", "RecordCreatedBy"); bcc.ColumnMappings.Add("RecordCreatedOn", "RecordCreatedOn"); bcc.DestinationTableName = "RPT_CareTeam"; bcc.WriteToServer(objRdr); } catch (Exception ex) { FormatError(ex, bcc); } } }
private void SaveSubcollection(ConcurrentBag <EMedication> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("Id", "MongoId"); bcc.ColumnMappings.Add("ProductId", "ProductId"); bcc.ColumnMappings.Add("NDC", "NDC"); bcc.ColumnMappings.Add("FullName", "FullName"); bcc.ColumnMappings.Add("ProprietaryName", "ProprietaryName"); bcc.ColumnMappings.Add("ProprietaryNameSuffix", "ProprietaryNameSuffix"); bcc.ColumnMappings.Add("StartDate", "StartDate"); bcc.ColumnMappings.Add("EndDate", "EndDate"); bcc.ColumnMappings.Add("SubstanceName", "SubstanceName"); bcc.ColumnMappings.Add("Route", "Route"); bcc.ColumnMappings.Add("Form", "Form"); bcc.ColumnMappings.Add("FamilyId", "FamilyId"); bcc.ColumnMappings.Add("Unit", "Unit"); bcc.ColumnMappings.Add("Strength", "Strength"); bcc.ColumnMappings.Add("Version", "Version"); bcc.ColumnMappings.Add("DeleteFlag", "DeleteFlag"); bcc.ColumnMappings.Add("TTLDate", "TTLDate"); bcc.ColumnMappings.Add("LastUpdatedOn", "LastUpdatedOn"); bcc.ColumnMappings.Add("RecordCreatedBy", "MongoRecordCreatedBy"); bcc.ColumnMappings.Add("RecordCreatedOn", "RecordCreatedOn"); bcc.ColumnMappings.Add("MongoUpdatedBy", "MongoUpdatedBy"); bcc.DestinationTableName = "RPT_Medication"; bcc.WriteToServer(objRdr); } catch (Exception ex) { FormatError(ex, bcc); } } }
private void SavePatientUtilizationCollection(ConcurrentBag <EPatientUtilization> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("Id", "MongoId"); bcc.ColumnMappings.Add("PatientId", "MongoPatientId"); bcc.ColumnMappings.Add("NoteTypeId", "MongoNoteTypeId"); bcc.ColumnMappings.Add("Reason", "Reason"); bcc.ColumnMappings.Add("VisitTypeId", "MongoVisitTypeId"); bcc.ColumnMappings.Add("OtherVisitType", "OtherVisitType"); bcc.ColumnMappings.Add("AdmitDate", "AdmitDate"); bcc.ColumnMappings.Add("Admitted", "Admitted"); bcc.ColumnMappings.Add("DischargeDate", "DischargeDate"); bcc.ColumnMappings.Add("LocationId", "MongoLocationId"); bcc.ColumnMappings.Add("OtherLocation", "OtherLocation"); bcc.ColumnMappings.Add("DispositionId", "MongoDispositionId"); bcc.ColumnMappings.Add("OtherDisposition", "OtherDisposition"); bcc.ColumnMappings.Add("UtilizationSourceId", "MongoUtilizationSourceId"); bcc.ColumnMappings.Add("DataSource", "DataSource"); bcc.ColumnMappings.Add("UpdatedBy", "MongoUpdatedBy"); bcc.ColumnMappings.Add("LastUpdatedOn", "LastUpdatedOn"); bcc.ColumnMappings.Add("RecordCreatedBy", "MongoRecordCreatedBy"); bcc.ColumnMappings.Add("RecordCreatedOn", "RecordCreatedOn"); bcc.ColumnMappings.Add("Version", "Version"); bcc.ColumnMappings.Add("DeleteFlag", "Delete"); bcc.DestinationTableName = "RPT_PatientUtilization"; bcc.WriteToServer(objRdr); } catch (Exception ex) { FormatError(ex, bcc); } } }
public async Task <Result <IEnumerable <Tweet> > > AnalyzeAsync(IEnumerable <Tweet> tweets) { var buffer = new BufferBlock <Tweet>(new DataflowBlockOptions { BoundedCapacity = Environment.ProcessorCount * 5 }); var learnerState = _learningService.Get(); var result = new ConcurrentBag <Tweet>(); var classifier = new ActionBlock <Tweet>(x => { var res = _tweetClassifier.Classify(x.Text, learnerState); var tweetBuilder = x.Builder; tweetBuilder.Sentiment = res.Sentence.Category; result.Add(tweetBuilder.Build()); }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 }); var classifiers = Enumerable.Range(0, Environment.ProcessorCount).Select(x => classifier).ToList(); var linkToOptions = new DataflowLinkOptions { PropagateCompletion = true }; classifiers.ForEach(x => buffer.LinkTo(x, linkToOptions)); using (tweets.ToObservable() .Retry(5) .SubscribeOn(Scheduler.Default) .Subscribe(onNext: async tweet => { await buffer.SendAsync(tweet); }, onError: ex => { ((IDataflowBlock)buffer).Fault(ex); }, onCompleted: () => { buffer.Complete(); })) { await Task.WhenAll(buffer.Completion); await Task.WhenAll(classifiers.Select(x => x.Completion).ToArray()); _learningService.Learn(result.Select(x => new Sentence(x.Text, x.Sentiment))); return(Result <IEnumerable <Tweet> > .Wrap(result.AsEnumerable())); } }
private void SaveSubcollection(ConcurrentBag <ECareTeamFrequency> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("MongoId", "MongoId"); bcc.ColumnMappings.Add("Name", "Name"); bcc.DestinationTableName = "RPT_CareTeamFrequency"; bcc.WriteToServer(objRdr); } catch (Exception ex) { FormatError(ex, bcc); } } }
private static IEnumerable <string> SuperConcat(int curChar, List <List <char?> > charss, Validator f = null) { ConcurrentBag <string> retval = new ConcurrentBag <string>(); if (curChar == charss.Count) { retval.Add(""); return(retval); } Parallel.ForEach <char?>(charss[curChar], (y) => { foreach (var x2 in SuperConcat(curChar + 1, charss, f)) { var str = y + x2; if (f == null || f(str)) { retval.Add(str); } } }); return(retval.AsEnumerable()); }
private void SavePharmClasses(ConcurrentBag <EPharmClass> dic) { var rSeries = dic.AsEnumerable(); using (var bcc = new SqlBulkCopy(ConnectionString, SqlBulkCopyOptions.Default)) using (var objRdr = ObjectReader.Create(rSeries)) { try { bcc.BulkCopyTimeout = 580; bcc.ColumnMappings.Add("MedMongoId", "MedMongoId"); bcc.ColumnMappings.Add("PharmClass", "PharmClass"); bcc.DestinationTableName = "RPT_MedPharmClass"; bcc.WriteToServer(objRdr); } catch (Exception ex) { FormatError(ex, bcc); } } }
private async void BatchQuery_Click(object sender, RoutedEventArgs e) { LoadLbl.Content = "Loding..."; BatchQuery.IsEnabled = false; Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); var list = new ConcurrentBag <GradeInfo>(); string error = ""; await Task.Factory.StartNew(() => { Parallel.ForEach(StuList, item => { RequestHelper helper = new RequestHelper(); try { string gradeHtmlString = helper.Request(item.UserName, item.ID); var grade = new StringExtract().ExtractGradeStr(gradeHtmlString); grade.Order = item.Order; list.Add(grade); } catch (Exception) { error += $"{item.Order}查询失败\n"; } }); }); stopwatch.Stop(); GradeList.Clear(); GradeList = list.AsEnumerable().ToList(); LoadLbl.Content = ""; BatchQuery.IsEnabled = true; MessageBox.Show("耗时:" + stopwatch.ElapsedMilliseconds.ToString() + "\n" + "查询条数:" + GradeList.Count.ToString() + "\n" + error); GradeList.Sort((x, y) => x.Order.CompareTo(y.Order)); GradeTable.ItemsSource = null; GradeTable.ItemsSource = GradeList; }
public virtual async Task <IEnumerable <IUserCreditModel> > FetchUserCredits(Guid userId) { var result = await this._userCreditsRepository.GetUserCreditsById(userId); if (result != null && result.Any()) { var creditBag = new ConcurrentBag <IUserCreditModel>(); result.ToList().ForEach(credit => { creditBag.Add(new UserCreditModel { Id = credit.Id, Action = DbOperation.NoAction, WorkYear = credit.Year, WorkPlace = credit.WorkPlace, WorkDetail = credit.WorkDetail }); }); return(creditBag.AsEnumerable()); } return(null); }
static void Main(string[] args) { if (args.Length < 1) { Console.WriteLine("You must supply a file path as the first argument."); return; } IEnumerable <StockInfo> stockInfos; if (!args.Contains("skip")) { var path = args[0]; var fileExists = File.Exists(path); if (!fileExists) { Console.WriteLine("The specified file could not be found."); return; } var tickers = File.ReadAllLines(path); stockInfos = new ConcurrentBag <StockInfo>(); var count = 0; Parallel.ForEach(tickers, () => new HttpClient(), (ticker, loopState, httpClient) => { HttpResponseMessage response; do { response = httpClient.GetAsync($"{ApiPath}?identifier={ticker}&api_key={ApiKey}").GetAwaiter().GetResult(); if (response.StatusCode == HttpStatusCode.NotFound) { break; } Console.WriteLine(response.StatusCode); }while (!response.IsSuccessStatusCode); if (response.StatusCode == HttpStatusCode.NotFound) { return(httpClient); } string body = response.Content.ReadAsStringAsync().GetAwaiter().GetResult(); try { var stockInfo = JsonConvert.DeserializeObject <StockInfo>(body); ((ConcurrentBag <StockInfo>)stockInfos).Add(stockInfo); } catch (JsonReaderException e) { Console.WriteLine(e.Message); Console.WriteLine(body); } Console.WriteLine(++count); return(httpClient); }, _ => { }); var resultsFile = File.CreateText("Results_Json.csv"); resultsFile.Write(JsonConvert.SerializeObject(stockInfos.AsEnumerable())); resultsFile.Flush(); resultsFile.Close(); } else { var jsonDoc = File.ReadAllLines("Results_Json.csv")[0]; stockInfos = JsonConvert.DeserializeObject <IEnumerable <StockInfo> >(jsonDoc); } var cleanedInfos = stockInfos.AsParallel() .Select(stockInfo => { return($"{stockInfo.ticker},{stockInfo.name},{stockInfo.legalName},{stockInfo.sic},{stockInfo.stockExchange},{stockInfo.shortDescription},{stockInfo.longDescription}"); }).ToList(); var file = File.CreateText("Results_Cleaned.csv"); file.WriteLine("ticker,name,legalname,sic,stockexchange,shortDescription,longDescription"); foreach (var line in cleanedInfos) { file.WriteLine(line); } file.Flush(); file.Close(); Console.WriteLine(stockInfos.Count()); }
public IEnumerable <ActiveCollection> GetAllActiveCollections() { return(_collectionsCacheAside.AsEnumerable()); }
public void ShouldReturnProperSubsetOfNumberArray() { int[] numbers = new int[] { 1, 2, 3, 4, 3, 6, 4, 8, 17, 42, 6 }; ConcurrentBag<int> initialBag = new ConcurrentBag<int>(numbers); UniqueRandomNumberGenerator g = new UniqueRandomNumberGenerator(numbers); const int GeneratedRandomNumberCount = 3; // Sanity check on test data Debug.Assert( GeneratedRandomNumberCount < initialBag.Count, String.Format("The generated random number count {0} must be less than the count of initial numbers {1} for this test.", GeneratedRandomNumberCount, initialBag.Count)); Debug.WriteLine("Random Numbers"); int number = 0; ConcurrentBag<int> actualBag = new ConcurrentBag<int>(); for (int i = 1; i <= GeneratedRandomNumberCount; i++) { number = g.NewRandomNumber(); actualBag.Add(number); } HashSet<int> initialSet = new HashSet<int>(initialBag.AsEnumerable()); HashSet<int> actualSet = new HashSet<int>(actualBag.AsEnumerable()); Assert.IsTrue( actualSet.IsProperSubsetOf(initialSet), "Generated numbers should be a subset of the initial numbers."); }