/// <summary> /// Removes containing paths and merge overlapping paths. /// </summary> /// <param name="scaffoldPaths">Input paths/scaffold.</param> public void PurgePath(IList<ScaffoldPath> scaffoldPaths) { if (scaffoldPaths != null && 0 != scaffoldPaths.Count) { this.internalScaffoldPaths = scaffoldPaths.AsParallel().OrderBy(t => t.Count).ToList(); bool isUpdated = true; bool[] isConsumed = new bool[this.internalScaffoldPaths.Count]; while (isUpdated) { isUpdated = false; for (int index = 0; index < this.internalScaffoldPaths.Count; index++) { if (null != this.internalScaffoldPaths[index] && 0 != this.internalScaffoldPaths[index].Count && !isConsumed[index]) { isUpdated |= this.SearchContainingAndOverlappingPaths(this.internalScaffoldPaths[index], isConsumed); } else { isConsumed[index] = true; } } } this.UpdatePath(isConsumed); scaffoldPaths.Clear(); ((List<ScaffoldPath>)scaffoldPaths).AddRange(this.internalScaffoldPaths); } }
public ActionResult List() { List <Environment> environments = this.Entities.Environment .Include("Properties") .Include("Machines.MachineRoles") .ToList(); IList <Machine> machines = environments.SelectMany(e => e.Machines).Distinct().ToList(); Task <Dictionary <Machine, SatelliteState> > isAliveTask = (new TaskFactory <Dictionary <Machine, SatelliteState> >()).StartNew(() => { return(machines .AsParallel() .Select(m => new { m, alive = this.satelliteMonitor.IsAlive(m) }) .ToDictionary(k => k.m, k => k.alive)); }); Task <Dictionary <Machine, IServerSummary> > getsummaryTask = (new TaskFactory <Dictionary <Machine, IServerSummary> >()).StartNew(() => { return(machines .AsParallel() .Select(m => new { m, summary = this.satelliteMonitor.GetServerSummary(m) }) .ToDictionary(k => k.m, k => k.summary)); }); Task.WaitAll(isAliveTask, getsummaryTask); this.ViewBag.MachineStates = isAliveTask.Result; this.ViewBag.MachineSummaries = getsummaryTask.Result; this.ViewBag.Environments = environments; return(this.View()); }
public double PredictLegit(double penaltyLegit, double penaltySpam, double windowWidth) { var legitCorrectForecast = _testLegit .AsParallel() .Select(msg => Predict(msg, _trainLegit, penaltyLegit, windowWidth)) .ToArray(); var legitIncorrectForecast = _testLegit .AsParallel() .Select(msg => Predict(msg, _trainSpam, penaltySpam, windowWidth)) .ToArray(); return(legitCorrectForecast.GetTP(legitIncorrectForecast) / (double)_testLegit.Count * 100); }
private bool ColumnIsEmpty(string val, string column) { var _reportPage = new ReportPage(_driver); if (_reportPage.NoData.Displayed) { return(false); } var headers = _driver.FindElements(By.TagName("th")); int colNumber = 0; for (int i = 1; i <= headers.Count; i++) { if (headers[i - 1].Text == column) { colNumber = i; break; } } Console.WriteLine("*//table/tbody/tr/th[contains(., '{0}')]/../../td[{1}]", column, colNumber); IList <IWebElement> list = _driver.FindElements( By.XPath(String.Format("*//table/tbody/tr/th[contains(., '{0}')]/../../tr/td[{1}]", column, colNumber))); IList <string> names = new List <string>(); foreach (var el in list.AsParallel()) { Console.WriteLine(el.Text); names.Add(el.Text); } return(names.AsParallel().All(el => (String.IsNullOrEmpty(el) || String.IsNullOrWhiteSpace(el)))); }
private void InitHistoryDatas() { var numberHistoryDatas = new ConcurrentDictionary <int, PeriodNumberInfo>(); _threeRegionDatas = new ConcurrentDictionary <ThreeRegion, ThreeRegionDataInfo>(); _lotteryDataPackage.AsParallel().ForAll(lotteryData => { // var lotteryNum = lotteryData.Data.Split(',').Select(p => Convert.ToInt32(p)).ToList()[_numberInfo.KeyNumber - 1]; var periodNumberInfo = new PeriodNumberInfo(lotteryData.Period, lotteryNum, _numberInfo); numberHistoryDatas[lotteryData.Period] = periodNumberInfo; if (lotteryNum <= NumberInfo.SizeCriticalValue) { smallSizeCount++; } if (lotteryNum % 2 == 1) { oddCount++; } if (!_threeRegionDatas.Keys.Contains(periodNumberInfo.ThreeRegionShape)) { _threeRegionDatas[periodNumberInfo.ThreeRegionShape] = new ThreeRegionDataInfo(periodNumberInfo.ThreeRegionShape, _lotteryDataPackage.Count, _numberInfo); _threeRegionDatas[periodNumberInfo.ThreeRegionShape].PutThisRegionData(new KeyValuePair <int, PeriodNumberInfo>(lotteryData.Period, periodNumberInfo)); } else { _threeRegionDatas[periodNumberInfo.ThreeRegionShape].PutThisRegionData(new KeyValuePair <int, PeriodNumberInfo>(lotteryData.Period, periodNumberInfo)); } }); _numberHistoryDatas = numberHistoryDatas.OrderByDescending(p => p.Key).ToDictionary(p => p.Key, p => p.Value); }
public static void GenerateMarketableItemJSON(IList <Item> items, IList <ItemSearchCategory> categories, string outputPath) { var mieBaseTop = Console.CursorTop; dynamic itemJSONOutput = new JObject(); var itemID = new List <int>(); foreach (var category in categories) { if (category.RowId < 9) { goto console_update; } var itemSet = items .AsParallel() .Where(item => item.ItemSearchCategory.Value.RowId == category.RowId) .Select(item => item.RowId) .Select(Convert.ToInt32) .ToList(); if (!itemSet.Any()) { goto console_update; } itemID = itemID.Concat(itemSet).ToList(); console_update: Console.CursorLeft = 0; Console.CursorTop = mieBaseTop; Console.Write($"cat: [{category.RowId}/{categories.Count - 1}]"); } itemID.Sort(); itemJSONOutput.itemID = JToken.FromObject(itemID); File.WriteAllText(Path.Combine(outputPath, "item.json"), JsonConvert.SerializeObject(itemJSONOutput)); Console.WriteLine(); }
private IList <CloudQueueMessage> TransformInboundMessages(IList <CloudQueueMessage> inboundMessages, Stopwatch stopWatch) { if (this.OptionalThreadSafeMessageTransformer == null) { return(inboundMessages); } // Transform messages before requeueing them. if (inboundMessages.Count < 5) { // When we have a few messages, transform them sequentially. inboundMessages = inboundMessages.Select(msg => this.OptionalThreadSafeMessageTransformer(msg)).ToList(); } else { // When there are many messages, transform them in parallel. inboundMessages = inboundMessages.AsParallel().AsOrdered().Select(msg => this.OptionalThreadSafeMessageTransformer(msg)).ToList(); } this.LogInformation("Done transforming {0} messages. Elapsed {1}.", inboundMessages.Count, stopWatch.Elapsed); stopWatch.Reset(); stopWatch.Start(); return(inboundMessages); }
public static IEnumerable <DataTable> ToDataTables(this IList <PerformanceAggregate> aggregates, bool executeForPrepare) { var performanceTables = aggregates #if !DEBUG .AsParallel().WithDegreeOfParallelism(aggregates.Count) #endif .Select((p) => { var counterTable = p.Counter.ToDataTable(); if (executeForPrepare) { return(counterTable); } foreach (var item in p.Values) { counterTable.Rows.Add(item.TraceEventTime, item.Value); } return(counterTable); }); return(performanceTables); }
/// <summary> /// Sets up fields for the assembly process. /// </summary> /// <param name="sequenceReads">List of sequence reads</param> private void Initialize(IList <ISequence> sequenceReads) { // Reset parameters not set by user, based on sequenceReads if (AllowKmerLengthEstimation) { _kmerLength = EstimateKmerLength(sequenceReads); } if (_kmerLength <= 0) { throw new InvalidOperationException(Properties.Resource.KmerLength); } if (!sequenceReads.AsParallel().All(seq => seq.Count >= _kmerLength)) { throw new InvalidOperationException(Properties.Resource.InappropriateKmerLength); } if (_dangleThreshold == -1) { _dangleThreshold = _kmerLength + 1; } if (_redundantPathLengthThreshold == -1) { // Reference for default threshold for redundant path purger: // ABySS Release Notes 1.1.2 - "Pop bubbles shorter than N bp. The default is b=3*(k + 1)." _redundantPathLengthThreshold = 3 * (_kmerLength + 1); } InitializeDefaultGraphModifiers(); RemoveAmbiguousReads(sequenceReads); }
private async Task <IList <Path <T> > > _fork(Node <T> destination, IList <Path <T> > pathes, Matrix <T> reachibilityMatrix, Matrix <T> adjacencyMatrix) { var forked = new List <Path <T> >(); var uncompleted = pathes.AsParallel().Where(p => !p.Contains(destination)); uncompleted.ForAll(async path => { var rwLock = new AsyncReaderWriterLock(); var lastStep = path.Last(); var nextStepsCandidates = await adjacencyMatrix.GetRow(lastStep); var nextSteps = nextStepsCandidates.AsParallel().Where(x => x.Value > 0).Select(x => x.Key) .Where(candidate => reachibilityMatrix[candidate, destination] > 0 || candidate == destination); foreach (var nextStep in nextSteps.Where(c => !path.Contains(c)).AsEnumerable()) { var pathCopy = await path.Clone(); await pathCopy.Add(nextStep); using (await rwLock.WriterLockAsync()) forked.Add(pathCopy); } }); if (!forked.Any()) { return(pathes); } var completedPathes = pathes.Where(path => path.Contains(destination)); forked.AddRange(completedPathes); return(await _fork(destination, forked, reachibilityMatrix, adjacencyMatrix).ConfigureAwait(false)); //tail-recursive }
/// <summary> /// Removes containing paths and merge overlapping paths. /// </summary> /// <param name="scaffoldPaths">Input paths/scaffold.</param> public void PurgePath(IList <ScaffoldPath> scaffoldPaths) { if (scaffoldPaths != null && 0 != scaffoldPaths.Count) { this.internalScaffoldPaths = scaffoldPaths.AsParallel().OrderBy(t => t.Count).ToList(); bool isUpdated = true; bool[] isConsumed = new bool[this.internalScaffoldPaths.Count]; while (isUpdated) { isUpdated = false; for (int index = 0; index < this.internalScaffoldPaths.Count; index++) { if (null != this.internalScaffoldPaths[index] && 0 != this.internalScaffoldPaths[index].Count && !isConsumed[index]) { isUpdated |= this.SearchContainingAndOverlappingPaths(this.internalScaffoldPaths[index], isConsumed); } else { isConsumed[index] = true; } } } this.UpdatePath(isConsumed); scaffoldPaths.Clear(); ((List <ScaffoldPath>)scaffoldPaths).AddRange(this.internalScaffoldPaths); } }
/// <summary> /// An aggregate parallel query to return the minimum and the maximum of <paramref name="data"/> together, faster than two successive parallel queries to minimum and maximum. /// </summary> /// <param name="data">The list whose extrema we are to find.</param> /// <returns>A <see cref="Tuple{double, double}"/> instance whose <see cref="Tuple{double, double}.Item1"/> represents the minimum and whose <see cref="Tuple{double, double}.Item2"/> contains the maximum of <paramref name="data"/>.</returns> public static Tuple <double, double> Extrema(this IList <double> data) { ParallelQuery <double> query = data.AsParallel(); return(query.Aggregate( // Initialise accumulator: () => new ExtremumAccumulator() { Min = Double.MaxValue, Max = Double.MinValue }, // Aggregate calculations: (accumulator, item) => { if (item < accumulator.Min) { accumulator.Min = item; } if (item > accumulator.Max) { accumulator.Max = item; } return accumulator; }, // Combine accumulators: (accumulator1, accumulator2) => new ExtremumAccumulator() { Min = Math.Min(accumulator1.Min, accumulator2.Min), Max = Math.Max(accumulator1.Max, accumulator2.Max) }, // Get result: accumulator => new Tuple <double, double>(accumulator.Min, accumulator.Max) )); }
/// <summary> /// Combines lists of returns for each instrument into a single list using parallel execution /// </summary> /// <param name="analyticCollections"></param> /// <returns></returns> public static IEnumerable <Analytic> CombineAnalyticsInParallel(IList <IList <Analytic> > analyticCollections) { AnalyticDates = new List <DateTime>(); AnalyticValues = new List <decimal>(); FinalAnalyticCollection = new List <Analytic>(); IEnumerable <Analytic> combinedAnalyticCollection = new List <Analytic>(); combinedAnalyticCollection = analyticCollections.Aggregate(combinedAnalyticCollection, (current, list) => current.Concat(list)).ToList(); var combineByDate = combinedAnalyticCollection.GroupBy(d => d.Date); foreach (var date in combineByDate) { AnalyticDates.Add(date.Key); foreach (var value in date) { AnalyticValues.Add(value.CalculatedAnalytic); } Analytic SummedAnalytic = new Analytic(); SummedAnalytic.Date = date.Key; SummedAnalytic.CalculatedAnalytic = Math.Round(AnalyticValues.Sum(), 2); FinalAnalyticCollection.Add(SummedAnalytic); AnalyticValues.Clear(); } return(FinalAnalyticCollection.AsParallel()); }
public void Notify() { foreach (Command command in _orders.AsParallel()) { command.ExecuteCommand(); } }
public static IList <DataElement> CreateDataElements(string datastr, string filename) { IList <DataElement> elist = ReadDataElements(datastr); elist.AsParallel().ForAll(x => x.Filename = filename); return(elist); }
public static IEnumerable <IGrouping <TKey, TMapped> > Map <TSource, TKey, TMapped>(this IList <TSource> source, Func <TSource, IEnumerable <TMapped> > map, Func <TMapped, TKey> keySelector) => source.AsParallel() .WithExecutionMode(ParallelExecutionMode.ForceParallelism) .WithDegreeOfParallelism(Environment.ProcessorCount) .SelectMany(map) .GroupBy(keySelector) .ToList();
public ActionResult List() { LogWriter.Instance().LogWrite("/Board/List 접속"); if (!SessionCheck(Define.Session.SESSION_CHECK)) { LogWriter.Instance().LogWrite("/Board/List 에서 세션 만료로 에러가 발생합니다."); return(ErrorPage("/Home/Error")); } if (!CheckAuth()) { LogWriter.Instance().LogWrite(UserSession.UserId, "/Board/List 인증 에러"); return(base.Logout()); } int count = Convert.ToInt32(Math.Ceiling((Double)((Double)boardDao.GetBoardCount() / (Double)PAGELIMIT))); IList <Board> list = boardDao.SelectBoard(PAGELIMIT, 1); ViewBag.listcount = count; list.AsParallel().ForAll((board) => { board.Title += " (" + commentDao.GetCommentCount(board.Idx).ToString() + ")"; }); ViewBag.list = list; Session[Define.Session.CONTROLLER] = "Board"; Session[Define.Session.ACTION] = "List"; return(View("~/Views/Board/Web/List.cshtml", Define.MASTER_VIEW)); }
private IList <HseReceiverUserEntity> ConvertSadToHseReceiverUserEntities(IList <HSELocalSADEntity> list, string createdLillyId, int hseMessageId) { // var topUsers = _hseReceiverUserService.GetTop30InDayUsers(list.Select(x => x.LillyID).ToList()).ToList(); return(list.AsParallel().Select(x => { var user = topUsers.FirstOrDefault(y => y.LillyId.Equals(x.LillyID, StringComparison.OrdinalIgnoreCase)); return new HseReceiverUserEntity { Tel = x.Phone, LillyId = x.LillyID, CreatedLillyId = createdLillyId, FristLevelDepartmentName = x.Company, SecondLevelDepartmentName = x.Department, ThirdLevelDepartmentName = x.SubDepartment, Name = x.ChineseName, HseMessageId = hseMessageId, ManagerName = x.ManagerName, ManagerLillyId = x.ManagerID, ManagerTel = x.ManagerTel, Location = x.BaseLocation, CreatedDateTime = user == null ? DateTime.Now : user.CreatedDateTime, Status = user == null ? Status.NoReplied.ToString() : user.Status, }; }).ToList()); }
private static void PLLinqMerge(IList <ShoppingCart> shoppingCarts) { var q = from cart in shoppingCarts.AsParallel() select cart; Parallel.ForEach(q, item => { /* Process item. */ }); q.ForAll(p => { }); }
private IEnumerable <IssueAndLogs> GetLogs(IList <Issue> issues) { return(issues.AsParallel() .WithDegreeOfParallelism(PARALLEL_DEGREE) .Select(issue => new IssueAndLogs { Issue = issue, Logs = _jiraLoader.GetLogs(issue) })); }
// Takes same patterns, and executes in parallel private static IEnumerable<string> GetFiles(string path, IList<string> searchPatterns, SearchOption searchOption = SearchOption.AllDirectories) { return searchPatterns.AsParallel() .SelectMany(searchPattern => Directory.EnumerateFiles(path, searchPattern, searchOption)); }
protected virtual IList <Item <API_CLASS> > ToApiModel(IList <BLL_CLASS> searchResults) { return(searchResults.AsParallel().AsOrdered().Select(e => new Item <API_CLASS>( data: ToApiModel(e), links: GetLinks(BaseSearchPath, PrimaryEntityType, e), messages: e.Messages) ).ToList()); }
private IList <Tuple <string, string> > CalculateBackupSet(IList <PackageRef> packages, ISet <string> backups) { return(packages .AsParallel() .Select(r => Tuple.Create(StorageHelpers.GetPackageBlobName(r), StorageHelpers.GetPackageBackupBlobName(r))) .Where(t => !backups.Contains(t.Item2)) .ToList()); }
public void Run() { OnStartProcessing?.Invoke(_dataSets.Count); _dataSets .AsParallel() .ForAll(Process); }
public List <CdaDocument> Extract(IList <XmlDocument> cdaDocuments) { if (cdaDocuments == null || cdaDocuments.Count == 0) { throw new ArgumentException("'cdaDocuments' cannot be null or empty"); } return(cdaDocuments.AsParallel().Select(Extract).ToList()); }
public ValueTask <CSharpCompilation> EnrichAsync(CSharpCompilation target, CancellationToken cancellationToken = default) => new ValueTask <CSharpCompilation>(target .AddSyntaxTrees(_generators .AsParallel() .AsUnordered() .WithCancellation(cancellationToken) .SelectMany(p => p.Generate()) .ToArray()));
/// <summary> /// Removes input sequences that have ambiguous symbols. /// Updates the field holding sequence reads. /// </summary> /// <param name="inputSequences">List of input sequences</param> protected void RemoveAmbiguousReads(IList <ISequence> inputSequences) { _sequenceReads = new List <ISequence>( inputSequences.AsParallel().Where(s => s.All(c => !c.IsAmbiguous && !c.IsGap))); if (_sequenceReads.Count == 0) { throw new InvalidOperationException(Properties.Resource.AmbiguousCharacter); } }
private static List <T> Sort(IList <T> items, PropertyDescriptor prop, ListSortDirection direction) { PropertyInfo propertyInfo = typeof(T).GetProperty(prop.Name); object keySelector(T i) => propertyInfo.GetValue(i); if (items.Count > 10000) { return((direction == ListSortDirection.Ascending ? items.AsParallel().OrderBy(keySelector) : items.AsParallel().OrderByDescending(keySelector)) .ToList()); } return((direction == ListSortDirection.Ascending ? items.OrderBy(keySelector) : items.OrderByDescending(keySelector)) .ToList()); }
private IEnumerable <IssueAndLogs> GetLogs(IList <Issue> issues) { return(issues.AsParallel() .WithDegreeOfParallelism(PARALLEL_DEGREE) .Select(issue => new IssueAndLogs { Issue = issue, Logs = issue.GetChangeLogsAsync().Result.ToList() }) .ToList()); }
private static bool CalculatePixelOrder() { IEnumerable <Pixel> relativePixelsToBuild; IEnumerable <int> allY = Enumerable.Range(0, height); IEnumerable <int> allX = Enumerable.Range(0, width); IList <Pixel> nonEmptyPixels = allX. SelectMany(X => allY.Select(Y => ((short)X, (short)Y, C: imagePixels[X, Y]))). Where(xyc => xyc.C != EarthPixelColor.None).ToList(); try { switch (options.PlacingOrderMode) { case PlacingOrderMode.Left: relativePixelsToBuild = nonEmptyPixels.OrderBy(xy => xy.Item1).ThenBy(e => Guid.NewGuid()); break; case PlacingOrderMode.Right: relativePixelsToBuild = nonEmptyPixels.OrderByDescending(xy => xy.Item1).ThenBy(e => Guid.NewGuid()); break; case PlacingOrderMode.Top: relativePixelsToBuild = nonEmptyPixels.OrderBy(xy => xy.Item2).ThenBy(e => Guid.NewGuid()); break; case PlacingOrderMode.Bottom: relativePixelsToBuild = nonEmptyPixels.OrderByDescending(xy => xy.Item2).ThenBy(e => Guid.NewGuid()); break; case PlacingOrderMode.Outline: relativePixelsToBuild = nonEmptyPixels.AsParallel().OrderByDescending(OutlineCriteria); break; default: Random rnd = new Random(); for (int i = 0; i < nonEmptyPixels.Count; i++) { int r = rnd.Next(i, nonEmptyPixels.Count); Pixel tmp = nonEmptyPixels[r]; nonEmptyPixels[r] = nonEmptyPixels[i]; nonEmptyPixels[i] = tmp; } relativePixelsToBuild = nonEmptyPixels; break; } pixelsToBuild = relativePixelsToBuild .Select(p => ((short)(p.Item1 + options.LeftX), (short)(p.Item2 + options.TopY), p.Item3)).ToList(); } catch (Exception ex) { logger.LogError($"Unhandled exception while calculating pixel order: {ex.Message}"); return(false); } return(true); }
private static long CrossProductParallel(IList<long> vector1, IList<long> vector2) { if (vector1.Count != vector2.Count) return 0; var sum = (from a in vector1.AsParallel() select ((from b in vector2 select b * a).Sum())).Sum(); return sum; }
public IEnumerable <File> IsFileExists(string filename, IList <string> folderPaths) { var files = folderPaths .AsParallel() .WithDegreeOfParallelism(Math.Min(MaxInnerParallelRequests, folderPaths.Count)) .Select(async path => (Folder)await GetItemAsync(path, ItemType.Folder, false)) .SelectMany(fld => fld.Result.Files.Where(file => WebDavPath.PathEquals(file.Name, filename))); return(files); }
/// <summary> /// For optimal graph formation, k-mer length should not be less /// than half the length of the longest input sequence and /// cannot be more than the length of the shortest input sequence. /// Reference for estimating kmerlength from reads: Supplement material from /// publication "ABySS: A parallel assembler for short read sequence data" /// </summary> /// <param name="sequences">List of input sequences</param> /// <returns>Estimated optimal kmer length</returns> public static int EstimateKmerLength(IList <ISequence> sequences) { // kmer length should be less than input sequence lengths float maxLength = sequences.AsParallel().Min(s => s.Count); // for optimal purpose, kmer length should be more than half of longest sequence float minLength = (float)sequences.AsParallel().Max(s => s.Count) / 2; if (minLength < maxLength) { // Choose median value between the end-points return((int)Math.Ceiling((minLength + maxLength) / 2)); } else { // In this case pick maxLength, since this is a hard limit return((int)Math.Floor(maxLength)); } }
public IList<TaskSharingViewModel> CreateTaskSharing(IList<IFormFile> file, Guid taskId, Guid staffId) { Args.NotNull(file, nameof(file)); var taskSharings = new List<TaskSharingViewModel>(); file.AsParallel().ToList().ForEach(p => { using (var reader = new StreamReader(p.OpenReadStream())) { reader.BaseStream.Position = 0; var fileName = p.ContentDisposition.Split(';')[2].Split('=')[1].Replace("\"",""); var taskSharing = m_TaskSharingManager.CreateTaskSharing(taskId, staffId, fileName, p.ContentType, reader.BaseStream); taskSharings.Add(taskSharing.ToViewModel()); } }); return taskSharings; }
public ActionResult IndexCompleted(int offSetX, int offSetY, int canvasWidth, int canvasHeight, string errMsg, IList<LogQueryResultDetail> details) { if (string.IsNullOrWhiteSpace(errMsg)) { var list = new List<int[]>(); //list.Add(new int[] { offSetX, offSetY, offSetX, offSetY + canvasHeight, 255, 0, 0 }); //list.Add(new int[] { offSetX, offSetY + canvasHeight, offSetX + canvasWidth, offSetY + canvasHeight, 255, 0, 0 }); //list.Add(new int[] { offSetX + canvasWidth, offSetY + canvasHeight, offSetX + canvasWidth, offSetY, 255, 0, 0 }); //list.Add(new int[] { offSetX + canvasWidth, offSetY, offSetX, offSetY, 255, 0, 0 }); var curDT = DateTime.MinValue; var curElapsed = -1L; foreach (var cur in details.AsParallel().OrderBy(i => i.Elapsed).ThenBy(i => i.CreatedDateTime)) { } return Json(new { points = list, errmsg = "" }, JsonRequestBehavior.AllowGet); } return Json(new { errMsg = errMsg }, JsonRequestBehavior.AllowGet); }
/// <summary> /// Generate sequences from list of contig nodes. /// </summary> /// <param name="contigGraph">Contig Overlap Graph.</param> /// <param name="paths">Scaffold paths.</param> /// <returns>List of sequences of scaffolds.</returns> protected IList<ISequence> GenerateScaffold( ContigGraph contigGraph, IList<ScaffoldPath> paths) { if (contigGraph == null) { throw new ArgumentNullException("contigGraph"); } if (paths == null) { throw new ArgumentNullException("paths"); } List<ISequence> scaffolds = paths.AsParallel().Select(t => t.BuildSequenceFromPath(contigGraph, this.kmerLength)).ToList(); IEnumerable<Node> visitedNodes = contigGraph.Nodes.AsParallel().Where(t => !t.IsMarked()); scaffolds.AddRange(visitedNodes.AsParallel().Select(t => contigGraph.GetNodeSequence(t))); contigGraph.Dispose(); return scaffolds; }
private IList<CloudQueueMessage> TransformInboundMessages(IList<CloudQueueMessage> inboundMessages, Stopwatch stopWatch) { if(this.OptionalThreadSafeMessageTransformer == null) return inboundMessages; // Transform messages before requeueing them. if(inboundMessages.Count < 5) // When we have a few messages, transform them sequentially. inboundMessages = inboundMessages.Select(msg => this.OptionalThreadSafeMessageTransformer(msg)).ToList(); else // When there are many messages, transform them in parallel. inboundMessages = inboundMessages.AsParallel().AsOrdered().Select(msg => this.OptionalThreadSafeMessageTransformer(msg)).ToList(); this.LogInformation("Done transforming {0} messages. Elapsed {1}.", inboundMessages.Count, stopWatch.Elapsed); stopWatch.Reset(); stopWatch.Start(); return inboundMessages; }
/// <summary> /// Aligns reads to contigs using kmer method of alignment. /// </summary> /// <param name="contigs">List of contig sequences.</param> /// <param name="reads">List of read sequences.</param> /// <param name="kmerLength">Kmer Length.</param> /// <returns>List of Contig.</returns> public static IList<Contig> ReadContigAlignment(IList<ISequence> contigs, IList<ISequence> reads, int kmerLength) { KmerIndexerDictionary map = SequenceToKmerBuilder.BuildKmerDictionary(reads, kmerLength); IList<ContigIndex> contigDatas; contigDatas = contigs.AsParallel().Select(contig => { IEnumerable<ISequence> kmers = SequenceToKmerBuilder.GetKmerSequences(contig, kmerLength); ContigIndex index = new ContigIndex(contig); foreach (ISequence kmer in kmers) { IList<KmerIndexer> positions; if (map.TryGetValue(kmer, out positions) || map.TryGetValue(kmer.GetReverseComplementedSequence(), out positions)) { index.ContigReadMatchIndexes.Add(positions); } else { index.ContigReadMatchIndexes.Add(new List<KmerIndexer>()); } } return index; }).ToList(); return contigDatas.Select(contigData => { IList<Task<IList<ReadMap>>> tasks = new List<Task<IList<ReadMap>>>(); // Stores information about contigs for which tasks has been generated. IList<long> visitedReads = new List<long>(); // Creates Task for every read in nodes for a given contig. for (int index = 0; index < contigData.ContigReadMatchIndexes.Count; index++) { int readPosition = index; foreach (KmerIndexer kmer in contigData.ContigReadMatchIndexes[index]) { long contigIndex = kmer.SequenceIndex; if (!visitedReads.Contains(contigIndex)) { visitedReads.Add(contigIndex); tasks.Add( Task<IList<ReadMap>>.Factory.StartNew(t => MapRead(readPosition, contigData.ContigReadMatchIndexes, contigIndex, kmerLength), TaskCreationOptions.AttachedToParent)); } } } Contig contigOutputStructure = new Contig(); contigOutputStructure.Consensus = contigData.ContigSequence; for (int index = 0; index < visitedReads.Count; index++) { foreach (ReadMap maps in tasks[index].Result) { Contig.AssembledSequence assembledSeq = new Contig.AssembledSequence() { Length = maps.Length, Position = maps.StartPositionOfContig, ReadPosition = maps.StartPositionOfRead, Sequence = reads.ElementAt(visitedReads[index]) }; if (new string( contigOutputStructure.Consensus.GetSubSequence( assembledSeq.Position, assembledSeq.Length).Select(a => (char)a).ToArray()). Equals(new string(assembledSeq.Sequence.GetSubSequence(assembledSeq.ReadPosition, assembledSeq.Length) .Select(a => (char)a).ToArray()))) { assembledSeq.IsComplemented = false; assembledSeq.IsReversed = false; } else { assembledSeq.IsComplemented = true; assembledSeq.IsReversed = true; } contigOutputStructure.Sequences.Add(assembledSeq); } } return contigOutputStructure; }).ToList(); }
public ReportingResult ComputeReport( IList<FrameworkName> targets, string submissionId, AnalyzeRequestFlags requestFlags, IDictionary<MemberInfo, ICollection<AssemblyInfo>> allDependencies, IList<MemberInfo> missingDependencies, IDictionary<string, ICollection<string>> unresolvedAssemblies, IList<string> unresolvedUserAssemblies, IEnumerable<string> assembliesWithErrors) { var types = allDependencies.Keys.Where(dep => dep.TypeDocId == null); ReportingResult result = new ReportingResult(targets, types, submissionId, requestFlags); missingDependencies .AsParallel() .ForAll((Action<MemberInfo>)((item) => { // the calling assemblies are in Finder... if (allDependencies == null) { lock (result) { result.AddMissingDependency(null, item, item.RecommendedChanges); } } else { ICollection<AssemblyInfo> calledIn; if (!allDependencies.TryGetValue(item, out calledIn)) return; foreach (var callingAsm in calledIn) { lock (result) { result.AddMissingDependency(callingAsm, item, item.RecommendedChanges); } } } })); if (assembliesWithErrors != null) { foreach (var error in assembliesWithErrors) { result.AddAssemblyWithError(error); } } foreach (var unresolvedAssembly in unresolvedUserAssemblies) { result.AddUnresolvedUserAssembly(unresolvedAssembly, unresolvedAssemblies == null ? Enumerable.Empty<string>() : unresolvedAssemblies[unresolvedAssembly]); } // Compute per assembly report if (allDependencies != null) { var perAssemblyUsage = ComputePerAssemblyUsage(targets, missingDependencies, allDependencies); result.SetAssemblyUsageInfo(perAssemblyUsage); // Compute the map of assemblyInfo to name var assemblyNameMap = ComputeAssemblyNames(perAssemblyUsage); result.SetAssemblyNameMap(assemblyNameMap); } return result; }