internal ExecutionReport Cover( CoverVariation coverVariation, char strand, int minAcc, int maxAcc, Aggregate aggregate, out FunctionOutput <Output <C, I, M> > result, MaxDegreeOfParallelism maxDegreeOfParallelism) { int totalBookmarks = 0; var tmpResult = new FunctionOutput <Output <C, I, M> >(); foreach (var chr in chrs) { foreach (var sDi4 in chr.Value) { if (!tmpResult.Chrs.ContainsKey(chr.Key)) { tmpResult.Chrs.TryAdd(chr.Key, new ConcurrentDictionary <char, List <Output <C, I, M> > >()); } if (!tmpResult.Chrs[chr.Key].ContainsKey(sDi4.Key)) { tmpResult.Chrs[chr.Key].TryAdd(sDi4.Key, new List <Output <C, I, M> >()); } } } _stpWtch.Restart(); Parallel.ForEach(chrs, new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism.chrDegree }, chr => { IOutput <C, I, M, Output <C, I, M> > outputStrategy = new AggregateFactory <C, I, M>().GetAggregateFunction(aggregate); foreach (var sDi4 in chr.Value) { totalBookmarks += sDi4.Value.bookmarkCount; switch (coverVariation) { case CoverVariation.Cover: sDi4.Value.Cover <Output <C, I, M> >(outputStrategy, minAcc, maxAcc, maxDegreeOfParallelism.di4Degree); break; case CoverVariation.Summit: sDi4.Value.Summit <Output <C, I, M> >(outputStrategy, minAcc, maxAcc, maxDegreeOfParallelism.di4Degree); break; } tmpResult.Chrs[chr.Key][sDi4.Key] = outputStrategy.output; } }); _stpWtch.Stop(); result = tmpResult; return(new ExecutionReport(totalBookmarks, _stpWtch.Elapsed)); }
public ExecutionReport VariantAnalysis(char strand, Dictionary <string, Dictionary <char, List <I> > > references, Aggregate aggregate, out FunctionOutput <Output <C, I, M> > result, MaxDegreeOfParallelism maxDegreeOfParallelism, out Dictionary <uint, int> newRes) { return(genome.VariantAnalysis(references, strand, aggregate, out result, out newRes, maxDegreeOfParallelism)); }
public ExecutionReport Map(char strand, Dictionary <string, Dictionary <char, List <I> > > references, Aggregate aggregate, out FunctionOutput <Output <C, I, M> > result, MaxDegreeOfParallelism maxDegreeOfParallelism) { return(genome.Map(references, strand, aggregate, out result, maxDegreeOfParallelism)); }
public ExecutionReport Cover(CoverVariation coverVariation, char strand, int minAcc, int maxAcc, Aggregate aggregate, out FunctionOutput <Output <C, I, M> > result, MaxDegreeOfParallelism maxDegreeOfParallelism) { return(genome.Cover(coverVariation, strand, minAcc, maxAcc, aggregate, out result, maxDegreeOfParallelism)); }
internal ExecutionReport VariantAnalysis( Dictionary <string, Dictionary <char, List <I> > > references, char strand, Aggregate aggregate, out FunctionOutput <Output <C, I, M> > result, out Dictionary <uint, int> newRes, MaxDegreeOfParallelism maxDegreeOfParallelism) { int totalIntervals = 0; var tmpResults = new FunctionOutput <Output <C, I, M> >(); foreach (var refChr in references) { if (!chrs.ContainsKey(refChr.Key)) { continue; } foreach (var refStrand in refChr.Value) { if (!chrs[refChr.Key].ContainsKey(refStrand.Key)) { continue; } if (!tmpResults.Chrs.ContainsKey(refChr.Key)) { tmpResults.Chrs.TryAdd(refChr.Key, new ConcurrentDictionary <char, List <Output <C, I, M> > >()); } if (!tmpResults.Chrs[refChr.Key].ContainsKey(refStrand.Key)) { tmpResults.Chrs[refChr.Key].TryAdd(refStrand.Key, new List <Output <C, I, M> >()); } } } string lockOnMe = "Vahid"; Dictionary <uint, int> tmpRes = new Dictionary <uint, int>(); _stpWtch.Restart(); Parallel.ForEach(references, new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism.chrDegree }, refChr => { IOutput <C, I, M, Output <C, I, M> > outputStrategy = new VariantAnalysisOutputStrategy <C, I, M>(); if (chrs.ContainsKey(refChr.Key)) { foreach (var refStrand in refChr.Value) { if (!chrs[refChr.Key].ContainsKey(refStrand.Key)) { continue; } chrs[refChr.Key][refStrand.Key].VariantAnalysis <Output <C, I, M> >(ref outputStrategy, refStrand.Value, maxDegreeOfParallelism.di4Degree); tmpResults.Chrs[refChr.Key][refStrand.Key] = outputStrategy.output; totalIntervals += refStrand.Value.Count; lock (lockOnMe) { foreach (var item in ((VariantAnalysisOutputStrategy <C, I, M>)outputStrategy).samplesCV) { if (tmpRes.ContainsKey(item.Key)) { tmpRes[item.Key] += item.Value; } else { tmpRes.Add(item.Key, item.Value); } } } } } }); _stpWtch.Stop(); result = tmpResults; newRes = tmpRes; return(new ExecutionReport(totalIntervals, _stpWtch.Elapsed)); }
internal ExecutionReport Map( Dictionary <string, Dictionary <char, List <I> > > references, char strand, Aggregate aggregate, out FunctionOutput <Output <C, I, M> > result, MaxDegreeOfParallelism maxDegreeOfParallelism) { int totalIntervals = 0; var tmpResults = new FunctionOutput <Output <C, I, M> >(); foreach (var refChr in references) { if (!chrs.ContainsKey(refChr.Key)) { continue; } foreach (var refStrand in refChr.Value) { if (!chrs[refChr.Key].ContainsKey(refStrand.Key)) { continue; } if (!tmpResults.Chrs.ContainsKey(refChr.Key)) { tmpResults.Chrs.TryAdd(refChr.Key, new ConcurrentDictionary <char, List <Output <C, I, M> > >()); } if (!tmpResults.Chrs[refChr.Key].ContainsKey(refStrand.Key)) { tmpResults.Chrs[refChr.Key].TryAdd(refStrand.Key, new List <Output <C, I, M> >()); } } } _stpWtch.Restart(); Parallel.ForEach(references, new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism.chrDegree }, refChr => { IOutput <C, I, M, Output <C, I, M> > outputStrategy = new AggregateFactory <C, I, M>().GetAggregateFunction(aggregate); if (chrs.ContainsKey(refChr.Key)) { foreach (var refStrand in refChr.Value) { if (!chrs[refChr.Key].ContainsKey(refStrand.Key)) { continue; } chrs[refChr.Key][refStrand.Key].Map <Output <C, I, M> >(ref outputStrategy, refStrand.Value, maxDegreeOfParallelism.di4Degree); tmpResults.Chrs[refChr.Key][refStrand.Key] = outputStrategy.output; totalIntervals += refStrand.Value.Count; } } }); _stpWtch.Stop(); result = tmpResults; return(new ExecutionReport(totalIntervals, _stpWtch.Elapsed)); }