static void Main(string[] args) { if (args.Length < 2) { Console.WriteLine("usage: GenerateMerPairs [-m min] [-t threads] cbtFN readsPattern or file names (" + version + ")"); return; } List <string> FNParams = new List <string>(); // the .cbt name and the set of file names or patterns int noThreads = 1; // no. of healing threads to run in parallel (1 thread is default) int minLoadReps = 3; // min rep count needed before mer will be loaded into uniqueMers table or saved as a pair for (int p = 0; p < args.Length; p++) { if (args[p][0] == '-') { args[p] = args[p].ToLower(); if (args[p] == "-m" || args[p] == "-min") { if (!CheckForParamValue(p, args.Length, "minReps number expected after -m|-min")) { return; } try { minLoadReps = Convert.ToInt32(args[p + 1]); } catch { Console.WriteLine("expected a number for the -m|-min parameter: " + args[p + 1]); return; } p++; continue; } if (args[p] == "-t" || args[p] == "-threads") { if (!CheckForParamValue(p, args.Length, "number expected after -t|-threads")) { return; } try { noThreads = Convert.ToInt32(args[p + 1]); } catch { Console.WriteLine("expected a number for the -t|-threads parameter: " + args[p + 1]); return; } p++; continue; } Console.WriteLine("unrecognised option: " + args[p]); Console.WriteLine("usage: generateMerPairs [-m min] [-t threads] cbtFN readsPattern or file names (" + version + ")"); return; } FNParams.Add(args[p]); } if (FNParams.Count < 2) { Console.WriteLine("expected a cbt file name and at least one reads file name or pattern"); return; } // take the cbt file name from the start of the non-option list string cbtFN = FNParams[0]; FNParams.RemoveAt(0); if (FNParams.Count == 0) { Console.WriteLine("did not find any reads file names or patterns"); return; } string pairsFN = cbtFN.Replace(".cbt", ".prs"); List <string> readsFileNames = new List <string>(FNParams.Count); List <string> readsFilePaths = new List <string>(FNParams.Count); foreach (string readsFNP in FNParams) { string readsFileName; string readsFilePath; GetPathFN(readsFNP, out readsFilePath, out readsFileName); readsFilePaths.Add(readsFilePath); readsFileNames.Add(readsFileName); } List <string> expandedReadsFNs = new List <string>(); for (int f = 0; f < FNParams.Count; f++) { string[] matchedReadsFNs = Directory.GetFiles(readsFilePaths[f], readsFileNames[f], SearchOption.TopDirectoryOnly); foreach (string matchedReadsFN in matchedReadsFNs) { expandedReadsFNs.Add(matchedReadsFN); } } // make sure there aren't any duplicates in the file list (seems to be a bug on the Cherax SGI HPC system and it returns each file name twice) List <string> distinctReadsFNs = new List <string>(); foreach (string fn in expandedReadsFNs) { if (!distinctReadsFNs.Contains(fn)) { distinctReadsFNs.Add(fn); } } // finally... the set of fully qualified, distinct reads files string[] readsFNs; readsFNs = distinctReadsFNs.ToArray(); if (readsFNs.Length == 0) { Console.WriteLine("No matching read files found"); return; } int noOfReadsFiles = readsFNs.Length; readsFiles = new StreamReader[noOfReadsFiles]; for (int f = 0; f < noOfReadsFiles; f++) { string readsFN = readsFNs[f]; readsFiles[f] = new StreamReader(readsFN); } // look at the first file to determine the file format and possible read length StreamReader testReader = new StreamReader(readsFNs[0]); char headerChar = (char)testReader.Peek(); if (headerChar == '>') { readsFormat = MerStrings.formatFNA; } if (headerChar == '@') { readsFormat = MerStrings.formatFASTQ; } int readLength = 0; for (int i = 0; i < 20; i++) { string nextRead = MerStrings.ReadRead(testReader, readsFormat); if (nextRead == null) { break; } int nextLength = nextRead.Length; if (nextLength > readLength) { readLength = nextLength; } } testReader.Close(); // have to able to fit at least two full mers into the read (no overlaps) if (readLength < 2 * merSize) { Console.WriteLine("reads too short to generate pairs: " + readLength); return; } if (!File.Exists(cbtFN)) { Console.WriteLine(".cbt file not found: " + cbtFN); return; } //string knownPairsFN = "C.sporogenesRaw_25_Copy_1.prs"; //BinaryReader knownPairs = new BinaryReader(File.Open(knownPairsFN, FileMode.Open, FileAccess.Read)); //knownPairs.ReadInt32(); //while (true) //{ // ulong mer = 0; // int count = 0; // try // { // mer = knownPairs.ReadUInt64(); // count = knownPairs.ReadInt32(); // goodPairs.Add(mer, count); // } // catch // { // break; // } //} //knownPairs.Close(); //Console.WriteLine("loaded " + goodPairs.Count + " good mers from " + knownPairsFN); long loadedUniqueMers = 0; long loadedTotalMers = 0; // load the .cbt file into a merTable (either a hash table (small) or a sorted array (large)) MerStrings.LoadCBTFile(cbtFN, minLoadReps, 0, 0, minLoadReps, out uniqueMers, out merSize, out averageDepth, out loadedUniqueMers, out loadedTotalMers); if (merSize < merStubSize) { Console.WriteLine("mers in .cbt file are shorter than merStub size: " + merSize + " < " + merStubSize); return; } uniquePairs = new MerCollections.MerTables(loadedUniqueMers, noThreads); // calculate a gap size based on the first read gap = (readLength - endGuard) / 2 - (merStubSize * 2); if (gap < minGap) { gap = minGap; } if (gap > maxGap) { gap = maxGap; } pairStride = merStubSize + gap + merStubSize; // start the monitor/synchronising thread Thread monitorProgress = new Thread(RateReporter); monitorProgress.Priority = ThreadPriority.AboveNormal; monitorProgress.Start(); DateTime pairingStart = DateTime.Now; foreach (string readsFN in readsFNs) { Console.WriteLine("Generating pairs from " + readsFN); StreamReader reads = new StreamReader(readsFN, Encoding.ASCII, false, 1000000); BufferedReader bufferedReads = new BufferedReader(readsFormat, reads, null); threadFinished = new EventWaitHandle[noThreads]; int threadNo = 0; for (int i = 0; i < noThreads; i++) { threadFinished[i] = new EventWaitHandle(false, EventResetMode.AutoReset); } for (int t = 0; t < noThreads; t++) { threadParams workerParam = new threadParams(); workerParam.threadNo = threadNo; workerParam.bufferedReadsFile = bufferedReads; ThreadPool.QueueUserWorkItem(new WaitCallback(PairWorker), workerParam); threadNo++; } // and wait for them all to finish for (int t = 0; t < noThreads; t++) { threadFinished[t].WaitOne(); } } BinaryWriter pairsFile = new BinaryWriter(File.Open(pairsFN, FileMode.Create, FileAccess.Write)); pairsFile.Write(gap); for (int pi = 0; pi < uniquePairs.noOfPartitions; pi++) { totalPairsGenerated += uniquePairs.repeatedMers[pi].Sort(); } for (int ti = 0; ti < noThreads; ti++) { if (uniquePairs.overflowMers[ti] != null) { totalPairsGenerated += uniquePairs.overflowMers[ti].Sort(); } } MergeAndWrite(pairsFile, uniquePairs.repeatedMers, uniquePairs.overflowMers); pairsFile.Close(); StopMonitorThread(monitorProgress); //Console.WriteLine(totalDeepUnbalancedReads + " deep unbalanced reads"); //Console.WriteLine(totalReadsProcessed + " reads processed"); Console.WriteLine("wrote " + totalPairsWritten + " pairs from " + totalReadsRead + " reads in " + (DateTime.Now - pairingStart).TotalSeconds.ToString("#.0") + "s"); }
static void PairWorker(object param) { threadParams threadParam = (threadParams)param; int threadNo = (int)threadParam.threadNo; BufferedReader readsFile = threadParam.bufferedReadsFile; bool EOF = false; Sequence[] readHeaderBatch = new Sequence[batchSize]; Sequence[] readBatch = new Sequence[batchSize]; for (int i = 0; i < batchSize; i++) { readHeaderBatch[i] = new Sequence(defaultHeaderLength); readBatch[i] = new Sequence(defaultReadLength); } int readsInBatch = 0; long threadReadsRead = 0; long threadReadsProcessed = 0; ulong[] mersFromRead = new ulong[1000]; bool[] merValid = new bool[1000]; ulong[] canonicalMersFromRead = new ulong[1000]; int[] plusDepths = new int[1000]; int[] rcDepths = new int[1000]; bool deepUnbalanced = false; long threadDeepUnbalancedCount = 0; int minDepth = averageDepth / 20; while (!EOF) { lock (readsFile) { readsInBatch = readsFile.ReadReads(batchSize, readHeaderBatch, readBatch, null, null); if (readsInBatch != batchSize) { EOF = true; } threadReadsRead += readsInBatch; } progressReadsProcessed += readsInBatch; for (int r = 0; r < readsInBatch; r++) { threadReadsProcessed++; Sequence read = readBatch[r]; int readLength = read.Length; if (readLength < 2 * merSize) { continue; } if (readLength < 200) { stepSize = 1; } else { stepSize = 2; } //string target = "GTATATAATAAAGTTTTTTATAAAATTTTAAAAGATCATTATAAAAATATAATAACAATTAATATAATATTAATATACTTTAGTTATAGCTATAAATCTTT"; //if (read.ToString() == target) // Debugger.Break(); int merCount = MerStrings.GenerateMersFromRead(read, merSize, ref mersFromRead, ref merValid); for (int i = 0; i < merCount; i++) { if (merValid[i]) { ulong rcMer = MerStrings.ReverseComplement(mersFromRead[i], merSize); if (rcMer < mersFromRead[i]) { canonicalMersFromRead[i] = rcMer; } else { canonicalMersFromRead[i] = mersFromRead[i]; } } } GetDepthsForRead(merCount, mersFromRead, canonicalMersFromRead, merValid, plusDepths, rcDepths, minDepth, out deepUnbalanced); if (deepUnbalanced) { threadDeepUnbalancedCount++; continue; } ulong pair; int pairDepth; bool gotPair; int startingM = 0; int lastM = read.Length - pairStride; // generate pairs up to the end of the read (used to only generate from first part) while (startingM < lastM) { if (merValid[startingM]) { gotPair = GeneratePairFromRead(mersFromRead, merValid, plusDepths, rcDepths, startingM, merCount, minDepth, out pair, out pairDepth); if (gotPair) { ulong rcPair = MerStrings.ReverseComplement(pair, 32); if (rcPair < pair) { pair = rcPair; } //if (pair == 0x054A0985B90B34D1) // Debugger.Break(); uniquePairs.AddIfNotPresent(pair, pairDepth, threadNo); //lock (pairDictionary) //{ // if (!pairDictionary.ContainsKey(pair)) // pairDictionary.Add(pair, pairDepth); //} //Interlocked.Increment(ref GPTrue); //gotPairFromRead = true; } //else //Interlocked.Increment(ref GPfalse); } startingM += stepSize; } //if (!gotPairFromRead) // threadReadsWithNoPairs++; } } Interlocked.Add(ref totalReadsProcessed, threadReadsProcessed); Interlocked.Add(ref totalReadsRead, threadReadsRead); Interlocked.Add(ref totalDeepUnbalancedReads, threadDeepUnbalancedCount); threadFinished[threadNo].Set(); }