public GenotypeCalls(BinaryReader gtc_br, bool ignore_version = false, bool check_write_complete = true) { /* * Constructor * * Args: * filename(string): GTC filename * ignore_version(bool): boolean to ignore automated checks on * file version, not recommended(default: False) * * Returns: * GenotypeCalls */ this.gtc_stream = new MemoryStreamNotDispose(); gtc_br.BaseStream.CopyTo(this.gtc_stream); BinaryReader gtc_handle = new BinaryReader(this.gtc_stream); gtc_handle.BaseStream.Seek(0, SeekOrigin.Begin); string identifier = new string(gtc_handle.ReadChars(3)); if (identifier != "gtc") { throw new Exception("GTC format error: bad format identifier"); } this.version = (int)(byte)read_byte(gtc_handle); if (!new List <int>(GenotypeCalls.supported_version).Contains(version)) { throw new Exception("Unsupported GTC File version (" + this.version.ToString() + ")"); } this.number_toc_entries = (int)read_int(gtc_handle); /* # Parse the table of contents and map the toc entry # to the lookup */ this.toc_table = new Dictionary <short, int>(); short id; int offset; for (int toc_idx = 0; toc_idx < this.number_toc_entries; ++toc_idx) { id = gtc_handle.ReadInt16(); offset = (int)gtc_handle.ReadUInt32(); this.toc_table[id] = offset; } if (check_write_complete && !this.is_write_complete()) { throw new Exception("GTC file is incomplete"); } }
public static MemoryStreamNotDispose beadPoolManifestWriter(BeadPoolManifest bpm) { MemoryStreamNotDispose fs = new MemoryStreamNotDispose(); using (BinaryWriter bw = new BinaryWriter(fs)) { for (int locus_idx = 0; locus_idx < bpm.num_loci; ++locus_idx) { bpm.normalization_ids[locus_idx] -= 100 * bpm.assay_types[locus_idx]; } bw.Write(System.Text.Encoding.ASCII.GetBytes("BPM")); bw.Write((byte)1); bw.Write(bpm.version); bw.Write(bpm.manifest_name); bw.Write(bpm.control_config); bw.Write(bpm.num_loci); bw.Write(bpm.after_num_loci); for (int idx = 0; idx < bpm.num_loci; ++idx) { bw.Write(bpm.names[idx]); } for (int idx = 0; idx < bpm.num_loci; ++idx) { bw.Write((byte)bpm.normalization_ids[idx]); } for (int idx = 0; idx < bpm.num_loci; ++idx) { LocusEntryWriter.Writer(bw, bpm.locus_entries[idx]); } } return(fs); }
async Task <APIGatewayProxyResponse> CreateResponse(JObject input, ILambdaContext context) { JObject preference = new Preference().preference; int statusCode = (input != null) ? (int)HttpStatusCode.OK : (int)HttpStatusCode.InternalServerError; string body = ""; string chainBucketRegion = input.ContainsKey("chainBucketRegion") ? input["chainBucketRegion"].ToString() : preference["S3"]["Chain"]["Region"].ToString(); string chainBucketName = input.ContainsKey("chainBucketName") ? input["chainBucketName"].ToString() : preference["S3"]["Chain"]["Name"].ToString(); string chainKey = input["chainKey"].ToString(); string bimBucketRegion = input.ContainsKey("bimBucketRegion") ? input["bimBucketRegion"].ToString() : preference["S3"]["BPM"]["Region"].ToString(); string bimBucketName = input.ContainsKey("bimBucketName") ? input["bimBucketName"].ToString() : preference["S3"]["BPM"]["Name"].ToString(); string bimKey = input["bimKey"].ToString(); string bimKeyNew = input["bimKeyNew"].ToString(); context.Logger.LogLine("chain file laoding"); Chain chain; using (Stream stream = readStreamFromS3ToMemory(bucketRegion: chainBucketRegion, bucketName: chainBucketName, keyName: chainKey).Result) using (GZipStream gs = new GZipStream(stream, CompressionMode.Decompress)) { chain = new Chain(stream: gs); } context.Logger.LogLine("chain file laoded"); context.Logger.LogLine("bim file laoding"); List <BIM> bims; using (Stream stream = readStreamFromS3ToMemory(bucketRegion: bimBucketRegion, bucketName: bimBucketName, keyName: bimKey).Result) using (StreamReader sr = new StreamReader(stream)) { bims = Plink.Plink.readBIMFromStream(sr).AsParallel().Select((bim, bim_idx) => { return(bimLiftOver(chain: chain, input: bim)); }).ToList(); } context.Logger.LogLine("bim file laoded"); context.Logger.LogLine("bim file writing"); using (MemoryStreamNotDispose ms = new MemoryStreamNotDispose()) using (StreamWriter sw = new StreamWriter(ms)) { Plink.Plink.writeBIMToStream(sw_bim: sw, bims: bims); writeStreamFromStreamToS3(bucketRegion: bimBucketRegion, bucketName: bimBucketName, keyName: bimKeyNew, stream: ms).Wait(); } context.Logger.LogLine("bim file written"); var response = new APIGatewayProxyResponse { StatusCode = statusCode, Body = body, Headers = new Dictionary <string, string> { { "Content-Type", "application/json" }, { "Access-Control-Allow-Origin", "*" } } }; return(response); }
public void change_sample_name(string sample_name) { /* * Returns: * string: The name of the sample */ MemoryStreamNotDispose new_stream = new MemoryStreamNotDispose(); using (BinaryReader br_origin = new BinaryReader(this.gtc_stream)) using (BinaryWriter bw_new = new BinaryWriter(new_stream)) using (MemoryStream temp = new MemoryStream()) using (BinaryWriter bw_temp = new BinaryWriter(temp)) { Console.WriteLine("Length of original stream: " + br_origin.BaseStream.Length); br_origin.BaseStream.Seek(0, SeekOrigin.Begin); int sample_name_position = this.toc_table[GenotypeCalls.__ID_SAMPLE_NAME]; Console.WriteLine("Sample name position: " + sample_name_position); // identifier bw_new.Write(br_origin.ReadChars(3)); // version bw_new.Write(br_origin.ReadByte()); // number of toc entries bw_new.Write(br_origin.ReadInt32()); for (int toc_idx = 0; toc_idx < this.number_toc_entries; ++toc_idx) { br_origin.ReadInt16(); br_origin.ReadUInt32(); } long position_before_sample_name = br_origin.BaseStream.Position; Console.WriteLine("Position before sample name: " + position_before_sample_name); br_origin.ReadString(); long position_after_sample_name = br_origin.BaseStream.Position; Console.WriteLine("Position after sample name: " + position_after_sample_name); long length_after_sample_name = this.gtc_stream.Length - br_origin.BaseStream.Position; Console.WriteLine("Length after sample name: " + length_after_sample_name); temp.Seek(0, SeekOrigin.Begin); bw_temp.Write(sample_name); bw_temp.Flush(); long length_new_sample_name = temp.Position; Console.WriteLine("Length new sample name: " + length_new_sample_name); Dictionary <short, int> toc = new Dictionary <short, int>(); foreach (var keyValue in this.toc_table) { toc[keyValue.Key] = keyValue.Value; } foreach (var keyValue in toc) { if (keyValue.Value > sample_name_position) { this.toc_table[keyValue.Key] += (int)(length_new_sample_name - (position_after_sample_name - position_before_sample_name)); } } foreach (var keyValue in this.toc_table) { bw_new.Write(keyValue.Key); bw_new.Write(keyValue.Value); } bw_new.Write(sample_name); bw_new.Write(br_origin.ReadBytes((int)length_after_sample_name)); bw_new.Flush(); Console.WriteLine("Length of new stream: " + new_stream.Length); } this.gtc_stream = new_stream; }
public static List <string> ConvertGTCToPlinkS3(string bucketNameGTC, List <string> keyNameGTCs, string bucketRegionGTC, string bucketNameBPM, string bucketRegionBPM, string bucketNamePlink, string bucketRegionPlink, string keyNamePlinkBasedir, string tableRegion, string tableName, string credential = @"embeded", AmazonS3Client s3client = null) { Console.WriteLine("Conversion function invoked"); return(keyNameGTCs.Select((keyNameGTC, gtc_idx) => { string gtc_filename = keyNameGTC.Split('/').Last(); List <GenotypeCalls> gtcs = new List <GenotypeCalls> { new GenotypeCalls(new BinaryReader(readStreamFromS3ToMemory(bucketName: bucketNameGTC, keyName: keyNameGTC, bucketRegion: bucketRegionGTC, credential: credential, s3client: s3client).Result)) }; string plink_filename_base = keyNamePlinkBasedir != "\"\"" ? Path.Combine(keyNamePlinkBasedir, gtcs[0].get_sample_name()) : gtcs[0].get_sample_name();//Regex.Replace(gtc_filename, "\\.gtc$", "")); Console.WriteLine("sample name: " + gtcs[0].get_sample_name()); ProgressWriter.writer(sn: gtcs[0].get_sample_name(), stage: "Plink_conversion", status: "start").Wait(); try { string bpm_in_gtc = gtcs[0].get_snp_manifest(); var bpm_mapping = new Preference().bpm_mapping; for (int mapping_idx = 0; mapping_idx < bpm_mapping.Count; ++mapping_idx) { if (bpm_in_gtc == bpm_mapping[mapping_idx]["bpm"].ToString()) { Plink.Plink plink = GTCToPlink.ConvertGTCToPlink(gtcs: gtcs, bpm: null); plink.bimFile = bpm_mapping[mapping_idx]["manifest"].ToString() + ".bim"; Console.WriteLine("Plink conversion completed"); // var sw_bim_stream = new GZipStream(new MemoryStreamNotDispose(), CompressionMode.Compress); // var sw_bim_stream = new MemoryStream(); var sw_bim_file_stream = new MemoryStreamNotDispose(); var sw_fam_stream = new MemoryStreamNotDispose(); var bw_bed_stream = new MemoryStreamNotDispose(); // var bw_strand_stream = new MemoryStream(); plink.writePlinkToStream(sw_bim: null, sw_fam: new StreamWriter(sw_fam_stream), bw_bed: new BinaryWriter(bw_bed_stream), sw_bim_file: new StreamWriter(sw_bim_file_stream), bw_strand: null); writeStreamFromStreamToS3(bucketName: bucketNamePlink, keyName: plink_filename_base + ".bed", bucketRegion: bucketRegionPlink, stream: bw_bed_stream, credential: credential, s3client: s3client).Wait(); // writeStreamFromStreamToS3(bucketName: bucketNamePlink, keyName: plink_filename_base + ".bim.gz", bucketRegion: bucketRegionPlink, stream: sw_bim_stream.BaseStream, credential: credential, s3client: s3client).Wait(); writeStreamFromStreamToS3(bucketName: bucketNamePlink, keyName: plink_filename_base + "_bim.txt", bucketRegion: bucketRegionBPM, stream: sw_bim_file_stream, credential: credential, s3client: s3client).Wait(); writeStreamFromStreamToS3(bucketName: bucketNamePlink, keyName: plink_filename_base + ".fam", bucketRegion: bucketRegionPlink, stream: sw_fam_stream, credential: credential, s3client: s3client).Wait(); // writeStreamFromStreamToS3(bucketName: bucketNamePlink, keyName: plink_filename_base + ".strand", bucketRegion: bucketRegionPlink, stream: bw_strand_stream, credential: credential, s3client: s3client).Wait(); ProgressWriter.writer(sn: gtcs[0].get_sample_name(), stage: "Plink_conversion", status: "end").Wait(); return plink_filename_base; } } } catch (Exception e) { Console.WriteLine(e.Message); Console.WriteLine(e.StackTrace); ProgressWriter.writer(sn: gtcs[0].get_sample_name(), stage: "Plink_conversion", status: "failed").Wait(); } return null; }).ToList()); }