/// <summary> /// Reads a single trace header from the file /// </summary> /// <param name="traceIndex">The trace index as it appears in squence in the file</param> /// <returns>The segy trace header</returns> public SegyTraceHeader ReadTraceHeader(long traceIndex) { CodeContract.Requires(traceIndex < (long)TraceCount, "Trace index to read must be less than the number of traces in the file."); CodeContract.Requires(traceIndex >= 0, "Trace index must be greater than or equal to 0"); var isLilEndian = IsLittleEndian; _stream.Seek(0, SeekOrigin.Begin); var textFileHeadersCount = FileTextualHeaders.Length; var binaryHeader = FileBinaryHeader; var dataStartIndex = TextHeaderBytesCount + BinaryHeaderBytesCount + TextHeaderBytesCount * (textFileHeadersCount - 1); var sampleFormat = (FormatCode)binaryHeader.DataSampleFormatCode; var sampleSz = SizeFrom(sampleFormat); var initStreamPosition = dataStartIndex + traceIndex * (TraceHeaderBytesCount + sampleSz * binaryHeader.SamplesPerTraceOfFile); // as per rev 1, all data values are assumed big endian CodeContract.Assume(initStreamPosition <= _stream.Length, "initial trace index exceeds file length."); _stream.Seek(initStreamPosition, SeekOrigin.Begin); var traceHeaderByteArr = _reader.ReadBytes(TraceHeaderBytesCount); return(SegyTraceHeader.From(traceHeaderByteArr, isLilEndian)); }
/// <summary> /// Reads a single trace from the file. /// </summary> /// <param name="traceIndex">The trace index as it appears in squence in the file</param> /// <returns>The segy trace</returns> public SegyTrace ReadTrace(long traceIndex) { CodeContract.Requires(traceIndex < TraceCount, "Trace index to read must be less than the number of traces in the file."); CodeContract.Requires(traceIndex >= 0, "Trace index must be greater than or equal to 0"); var islilEndian = IsLittleEndian; _stream.Seek(0, SeekOrigin.Begin); var textFileHeadersCount = FileTextualHeaders.Length; var binaryHeader = FileBinaryHeader; var dataStartIndex = TextHeaderBytesCount + BinaryHeaderBytesCount + TextHeaderBytesCount * (textFileHeadersCount - 1); var sampleFormat = (FormatCode)binaryHeader.DataSampleFormatCode; int sampleSz = SizeFrom(sampleFormat); // as per rev 1, all data values are assumed big endian var ns = binaryHeader.SamplesPerTraceOfFile; var initStreamPosition = dataStartIndex + (240 + ns * sampleSz) * traceIndex; CodeContract.Assume(initStreamPosition <= _stream.Length, "initial trace index exceeds file length."); _stream.Seek(initStreamPosition, SeekOrigin.Begin); var traceHeaderByteArr = _reader.ReadBytes(TraceHeaderBytesCount); var trHeader = SegyTraceHeader.From(traceHeaderByteArr, islilEndian); var traceDataBytesSz = trHeader.SampleCount * sampleSz; var traceDataBytes = _reader.ReadBytes(traceDataBytesSz); var traceData = GetData(traceDataBytes, sampleFormat, trHeader.SampleCount); var seismicTrace = new SegyTrace { ComponentAxis = 0, Data = traceData, Header = trHeader }; return(seismicTrace); }
/// <summary> /// Appends a set of traces to the end of the file or trace series. /// </summary> /// <param name="traces">Traces to write</param> /// <param name="progress">A progress handler</param> /// <param name="ct">Cancellation token</param> public void Write(IEnumerable <SegyTrace> traces, IProgress <int> progress = null, CancellationToken ct = default(CancellationToken)) { CodeContract.Requires <NullReferenceException>(traces != null, "Traces cannot be null."); if (ct.IsCancellationRequested) { return; } BigArray <SegyTrace> segyTraces = traces as BigArray <SegyTrace> ?? traces.ToBigArray(); CodeContract.Assume(segyTraces.Any(), "There must be at least one trace to write."); // get "traces" statistics. var distinctTraceSampleCounts = traces.Select(tr => tr.Data.Length).Distinct(); int numTraceLengths = distinctTraceSampleCounts.Count(); // assume number of trace lengths is 1. CodeContract.Assume(numTraceLengths == 1, "There are traces to write with inconsistent lengths. All traces must have the same length"); _stream.Seek(0, SeekOrigin.End); if (TraceSampleCount == 0) { TraceSampleCount = distinctTraceSampleCounts.FirstOrDefault(); } else { CodeContract.Assume(TraceSampleCount == distinctTraceSampleCounts.FirstOrDefault(), "Trace lengths to write is not consistent with the rest of the trace lengths in this file."); } var currProgress = 0; long traceCount = segyTraces.LongCount(); long ctr = 0; foreach (var sgyTrace in traces) { _writer.Write(sgyTrace.GetBytes()); ctr++; // report progress and cancel if requested if (ct.IsCancellationRequested) { break; } if (progress == null) { continue; } var progPercent = (int)(100 * (double)ctr / traceCount); if (currProgress == progPercent) { continue; } progress?.Report(progPercent); currProgress++; } }
/// <summary> /// Ctor /// </summary> /// <param name="traceHeader">The trace header to use for this trace</param> /// <param name="data">Trace data for this trace</param> /// <param name="componentAxis">The cartesian coordinate direction in which this trace exists</param> public SegyTrace(SegyTraceHeader traceHeader, float[] data, int componentAxis = 0) { Header = traceHeader; Data = data; ComponentAxis = componentAxis; CodeContract.Assume(data.Length <= ushort.MaxValue, "The length of the data array for this trace must not exceed ushort.MaxValue due to type definitions of the sample count property in the trace header."); Header.SampleCount = (ushort)data.Length; }
/// <summary> /// Appends a single segy trace to the end of the file or trace series. /// </summary> /// <param name="sgyTrace">The trace to write.</param> public void Write(SegyTrace sgyTrace) { CodeContract.Requires(sgyTrace.Data.Length != 0); if (TraceCount == 0) { TraceSampleCount = sgyTrace.Data.Length; } CodeContract.Assume(sgyTrace.Data.Length == TraceSampleCount); _writer.BaseStream.Position = _writer.BaseStream.Length; _writer.Write(sgyTrace.GetBytes()); TraceCount++; }
public void ReadTeapotDome3D() { using (SegyReader reader = new SegyReader(teapotDome3DFileInfo)) { var fileTextualHeaders = reader.FileTextualHeaders; CodeContract.Assume(fileTextualHeaders.Length == 1, "There should be at least 1 text file header in the file."); CodeContract.Assume(reader.TraceCount > 0); CodeContract.Assume(reader.FileBinaryHeader.SamplesPerTraceOfFile > 0); var traces = reader.ReadTraces(); var traceCount = traces.Count(); var minmax = reader.GetAmplitudeRange(); CodeContract.Assume(traceCount == reader.TraceCount); } }
public void CopySegyFileAndCompareToOriginal(FileInfo oldFileInfo) { var newFile = oldFileInfo.FullName.Replace(".sgy", "_tmp.sgy"); FileInfo newFileInfo = new FileInfo($@"{newFile}"); using (SegyReader reader = new SegyReader(oldFileInfo)) { using (SegyWriter writer = new SegyWriter(newFileInfo)) { writer.Write(reader.FileTextualHeaders[0]); writer.Write(reader.FileBinaryHeader); writer.Write(reader.ReadTraces()); } } using (SegyReader oldFileReader = new SegyReader(oldFileInfo)) { using (SegyReader newFileReader = new SegyReader(newFile)) { CodeContract.Assume(oldFileReader.FileTextualHeaders[0] == newFileReader.FileTextualHeaders[0]); CodeContract.Assume(oldFileReader.FileBinaryHeader.Equals(newFileReader.FileBinaryHeader)); var oldTraces = oldFileReader.ReadTraces().ToArray(); var newTraces = newFileReader.ReadTraces().ToArray(); CodeContract.Assume(oldTraces.Length == newTraces.Length); var traceCount = oldTraces.Length; for (int i = 0; i < traceCount; i++) { var oldTrace = oldTraces[i]; var newTrace = newTraces[i]; CodeContract.Assume(oldTrace.Equals(newTrace)); } } } newFileInfo.Delete(); }
/// <summary> /// Other Ctor /// </summary> /// <param name="fileinfo">The FileInfo of the file intended to be written</param> public SegyWriter(FileInfo fileinfo) : this(fileinfo.FullName) { CodeContract.Assume(fileinfo != null, "The file info used to create a new segy file must not be null."); }
/// <summary> /// Reads a set of segy traceheaders in the file within a defined range /// <remarks> /// if startTrace and endTrace are default for this method, all traces in the file will be read</remarks> /// </summary> /// <param name="progress">A progress handler</param> /// <param name="ct">A cancellation token</param> /// <param name="startTrace">The 0 based starting trace index to read</param> /// <param name="endTrace">The 0 based ending trace index to read</param> /// <returns>A collection of segy trace headers</returns> public IEnumerable <SegyTraceHeader> ReadTraceHeaders(IProgress <int> progress = null, CancellationToken ct = default(CancellationToken), long startTrace = 0, long nTraces = Int64.MaxValue) { var lastTraceToReadIndex = startTrace + nTraces; if (lastTraceToReadIndex > TraceCount) { lastTraceToReadIndex = TraceCount; } if (lastTraceToReadIndex != Int64.MaxValue) { CodeContract.Requires(lastTraceToReadIndex <= TraceCount, "Ending trace index must be less than the number of traces in the file."); } CodeContract.Requires(startTrace >= 0, "Cannot read a negative number of traces."); CodeContract.Requires(startTrace >= 0, "Starting trace index must be greater than 0."); var islilEndian = IsLittleEndian; var dataStartIndex = 3600 + (3200 * (FileTextualHeaders.Length - 1)); var sampleFormat = (FormatCode)FileBinaryHeader.DataSampleFormatCode; int sampleSz = SizeFrom(sampleFormat); // as per rev 1, all data values are assumed big endian //BigList<SgyTraceHeader> trHdrs; BigArray <SegyTraceHeader> trHArr; if (nTraces == long.MaxValue) { trHArr = new BigArray <SegyTraceHeader>(TraceCount - startTrace); } else { trHArr = new BigArray <SegyTraceHeader>(nTraces); } var streamLen = _stream.Length; var streamPos = dataStartIndex + startTrace * (240 + sampleSz * FileBinaryHeader.SamplesPerTraceOfFile); CodeContract.Assume(streamPos <= _stream.Length, "initial trace index exceeds file length."); _stream.Seek(streamPos, SeekOrigin.Begin); int ns = FileBinaryHeader.SamplesPerTraceOfFile; // Assume that the binary header has num samples per trace. dont trust trace headers var traceDataBytesSz = ns * sampleSz; int progPercent = 0; for (long i = startTrace; (i < lastTraceToReadIndex) && (streamPos < streamLen); i++) { var traceHeaderByteArr = _reader.ReadBytes(TraceHeaderBytesCount); var trHdr = SegyTraceHeader.From(traceHeaderByteArr, islilEndian); trHArr[i - startTrace] = trHdr; _stream.Seek(traceDataBytesSz, SeekOrigin.Current); streamPos += 240 + traceDataBytesSz; if (ct.IsCancellationRequested) { break; } if (progress == null) { continue; } var percent = (int)(100 * (double)_stream.Position / _stream.Length); if (progPercent == percent) { continue; } progress.Report(percent); progPercent = percent; } return(trHArr); }
/// <summary> /// Scans all sample values and evaluates the min and max amplitude of the sample values /// </summary> /// <param name="progress">progress notifier</param> /// <param name="ct">cancellation token</param> /// <returns>A pair of floats, the first of whick is min, second is max.</returns> public Tuple <float, float> GetAmplitudeRange(IProgress <int> progress = null, CancellationToken ct = default(CancellationToken)) { var lastTraceToReadIndex = TraceCount; if (lastTraceToReadIndex != Int64.MaxValue) { CodeContract.Requires(lastTraceToReadIndex <= TraceCount, "Ending trace index must be less than the number of traces in the file."); } var islilEndian = IsLittleEndian; var dataStartIndex = 3600 + (3200 * (FileTextualHeaders.Length - 1)); var sampleFormat = (FormatCode)FileBinaryHeader.DataSampleFormatCode; int sampleSz = SizeFrom(sampleFormat); // as per rev 1, all data values are assumed big endian var streamLen = _stream.Length; var streamPos = dataStartIndex + 0 * (240 + sampleSz * FileBinaryHeader.SamplesPerTraceOfFile); CodeContract.Assume(streamPos <= _stream.Length, "initial trace index exceeds file length."); _stream.Seek(streamPos, SeekOrigin.Begin); int ns = FileBinaryHeader.SamplesPerTraceOfFile; // Assume that the binary header has num samples per trace. dont trust trace headers var traceDataBytesSz = ns * sampleSz; int progPercent = 0; float min = Single.PositiveInfinity; float max = Single.NegativeInfinity; for (long i = 0; (i < lastTraceToReadIndex) && (streamPos < streamLen); i++) { _stream.Seek(TraceHeaderBytesCount, SeekOrigin.Current); var trDataBytes = _reader.ReadBytes(traceDataBytesSz); var trData = GetData(trDataBytes, sampleFormat, ns); Parallel.For(0, trData.Length, k => { if (trData[k] < min) { min = trData[k]; } if (trData[k] > max) { max = trData[k]; } }); streamPos += 240 + traceDataBytesSz; if (ct.IsCancellationRequested) { break; } if (progress == null) { continue; } var percent = (int)(100 * (double)_stream.Position / _stream.Length); if (progPercent == percent) { continue; } progress.Report(percent); progPercent = percent; } return(new Tuple <float, float>(min, max)); }