/// <summary> /// Reads the first header encountered with a valid date-time stamp. /// </summary> /// <param name="firstValidStamp">The date-time that was encountered.</param> /// <returns>the number of records before the first valid date-time stamp was encountered.</returns> /// <remarks> /// When no valid header is found the method will return -1 and <paramref name="firstValidStamp"/> will be set to default(DateTime). /// </remarks> public int ReadNextValidStamp(out DateTime firstValidStamp) { // TODO: if the stream is not at a multiple of 8, scoot it back if (_isClosed || _stream.Position >= _stream.Length) { _isClosed = true; firstValidStamp = default(DateTime); return(-1); } int recordCount = 0; var headerBuffer = new byte[DaqDataFileInfo.RecordSize]; while (DaqDataFileInfo.RecordSize == _stream.Read(headerBuffer, 0, DaqDataFileInfo.RecordSize)) { if (DaqDataFileInfo.HeaderCodeByte == headerBuffer[LastRecordIndex]) { if (DaqDataFileInfo.TryConvert7ByteDateTime(headerBuffer, 0, out firstValidStamp)) { return(recordCount); } } else { recordCount++; } } firstValidStamp = default(DateTime); return(-1); }
public void Write(DateTime stamp, PackedReadingValues values) { if (_isClosed || null == _stream) { throw new InvalidOperationException("Writer is not valid."); } var data = new byte[DaqDataFileInfo.RecordSize]; // do we needa new chunk? var needsAChunkHeader = false; var expectedStamp = _chunkStamp.Add(new TimeSpan(TimeSpan.TicksPerSecond * _recordsInThisChunk)); if (expectedStamp != stamp) { needsAChunkHeader = true; } if (_recordsInThisChunk == 0) { needsAChunkHeader = true; } if (_recordsInThisChunk >= 255) { needsAChunkHeader = true; } // make a chunk if needed if (needsAChunkHeader) { DaqDataFileInfo.ConvertDateTimeTo7ByteForm(stamp, data, 0); data[DaqDataFileInfo.RecordSize - 1] = DaqDataFileInfo.HeaderCodeByte; _stream.Write(data, 0, data.Length); _chunkStamp = stamp; _recordsInThisChunk = 0; } // write the record PackedReadingValues.ToDeviceBytes(values, data, 0); _stream.Write(data, 0, data.Length); _recordsInThisChunk++; }
private static IEnumerable<PackedReading> GetPackedReadings(DaqDataFileInfo fileInfo) { using (var file = fileInfo.CreateReader()) { while (file.MoveNext()) { yield return file.Current; } } }
/// <summary> /// Moves to the next record in the DAQ file. /// </summary> /// <returns>true when the move was successful.</returns> public bool MoveNext() { if (_isClosed || _stream.Position >= _stream.Length) { _isClosed = true; return(false); } var recordData = new byte[DaqDataFileInfo.RecordSize]; while (true) { if (_stream.Position >= _stream.Length || DaqDataFileInfo.RecordSize != _stream.Read(recordData, 0, DaqDataFileInfo.RecordSize)) { _current = new PackedReading(); _isClosed = true; return(false); } if (DaqDataFileInfo.HeaderCodeByte == recordData[DaqDataFileInfo.RecordSize - 1]) { // a header record if (DaqDataFileInfo.TryConvert7ByteDateTime(recordData, 0, out _currentBaseTime)) { _timeBaseFromReadAhead = false; _chunkRecordCounter = 0; } else { // invalid time stamp if (!_timeBaseFromReadAhead) { // if we havnt already read ahead for a good stamp we // need to read ahead to get the next valid time stamp var positionRestore = _stream.Position; var dataRecordsRead = ReadNextValidStamp(out _currentBaseTime); if (dataRecordsRead < 0) { return(false); // failed to find any more time stamp records, quit } // skipped over some data records if (0 != dataRecordsRead) { // need to adjust the stamp we got for those records _currentBaseTime = _currentBaseTime.Subtract( new TimeSpan(0, 0, dataRecordsRead) ); // also need to put the stream back how found it _stream.Seek(positionRestore, SeekOrigin.Begin); } // this is the start of a new "virtual" chunk _chunkRecordCounter = 0; // virtual because invalid headers immediately following this will be ignored _timeBaseFromReadAhead = true; } else { ; // just skip it, pretend it doesen't exist because we already read ahead } } } else { // a data record var values = PackedReadingValues.FromDeviceBytes(recordData, 0); if (!values.IsValid) { _chunkRecordCounter++; continue; } _current = new PackedReading( _currentBaseTime.Add(new TimeSpan(0, 0, _chunkRecordCounter)), values ); _chunkRecordCounter++; return(true); } } }