public void FromDeviceRawBytesValuesTest() { var reading = new PackedReading( DateTimeSample, PackedReadingValues.FromDeviceBytes(DeviceSampleData, 0) ); Assert.AreEqual(18.51, reading.WindSpeed); Assert.AreEqual(Double.NaN, reading.WindDirection); Assert.AreEqual(24.5, reading.Temperature); Assert.AreEqual(.505, reading.Humidity); Assert.AreEqual(98000, reading.Pressure); }
public void FromDeviceRawBytesRawValuesTest() { var reading = new PackedReading( DateTimeSample, PackedReadingValues.FromDeviceBytes(DeviceSampleData, 0) ); Assert.AreEqual(0x73B, reading.Values.RawWindSpeed); Assert.AreEqual(0x1ff, reading.Values.RawWindDirection); Assert.AreEqual(0x285, reading.Values.RawTemperature); Assert.AreEqual(0x1f9, reading.Values.RawHumidity); Assert.AreEqual(0xbf68, reading.Values.RawPressure); }
private DaqDataFileReader(Stream stream, bool ownsStream) { _stream = stream; _ownsStream = ownsStream; _isClosed = false; _currentBaseTime = default(DateTime); _current = default(PackedReading); _timeBaseFromReadAhead = false; _chunkRecordCounter = 0; if(null == _stream) { throw new ArgumentNullException("stream"); } }
private DaqDataFileReader(Stream stream, bool ownsStream) { _stream = stream; _ownsStream = ownsStream; _isClosed = false; _currentBaseTime = default(DateTime); _current = default(PackedReading); _timeBaseFromReadAhead = false; _chunkRecordCounter = 0; if (null == _stream) { throw new ArgumentNullException("stream"); } }
public void FromDoubleValuesToRawValues() { var reading = new PackedReading( DateTimeSample, new PackedReadingValues( // ReSharper disable RedundantArgumentName temperature: 24.5, pressure: 98000.0, humidity: 0.505, windDirection: Double.NaN, windSpeed: 18.51 // ReSharper restore RedundantArgumentName ) ); Assert.AreEqual(0x73B, reading.Values.RawWindSpeed); Assert.AreEqual(0x0, reading.Values.RawWindDirection); Assert.AreEqual(0x285, reading.Values.RawTemperature); Assert.AreEqual(0x1f9, reading.Values.RawHumidity); Assert.AreEqual(0xbf68, reading.Values.RawPressure); }
/// <summary> /// Moves to the next record in the DAQ file. /// </summary> /// <returns>true when the move was successful.</returns> public bool MoveNext() { if (_isClosed || _stream.Position >= _stream.Length) { _isClosed = true; return false; } var recordData = new byte[DaqDataFileInfo.RecordSize]; while(true) { if (_stream.Position >= _stream.Length || DaqDataFileInfo.RecordSize != _stream.Read(recordData, 0, DaqDataFileInfo.RecordSize)) { _current = new PackedReading(); _isClosed = true; return false; } if (DaqDataFileInfo.HeaderCodeByte == recordData[DaqDataFileInfo.RecordSize - 1]) { // a header record if (DaqDataFileInfo.TryConvert7ByteDateTime(recordData, 0, out _currentBaseTime)) { _timeBaseFromReadAhead = false; _chunkRecordCounter = 0; } else { // invalid time stamp if (!_timeBaseFromReadAhead) { // if we havnt already read ahead for a good stamp we // need to read ahead to get the next valid time stamp var positionRestore = _stream.Position; var dataRecordsRead = ReadNextValidStamp(out _currentBaseTime); if (dataRecordsRead < 0) { return false; // failed to find any more time stamp records, quit } // skipped over some data records if (0 != dataRecordsRead) { // need to adjust the stamp we got for those records _currentBaseTime = _currentBaseTime.Subtract( new TimeSpan(0, 0, dataRecordsRead) ); // also need to put the stream back how found it _stream.Seek(positionRestore, SeekOrigin.Begin); } // this is the start of a new "virtual" chunk _chunkRecordCounter = 0; // virtual because invalid headers immediately following this will be ignored _timeBaseFromReadAhead = true; } else { ; // just skip it, pretend it doesen't exist because we already read ahead } } } else { // a data record var values = PackedReadingValues.FromDeviceBytes(recordData, 0); if (!values.IsValid) { _chunkRecordCounter++; continue; } _current = new PackedReading( _currentBaseTime.Add(new TimeSpan(0, 0, _chunkRecordCounter)), values ); _chunkRecordCounter++; return true; } } }
/// <summary> /// Moves to the next record in the DAQ file. /// </summary> /// <returns>true when the move was successful.</returns> public bool MoveNext() { if (_isClosed || _stream.Position >= _stream.Length) { _isClosed = true; return(false); } var recordData = new byte[DaqDataFileInfo.RecordSize]; while (true) { if (_stream.Position >= _stream.Length || DaqDataFileInfo.RecordSize != _stream.Read(recordData, 0, DaqDataFileInfo.RecordSize)) { _current = new PackedReading(); _isClosed = true; return(false); } if (DaqDataFileInfo.HeaderCodeByte == recordData[DaqDataFileInfo.RecordSize - 1]) { // a header record if (DaqDataFileInfo.TryConvert7ByteDateTime(recordData, 0, out _currentBaseTime)) { _timeBaseFromReadAhead = false; _chunkRecordCounter = 0; } else { // invalid time stamp if (!_timeBaseFromReadAhead) { // if we havnt already read ahead for a good stamp we // need to read ahead to get the next valid time stamp var positionRestore = _stream.Position; var dataRecordsRead = ReadNextValidStamp(out _currentBaseTime); if (dataRecordsRead < 0) { return(false); // failed to find any more time stamp records, quit } // skipped over some data records if (0 != dataRecordsRead) { // need to adjust the stamp we got for those records _currentBaseTime = _currentBaseTime.Subtract( new TimeSpan(0, 0, dataRecordsRead) ); // also need to put the stream back how found it _stream.Seek(positionRestore, SeekOrigin.Begin); } // this is the start of a new "virtual" chunk _chunkRecordCounter = 0; // virtual because invalid headers immediately following this will be ignored _timeBaseFromReadAhead = true; } else { ; // just skip it, pretend it doesen't exist because we already read ahead } } } else { // a data record var values = PackedReadingValues.FromDeviceBytes(recordData, 0); if (!values.IsValid) { _chunkRecordCounter++; continue; } _current = new PackedReading( _currentBaseTime.Add(new TimeSpan(0, 0, _chunkRecordCounter)), values ); _chunkRecordCounter++; return(true); } } }