public PackedReadingValues QueryValues(int nid) { if (nid < 0 || nid > 3) { throw new ArgumentOutOfRangeException("nid"); } using (IsolateConnection()) { System.Diagnostics.Debug.Write(String.Format("Query {0}", nid)); ConnectIfRequired(); var queryPacket = GenereateQueryPacketData(nid); if (WritePacket(queryPacket)) { for (int i = 0; i < 3; i++) { queryPacket = UsbConn.ReadPacket(TenthSecond); if (null != queryPacket) { var result = PackedReadingValues.FromDeviceBytes(queryPacket, 1); System.Diagnostics.Debug.WriteLine(String.Format(", {0}: {1}", nid, result)); return(result); } } } } System.Diagnostics.Debug.WriteLine(String.Format(": FAIL!", nid)); return(PackedReadingValues.Invalid); }
public void HandleObservation(PackedReadingValues values, int networkSize, DateTime daqRecordedTime) { lock (_stateMutex) { if (values.IsValid) { var recordStamp = daqRecordedTime.Subtract(new TimeSpan(0, 0, networkSize)); if (_latestReadings.Count == 0) { _latestReadings.Add(new PackedReading(recordStamp, values)); } else { var latest = _latestReadings.FirstOrDefault(); if (latest.TimeStamp != recordStamp) { _latestReadings.Insert(0, new PackedReading(recordStamp, values)); } while (_latestReadings.Count > _maxReadings) { _latestReadings.RemoveAt(_maxReadings - 1); } } _valid = true; } else { _valid = false; } } }
private void QueryThreadBody() { if (!_queryActive) { return; } int networkSize = 4; var values = new PackedReadingValues[networkSize]; bool?usingDaqTemp = null; int highestValid = -1; var now = DateTime.Now; var daqSafeTime = UnitUtility.StripToSecond(now); for (int i = 0; i < values.Length; i++) { values[i] = QueryValues(i); } for (int i = 0; i < values.Length; i++) { if (!values[i].IsValid) { continue; } if (PackedValuesFlags.AnemTemperatureSource != (values[i].RawFlags & PackedValuesFlags.AnemTemperatureSource)) { usingDaqTemp = true; } else if (!usingDaqTemp.HasValue) { usingDaqTemp = false; } highestValid = Math.Max(highestValid, i); } if (highestValid < 0) { highestValid = 3; } networkSize = highestValid + 1; _lastClock = QueryAdjustedClock(0); _daqStat = _lastDaqStatusQuery >= now?QueryStatus() : DaqStatusValues.Default; for (int i = 0; i < values.Length; i++) { _sensors[i].HandleObservation(values[i], networkSize, daqSafeTime); } _networkSize = networkSize; if (!_usingDaqTempUntil.HasValue || _usingDaqTempUntil.Value <= DateTime.Now) { _usingDaqTemp = usingDaqTemp; _usingDaqTempUntil = null; } }
public IEnumerable <PackedReading> GetReadings(string sensor, DateTime from, TimeSpan span) { DateTime to = from.Add(span); if (to < from) { DateTime s = to; to = from; from = s; } if (!ForceConnectionOpen()) { throw new Exception("Could not open database."); } using (var command = _connection.CreateTextCommand( "SELECT stamp,[values] FROM Record" + " INNER JOIN Sensor ON (Sensor.sensorId = Record.sensorId)" + " WHERE Sensor.nameKey = @sensorNameKey" + " AND Record.stamp >= @minPosixStamp" + " AND Record.stamp <= @maxPosixStamp" + " ORDER BY stamp " + ((span < TimeSpan.Zero) ? "DESC" : "ASC") )) { command.AddParameter("minPosixStamp", DbType.Int32, UnitUtility.ConvertToPosixTime(from)); command.AddParameter("maxPosixStamp", DbType.Int32, UnitUtility.ConvertToPosixTime(to)); command.AddParameter("sensorNameKey", DbType.String, sensor); using (var reader = command.ExecuteReader()) { int ordStamp = reader.GetOrdinal("stamp"); int ordValues = reader.GetOrdinal("values"); byte[] values = new byte[8]; const int chunkSize = 256; bool isReading = true; var cache = new List <PackedReading>(chunkSize); while (isReading) { cache.Clear(); for (int i = 0; i < chunkSize; i++) { if (!reader.Read()) { isReading = false; break; } reader.GetBytes(ordValues, 0, values, 0, values.Length); cache.Add(new PackedReading( UnitUtility.ConvertFromPosixTime(reader.GetInt32(ordStamp)), PackedReadingValues.ConvertFromPackedBytes(values, 0) )); } foreach (var item in cache) { yield return(item); } } } } }
public bool Push <T>(string sensor, IEnumerable <T> readings, bool replace) where T : IReading { if (String.IsNullOrEmpty(sensor)) { throw new ArgumentOutOfRangeException("sensor"); } if (!ForceConnectionOpen()) { throw new Exception("Could not open database."); } int?sensorRecordId = null; using (var getSensorCmd = _connection.CreateTextCommand("SELECT sensorId FROM Sensor WHERE nameKey = '" + sensor + "'")) { using (var getSensorReader = getSensorCmd.ExecuteReader()) { if (getSensorReader.Read()) { sensorRecordId = getSensorReader.GetInt32(getSensorReader.GetOrdinal("sensorId")); } } } if (!sensorRecordId.HasValue) { return(false); } TimeRange insertTimeRange = default(TimeRange); // insert the initial values using (var pushRecordCommand = _connection.CreateTextCommand( "INSERT OR " + (replace ? "REPLACE" : "IGNORE") + " INTO Record (sensorId,stamp,[values])" + " VALUES (@sensorId,@stamp,@values)" )) { pushRecordCommand.AddParameter("sensorId", DbType.Int32, sensorRecordId.Value); DbParameter stampParam = pushRecordCommand.AddParameter("stamp", DbType.Int32, null); DbParameter valuesParam = pushRecordCommand.AddParameter("values", DbType.Binary, null); insertTimeRange = typeof(T) == typeof(PackedReading) ? PushReadings( pushRecordCommand, readings.Cast <PackedReading>(), r => PackedReadingValues.ConvertToPackedBytes(r.Values), stampParam, valuesParam ) : PushReadings( pushRecordCommand, readings.Cast <PackedReading>(), r => PackedReadingValues.ConvertToPackedBytes(r), stampParam, valuesParam ); } return(UpdateSummaryRecords(insertTimeRange, sensor, sensorRecordId.Value)); }
public void FromDeviceRawBytesRawValuesTest() { var reading = new PackedReading( DateTimeSample, PackedReadingValues.FromDeviceBytes(DeviceSampleData, 0) ); Assert.AreEqual(0x73B, reading.Values.RawWindSpeed); Assert.AreEqual(0x1ff, reading.Values.RawWindDirection); Assert.AreEqual(0x285, reading.Values.RawTemperature); Assert.AreEqual(0x1f9, reading.Values.RawHumidity); Assert.AreEqual(0xbf68, reading.Values.RawPressure); }
public void FromDeviceRawBytesValuesTest() { var reading = new PackedReading( DateTimeSample, PackedReadingValues.FromDeviceBytes(DeviceSampleData, 0) ); Assert.AreEqual(18.51, reading.WindSpeed); Assert.AreEqual(Double.NaN, reading.WindDirection); Assert.AreEqual(24.5, reading.Temperature); Assert.AreEqual(.505, reading.Humidity); Assert.AreEqual(98000, reading.Pressure); }
public PackedReadingsHourSummary( DateTime beginStamp, PackedReadingValues min, PackedReadingValues max, PackedReadingValues mean, PackedReadingValues stddev, int count ) : base(beginStamp, min, max, mean, stddev, count) { }
public void FromDeviceRawBytesValuesTest() { var values = PackedReadingValues.FromDeviceBytes(DeviceSampleData, 0); Assert.AreEqual(18.51, values.WindSpeed); Assert.AreEqual(Double.NaN, values.WindDirection); Assert.AreEqual(24.5, values.Temperature); Assert.AreEqual(.505, values.Humidity); Assert.AreEqual(98000, values.Pressure); Assert.IsTrue(values.IsValid); Assert.IsTrue(values.IsHumidityValid); Assert.IsTrue(values.IsTemperatureValid); Assert.IsTrue(values.IsPressureValid); Assert.IsTrue(values.IsWindSpeedValid); Assert.IsFalse(values.IsWindDirectionValid); }
public void FromDeviceRawBytesRawValuesTest() { var values = PackedReadingValues.FromDeviceBytes(DeviceSampleData, 0); Assert.AreEqual(0x73B, values.RawWindSpeed); Assert.AreEqual(0x1ff, values.RawWindDirection); Assert.AreEqual(0x285, values.RawTemperature); Assert.AreEqual(0x1f9, values.RawHumidity); Assert.AreEqual(0xbf68, values.RawPressure); Assert.AreEqual( PackedValuesFlags.AnemTemperatureSource | PackedValuesFlags.Humidity | PackedValuesFlags.Pressure | PackedValuesFlags.WindSpeed, values.RawFlags ); }
public PackedReadingsDaySummary( DateTime beginStamp, PackedReadingValues min, PackedReadingValues max, PackedReadingValues mean, PackedReadingValues stddev, int count ) : base( beginStamp, min, max, mean, stddev, count ) { }
public void Write(DateTime stamp, PackedReadingValues values) { if (_isClosed || null == _stream) { throw new InvalidOperationException("Writer is not valid."); } var data = new byte[DaqDataFileInfo.RecordSize]; // do we needa new chunk? var needsAChunkHeader = false; var expectedStamp = _chunkStamp.Add(new TimeSpan(TimeSpan.TicksPerSecond * _recordsInThisChunk)); if (expectedStamp != stamp) { needsAChunkHeader = true; } if (_recordsInThisChunk == 0) { needsAChunkHeader = true; } if (_recordsInThisChunk >= 255) { needsAChunkHeader = true; } // make a chunk if needed if (needsAChunkHeader) { DaqDataFileInfo.ConvertDateTimeTo7ByteForm(stamp, data, 0); data[DaqDataFileInfo.RecordSize - 1] = DaqDataFileInfo.HeaderCodeByte; _stream.Write(data, 0, data.Length); _chunkStamp = stamp; _recordsInThisChunk = 0; } // write the record PackedReadingValues.ToDeviceBytes(values, data, 0); _stream.Write(data, 0, data.Length); _recordsInThisChunk++; }
protected PackedReadingsSummary( DateTime beginStamp, PackedReadingValues min, PackedReadingValues max, PackedReadingValues mean, PackedReadingValues sampleStandardDeviation, int count ) { BeginStamp = beginStamp; Min = min; Max = max; Mean = mean; SampleStandardDeviation = sampleStandardDeviation; Count = count; TemperatureCounts = new Dictionary<ushort, int>(); PressureCounts = new Dictionary<ushort, int>(); HumidityCounts = new Dictionary<ushort, int>(); WindSpeedCounts = new Dictionary<ushort, int>(); WindDirectionCounts = new Dictionary<ushort, int>(); }
protected PackedReadingsSummary( DateTime beginStamp, PackedReadingValues min, PackedReadingValues max, PackedReadingValues mean, PackedReadingValues sampleStandardDeviation, int count ) { BeginStamp = beginStamp; Min = min; Max = max; Mean = mean; SampleStandardDeviation = sampleStandardDeviation; Count = count; TemperatureCounts = new Dictionary <ushort, int>(); PressureCounts = new Dictionary <ushort, int>(); HumidityCounts = new Dictionary <ushort, int>(); WindSpeedCounts = new Dictionary <ushort, int>(); WindDirectionCounts = new Dictionary <ushort, int>(); }
public void ReadFirstThreeRecordsTest() { DateTime stamp; // setup the stamp variable to compare Assert.True(DaqDataFileInfo.TryConvert7ByteDateTime( new byte[] { 0x07, 0xda, 0x03, 0x0f, 0x13, 0x1e, 0x17, 0xa5 }, 0, out stamp )); using (var stream = DaqDataFileTestUtility.CreateSampleDaqFileStream()) { using (var reader = new DaqDataFileReader(stream)) { Assert.True(reader.MoveNext()); Assert.AreEqual( PackedReadingValues.FromDeviceBytes(new byte[] { 0x00, 0x01, 0x09, 0x41, 0x33, 0x60, 0x00, 0x1f }, 0), reader.Current.Values ); Assert.AreEqual(stamp.Add(new TimeSpan(0, 0, 0, 1)), reader.Current.TimeStamp); Assert.True(reader.MoveNext()); Assert.AreEqual( PackedReadingValues.FromDeviceBytes(new byte[] { 0x00, 0x01, 0x05, 0x41, 0x33, 0x60, 0x00, 0x1f }, 0), reader.Current.Values ); Assert.AreEqual(stamp.Add(new TimeSpan(0, 0, 0, 2)), reader.Current.TimeStamp); Assert.True(reader.MoveNext()); Assert.AreEqual( PackedReadingValues.FromDeviceBytes(new byte[] { 0x00, 0x01, 0x09, 0x41, 0x33, 0x60, 0x00, 0x1f }, 0), reader.Current.Values ); Assert.AreEqual(stamp.Add(new TimeSpan(0, 0, 0, 3)), reader.Current.TimeStamp); } } }
public void FromDoubleValuesToRawValues() { var values = new PackedReadingValues( // ReSharper disable RedundantArgumentName temperature: 24.5, pressure: 98000.0, humidity: 0.505, windDirection: Double.NaN, windSpeed: 18.51 // ReSharper restore RedundantArgumentName ); Assert.AreEqual(0x73B, values.RawWindSpeed); Assert.AreEqual(0, values.RawWindDirection); Assert.AreEqual(0x285, values.RawTemperature); Assert.AreEqual(0x1f9, values.RawHumidity); Assert.AreEqual(0xbf68, values.RawPressure); Assert.IsTrue(values.IsValid); Assert.IsTrue(values.IsHumidityValid); Assert.IsTrue(values.IsTemperatureValid); Assert.IsTrue(values.IsPressureValid); Assert.IsTrue(values.IsWindSpeedValid); Assert.IsFalse(values.IsWindDirectionValid); }
private bool PushSummaries(int sensorId, string tableName, IEnumerable <ReadingsSummary> summaries, PosixTimeRange totalRange) { string insertCommandText = String.Format( "INSERT INTO {0} (sensorId,stamp,minValues,maxValues,meanValues,stddevValues,recordCount,tempCount,pressCount,humCount,speedCount,dirCount)" + " VALUES (@sensorId,@stamp,@minValues,@maxValues,@meanValues,@stddevValues,@recordCount,@tempCount,@pressCount,@humCount,@speedCount,@dirCount)", tableName ); string deleteCommandText = String.Format( "DELETE FROM {0} WHERE stamp >= @minStamp AND stamp <= @maxStamp AND sensorId = @sensorId", tableName ); using (var command = _connection.CreateTextCommand(deleteCommandText)) { command.Transaction = _connection.BeginTransaction(); var sensorIdParam = command.AddParameter("sensorId", DbType.Int32, sensorId); if (totalRange.Span != 0) { command.AddParameter("minStamp", DbType.Int32, totalRange.Low); command.AddParameter("maxStamp", DbType.Int32, totalRange.High); command.ExecuteNonQuery(); } command.CommandText = insertCommandText; var stampParam = command.AddParameter("stamp", DbType.Int32, null); var minValuesParam = command.AddParameter("minValues", DbType.Binary, null); minValuesParam.Size = 8; var maxValuesParam = command.AddParameter("maxValues", DbType.Binary, null); maxValuesParam.Size = 8; var meanValuesParam = command.AddParameter("meanValues", DbType.Binary, null); meanValuesParam.Size = 8; var stddevValuesParam = command.AddParameter("stddevValues", DbType.Binary, null); stddevValuesParam.Size = 8; var recordCountParam = command.AddParameter("recordCount", DbType.Int32, null); recordCountParam.ParameterName = "recordCount"; var tempCountParam = command.AddParameter("tempCount", DbType.Binary, null); tempCountParam.Size = 0; var pressCountParam = command.AddParameter("pressCount", DbType.Binary, null); pressCountParam.Size = 0; var humCountParam = command.AddParameter("humCount", DbType.Binary, null); humCountParam.Size = 0; var speedCountParam = command.AddParameter("speedCount", DbType.Binary, null); speedCountParam.Size = 0; var dirCountParam = command.AddParameter("dirCount", DbType.Binary, null); dirCountParam.Size = 0; int counter = 0; foreach (var summary in summaries) { stampParam.Value = UnitUtility.ConvertToPosixTime(summary.BeginStamp); minValuesParam.Value = PackedReadingValues.ConvertToPackedBytes(summary.Min); maxValuesParam.Value = PackedReadingValues.ConvertToPackedBytes(summary.Max); meanValuesParam.Value = PackedReadingValues.ConvertToPackedBytes(summary.Mean); stddevValuesParam.Value = PackedReadingValues.ConvertToPackedBytes(summary.SampleStandardDeviation); recordCountParam.Value = summary.Count; byte[] data; tempCountParam.Value = data = PackedReadingValues.ConvertTemperatureCountsToPackedBytes(summary.TemperatureCounts); tempCountParam.Size = data.Length; pressCountParam.Value = data = PackedReadingValues.ConvertPressureCountsToPackedBytes(summary.PressureCounts); pressCountParam.Size = data.Length; humCountParam.Value = data = PackedReadingValues.ConvertHumidityCountsToPackedBytes(summary.HumidityCounts); humCountParam.Size = data.Length; speedCountParam.Value = data = PackedReadingValues.ConvertWindSpeedCountsToPackedBytes(summary.WindSpeedCounts); speedCountParam.Size = data.Length; dirCountParam.Value = data = PackedReadingValues.ConvertWindDirectionCountsToPackedBytes(summary.WindDirectionCounts); dirCountParam.Size = data.Length; command.ExecuteNonQuery(); counter++; if (counter >= RecordBatchQuantity) { counter = 0; command.Transaction.Commit(); command.Transaction = _connection.BeginTransaction(); } } command.Transaction.Commit(); } return(true); }
/// <summary> /// Moves to the next record in the DAQ file. /// </summary> /// <returns>true when the move was successful.</returns> public bool MoveNext() { if (_isClosed || _stream.Position >= _stream.Length) { _isClosed = true; return(false); } var recordData = new byte[DaqDataFileInfo.RecordSize]; while (true) { if (_stream.Position >= _stream.Length || DaqDataFileInfo.RecordSize != _stream.Read(recordData, 0, DaqDataFileInfo.RecordSize)) { _current = new PackedReading(); _isClosed = true; return(false); } if (DaqDataFileInfo.HeaderCodeByte == recordData[DaqDataFileInfo.RecordSize - 1]) { // a header record if (DaqDataFileInfo.TryConvert7ByteDateTime(recordData, 0, out _currentBaseTime)) { _timeBaseFromReadAhead = false; _chunkRecordCounter = 0; } else { // invalid time stamp if (!_timeBaseFromReadAhead) { // if we havnt already read ahead for a good stamp we // need to read ahead to get the next valid time stamp var positionRestore = _stream.Position; var dataRecordsRead = ReadNextValidStamp(out _currentBaseTime); if (dataRecordsRead < 0) { return(false); // failed to find any more time stamp records, quit } // skipped over some data records if (0 != dataRecordsRead) { // need to adjust the stamp we got for those records _currentBaseTime = _currentBaseTime.Subtract( new TimeSpan(0, 0, dataRecordsRead) ); // also need to put the stream back how found it _stream.Seek(positionRestore, SeekOrigin.Begin); } // this is the start of a new "virtual" chunk _chunkRecordCounter = 0; // virtual because invalid headers immediately following this will be ignored _timeBaseFromReadAhead = true; } else { ; // just skip it, pretend it doesen't exist because we already read ahead } } } else { // a data record var values = PackedReadingValues.FromDeviceBytes(recordData, 0); if (!values.IsValid) { _chunkRecordCounter++; continue; } _current = new PackedReading( _currentBaseTime.Add(new TimeSpan(0, 0, _chunkRecordCounter)), values ); _chunkRecordCounter++; return(true); } } }
public PackedReading(DateTime stamp, PackedReadingValues values) { TimeStamp = stamp; Values = values; }
private IEnumerable <T> GetPackedSummaries <T>(string sensor, DateTime from, TimeSpan span, string tableName) where T : PackedReadingsSummary, new() { DateTime to = from.Add(span); if (to < from) { DateTime s = to; to = from; from = s; } if (!ForceConnectionOpen()) { throw new Exception("Could not open database."); } using (var command = _connection.CreateTextCommand(String.Format( "SELECT stamp,minValues,maxValues,meanValues,stddevValues,recordCount" + ",tempCount,pressCount,humCount,speedCount,dirCount" + " FROM [{0}]" + " INNER JOIN Sensor ON (Sensor.sensorId = [{0}].sensorId)" + " WHERE Sensor.nameKey = @sensorNameKey" + " AND [{0}].stamp >= @minPosixStamp" + " AND [{0}].stamp < @maxPosixStamp" + " ORDER BY stamp " + ((span < TimeSpan.Zero) ? "DESC" : "ASC"), tableName ))) { command.AddParameter("minPosixStamp", DbType.Int32, UnitUtility.ConvertToPosixTime(from)); command.AddParameter("maxPosixStamp", DbType.Int32, UnitUtility.ConvertToPosixTime(to)); command.AddParameter("sensorNameKey", DbType.String, sensor); using (IDataReader reader = command.ExecuteReader()) { int ordStamp = reader.GetOrdinal("stamp"); int ordMinValues = reader.GetOrdinal("minValues"); int ordMaxValues = reader.GetOrdinal("maxValues"); int ordMeanValues = reader.GetOrdinal("meanValues"); int ordStddevValues = reader.GetOrdinal("stddevValues"); int ordRecordCount = reader.GetOrdinal("recordCount"); int ordTempCount = reader.GetOrdinal("tempCount"); int ordPressCount = reader.GetOrdinal("pressCount"); int ordHumCount = reader.GetOrdinal("humCount"); int ordSpeedCount = reader.GetOrdinal("speedCount"); int ordDirCount = reader.GetOrdinal("dirCount"); byte[] values = new byte[8]; //return ReadAsSensorReadings(reader); while (reader.Read()) { reader.GetBytes(ordMinValues, 0, values, 0, values.Length); PackedReadingValues minValues = PackedReadingValues.ConvertFromPackedBytes(values); reader.GetBytes(ordMaxValues, 0, values, 0, values.Length); PackedReadingValues maxValues = PackedReadingValues.ConvertFromPackedBytes(values); reader.GetBytes(ordMeanValues, 0, values, 0, values.Length); PackedReadingValues meanValues = PackedReadingValues.ConvertFromPackedBytes(values); reader.GetBytes(ordStddevValues, 0, values, 0, values.Length); PackedReadingValues stddevValues = PackedReadingValues.ConvertFromPackedBytes(values); var summary = new T { BeginStamp = UnitUtility.ConvertFromPosixTime(reader.GetInt32(ordStamp)), Min = minValues, Max = maxValues, Mean = meanValues, SampleStandardDeviation = stddevValues, Count = reader.GetInt32(ordRecordCount), TemperatureCounts = PackedReadingValues.PackedCountsToHashUnsigned16(reader.GetValue(ordTempCount) as byte[]), PressureCounts = PackedReadingValues.PackedCountsToHashUnsigned16(reader.GetValue(ordPressCount) as byte[]), HumidityCounts = PackedReadingValues.PackedCountsToHashUnsigned16(reader.GetValue(ordHumCount) as byte[]), WindSpeedCounts = PackedReadingValues.PackedCountsToHashUnsigned16(reader.GetValue(ordSpeedCount) as byte[]), WindDirectionCounts = PackedReadingValues.PackedCountsToHashUnsigned16(reader.GetValue(ordDirCount) as byte[]) }; yield return(summary); } } } }