private static bool Propagate(FileStream fh, EndianBinaryReader reader, EndianBinaryWriter writer, Header header, long timestamp, ArchiveInfo higher, ArchiveInfo lower) { var aggregationType = header.AggregationType; var xff = header.xFilesFactor; var lowerIntervalStart = timestamp - timestamp.Mod(lower.SecondsPerPoint); var lowerIntervalEnd = lowerIntervalStart + lower.SecondsPerPoint; fh.Seek(higher.Offset, SeekOrigin.Begin); var higherBaseInterval = reader.ReadInt64(); // timestamp var higherBaseValue = reader.ReadDouble(); // value long higherFirstOffset; if (higherBaseInterval == 0) { higherFirstOffset = higher.Offset; } else { var timeDistance = lowerIntervalStart - higherBaseInterval; var pointDistance = timeDistance / higher.SecondsPerPoint; var byteDistance = pointDistance * PointSize; higherFirstOffset = higher.Offset + byteDistance.Mod(higher.Size); } var higherPoints = lower.SecondsPerPoint / higher.SecondsPerPoint; var higherSize = higherPoints * PointSize; var relativeFirstOffset = higherFirstOffset - higher.Offset; var relativeLastOffset = (relativeFirstOffset + higherSize).Mod(higher.Size); var higherLastOffset = relativeLastOffset + higher.Offset; fh.Seek(higherFirstOffset, SeekOrigin.Begin); byte[] seriesBuffer; int bytesRead = 0; if (higherFirstOffset < higherLastOffset) { seriesBuffer = new byte[(int)(higherLastOffset - higherFirstOffset)]; // we don't wrap the archive bytesRead = fh.Read(seriesBuffer, 0, seriesBuffer.Length); } else { var higherEnd = higher.Offset + higher.Size; var firstPart = (int)(higherEnd - higherFirstOffset); var secondPart = (int)(higherLastOffset - higher.Offset); seriesBuffer = new byte[firstPart + secondPart]; bytesRead += fh.Read(seriesBuffer, 0, firstPart); fh.Seek(higher.Offset, SeekOrigin.Begin); bytesRead += fh.Read(seriesBuffer, firstPart, secondPart); //var archiveEnd = higher.Offset + higher.Size; //seriesBuffer = new byte[(int)(archiveEnd - higherFirstOffset) + (int)(higherLastOffset - higher.Offset)]; //// We do wrap around the archive, so we need two reads //bytesRead += fh.Read(seriesBuffer, 0, (int)(archiveEnd - higherFirstOffset)); //if (higherLastOffset < higherFirstOffset) //{ // fh.Seek(higher.Offset, SeekOrigin.Begin); // bytesRead += fh.Read(seriesBuffer, 0, (int)(higherLastOffset - higher.Offset)); //} } var neighborValues = UnpackSeriesBuffer(seriesBuffer, bytesRead); // Propagate aggregateValue to propagate from neighborValues if we have enough known points var knownValues = neighborValues.Where(x => !x.Equals(default(PointPair)) && x.Timestamp != default(long)).Select(x => x.value); if (knownValues.Count() == 0) { return(false); } var knownPercent = (double)knownValues.Count() / (double)neighborValues.Length; Debug.WriteLine(string.Format("Calculate Aggregate xff = {0} for {1} points", knownPercent, knownValues.Count())); if (knownPercent >= xff) { // we have enough data to propagte a value var aggregateValue = Aggregate(aggregationType, knownValues); fh.Seek(lower.Offset, SeekOrigin.Begin); var lowerBaseInterval = reader.ReadInt64(); // timestamp var lowerBaseValue = reader.ReadDouble(); // value if (lowerBaseInterval == 0) { // First propagated update to this lower archive fh.Seek(lower.Offset, SeekOrigin.Begin); writer.Write(lowerIntervalStart); writer.Write(aggregateValue); Debug.WriteLine(string.Format("writing aggregate point ({0},{1}) to position {2} - first update", lowerIntervalStart, aggregateValue, lower.Offset)); } else { // Not our first propagated update to this lower archive var timeDistance = lowerIntervalStart - lowerBaseInterval; var pointDistance = timeDistance / lower.SecondsPerPoint; var byteDistance = pointDistance * PointSize; var lowerOffset = lower.Offset + (byteDistance.Mod(lower.Size)); Debug.WriteLine(string.Format("calculating aggregate offset int = {0} base = {1} td = {2} pd = {3} bd = {4} offset = {5}", lowerIntervalStart, lowerBaseInterval, timeDistance, pointDistance, byteDistance, lowerOffset)); fh.Seek(lowerOffset, SeekOrigin.Begin); writer.Write(lowerIntervalStart); writer.Write(aggregateValue); Debug.WriteLine(string.Format("writing aggregate point ({0},{1}) to position {2}", lowerIntervalStart, aggregateValue, lowerOffset)); } return(true); } else { return(false); } }
private static void FileUpdate(FileStream fh, double value, long?timestamp, long?now) { var header = ReadHeader(fh); now = now ?? DateTime.UtcNow.ToUnixTime(); if (!timestamp.HasValue) { timestamp = now.Value; } var diff = now - timestamp; if (!(diff < header.MaxRetention && diff >= 0)) { throw new TimestampNotCoveredException("Timestamp not covered by any archives in this database."); } List <ArchiveInfo> lowerArchives = null; ArchiveInfo archive = new ArchiveInfo(); for (int i = 0; i < header.ArchiveList.Count; i++) { archive = header.ArchiveList[i]; // Find the highest-precision archive that covers timestamp if (archive.Retention < diff) { continue; } // We'll pass on the update to these lower precision archives later lowerArchives = header.ArchiveList.Skip(i + 1).ToList(); break; } using (var reader = new EndianBinaryReader(EndianBitConverter.Big, new NonClosingStreamWrapper(fh))) using (var writer = new EndianBinaryWriter(EndianBitConverter.Big, new NonClosingStreamWrapper(fh))) { // First we update the highest-precision archive var myInterval = timestamp.Value - (timestamp.Mod(archive.SecondsPerPoint)); fh.Seek(archive.Offset, SeekOrigin.Begin); var baseInterval = reader.ReadInt64(); // timestamp var baseValue = reader.ReadDouble(); // value if (baseInterval == 0) { // this file's first update fh.Seek(archive.Offset, SeekOrigin.Begin); writer.Write(myInterval); writer.Write(value); baseInterval = myInterval; baseValue = value; Debug.WriteLine(string.Format("writing point ({0},{1}) to position {2} - first update", myInterval, value, archive.Offset)); } else { // not our first update var timeDistance = myInterval - baseInterval; var pointDistance = timeDistance / archive.SecondsPerPoint; var byteDistance = pointDistance * PointSize; var myOffset = archive.Offset + (byteDistance.Mod(archive.Size)); Debug.WriteLine(string.Format("calculating offset int = {0} base = {1} td = {2} pd = {3} bd = {4} offset = {5}", myInterval, baseInterval, timeDistance, pointDistance, byteDistance, myOffset)); fh.Seek(myOffset, SeekOrigin.Begin); writer.Write(myInterval); writer.Write(value); Debug.WriteLine(string.Format("writing point ({0},{1}) to position {2}", myInterval, value, myOffset)); } // Now we propagate the update to lower-precision archives var higher = archive; foreach (var lower in lowerArchives) { if (!Propagate(fh, reader, writer, header, myInterval, higher, lower)) { break; } higher = lower; } } fh.Flush(AutoFlush); }
/// <summary> /// Fetch data from a single archive. Note that checks for validity of the time /// period requested happen above this level so it's possible to wrap around the /// archive on a read and request data older than the archive's retention /// </summary> private static ArchiveFetch ArchiveFetch(FileStream fh, ArchiveInfo archive, long fromTime, long untilTime) { Debug.WriteLine(string.Format("ArchiveFetch from {0} to {1} in archive [{2},{3}]", fromTime, untilTime, archive.SecondsPerPoint, archive.Points)); var fromInterval = (fromTime - (fromTime.Mod(archive.SecondsPerPoint))) + (int)archive.SecondsPerPoint; var untilInterval = (untilTime - (untilTime.Mod(archive.SecondsPerPoint))) + (int)archive.SecondsPerPoint; fh.Seek(archive.Offset, SeekOrigin.Begin); using (var reader = new EndianBinaryReader(EndianBitConverter.Big, new NonClosingStreamWrapper(fh))) { var baseInterval = reader.ReadInt64(); // timestamp var baseValue = reader.ReadDouble(); // value if (baseInterval == 0) { var step = archive.SecondsPerPoint; var points = (int)((untilInterval - fromInterval) / step); var _timeInfo = new TimeInfo(fromInterval, untilInterval, archive.SecondsPerPoint); var _valueList = Enumerable.Repeat(new PointPair(0, 0), points).ToList(); return(new ArchiveFetch(_timeInfo, _valueList)); } // Determine fromOffset var timeDistance = fromInterval - baseInterval; var pointDistance = timeDistance / archive.SecondsPerPoint; var byteDistance = pointDistance * PointSize; var fromOffset = (int)(archive.Offset + (byteDistance.Mod(archive.Size))); // Determine untilOffset timeDistance = untilInterval - baseInterval; pointDistance = timeDistance / archive.SecondsPerPoint; byteDistance = pointDistance * PointSize; var untilOffset = (int)(archive.Offset + (byteDistance.Mod(archive.Size))); // read all the points in the interval fh.Seek(fromOffset, SeekOrigin.Begin); byte[] seriesBuffer; int bytesRead = 0; if (fromOffset < untilOffset) { // If we don't wrap around the archive seriesBuffer = new byte[(int)(untilOffset - fromOffset)]; bytesRead += fh.Read(seriesBuffer, 0, seriesBuffer.Length); if (bytesRead != seriesBuffer.Length) { throw new CorruptWhisperFileException(string.Format("read: {0} != {1}", bytesRead, seriesBuffer.Length)); } Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (bytesRead / PointSize), fromOffset)); } else { // We do wrap around the archive, so we need two reads var archiveEnd = archive.Offset + archive.Size; var firstPart = (int)(archiveEnd - fromOffset); var secondPart = (int)(untilOffset - archive.Offset); seriesBuffer = new byte[firstPart + secondPart]; bytesRead += fh.Read(seriesBuffer, 0, firstPart); Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (firstPart / PointSize), fromOffset)); fh.Seek(archive.Offset, SeekOrigin.Begin); bytesRead += fh.Read(seriesBuffer, firstPart, secondPart); Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (secondPart / PointSize), archive.Offset)); } var valueList = UnpackSeriesBuffer(seriesBuffer, bytesRead); var timeInfo = new TimeInfo(fromInterval, untilInterval, archive.SecondsPerPoint); return(new ArchiveFetch(timeInfo, valueList.Where(x => !x.Equals(default(PointPair)) && x.Timestamp != default(long)).ToList())); } }
private static bool Propagate(FileStream fh, EndianBinaryReader reader, EndianBinaryWriter writer, Header header, long timestamp, ArchiveInfo higher, ArchiveInfo lower) { var aggregationType = header.AggregationType; var xff = header.xFilesFactor; var lowerIntervalStart = timestamp - timestamp.Mod(lower.SecondsPerPoint); var lowerIntervalEnd = lowerIntervalStart + lower.SecondsPerPoint; fh.Seek(higher.Offset, SeekOrigin.Begin); var higherBaseInterval = reader.ReadInt64(); // timestamp var higherBaseValue = reader.ReadDouble(); // value long higherFirstOffset; if (higherBaseInterval == 0) { higherFirstOffset = higher.Offset; } else { var timeDistance = lowerIntervalStart - higherBaseInterval; var pointDistance = timeDistance / higher.SecondsPerPoint; var byteDistance = pointDistance * PointSize; higherFirstOffset = higher.Offset + byteDistance.Mod(higher.Size); } var higherPoints = lower.SecondsPerPoint / higher.SecondsPerPoint; var higherSize = higherPoints * PointSize; var relativeFirstOffset = higherFirstOffset - higher.Offset; var relativeLastOffset = (relativeFirstOffset + higherSize).Mod(higher.Size); var higherLastOffset = relativeLastOffset + higher.Offset; fh.Seek(higherFirstOffset, SeekOrigin.Begin); byte[] seriesBuffer; int bytesRead = 0; if (higherFirstOffset < higherLastOffset) { seriesBuffer = new byte[(int)(higherLastOffset - higherFirstOffset)]; // we don't wrap the archive bytesRead = fh.Read(seriesBuffer, 0, seriesBuffer.Length); } else { var higherEnd = higher.Offset + higher.Size; var firstPart = (int)(higherEnd - higherFirstOffset); var secondPart = (int)(higherLastOffset - higher.Offset); seriesBuffer = new byte[firstPart + secondPart]; bytesRead += fh.Read(seriesBuffer, 0, firstPart); fh.Seek(higher.Offset, SeekOrigin.Begin); bytesRead += fh.Read(seriesBuffer, firstPart, secondPart); //var archiveEnd = higher.Offset + higher.Size; //seriesBuffer = new byte[(int)(archiveEnd - higherFirstOffset) + (int)(higherLastOffset - higher.Offset)]; //// We do wrap around the archive, so we need two reads //bytesRead += fh.Read(seriesBuffer, 0, (int)(archiveEnd - higherFirstOffset)); //if (higherLastOffset < higherFirstOffset) //{ // fh.Seek(higher.Offset, SeekOrigin.Begin); // bytesRead += fh.Read(seriesBuffer, 0, (int)(higherLastOffset - higher.Offset)); //} } var neighborValues = UnpackSeriesBuffer(seriesBuffer, bytesRead); // Propagate aggregateValue to propagate from neighborValues if we have enough known points var knownValues = neighborValues.Where(x => !x.Equals(default(PointPair)) && x.Timestamp != default(long)).Select(x => x.value); if (knownValues.Count() == 0) { return false; } var knownPercent = (double)knownValues.Count() / (double)neighborValues.Length; Debug.WriteLine(string.Format("Calculate Aggregate xff = {0} for {1} points", knownPercent, knownValues.Count())); if (knownPercent >= xff) { // we have enough data to propagte a value var aggregateValue = Aggregate(aggregationType, knownValues); fh.Seek(lower.Offset, SeekOrigin.Begin); var lowerBaseInterval = reader.ReadInt64(); // timestamp var lowerBaseValue = reader.ReadDouble(); // value if (lowerBaseInterval == 0) { // First propagated update to this lower archive fh.Seek(lower.Offset, SeekOrigin.Begin); writer.Write(lowerIntervalStart); writer.Write(aggregateValue); Debug.WriteLine(string.Format("writing aggregate point ({0},{1}) to position {2} - first update", lowerIntervalStart, aggregateValue, lower.Offset)); } else { // Not our first propagated update to this lower archive var timeDistance = lowerIntervalStart - lowerBaseInterval; var pointDistance = timeDistance / lower.SecondsPerPoint; var byteDistance = pointDistance * PointSize; var lowerOffset = lower.Offset + (byteDistance.Mod(lower.Size)); Debug.WriteLine(string.Format("calculating aggregate offset int = {0} base = {1} td = {2} pd = {3} bd = {4} offset = {5}", lowerIntervalStart, lowerBaseInterval, timeDistance, pointDistance, byteDistance, lowerOffset)); fh.Seek(lowerOffset, SeekOrigin.Begin); writer.Write(lowerIntervalStart); writer.Write(aggregateValue); Debug.WriteLine(string.Format("writing aggregate point ({0},{1}) to position {2}", lowerIntervalStart, aggregateValue, lowerOffset)); } return true; } else { return false; } }
private static void FileUpdate(FileStream fh, double value, long? timestamp, long? now) { var header = ReadHeader(fh); now = now ?? DateTime.UtcNow.ToUnixTime(); if (!timestamp.HasValue) { timestamp = now.Value; } var diff = now - timestamp; if (!(diff < header.MaxRetention && diff >= 0)) { throw new TimestampNotCoveredException("Timestamp not covered by any archives in this database."); } List<ArchiveInfo> lowerArchives = null; ArchiveInfo archive = new ArchiveInfo(); for (int i = 0; i < header.ArchiveList.Count; i++) { archive = header.ArchiveList[i]; // Find the highest-precision archive that covers timestamp if (archive.Retention < diff) { continue; } // We'll pass on the update to these lower precision archives later lowerArchives = header.ArchiveList.Skip(i + 1).ToList(); break; } using (var reader = new EndianBinaryReader(EndianBitConverter.Big, new NonClosingStreamWrapper(fh))) using (var writer = new EndianBinaryWriter(EndianBitConverter.Big, new NonClosingStreamWrapper(fh))) { // First we update the highest-precision archive var myInterval = timestamp.Value - (timestamp.Mod(archive.SecondsPerPoint)); fh.Seek(archive.Offset, SeekOrigin.Begin); var baseInterval = reader.ReadInt64(); // timestamp var baseValue = reader.ReadDouble(); // value if (baseInterval == 0) { // this file's first update fh.Seek(archive.Offset, SeekOrigin.Begin); writer.Write(myInterval); writer.Write(value); baseInterval = myInterval; baseValue = value; Debug.WriteLine(string.Format("writing point ({0},{1}) to position {2} - first update", myInterval, value, archive.Offset)); } else { // not our first update var timeDistance = myInterval - baseInterval; var pointDistance = timeDistance / archive.SecondsPerPoint; var byteDistance = pointDistance * PointSize; var myOffset = archive.Offset + (byteDistance.Mod(archive.Size)); Debug.WriteLine(string.Format("calculating offset int = {0} base = {1} td = {2} pd = {3} bd = {4} offset = {5}", myInterval, baseInterval, timeDistance, pointDistance, byteDistance, myOffset)); fh.Seek(myOffset, SeekOrigin.Begin); writer.Write(myInterval); writer.Write(value); Debug.WriteLine(string.Format("writing point ({0},{1}) to position {2}", myInterval, value, myOffset)); } // Now we propagate the update to lower-precision archives var higher = archive; foreach (var lower in lowerArchives) { if (!Propagate(fh, reader, writer, header, myInterval, higher, lower)) { break; } higher = lower; } } fh.Flush(AutoFlush); }
/// <summary> /// Fetch data from a single archive. Note that checks for validity of the time /// period requested happen above this level so it's possible to wrap around the /// archive on a read and request data older than the archive's retention /// </summary> private static ArchiveFetch ArchiveFetch(FileStream fh, ArchiveInfo archive, long fromTime, long untilTime) { Debug.WriteLine(string.Format("ArchiveFetch from {0} to {1} in archive [{2},{3}]", fromTime, untilTime, archive.SecondsPerPoint, archive.Points)); var fromInterval = (fromTime - (fromTime.Mod(archive.SecondsPerPoint))) + (int)archive.SecondsPerPoint; var untilInterval = (untilTime - (untilTime.Mod(archive.SecondsPerPoint))) + (int)archive.SecondsPerPoint; fh.Seek(archive.Offset, SeekOrigin.Begin); using (var reader = new EndianBinaryReader(EndianBitConverter.Big, new NonClosingStreamWrapper(fh))) { var baseInterval = reader.ReadInt64(); // timestamp var baseValue = reader.ReadDouble(); // value if (baseInterval == 0) { var step = archive.SecondsPerPoint; var points = (int)((untilInterval - fromInterval) / step); var _timeInfo = new TimeInfo(fromInterval, untilInterval, archive.SecondsPerPoint); var _valueList = Enumerable.Repeat(new PointPair(0, 0), points).ToList(); return new ArchiveFetch(_timeInfo, _valueList); } // Determine fromOffset var timeDistance = fromInterval - baseInterval; var pointDistance = timeDistance / archive.SecondsPerPoint; var byteDistance = pointDistance * PointSize; var fromOffset = (int)(archive.Offset + (byteDistance.Mod(archive.Size))); // Determine untilOffset timeDistance = untilInterval - baseInterval; pointDistance = timeDistance / archive.SecondsPerPoint; byteDistance = pointDistance * PointSize; var untilOffset = (int)(archive.Offset + (byteDistance.Mod(archive.Size))); // read all the points in the interval fh.Seek(fromOffset, SeekOrigin.Begin); byte[] seriesBuffer; int bytesRead = 0; if (fromOffset < untilOffset) { // If we don't wrap around the archive seriesBuffer = new byte[(int)(untilOffset - fromOffset)]; bytesRead += fh.Read(seriesBuffer, 0, seriesBuffer.Length); if (bytesRead != seriesBuffer.Length) { throw new CorruptWhisperFileException(string.Format("read: {0} != {1}", bytesRead, seriesBuffer.Length)); } Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (bytesRead / PointSize), fromOffset)); } else { // We do wrap around the archive, so we need two reads var archiveEnd = archive.Offset + archive.Size; var firstPart = (int)(archiveEnd - fromOffset); var secondPart = (int)(untilOffset - archive.Offset); seriesBuffer = new byte[firstPart + secondPart]; bytesRead += fh.Read(seriesBuffer, 0, firstPart); Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (firstPart / PointSize), fromOffset)); fh.Seek(archive.Offset, SeekOrigin.Begin); bytesRead += fh.Read(seriesBuffer, firstPart, secondPart); Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (secondPart / PointSize), archive.Offset)); } var valueList = UnpackSeriesBuffer(seriesBuffer, bytesRead); var timeInfo = new TimeInfo(fromInterval, untilInterval, archive.SecondsPerPoint); return new ArchiveFetch(timeInfo, valueList.Where(x => !x.Equals(default(PointPair)) && x.Timestamp != default(long)).ToList()); } }