private static bool Propagate(FileStream fh, EndianBinaryReader reader, EndianBinaryWriter writer, Header header, long timestamp, ArchiveInfo higher, ArchiveInfo lower) { var aggregationType = header.AggregationType; var xff = header.xFilesFactor; var lowerIntervalStart = timestamp - timestamp.Mod(lower.SecondsPerPoint); var lowerIntervalEnd = lowerIntervalStart + lower.SecondsPerPoint; fh.Seek(higher.Offset, SeekOrigin.Begin); var higherBaseInterval = reader.ReadInt64(); // timestamp var higherBaseValue = reader.ReadDouble(); // value long higherFirstOffset; if (higherBaseInterval == 0) { higherFirstOffset = higher.Offset; } else { var timeDistance = lowerIntervalStart - higherBaseInterval; var pointDistance = timeDistance / higher.SecondsPerPoint; var byteDistance = pointDistance * PointSize; higherFirstOffset = higher.Offset + byteDistance.Mod(higher.Size); } var higherPoints = lower.SecondsPerPoint / higher.SecondsPerPoint; var higherSize = higherPoints * PointSize; var relativeFirstOffset = higherFirstOffset - higher.Offset; var relativeLastOffset = (relativeFirstOffset + higherSize).Mod(higher.Size); var higherLastOffset = relativeLastOffset + higher.Offset; fh.Seek(higherFirstOffset, SeekOrigin.Begin); byte[] seriesBuffer; int bytesRead = 0; if (higherFirstOffset < higherLastOffset) { seriesBuffer = new byte[(int)(higherLastOffset - higherFirstOffset)]; // we don't wrap the archive bytesRead = fh.Read(seriesBuffer, 0, seriesBuffer.Length); } else { var higherEnd = higher.Offset + higher.Size; var firstPart = (int)(higherEnd - higherFirstOffset); var secondPart = (int)(higherLastOffset - higher.Offset); seriesBuffer = new byte[firstPart + secondPart]; bytesRead += fh.Read(seriesBuffer, 0, firstPart); fh.Seek(higher.Offset, SeekOrigin.Begin); bytesRead += fh.Read(seriesBuffer, firstPart, secondPart); //var archiveEnd = higher.Offset + higher.Size; //seriesBuffer = new byte[(int)(archiveEnd - higherFirstOffset) + (int)(higherLastOffset - higher.Offset)]; //// We do wrap around the archive, so we need two reads //bytesRead += fh.Read(seriesBuffer, 0, (int)(archiveEnd - higherFirstOffset)); //if (higherLastOffset < higherFirstOffset) //{ // fh.Seek(higher.Offset, SeekOrigin.Begin); // bytesRead += fh.Read(seriesBuffer, 0, (int)(higherLastOffset - higher.Offset)); //} } var neighborValues = UnpackSeriesBuffer(seriesBuffer, bytesRead); // Propagate aggregateValue to propagate from neighborValues if we have enough known points var knownValues = neighborValues.Where(x => !x.Equals(default(PointPair)) && x.Timestamp != default(long)).Select(x => x.value); if (knownValues.Count() == 0) { return false; } var knownPercent = (double)knownValues.Count() / (double)neighborValues.Length; Debug.WriteLine(string.Format("Calculate Aggregate xff = {0} for {1} points", knownPercent, knownValues.Count())); if (knownPercent >= xff) { // we have enough data to propagte a value var aggregateValue = Aggregate(aggregationType, knownValues); fh.Seek(lower.Offset, SeekOrigin.Begin); var lowerBaseInterval = reader.ReadInt64(); // timestamp var lowerBaseValue = reader.ReadDouble(); // value if (lowerBaseInterval == 0) { // First propagated update to this lower archive fh.Seek(lower.Offset, SeekOrigin.Begin); writer.Write(lowerIntervalStart); writer.Write(aggregateValue); Debug.WriteLine(string.Format("writing aggregate point ({0},{1}) to position {2} - first update", lowerIntervalStart, aggregateValue, lower.Offset)); } else { // Not our first propagated update to this lower archive var timeDistance = lowerIntervalStart - lowerBaseInterval; var pointDistance = timeDistance / lower.SecondsPerPoint; var byteDistance = pointDistance * PointSize; var lowerOffset = lower.Offset + (byteDistance.Mod(lower.Size)); Debug.WriteLine(string.Format("calculating aggregate offset int = {0} base = {1} td = {2} pd = {3} bd = {4} offset = {5}", lowerIntervalStart, lowerBaseInterval, timeDistance, pointDistance, byteDistance, lowerOffset)); fh.Seek(lowerOffset, SeekOrigin.Begin); writer.Write(lowerIntervalStart); writer.Write(aggregateValue); Debug.WriteLine(string.Format("writing aggregate point ({0},{1}) to position {2}", lowerIntervalStart, aggregateValue, lowerOffset)); } return true; } else { return false; } }
private static Header ReadHeader(FileStream fh) { if (cachedHeaders.ContainsKey(fh.Name)) { return cachedHeaders[fh.Name]; } var originalOffest = fh.Position; fh.Seek(0, SeekOrigin.Begin); Header header; using (var reader = new EndianBinaryReader(EndianBitConverter.Big, new NonClosingStreamWrapper(fh))) { long aggregationType; long maxRetention; double xff; long archiveCount; try { aggregationType = reader.ReadInt64(); maxRetention = reader.ReadInt64(); xff = reader.ReadDouble(); archiveCount = reader.ReadInt64(); } catch (Exception e) { throw new CorruptWhisperFileException("Unable to read header", fh.Name, e); } var archives = new List<ArchiveInfo>(); for (int i = 0; i < archiveCount; i++) { try { var offset = reader.ReadInt64(); var secondsPerPoint = reader.ReadInt64(); var points = reader.ReadInt64(); archives.Add(new ArchiveInfo(secondsPerPoint, points, offset)); } catch (Exception e) { throw new CorruptWhisperFileException(string.Format("Unable to read archive{0} metadata", i), fh.Name, e); } } header = new Header((AggregationType)aggregationType, maxRetention, xff, archives); } if (CacheHeaders) { cachedHeaders.TryAdd(fh.Name, header); } return header; }