/// <summary> /// </summary> /// <param name="dimensionSet"></param> /// <param name="fullFilenameOrBlobName">Can be a full filename or Blobname. Set the other param accordingly</param> /// <param name="memoryStreamManager"></param> /// <param name="storagePath">Null if fullFilename given, otherwise, carries a value</param> public DataBucket(DimensionSet dimensionSet, string fullFilenameOrBlobName, RecyclableMemoryStreamManager memoryStreamManager, string storagePath) { DateTime start, end; if (storagePath == null) { if (!GetTimestampsFromFullFilename(fullFilenameOrBlobName, out start, out end)) { throw new ArgumentException("Invalid filename", "fullFilenameOrBlobName"); } this.Filename = fullFilenameOrBlobName; this.Blobname = GetBlobnameFromFilename(fullFilenameOrBlobName); } else { if (!GetTimestampsFromFilename(fullFilenameOrBlobName, out start, out end)) { throw new ArgumentException("Invalid blobname", "fullFilenameOrBlobName"); } this.Blobname = fullFilenameOrBlobName; this.Filename = GetFilenameFromBlobName(fullFilenameOrBlobName, storagePath); } this.DimensionSet = dimensionSet; this.memoryStreamManager = memoryStreamManager; this.StartTime = start; this.TimeSpan = (end - start); this.Sealed = false; }
public KeyedDataStore(DimensionSet dimensionSet, RecyclableMemoryStreamManager memoryStreamManager, MemoryStream initialData, int dataKeyCount, PersistedDataType initialDataType, string sourceTag = "unknown with data") { this.allocationStack = #if COLLECT_STACKS sourceTag + " " + Environment.StackTrace; #else string.Empty; #endif this.DimensionSet = dimensionSet; this.memoryStreamManager = memoryStreamManager; if (initialData != null && dataKeyCount > 0) { this.mergedData = this.CreateDataFromStream(initialData, initialDataType, dataKeyCount); } else { this.mergedData = new QueryableSingleValueData(null, DimensionSet.Empty); if (initialData != null) { initialData.Dispose(); } } this.multiValue = (new TInternal()).MultiValue; }
public DataSet(string name, string storagePath, DimensionSet dimensionSet, ISharedDataSetProperties properties) { this.Name = name; this.storagePath = storagePath; this.DimensionSet = dimensionSet; this.properties = properties; this.data = new SortedList <DateTime, DataBucket <TInternal> >( Comparer <DateTime> .Create((x, y) => y.CompareTo(x))); }
protected QueryableData(DimensionSet dimensionSet) { this.allocationStack = #if COLLECT_STACKS Environment.StackTrace; #else string.Empty; #endif this.DimensionSet = dimensionSet; }
public PersistedDataAggregator(string name, DimensionSet dimensionSet, IEnumerable <string> sources, DateTime startTime, DateTime endTime, RecyclableMemoryStreamManager streamManager) { if (string.IsNullOrWhiteSpace(name)) { throw new ArgumentException("Data name is invalid", name); } this.Name = name; this.Port = Protocol.DefaultServerPort; // TODO: move to ServerInfo if (dimensionSet != null) { this.DimensionSet = new DimensionSet(dimensionSet); } this.Sources = new List <PersistedDataSource>(); foreach (var machine in sources) { if (string.IsNullOrWhiteSpace(machine)) { throw new ArgumentException("Invalid source name supplied", "sources"); } this.Sources.Add(new PersistedDataSource(machine, PersistedDataSourceStatus.Unknown)); } if (this.Sources.Count == 0) { throw new ArgumentException("Must supply one or more source", "sources"); } if (startTime >= endTime) { throw new ArgumentException("startTime must be less than endTime", "startTime"); } if (streamManager == null) { throw new ArgumentNullException("streamManager"); } this.StartTime = startTime; this.EndTime = endTime; this.memoryStreamManager = streamManager; this.httpClient = new HttpClient( new WebRequestHandler { AllowAutoRedirect = false, AllowPipelining = true, AutomaticDecompression = DecompressionMethods.Deflate | DecompressionMethods.GZip, }); this.MaxFanout = DefaultMaxFanout; }
private static void CheckCreateCounter(string name, DimensionSet dimensionSet) { if (!Protocol.IsValidCounterName(name)) { throw new ArgumentException("Counter name is invalid.", "name"); } if (dimensionSet == null) { throw new ArgumentNullException("dimensionSet"); } }
public PersistedDataHeader(string name, DateTime start, DateTime end, PersistedDataType dataType, IEnumerable <PersistedDataSource> sources, DimensionSet dimensionSet, uint dataCount) { this.Name = name; this.StartTime = new DateTimeOffset(start, TimeSpan.Zero); this.EndTime = new DateTimeOffset(end, TimeSpan.Zero); this.DataType = dataType; this.Sources = new List <PersistedDataSource>(sources); this.DimensionSet = dimensionSet; this.DataCount = dataCount; }
public QueryableSingleValueData(MemoryStream source, DimensionSet dimensionSet, bool writable = false) : base(dimensionSet) { this.sourceStream = source; if (source != null) { this.Data = new BufferedKeyedData <long>(source.GetBuffer(), 0, (int)source.Length, dimensionSet, writable); } else { this.Data = new BufferedKeyedData <long>(null, 0, 0, dimensionSet); } }
public async Task <HistogramCounter> CreateHistogramCounter(string name, DimensionSet dimensions) { CheckCreateCounter(name, dimensions); var path = this.CreateDirectoryForDataSet(name); var dataSet = new DataSet <InternalHistogram>(name, path, dimensions, this); if (path != null) { await Task.Factory.StartNew(dataSet.LoadStoredData); } var counter = new HistogramCounter(dataSet); this.AddCounter(counter); return(counter); }
public DataBucket(DimensionSet dimensionSet, DateTime timestamp, long timeSpanInTicks, string storagePath, RecyclableMemoryStreamManager memoryStreamManager) { timestamp = timestamp.ToUniversalTime(); this.DimensionSet = dimensionSet; this.TimeSpan = TimeSpan.FromTicks(timeSpanInTicks); this.StartTime = RoundTimeStampToBucketKey(timestamp, timeSpanInTicks); this.memoryStreamManager = memoryStreamManager; if (storagePath != null) { this.Filename = Path.Combine(storagePath, GenerateFilename(this.StartTicks, this.EndTicks)); } this.Blobname = GenerateBlobname(this.StartTicks, this.EndTicks); this.data = new KeyedDataStore <TInternal>(this.DimensionSet, this.memoryStreamManager, "db ctor"); }
public void UpdateDataFromSources(IList <string> sourceList, DimensionSet sourceDimensions, KeyedDataStore <TInternal> sourceData) { if (this.Sealed) { throw new InvalidOperationException("Attempt to write to sealed bucket."); } foreach (var s in sourceList) { // Below we do some sanity checking to make sure that we're not ingesting data we were already given, // or ingesting data from a source that wasn't pre-declared as an input. Either of these would indicate // an upstream logic fault. var source = this.FindSource(s); if (source == null) { throw new InvalidOperationException("Adding data from previously unknown source " + s); } if (source.Status != PersistedDataSourceStatus.Unknown) { throw new InvalidOperationException("Double adding data from source " + s); } source.Status = PersistedDataSourceStatus.Available; } using (SharedLock.OpenExclusive(this.dataAccessLock)) { this.Load(); if (this.data == null || this.data.Empty) { this.DimensionSet = sourceDimensions; this.data = sourceData; } else { this.data.TakeData(sourceData); sourceData.Dispose(); } this.dirty = true; } }
public static IPersistedDataAggregator CreateAggregatorForSampleType(MetricSystem.PersistedDataType dataType, string name, DimensionSet dimensionSet, IEnumerable <string> sources, DateTime startTime, DateTime endTime, RecyclableMemoryStreamManager streamManager) { switch (dataType) { case MetricSystem.PersistedDataType.HitCount: return(new PersistedDataAggregator <InternalHitCount>(name, dimensionSet, sources, startTime, endTime, streamManager)); case MetricSystem.PersistedDataType.VariableEncodedHistogram: return(new PersistedDataAggregator <InternalHistogram>(name, dimensionSet, sources, startTime, endTime, streamManager)); default: throw new ArgumentException("Invalid data type", "dataType"); } }
/// <summary> /// Ctor. /// </summary> /// <param name="stream">Stream to read from.</param> /// <param name="memoryStreamManager">RecyclableMemoryStream manager to use for getting memory streams.</param> /// <param name="targetDimensionSet">Target dimension set to convert keys to (may be null for no conversion).</param> public PersistedDataReader(Stream stream, RecyclableMemoryStreamManager memoryStreamManager, DimensionSet targetDimensionSet) { if (stream == null) { throw new ArgumentNullException("stream"); } if (memoryStreamManager == null) { throw new ArgumentNullException("memoryStreamManager"); } if (!stream.CanSeek) { throw new NotSupportedException("Must provide a Stream which supports seek operations."); } this.sourceStream = stream; this.memoryStreamManager = memoryStreamManager; this.TargetDimensionSet = targetDimensionSet; }
public static Key GetWildcardKey(DimensionSet dimensionSet) { if (dimensionSet == null) { throw new ArgumentNullException("dimensionSet"); } var depth = dimensionSet.dimensions.Length; if (depth < WildcardKeys.Length) { return(WildcardKeys[dimensionSet.dimensions.Length]); } var key = new Key(new uint[depth]); for (var d = 0; d < depth; ++d) { key[d] = WildcardDimensionValue; } return(key); }
public PersistedDataWriter(Stream stream, DimensionSet dimensionSet, RecyclableMemoryStreamManager memoryStreamManager) { if (stream == null) { throw new ArgumentNullException("stream"); } if (dimensionSet == null) { throw new ArgumentNullException("dimensionSet"); } if (!stream.CanSeek) { throw new NotSupportedException("Stream must be able to seek."); } this.sourceStream = stream; this.dimensionSet = dimensionSet; this.memoryStreamManager = memoryStreamManager; this.sourceStreamWriter = new WriterStream(this.sourceStream, this.memoryStreamManager); this.WriteHeader(); }
public DataBucket(IEnumerable <DataBucket <TInternal> > buckets, DimensionSet dimensionSet, DateTime timestamp, long timeSpanInTicks, string storagePath, RecyclableMemoryStreamManager memoryStreamManager) : this(dimensionSet, timestamp, timeSpanInTicks, storagePath, memoryStreamManager) { foreach (var dataBucket in buckets) { if (this.TimeSpan <= dataBucket.TimeSpan) { Events.Write.AttemptToCompactIncompatibleQuanta(dataBucket.TimeSpan.Ticks, this.TimeSpan.Ticks); throw new ArgumentException("Cannot compact bucket of equal or smaller quantum"); } if (!dataBucket.Sealed) { throw new ArgumentException("Attempted to compact data from unsealed bucket " + dataBucket); } this.UpdateFromDataBucket(dataBucket); } this.Seal(); }
public QueryableMultiValueData(MemoryStream keyStream, MemoryStream valueStream, DimensionSet dimensionSet) : base(dimensionSet) { if (keyStream.Length == 0) { this.keys = new BufferedKeyedData <uint>(null, 0, 0, dimensionSet); keyStream.Dispose(); valueStream.Dispose(); } else { this.keyStream = keyStream; this.keys = new BufferedKeyedData <uint>(keyStream.GetBuffer(), 0, (int)keyStream.Length, dimensionSet); this.values = BufferedValueArray.Create(valueStream.GetBuffer(), 0, (int)valueStream.Length); this.valueStream = valueStream; } }
public abstract void PrepareForMerge(DimensionSet targetDimensionSet);
public override void PrepareForMerge(DimensionSet desiredDimensionSet) { this.keys.Convert(desiredDimensionSet); this.keys.Sort(); }
public override void PrepareForMerge(DimensionSet targetDimensionSet) { this.Data.Convert(targetDimensionSet); this.Data.Sort(); }
public QueryableMultiValueData(PersistedDataType type, MemoryStream source, DimensionSet dimensionSet, int keyCount) : base(dimensionSet) { if (source.Length == 0) { this.keys = new BufferedKeyedData <uint>(null, 0, 0, dimensionSet); source.Dispose(); } else { this.keyStream = source; this.valueStream = null; var keyPortionLength = (int)BufferedKeyedData <uint> .GetBufferSizeForKeyCount(keyCount, dimensionSet); var sourceBuffer = source.GetBuffer(); var sourceLength = (int)source.Length; this.keys = new BufferedKeyedData <uint>(sourceBuffer, 0, keyPortionLength, dimensionSet); this.values = BufferedValueArray.Create(type, sourceBuffer, keyPortionLength, sourceLength - keyPortionLength); } }
public bool ReadDataHeader() { Events.Write.BeginReadPersistedDataHeader(); if (this.pendingObjects != 0) { throw new PersistedDataException("Attempted to read data header without reading pending data objects"); } this.usePreviousProtocol = false; if (this.nextHeaderOffset > 0) { if (this.nextHeaderOffset >= this.sourceStream.Length) { return(false); } this.sourceStream.Position = this.nextHeaderOffset; } using (var readerStream = new ReaderStream(this.sourceStream, this.memoryStreamManager, false)) { try { var version = readerStream.ReadUInt16(); if (version != PersistedDataProtocol.ProtocolVersion) { if (version == PersistedDataProtocol.PreviousProtocolVersion) { this.usePreviousProtocol = true; } else { throw new PersistedDataException("Attempted to read protocol data of unsupported version."); } } this.Version = version; } catch (EndOfStreamException) { // This is dumb but we don't have a better way to handle this condition. It's okay if we're at the end // of the stream when reading the version header, it just means we don't have more data in the payload. return(false); } try { var blockLength = readerStream.ReadUInt64(); this.nextHeaderOffset = this.sourceStream.Position + (long)blockLength; if (this.usePreviousProtocol) { this.Header = this.LoadLegacyHeader(); } else { this.Header = this.LoadHeader(); } if (this.TargetDimensionSet == null) { this.TargetDimensionSet = this.DimensionSet; } this.pendingObjects = this.Header.DataCount; Events.Write.EndReadPersistedDataHeader(); return(true); } catch (Exception ex) { if (ex is EndOfStreamException || ex is InvalidDataException) { throw new PersistedDataException("Stream data may be truncated", ex); } throw; } } }
public KeyedDataStore(DimensionSet dimensionSet, RecyclableMemoryStreamManager memoryStreamManager, string sourceTag = "unknown") : this(dimensionSet, memoryStreamManager, null, 0, PersistedDataType.Unknown, sourceTag) { }