public ExtendedBuffer(int length) : base(length) { _buffer = MemoryPool <T> .Shared.Rent(length); MemoryMarshal.AsBytes(_buffer.Memory.Span).Fill(0); this.ElementSize = NexusUtilities.SizeOf(typeof(T)); }
public long GetByteCount() { var totalDays = (this.DateTimeEnd - this.DateTimeBegin).TotalDays; var frequency = string.IsNullOrWhiteSpace(this.SampleRate) ? 0 : new SampleRateContainer(this.SampleRate).SamplesPerDay; return((long)this.GetSelectedDatasets().Sum(dataset => { var elementSize = NexusUtilities.SizeOf(dataset.DataType); return frequency * totalDays * elementSize; })); }
public DataReaderDoubleStream ReadAsDoubleStream( DatasetInfo dataset, DateTime begin, DateTime end, ulong upperBlockSize, CancellationToken cancellationToken) { var progressRecords = this.Read(new List <DatasetInfo>() { dataset }, begin, end, upperBlockSize, TimeSpan.FromMinutes(1), cancellationToken); var samplesPerSecond = new SampleRateContainer(dataset.Id).SamplesPerSecond; var length = (long)Math.Round(samplesPerSecond * (decimal)(end - begin).TotalSeconds, MidpointRounding.AwayFromZero) * NexusUtilities.SizeOf(NexusDataType.FLOAT64); return(new DataReaderDoubleStream(length, progressRecords)); }
private IEnumerable <DataReaderProgressRecord> InternalRead( List <DatasetInfo> datasets, DateTime begin, DateTime end, ulong blockSizeLimit, TimeSpan basePeriod, TimeSpan fundamentalPeriod, CancellationToken cancellationToken) { /* * |....................| * | * | * |.................... * | * | * |.................... * | * |==================== * |.................... * | * | * |....................| * * | = base period (1 minute) * ... = fundamental period (e.g. 10 minutes) * |...| = begin & end markers * === = block period */ if (cancellationToken.IsCancellationRequested) { yield break; } if (!datasets.Any() || begin == end) { yield break; } // calculation var minutesPerFP = fundamentalPeriod.Ticks / basePeriod.Ticks; var bytesPerFP = datasets.Sum(dataset => { var bytesPerSample = NexusUtilities.SizeOf(dataset.DataType); var samplesPerMinute = dataset.GetSampleRate().SamplesPerSecond * 60; var bytesPerFP = bytesPerSample * samplesPerMinute * minutesPerFP; return(bytesPerFP); }); var FPCountPerBlock = blockSizeLimit / bytesPerFP; var roundedFPCount = (long)Math.Floor(FPCountPerBlock); if (roundedFPCount < 1) { throw new Exception("The block size limit is too small."); } var maxPeriodPerRequest = TimeSpan.FromTicks(fundamentalPeriod.Ticks * roundedFPCount); // load data var period = end - begin; var currentBegin = begin; var remainingPeriod = end - currentBegin; while (remainingPeriod > TimeSpan.Zero) { var datasetToRecordMap = new Dictionary <DatasetInfo, DataRecord>(); var currentPeriod = TimeSpan.FromTicks(Math.Min(remainingPeriod.Ticks, maxPeriodPerRequest.Ticks)); var currentEnd = currentBegin + currentPeriod; var index = 1; var count = datasets.Count; foreach (var dataset in datasets) { if (cancellationToken.IsCancellationRequested) { yield break; } (var data, var status) = this.ReadSingle(dataset, currentBegin, currentEnd); datasetToRecordMap[dataset] = new DataRecord(data, status); // update progress var localProgress = TimeSpan.FromTicks(currentPeriod.Ticks * index / count); var currentProgress = (currentBegin + localProgress - begin).Ticks / (double)period.Ticks; ((IProgress <double>) this.Progress).Report(currentProgress); index++; } // notify about new data yield return(new DataReaderProgressRecord(datasetToRecordMap, currentBegin, currentEnd)); // continue in time currentBegin += currentPeriod; remainingPeriod = end - currentBegin; } }
private void OpenFile(string dataFilePath, DateTime startDateTime, List <ChannelContextGroup> channelContextGroupSet) { if (File.Exists(dataFilePath)) { throw new Exception($"The file {dataFilePath} already exists. Extending an already existing file with additional channels is not supported."); } var famosFile = new FamosFileHeader(); // file var metadataGroup = new FamosFileGroup("Metadata"); metadataGroup.PropertyInfo = new FamosFilePropertyInfo(new List <FamosFileProperty>() { new FamosFileProperty("format_version", this.FormatVersion), new FamosFileProperty("system_name", this.DataWriterContext.SystemName), new FamosFileProperty("date_time", startDateTime), }); foreach (var customMetadataEntry in this.DataWriterContext.CustomMetadataEntrySet.Where(customMetadataEntry => customMetadataEntry.CustomMetadataEntryLevel == CustomMetadataEntryLevel.File)) { metadataGroup.PropertyInfo.Properties.Add(new FamosFileProperty(customMetadataEntry.Key, customMetadataEntry.Value)); } famosFile.Groups.Add(metadataGroup); // file -> project var projectGroup = new FamosFileGroup($"{this.DataWriterContext.ProjectDescription.PrimaryGroupName} / {this.DataWriterContext.ProjectDescription.SecondaryGroupName} / {this.DataWriterContext.ProjectDescription.ProjectName}"); projectGroup.PropertyInfo = new FamosFilePropertyInfo(new List <FamosFileProperty>() { new FamosFileProperty("project_version", this.DataWriterContext.ProjectDescription.Version) }); foreach (var customMetadataEntry in this.DataWriterContext.CustomMetadataEntrySet.Where(customMetadataEntry => customMetadataEntry.CustomMetadataEntryLevel == CustomMetadataEntryLevel.Project)) { projectGroup.PropertyInfo.Properties.Add(new FamosFileProperty(customMetadataEntry.Key, customMetadataEntry.Value)); } famosFile.Groups.Add(projectGroup); // for each context group foreach (var contextGroup in channelContextGroupSet) { var totalSeconds = (int)Math.Round(_settings.FilePeriod.TotalSeconds, MidpointRounding.AwayFromZero); var totalLength = (int)(totalSeconds * contextGroup.SampleRate.SamplesPerSecond); if (totalLength * (double)NexusUtilities.SizeOf(NexusDataType.FLOAT64) > 2 * Math.Pow(10, 9)) { throw new Exception(ErrorMessage.FamosWriter_DataSizeExceedsLimit); } // file -> project -> channels var field = new FamosFileField(FamosFileFieldType.MultipleYToSingleEquidistantTime); foreach (ChannelContext channelContext in contextGroup.ChannelContextSet) { var dx = contextGroup.SampleRate.Period.TotalSeconds; var channel = this.PrepareChannel(field, channelContext.ChannelDescription, (int)totalLength, startDateTime, dx); projectGroup.Channels.Add(channel); } famosFile.Fields.Add(field); _spdToFieldIndexMap[contextGroup.SampleRate.SamplesPerDay] = famosFile.Fields.Count - 1; } // famosFile.Save(dataFilePath, _ => { }); _famosFile = FamosFile.OpenEditable(dataFilePath); }