Example #1
0
        public bool CanDownload()
        {
            if (this.SampleRate != null)
            {
                var samplePeriod = new SampleRateContainer(this.SampleRate).Period.TotalSeconds;


                return(this.DateTimeBegin < this.DateTimeEnd &&
                       this.SelectedDatasets.Count > 0 &&
                       (ulong)this.FileGranularity >= samplePeriod);
            }
            else
            {
                return(false);
            }
        }
Example #2
0
 public ChannelDescription(Guid guid,
                           string channelName,
                           string datasetName,
                           string group,
                           NexusDataType dataType,
                           SampleRateContainer sampleRate,
                           string unit,
                           BufferType bufferType)
 {
     this.Guid        = guid;
     this.ChannelName = channelName;
     this.DatasetName = datasetName;
     this.Group       = group;
     this.DataType    = dataType;
     this.SampleRate  = sampleRate;
     this.Unit        = unit;
     this.BufferType  = bufferType;
 }
Example #3
0
        public DataReaderDoubleStream ReadAsDoubleStream(
            DatasetInfo dataset,
            DateTime begin,
            DateTime end,
            ulong upperBlockSize,
            CancellationToken cancellationToken)
        {
            var progressRecords = this.Read(new List <DatasetInfo>()
            {
                dataset
            }, begin, end, upperBlockSize, TimeSpan.FromMinutes(1), cancellationToken);
            var samplesPerSecond = new SampleRateContainer(dataset.Id).SamplesPerSecond;
            var length           = (long)Math.Round(samplesPerSecond *
                                                    (decimal)(end - begin).TotalSeconds, MidpointRounding.AwayFromZero) *
                                   NexusUtilities.SizeOf(NexusDataType.FLOAT64);

            return(new DataReaderDoubleStream(length, progressRecords));
        }
Example #4
0
        private void OrchestrateAggregation <T>(string targetDirectoryPath,
                                                DataReaderExtensionBase dataReader,
                                                DatasetInfo dataset,
                                                List <Aggregation> aggregations,
                                                DateTime date,
                                                bool force,
                                                CancellationToken cancellationToken) where T : unmanaged
        {
            // check source sample rate
            var sampleRate = new SampleRateContainer(dataset.Id, ensureNonZeroIntegerHz: true);

            // prepare variables
            var units   = new List <AggregationUnit>();
            var channel = (ChannelInfo)dataset.Parent;

            // prepare buffers
            foreach (var aggregation in aggregations)
            {
                var periodsToSkip = new List <int>();

                foreach (var period in aggregation.Periods)
                {
#warning Ensure that period is a sensible value

                    foreach (var entry in aggregation.Methods)
                    {
                        var method    = entry.Key;
                        var arguments = entry.Value;

                        // translate method name
                        var methodIdentifier = method switch
                        {
                            AggregationMethod.Mean => "mean",
                            AggregationMethod.MeanPolar => "mean_polar",
                            AggregationMethod.Min => "min",
                            AggregationMethod.Max => "max",
                            AggregationMethod.Std => "std",
                            AggregationMethod.Rms => "rms",
                            AggregationMethod.MinBitwise => "min_bitwise",
                            AggregationMethod.MaxBitwise => "max_bitwise",
                            AggregationMethod.SampleAndHold => "sample_and_hold",
                            AggregationMethod.Sum => "sum",
                            _ => throw new Exception($"The aggregation method '{method}' is unknown.")
                        };

                        var targetFileName = $"{channel.Id}_{period}_s_{methodIdentifier}.nex";
                        var targetFilePath = Path.Combine(targetDirectoryPath, targetFileName);

                        if (force || !File.Exists(targetFileName))
                        {
                            var buffer = new double[86400 / period];

                            var unit = new AggregationUnit()
                            {
                                Aggregation    = aggregation,
                                Period         = period,
                                Method         = method,
                                Argument       = arguments,
                                Buffer         = buffer,
                                TargetFilePath = targetFilePath
                            };

                            units.Add(unit);
                        }
                        else
                        {
                            // skip period / method combination
                        }
                    }
                }
            }

            if (!units.Any())
            {
                return;
            }

            // process data
            var fundamentalPeriod = TimeSpan.FromMinutes(10); // required to ensure that the aggregation functions get data with a multiple length of 10 minutes
            var endDate           = date.AddDays(1);
            var blockSizeLimit    = _aggregationChunkSizeMb * 1000 * 1000;

            // read raw data
            foreach (var progressRecord in dataReader.Read(dataset, date, endDate, blockSizeLimit, fundamentalPeriod, cancellationToken))
            {
                var dataRecord = progressRecord.DatasetToRecordMap.First().Value;

                // aggregate data
                var partialBuffersMap = this.ApplyAggregationFunction(dataset, (T[])dataRecord.Dataset, dataRecord.Status, units);

                foreach (var entry in partialBuffersMap)
                {
                    // copy aggregated data to target buffer
                    var partialBuffer = entry.Value;
                    var unit          = entry.Key;

                    Array.Copy(partialBuffer, 0, unit.Buffer, unit.BufferPosition, partialBuffer.Length);
                    unit.BufferPosition += partialBuffer.Length;
                }
            }

            // write data to file
            foreach (var unit in units)
            {
                try
                {
                    _fileAccessManager.Register(unit.TargetFilePath, cancellationToken);

                    if (File.Exists(unit.TargetFilePath))
                    {
                        File.Delete(unit.TargetFilePath);
                    }

                    // create data file
                    AggregationFile.Create <double>(unit.TargetFilePath, unit.Buffer);
                }
                finally
                {
                    _fileAccessManager.Unregister(unit.TargetFilePath);
                }
            }
        }
 private ulong TimeSpanToIndex(TimeSpan timeSpan, SampleRateContainer sampleRate)
 {
     return((ulong)(timeSpan.TotalSeconds * (double)sampleRate.SamplesPerSecond));
 }
Example #6
0
 public ChannelContextGroup(SampleRateContainer sampleRate, IList <ChannelContext> channelContextSet)
 {
     this.SampleRate        = sampleRate;
     this.ChannelContextSet = channelContextSet.ToList();
 }
        private ChannelDescription CreateChannelDescription(string channelName, string group, NexusDataType dataType, SampleRateContainer sampleRate, string unit)
        {
            var guid        = Guid.NewGuid();
            var datasetName = sampleRate.ToUnitString();

            return(new ChannelDescription(guid, channelName, datasetName, group, dataType, sampleRate, unit, BufferType.Simple));
        }