Exemple #1
0
        public MainViewModel(IRuntimeService runtime)
        {
            _log.Debug("Creating view model instance");

            Runtime     = runtime;
            DisplayName = Settings.Default.PluginDisplayName;
            DataObjects = new BindableCollection <string>()
            {
                QueryTemplateText
            };
            DataObject = QueryTemplateText;

            // Create the model for our witsml settings
            Model = new WitsmlSettings();

            // Create documents used by Avalon Editors used on query/result tabs.
            XmlQuery = new TextEditorViewModel(runtime, "XML")
            {
                IsPrettyPrintAllowed = true
            };
            QueryResults = new TextEditorViewModel(runtime, "XML", true)
            {
                IsPrettyPrintAllowed = true
            };
            Messages     = new TextDocument();
            SoapMessages = new TextDocument();

            // Create a default client proxy object.
            Proxy = CreateProxy();

            // Create view models displayed within this view model.
            RequestControl = new RequestViewModel(Runtime, XmlQuery);
            ResultControl  = new ResultViewModel(Runtime, QueryResults, Messages, SoapMessages);

            // Handle notifications for our witsml settings model changes
            Model.PropertyChanged += Model_PropertyChanged;
        }
        /// <summary>
        /// Streams the channel data.
        /// </summary>
        /// <param name="contextList">The context list.</param>
        /// <param name="token">The token.</param>
        /// <returns></returns>
        protected virtual async Task StreamChannelData(IList <ChannelStreamingContext> contextList, CancellationToken token)
        {
            _channelStreamingContextLists.Add(contextList);

            // These values can be set outside of our processing loop as the won't chnage
            //... as context is processed and completed.
            var firstContext         = contextList.First();
            var channelStreamingType = firstContext.ChannelStreamingType;
            var parentUri            = firstContext.ParentUri;
            var indexes             = firstContext.ChannelMetadata.Indexes.Cast <IIndexMetadataRecord>().ToList();
            var primaryIndex        = indexes[0];
            var isTimeIndex         = indexes.Select(i => i.IndexKind == (int)ChannelIndexTypes.Time).ToArray();
            var requestLatestValues =
                channelStreamingType == ChannelStreamingTypes.IndexCount
                    ? firstContext.IndexCount
                    : channelStreamingType == ChannelStreamingTypes.LatestValue
                        ? 1
                        : (int?)null;
            var  increasing = primaryIndex.Direction == (int)IndexDirections.Increasing;
            bool?firstStart = null;

            // Loop until there is a cancellation or all channals have been removed
            while (!IsStreamingStopped(contextList, ref token))
            {
                firstStart = !firstStart.HasValue;

                var channelIds = contextList.Select(i => i.ChannelId).Distinct().ToArray();
                Logger.Debug($"Streaming data for parentUri {parentUri.Uri} and channelIds {string.Join(",", channelIds)}");

                // We only need a start index value for IndexValue and RangeRequest or if we're streaming
                //... IndexCount or LatestValue and requestLatestValues is no longer set.
                var minStart =
                    (channelStreamingType == ChannelStreamingTypes.IndexValue || channelStreamingType == ChannelStreamingTypes.RangeRequest) ||
                    ((channelStreamingType == ChannelStreamingTypes.IndexCount || channelStreamingType == ChannelStreamingTypes.LatestValue) &&
                     !requestLatestValues.HasValue)
                        ? contextList.Min(x => Convert.ToInt64(x.StartIndex))
                        : (long?)null;

                // Only need and end index value for range request
                var maxEnd = channelStreamingType == ChannelStreamingTypes.RangeRequest
                    ? contextList.Max(x => Convert.ToInt64(x.EndIndex))
                    : (long?)null;

                //var isTimeIndex = primaryIndex.IndexType == ChannelIndexTypes.Time;
                var rangeSize = WitsmlSettings.GetRangeSize(isTimeIndex[0]);

                // Convert indexes from scaled values
                var minStartIndex = minStart?.IndexFromScale(primaryIndex.Scale, isTimeIndex[0]);
                var maxEndIndex   = channelStreamingType == ChannelStreamingTypes.IndexValue
                    ? (increasing ? minStartIndex + rangeSize : minStartIndex - rangeSize)
                    : maxEnd?.IndexFromScale(primaryIndex.Scale, isTimeIndex[0]);

                // Get channel data
                var mnemonics     = contextList.Select(c => c.ChannelMetadata.ChannelName).ToList();
                var dataProvider  = GetDataProvider(parentUri);
                var optimiseStart = channelStreamingType == ChannelStreamingTypes.IndexValue;
                var channelData   = dataProvider.GetChannelData(parentUri, new Range <double?>(minStartIndex, maxEndIndex), mnemonics, requestLatestValues, optimiseStart);

                // Stream the channel data
                await StreamChannelData(contextList, channelData, mnemonics.ToArray(), increasing, isTimeIndex, primaryIndex.Scale, firstStart.Value, token);

                // If we have processed an IndexCount or LatestValue query clear requestLatestValues so we can
                //... keep streaming new data as long as the channel is active.
                if (channelStreamingType == ChannelStreamingTypes.IndexCount ||
                    channelStreamingType == ChannelStreamingTypes.LatestValue)
                {
                    requestLatestValues = null;
                }

                // Check each context to see of all the data has streamed.
                var completedContexts = contextList
                                        .Where(
                    c =>
                    (c.ChannelStreamingType != ChannelStreamingTypes.RangeRequest &&
                     c.ChannelMetadata.Status != (int)ChannelStatuses.Active && c.ChannelMetadata.EndIndex.HasValue &&
                     c.StartIndex >= c.ChannelMetadata.EndIndex.Value) ||

                    (c.ChannelStreamingType == ChannelStreamingTypes.RangeRequest &&
                     c.StartIndex >= c.EndIndex))
                                        .ToArray();

                // Remove any contexts from the list that have completed returning all data
                completedContexts.ForEach(c =>
                {
                    // Notify consumer if the ReceiveChangeNotification field is true
                    if (c.ChannelMetadata.Status != (int)ChannelStatuses.Active && c.ReceiveChangeNotification)
                    {
                        // TODO: Decide which message shoud be sent...
                        // ChannelStatusChange(c.ChannelId, c.ChannelMetadata.Status);
                        // ChannelRemove(c.ChannelId);
                    }

                    contextList.Remove(c);
                });

                // Delay to prevent CPU overhead
                await Task.Delay(WitsmlSettings.StreamChannelDataDelayMilliseconds, token);
            }
        }
        /// <summary>
        /// Gets the channel data records for the specified data object URI and range.
        /// </summary>
        /// <param name="uri">The parent data object URI.</param>
        /// <param name="range">The data range to retrieve.</param>
        /// <param name="mnemonics">The mnemonics to fetch channel data for.
        /// This list will be modified to contain only those mnemonics that data was returned for.</param>
        /// <param name="requestLatestValues">The total number of requested latest values.</param>
        /// <param name="optimizeStart">if set to <c>true</c> start range can be optimized.</param>
        /// <returns>A collection of channel data.</returns>
        public List <List <List <object> > > GetChannelData(EtpUri uri, Range <double?> range, List <string> mnemonics, int?requestLatestValues, bool optimizeStart = false)
        {
            Logger.Debug($"Getting channel data for URI: {uri}");
            List <List <List <object> > > logData;

            var entity          = GetEntity(uri);
            var queryMnemonics  = mnemonics.ToArray();
            var allMnemonics    = GetAllMnemonics(entity);
            var mnemonicIndexes = ComputeMnemonicIndexes(entity, allMnemonics, queryMnemonics);
            var keys            = mnemonicIndexes.Keys.ToArray();
            var units           = GetUnitList(entity, keys);
            var dataTypes       = GetDataTypeList(entity, keys);
            var nullValues      = GetNullValueList(entity, keys);

            // Create a context to pass information required by the ChannelDataReader.
            var context = new ResponseContext()
            {
                RequestLatestValues = requestLatestValues,
                MaxDataNodes        = WitsmlSettings.LogMaxDataNodesGet,
                MaxDataPoints       = WitsmlSettings.LogMaxDataPointsGet
            };

            // Get the ranges for the query mnemonics
            var curveRanges =
                GetCurrentIndexRange(entity)
                .Where(c => queryMnemonics.Contains(c.Key))
                .Select(r => r.Value).ToList();

            var indexChannel       = entity.Index.FirstOrDefault();
            var increasing         = indexChannel.IsIncreasing();
            var isTimeIndex        = entity.IsTimeIndex();
            var rangeStart         = curveRanges.GetMinRangeStart(increasing);
            var optimizeRangeStart = curveRanges.GetOptimizeRangeStart(increasing);
            var rangeEnd           = curveRanges.GetMaxRangeEnd(increasing);
            var rangeStepSize      = WitsmlSettings.GetRangeStepSize(isTimeIndex);

            bool      finished;
            const int maxRequestFactor = 3;
            var       requestFactor    = 1;

            // Try an initial optimization for non-latest values and latest values.
            if (!requestLatestValues.HasValue && optimizeStart)
            {
                // Reset the start if specified start is before the minStart
                if (rangeStart.HasValue && range.StartsBefore(rangeStart.Value, increasing))
                {
                    range = new Range <double?>(rangeStart, range.End);
                }
            }
            else if (requestLatestValues.HasValue)
            {
                range = range.OptimizeLatestValuesRange(requestLatestValues, isTimeIndex, increasing, rangeStart, optimizeRangeStart, rangeEnd, requestFactor, rangeStepSize);
            }

            do // until finished
            {
                // Retrieve the data from the database
                var records = GetChannelData(uri, indexChannel?.Mnemonic, range, increasing, requestLatestValues);

                // Get a reader to process the log's channel data records
                var reader = records.GetReader();

                // Get the data from the reader based on the context and mnemonicIndexes (slices)
                Dictionary <string, Range <double?> > ranges;
                logData = reader.GetData(context, mnemonicIndexes, queryMnemonics, units, dataTypes, nullValues, out ranges);


                // Test if we're finished reading data
                finished =                                  // Finished if...
                           !requestLatestValues.HasValue || // not request latest values
                           context.HasAllRequestedValues || // request latest values and all values returned
                           (rangeStart.HasValue &&          // query range is at start of all channel data
                            range.StartsBefore(rangeStart.Value, increasing, true)) ||
                           !range.Start.HasValue;           // query was for all data

                // If we're not finished try a bigger range
                if (!finished)
                {
                    requestFactor += 1;
                    if (requestFactor < maxRequestFactor)
                    {
                        range = range.OptimizeLatestValuesRange(requestLatestValues, isTimeIndex, increasing, rangeStart, optimizeRangeStart, rangeEnd, requestFactor, rangeStepSize);
                    }
                    else
                    {
                        // This is the final optimization and will stop the iterations after the next pass
                        range = new Range <double?>(null, null);
                    }
                }
            } while (!finished);

            return(logData);
        }
Exemple #4
0
        /// <summary>
        /// Combines <see cref="IEnumerable{IChannelDataRecord}"/> data into RangeSize chunks for storage into the database
        /// </summary>
        /// <param name="records">The <see cref="IEnumerable{ChannelDataChunk}"/> records to be chunked.</param>
        /// <returns>An <see cref="IEnumerable{ChannelDataChunk}"/> of channel data.</returns>
        private IEnumerable <ChannelDataChunk> ToChunks(IEnumerable <IChannelDataRecord> records)
        {
            Logger.Debug("Converting ChannelDataRecords to ChannelDataChunks.");

            var data = new List <string>();
            var id   = string.Empty;
            List <ChannelIndexInfo> indexes      = null;
            ChannelIndexInfo        indexChannel = null;
            Range <long>?           plannedRange = null;
            double startIndex    = 0;
            double endIndex      = 0;
            double?previousIndex = null;

            string[] chunkMnemonics   = null;
            string[] chunkUnits       = null;
            string[] chunkNullValues  = null;
            var      chunkSettingsSet = false;

            foreach (var record in records)
            {
                indexChannel = record.GetIndex();
                indexes      = record.Indices.Select(x => x.Clone()).ToList();
                var increasing = indexChannel.Increasing;
                var index      = record.GetIndexValue();
                var rangeSize  = WitsmlSettings.GetRangeSize(indexChannel.IsTimeIndex);

                if (previousIndex.HasValue)
                {
                    if (previousIndex.Value == index)
                    {
                        Logger.ErrorFormat("Data node index repeated for uri: {0}; channel {1}; index: {2}", record.Uri, indexChannel.Mnemonic, index);
                        throw new WitsmlException(ErrorCodes.NodesWithSameIndex);
                    }
                    if (increasing && previousIndex.Value > index || !increasing && previousIndex.Value < index)
                    {
                        var error = $"Data node index not in sequence for uri: {record.Uri}: channel: {indexChannel.Mnemonic}; index: {index}";
                        Logger.Error(error);
                        throw new InvalidOperationException(error);
                    }
                }

                previousIndex = index;

                if (!plannedRange.HasValue)
                {
                    plannedRange = Range.ComputeRange(index, rangeSize, increasing);
                    id           = record.Id;
                    startIndex   = index;
                }

                // TODO: Can we use this instead? plannedRange.Value.Contains(index, increasing) or a new method?
                if (WithinRange(index, plannedRange.Value.End, increasing, false))
                {
                    id = string.IsNullOrEmpty(id) ? record.Id : id;
                    data.Add(record.GetJson());
                    endIndex = index;

                    if (chunkSettingsSet)
                    {
                        continue;
                    }

                    chunkMnemonics   = record.Mnemonics;
                    chunkUnits       = record.Units;
                    chunkNullValues  = record.NullValues;
                    chunkSettingsSet = true;
                }
                else
                {
                    //var newIndex = indexChannel.Clone();
                    //newIndex.Start = startIndex;
                    //newIndex.End = endIndex;
                    indexes[0].Start = startIndex;
                    indexes[0].End   = endIndex;

                    Logger.DebugFormat("ChannelDataChunk created with id '{0}', startIndex '{1}' and endIndex '{2}'.", id, startIndex, endIndex);

                    yield return(new ChannelDataChunk()
                    {
                        Uid = id,
                        Data = "[" + String.Join(",", data) + "]",
                        //Indices = new List<ChannelIndexInfo> { newIndex },
                        Indices = indexes,
                        RecordCount = data.Count,
                        MnemonicList = string.Join(",", chunkMnemonics),
                        UnitList = string.Join(",", chunkUnits),
                        NullValueList = string.Join(",", chunkNullValues)
                    });

                    plannedRange = Range.ComputeRange(index, rangeSize, increasing);
                    data         = new List <string>()
                    {
                        record.GetJson()
                    };
                    startIndex      = index;
                    endIndex        = index;
                    id              = record.Id;
                    chunkMnemonics  = record.Mnemonics;
                    chunkUnits      = record.Units;
                    chunkNullValues = record.NullValues;
                }
            }

            if (data.Count > 0 && indexes != null)
            {
                //var newIndex = indexChannel.Clone();
                //newIndex.Start = startIndex;
                //newIndex.End = endIndex;
                indexes          = indexes.Select(x => x.Clone()).ToList();
                indexes[0].Start = startIndex;
                indexes[0].End   = endIndex;

                Logger.DebugFormat("ChannelDataChunk created with id '{0}', startIndex '{1}' and endIndex '{2}'.", id, startIndex, endIndex);

                var chunk = new ChannelDataChunk()
                {
                    Uid  = id,
                    Data = "[" + string.Join(",", data) + "]",
                    //Indices = new List<ChannelIndexInfo> { newIndex },
                    Indices     = indexes,
                    RecordCount = data.Count
                };

                if (chunkMnemonics != null)
                {
                    chunk.MnemonicList  = string.Join(",", chunkMnemonics);
                    chunk.UnitList      = string.Join(",", chunkUnits);
                    chunk.NullValueList = string.Join(",", chunkNullValues);
                }

                yield return(chunk);
            }
        }
Exemple #5
0
        /// <summary>
        /// Merges <see cref="ChannelDataChunk" /> data for updates.
        /// </summary>
        /// <param name="reader">The reader.</param>
        /// <exception cref="WitsmlException"></exception>
        public void Merge(ChannelDataReader reader)
        {
            if (reader == null || reader.RecordsAffected <= 0)
            {
                return;
            }

            Logger.Debug("Merging records in ChannelDataReader.");

            try
            {
                // Get the full range of the reader.
                //... This is the range that we need to select existing ChannelDataChunks from the database to update
                var updateRange = reader.GetIndexRange();

                // Make sure we have a valid index range; otherwise, nothing to do
                if (!updateRange.Start.HasValue || !updateRange.End.HasValue)
                {
                    return;
                }

                var indexChannel = reader.GetIndex();
                var increasing   = indexChannel.Increasing;
                var rangeSize    = WitsmlSettings.GetRangeSize(indexChannel.IsTimeIndex);

                // Based on the range of the updates, compute the range of the data chunk(s)
                //... so we can merge updates with existing data.
                var existingRange = new Range <double?>(
                    Range.ComputeRange(updateRange.Start.Value, rangeSize, increasing).Start,
                    Range.ComputeRange(updateRange.End.Value, rangeSize, increasing).End
                    );

                // Get DataChannelChunk list from database for the computed range and URI
                var filter  = BuildDataFilter(reader.Uri, indexChannel.Mnemonic, existingRange, increasing);
                var results = GetData(filter, increasing);

                // Backup existing chunks for the transaction
                AttachChunks(results);

                try
                {
                    BulkWriteChunks(
                        ToChunks(
                            MergeSequence(results.GetRecords(), reader.AsEnumerable(), updateRange, rangeSize)),
                        reader.Uri,
                        string.Join(",", reader.Mnemonics),
                        string.Join(",", reader.Units),
                        string.Join(",", reader.NullValues)
                        );

                    CreateChannelDataChunkIndex();
                }
                catch (FormatException ex)
                {
                    Logger.ErrorFormat("Error when merging data: {0}", ex);
                    throw new WitsmlException(ErrorCodes.ErrorMaxDocumentSizeExceeded, ex);
                }
            }
            catch (MongoException ex)
            {
                Logger.ErrorFormat("Error when merging data: {0}", ex);
                throw new WitsmlException(ErrorCodes.ErrorUpdatingInDataStore, ex);
            }
        }
Exemple #6
0
        /// <summary>
        /// Merges <see cref="ChannelDataChunk" /> data for updates.
        /// </summary>
        /// <param name="reader">The reader.</param>
        /// <exception cref="WitsmlException"></exception>
        public void Merge(ChannelDataReader reader)
        {
            if (reader == null || reader.RecordsAffected <= 0)
            {
                return;
            }

            Logger.Debug("Merging records in ChannelDataReader.");

            try
            {
                // Get the full range of the reader.
                //... This is the range that we need to select existing ChannelDataChunks from the database to update
                var updateRange = reader.GetIndexRange();

                // Make sure we have a valid index range; otherwise, nothing to do
                if (!updateRange.Start.HasValue || !updateRange.End.HasValue)
                {
                    return;
                }

                var indexChannel = reader.GetIndex();
                var increasing   = indexChannel.Increasing;
                var rangeSize    = WitsmlSettings.GetRangeSize(indexChannel.IsTimeIndex);

                // Based on the range of the updates, compute the range of the data chunk(s)
                //... so we can merge updates with existing data.
                var chunkRange = new Range <double?>(
                    Range.ComputeRange(updateRange.Start.Value, rangeSize, increasing).Start,
                    Range.ComputeRange(updateRange.End.Value, rangeSize, increasing).End
                    );

                // Get DataChannelChunk list from database for the computed range and URI
                //specifically using a chunk limiter that will seek until the end of range is found regardless of the default read limit in the config file
                var results = GetData(reader.Uri, indexChannel.Mnemonic, chunkRange, increasing, false, GetDataSearchUntilFoundOrEndChunkLimiter);

                // Backup existing chunks for the transaction
                AttachChunks(results);

                // Check if reader overlaps existing data
                var hasOverlap        = false;
                var existingRange     = new Range <double?>();
                var existingMnemonics = results.Count > 0 ? results[0]?.MnemonicList.Split(',') : new string[0];

                if (results.Count > 0)
                {
                    existingRange = new Range <double?>(results.Min(x => x.Indices[0].Start), results.Max(x => x.Indices[0].End));
                    hasOverlap    = updateRange.Overlaps(existingRange, increasing);
                }

                try
                {
                    if (hasOverlap)
                    {
                        WriteRecordsToChunks(reader, MergeSequence(results.GetRecords(), reader.AsEnumerable(), updateRange, rangeSize));
                    }
                    else
                    {
                        // If there is no existing data add reader records only
                        if (results.Count == 0)
                        {
                            WriteRecordsToChunks(reader, reader.AsEnumerable());
                        }
                        // If there is only one chunk and the mnemonics match
                        else if (existingMnemonics != null && existingMnemonics.OrderBy(t => t).SequenceEqual(reader.Mnemonics.OrderBy(t => t)) && results.Count == 1)
                        {
                            // If the update is before the existing range
                            if (updateRange.EndsBefore(existingRange.Start.GetValueOrDefault(), increasing, true))
                            {
                                WriteRecordsToChunks(reader, reader.AsEnumerable().Concat(results.GetRecords()));
                            }
                            // If the update is after the existing range
                            else if (updateRange.StartsAfter(existingRange.End.GetValueOrDefault(), increasing, true))
                            {
                                WriteRecordsToChunks(reader, results.GetRecords().Concat(reader.AsEnumerable()));
                            }
                        }
                        // Resort to merging the records
                        else
                        {
                            WriteRecordsToChunks(reader, MergeSequence(results.GetRecords(), reader.AsEnumerable(), updateRange, rangeSize));
                        }
                    }
                    CreateChannelDataChunkIndex();
                }
                catch (FormatException ex)
                {
                    Logger.ErrorFormat("Error when merging data: {0}", ex);
                    throw new WitsmlException(ErrorCodes.ErrorMaxDocumentSizeExceeded, ex);
                }
            }
            catch (MongoException ex)
            {
                Logger.ErrorFormat("Error when merging data: {0}", ex);
                throw new WitsmlException(ErrorCodes.ErrorUpdatingInDataStore, ex);
            }
        }