/// <summary> /// Adds <see cref="ChannelSet" /> to the data store. /// </summary> /// <param name="parser">The input template parser.</param> /// <param name="dataObject">The <see cref="ChannelSet" /> to be added.</param> public override void Add(WitsmlQueryParser parser, ChannelSet dataObject) { // Extract Data var reader = ExtractDataReader(dataObject); InsertEntity(dataObject); if (reader != null) { Logger.DebugFormat("Adding ChannelSet data with uid '{0}' and name '{1}'", dataObject.Uuid, dataObject.Citation.Title); var increasing = dataObject.IsIncreasing(); var allMnemonics = reader.Indices.Select(i => i.Mnemonic).Concat(reader.Mnemonics).ToArray(); // Get current index information var ranges = GetCurrentIndexRange(dataObject); var indexCurve = dataObject.Index[0]; Logger.DebugFormat("Index curve mnemonic: {0}.", indexCurve.Mnemonic); GetUpdatedLogHeaderIndexRange(reader, allMnemonics, ranges, increasing); // Add ChannelDataChunks ChannelDataChunkAdapter.Add(reader); // Update index range UpdateIndexRange(dataObject.GetUri(), dataObject, ranges, allMnemonics); } }
private ChannelMetadataRecord ToChannelMetadataRecord(ChannelSet entity, Channel channel, IList <IndexMetadataRecord> indexMetadata) { var uri = channel.GetUri(entity); var primaryIndex = indexMetadata.FirstOrDefault(); var isTimeLog = primaryIndex != null && primaryIndex.IndexType == ChannelIndexTypes.Time; var curveIndexes = GetCurrentIndexRange(entity); var dataObject = new DataObject(); StoreStoreProvider.SetDataObject(dataObject, channel, uri, channel.Mnemonic, 0); return(new ChannelMetadataRecord() { ChannelUri = uri, ContentType = uri.ContentType, DataType = channel.DataType.GetValueOrDefault(EtpDataType.@double).ToString().Replace("@", string.Empty), Description = channel.Citation != null ? channel.Citation.Description ?? channel.Mnemonic : channel.Mnemonic, ChannelName = channel.Mnemonic, Uom = Units.GetUnit(channel.Uom), MeasureClass = channel.ChannelClass?.Title ?? ObjectTypes.Unknown, Source = channel.Source ?? ObjectTypes.Unknown, Uuid = channel.Mnemonic, DomainObject = dataObject, Status = ChannelStatuses.Active, //Status = (ChannelStatuses)(int)channel.GrowingStatus.GetValueOrDefault(ChannelStatus.inactive), StartIndex = primaryIndex == null ? null : curveIndexes[channel.Mnemonic].Start.IndexToScale(primaryIndex.Scale, isTimeLog), EndIndex = primaryIndex == null ? null : curveIndexes[channel.Mnemonic].End.IndexToScale(primaryIndex.Scale, isTimeLog), Indexes = indexMetadata, CustomData = new Dictionary <string, DataValue>() }); }
public async Task ChannelSet200_PutObject_Can_Add_ChannelSet() { AddParents(); await RequestSessionAndAssert(); var handler = _client.Handler <IStoreCustomer>(); var uri = ChannelSet.GetUri(); var dataObject = CreateDataObject(uri, ChannelSet); // Get Object Expecting it Not to Exist await GetAndAssert(handler, uri, Energistics.Etp.EtpErrorCodes.NotFound); // Put Object await PutAndAssert(handler, dataObject); // Get Object var args = await GetAndAssert(handler, uri); // Check Data Object XML Assert.IsNotNull(args?.Message.DataObject); var xml = args.Message.DataObject.GetString(); var result = Parse <ChannelSet>(xml); Assert.IsNotNull(result); }
/// <summary> /// Get the collection of all campaigns. /// </summary> /// <param name="noCache">Bypass the cache</param> /// <param name="refreshCache">Force refresh the cache</param> /// <returns>A set of campaigns</returns> public ChannelSet GetChannels(bool noCache, bool refreshCache) { // If no cache the load and return a entity set from the database if (noCache && !refreshCache) { return(LoadChannelSet()); } ChannelSet channelSet; string cacheKey = ChannelSet.StaticGetCacheKey(); if (!CacheManagerProvider.GetCacheManagerInstance().Contains <ChannelSet>(cacheKey) || refreshCache) { // Load the entity set from the database channelSet = LoadChannelSet(); if (channelSet != null) { // Add the entity set to the cache by reading caching parameters from the configuration CacheManagerProvider.GetCacheManagerInstance().Insert(cacheKey, channelSet, ConfigurationManager.GetCacheExpirationByType( channelSet.GetType())); } } else { channelSet = CacheManagerProvider.GetCacheManagerInstance().Get <ChannelSet>(cacheKey); } return(channelSet); }
/// <summary> /// Initializes the channel set. /// </summary> /// <param name="log">The log.</param> /// <param name="indexList">The index list.</param> /// <param name="loggingMethod">The logging method.</param> /// <param name="numDataValue">The number data value.</param> public void InitChannelSet(Log log, List <ChannelIndex> indexList, LoggingMethod loggingMethod = LoggingMethod.computed, int numDataValue = 150) { ChannelSet channelSet = LogGenerator.CreateChannelSet(log); channelSet.Index = indexList; bool isDepth = log.TimeDepth.EqualsIgnoreCase(ObjectFolders.Depth); if (isDepth) { var pointMetadataList = List(LogGenerator.CreatePointMetadata("Quality", "Quality", EtpDataType.boolean)); channelSet.Channel.Add(LogGenerator.CreateChannel(log, indexList, "Rate of Penetration", "ROP", UnitOfMeasure.mh, "Velocity", EtpDataType.@double, pointMetadataList: pointMetadataList)); channelSet.Channel.Add(LogGenerator.CreateChannel(log, indexList, "Hookload", "HKLD", UnitOfMeasure.klbf, "Force", EtpDataType.@double, null)); } else { var pointMetadataList = List(LogGenerator.CreatePointMetadata("Confidence", "Confidence", EtpDataType.@float)); channelSet.Channel.Add(LogGenerator.CreateChannel(log, indexList, "Rate of Penetration", "ROP", UnitOfMeasure.mh, "Velocity", EtpDataType.@double, pointMetadataList: pointMetadataList)); } log.ChannelSet = new List <ChannelSet>(); log.ChannelSet.Add(channelSet); LogGenerator.GenerateChannelData(log.ChannelSet, numDataValue: numDataValue); }
protected override void PrepareData() { Well = new Well { Uuid = DevKit.Uid(), Citation = DevKit.Citation("Well"), GeographicLocationWGS84 = DevKit.Location(), SchemaVersion = "2.0", TimeZone = DevKit.TimeZone }; Wellbore = new Wellbore { Uuid = DevKit.Uid(), Citation = DevKit.Citation("Wellbore"), Well = DevKit.DataObjectReference(Well), SchemaVersion = "2.0" }; ChannelSet = new ChannelSet { Uuid = DevKit.Uid(), Citation = DevKit.Citation("ChannelSet"), Wellbore = DevKit.DataObjectReference(Wellbore), SchemaVersion = EtpUris.GetUriFamily(typeof(ChannelSet)).Version, }; }
public void TestSetUp() { _logGenerator = new Log200Generator(); _wellboreReference = new DataObjectReference { ContentType = EtpContentTypes.Witsml200.For(ObjectTypes.Wellbore), Title = _logGenerator.Name("Wellbore"), Uuid = _logGenerator.Uid() }; _timeLog = new Log() { TimeDepth = "Time", Citation = _logGenerator.CreateCitation(_logGenerator.Name("Citation")), Wellbore = _wellboreReference, Uuid = _logGenerator.Uid() }; _depthLog = new Log() { TimeDepth = "Depth", Citation = _logGenerator.CreateCitation(_logGenerator.Name("Citation")), Wellbore = _wellboreReference, Uuid = _logGenerator.Uid() }; _measuredDepthIndex = _logGenerator.CreateMeasuredDepthIndex(IndexDirection.increasing); _dateTimeIndex = _logGenerator.CreateDateTimeIndex(); _elapseTimeIndex = _logGenerator.CreateElapsedTimeIndex(IndexDirection.increasing); _booleanPointMetadata = _logGenerator.CreatePointMetadata("confidence", "confidence", EtpDataType.boolean); _floatPointMetadata = _logGenerator.CreatePointMetadata("Confidence", "Confidence", EtpDataType.@float); _depthLogChannelSet = _logGenerator.CreateChannelSet(_depthLog); _depthLogChannelSet.Index.Add(_measuredDepthIndex); _depthLogChannelSet.Index.Add(_dateTimeIndex); _depthLogChannelSet.Channel.Add(_logGenerator.CreateChannel(_depthLog, _depthLogChannelSet.Index, "Rate of Penetration", "ROP", UnitOfMeasure.mh, "Velocity", EtpDataType.@double, pointMetadataList: _logGenerator.List(_booleanPointMetadata))); _depthLogChannelSet.Channel.Add(_logGenerator.CreateChannel(_depthLog, _depthLogChannelSet.Index, "Hookload", "HKLD", UnitOfMeasure.klbf, "Force", EtpDataType.@double, null)); _timeLogChannelSet = _logGenerator.CreateChannelSet(_timeLog); _timeLogChannelSet.Index.Add(_elapseTimeIndex); _timeLogChannelSet.Channel.Add(_logGenerator.CreateChannel(_timeLog, _timeLogChannelSet.Index, "Rate of Penetration", "ROP", UnitOfMeasure.mh, "Velocity", EtpDataType.@double, pointMetadataList: _logGenerator.List(_floatPointMetadata))); }
public async Task ChannelSet200_PutObject_Can_Add_ChannelSet() { AddParents(); await RequestSessionAndAssert(); var handler = _client.Handler <IStoreCustomer>(); var uri = ChannelSet.GetUri(); var dataObject = CreateDataObject(uri, ChannelSet); // Get Object var args = await GetAndAssert(handler, uri); // Check for message flag indicating No Data Assert.IsNotNull(args?.Header); Assert.AreEqual((int)MessageFlags.NoData, args.Header.MessageFlags); // Put Object await PutAndAssert(handler, dataObject); // Get Object args = await GetAndAssert(handler, uri); // Check Data Object XML Assert.IsNotNull(args?.Message.DataObject); var xml = args.Message.DataObject.GetString(); var result = Parse <ChannelSet>(xml); Assert.IsNotNull(result); }
public ClientManager(ILoggerFactory loggerFactory, TcpServer server, ChannelSet channels, ILoginPerformer <ClientHandle> clientFactory) { _server = server; _channels = channels; _clientFactory = clientFactory; Logger = loggerFactory.CreateLogger <ClientManager>(); }
public void TestSetUp() { Logger.Debug($"Executing {TestContext.TestName}"); DevKit = new DevKit200Aspect(TestContext); Well = new Well { Uuid = DevKit.Uid(), Citation = DevKit.Citation("Well"), GeographicLocationWGS84 = DevKit.Location(), SchemaVersion = "2.0", TimeZone = DevKit.TimeZone }; Wellbore = new Wellbore { Uuid = DevKit.Uid(), Citation = DevKit.Citation("Wellbore"), Well = DevKit.DataObjectReference(Well), SchemaVersion = "2.0" }; ChannelSet = new ChannelSet { Uuid = DevKit.Uid(), Citation = DevKit.Citation("ChannelSet"), Wellbore = DevKit.DataObjectReference(Wellbore), SchemaVersion = EtpUris.GetUriFamily(typeof(ChannelSet)).Version, }; BeforeEachTest(); OnTestSetUp(); }
private async Task CreateChannelSet(string name, double holeSize, string timeDepth, Tuple <double, int, double, double> tuple) { ChannelSet.Uuid = DevKit.Uid(); ChannelSet.Citation.Title = name; ChannelSet.RunNumber = tuple.Item2.ToString(); ChannelSet.StartIndex = new DepthIndexValue { Depth = (float)tuple.Item3 }; ChannelSet.EndIndex = new DepthIndexValue { Depth = (float)tuple.Item4 }; ChannelSet.NominalHoleSize = new LengthMeasureExt(holeSize, "in"); ChannelSet.TimeDepth = timeDepth; var handler = _client.Handler <IStoreCustomer>(); var uri = ChannelSet.GetUri(); var dataObject = CreateDataObject(uri, ChannelSet); // Get Object Expecting it Not to Exist //await GetAndAssert(handler, uri, Energistics.EtpErrorCodes.NotFound); // Put Object await PutAndAssert(handler, dataObject); // Get Object //var args = await GetAndAssert(handler, uri); // Check Data Object XML //Assert.IsNotNull(args?.Message.DataObject); //var xml = args.Message.DataObject.GetString(); //var result = Parse<ChannelSet>(xml); //Assert.IsNotNull(result); }
public async Task ChannelSet200_PutObject_Can_Update_ChannelSet() { AddParents(); await RequestSessionAndAssert(); var handler = _client.Handler <IStoreCustomer>(); var uri = ChannelSet.GetUri(); // Add a ExtensionNameValue to Data Object var envName = "TestPutObject"; var env = DevKit.ExtensionNameValue(envName, envName); ChannelSet.ExtensionNameValue = new List <ExtensionNameValue>() { env }; var dataObject = CreateDataObject(uri, ChannelSet); // Get Object Expecting it Not to Exist await GetAndAssert(handler, uri, Energistics.Etp.EtpErrorCodes.NotFound); // Put Object for Add await PutAndAssert(handler, dataObject); // Get Added Object var args = await GetAndAssert(handler, uri); // Check Added Data Object XML Assert.IsNotNull(args?.Message.DataObject); var xml = args.Message.DataObject.GetString(); var result = Parse <ChannelSet>(xml); Assert.IsNotNull(result); Assert.IsNotNull(result.ExtensionNameValue.FirstOrDefault(e => e.Name.Equals(envName))); // Remove Comment from Data Object result.ExtensionNameValue.Clear(); var updateDataObject = CreateDataObject(uri, result); // Put Object for Update await PutAndAssert(handler, updateDataObject); // Get Updated Object args = await GetAndAssert(handler, uri); // Check Added Data Object XML Assert.IsNotNull(args?.Message.DataObject); var updateXml = args.Message.DataObject.GetString(); result = Parse <ChannelSet>(updateXml); Assert.IsNotNull(result); // Test Data Object overwrite Assert.IsNull(result.ExtensionNameValue.FirstOrDefault(e => e.Name.Equals(envName))); }
public void ChannelSet200DataAdapter_Can_Update_ChannelSet_With_Middle_Depth_Data() { var dataGenerator = new DataGenerator(); var channelIndex = new ChannelIndex { Direction = IndexDirection.increasing, IndexType = ChannelIndexType.measureddepth, Mnemonic = "MD", Uom = UnitOfMeasure.m }; ChannelSet.Index = dataGenerator.List(channelIndex); ChannelSet.Channel = new List <Channel> { new Channel() { Uuid = dataGenerator.Uid(), Citation = new Citation { Title = dataGenerator.Name("ChannelSetTest") }, Mnemonic = "MSG", Uom = null, ChannelClass = dataGenerator.ToPropertyKindReference("velocity"), DataType = EtpDataType.@long, GrowingStatus = ChannelStatus.active, Index = ChannelSet.Index, StartIndex = new DepthIndexValue(), EndIndex = new DepthIndexValue(), SchemaVersion = OptionsIn.DataVersion.Version200.Value } }; ChannelSet.Data = new ChannelData(); ChannelSet.SetData(@"[ [ [0 ], [ 3.11 ] ], [ [100 ], [ 3.12 ] ], [ [150 ], [ 3.14 ] ], [ [200 ], [ 3.15 ] ], ]"); DevKit.AddAndAssert(ChannelSet); ChannelSet.Data = new ChannelData(); ChannelSet.SetData(@"[ [ [0 ], [ 3.11 ] ], [ [100 ], [ 3.12 ] ], [ [120 ], [ 3.13 ] ], [ [150 ], [ 3.14 ] ], [ [200 ], [ 3.15 ] ], ]"); DevKit.UpdateAndAssert(ChannelSet); var mnemonics = ChannelSet.Index.Select(i => i.Mnemonic).Concat(ChannelSet.Channel.Select(c => c.Mnemonic)).ToList(); var dataOut = _channelDataProvider.GetChannelData(ChannelSet.GetUri(), new Range <double?>(0, null), mnemonics, null); Assert.AreEqual(5, dataOut.Count); Assert.AreEqual(2, dataOut[1].Count); Assert.AreEqual(3.13, dataOut[2][1][0]); }
/// <summary> /// Gets the <see cref="ChannelSet"/> data. /// </summary> /// <param name="channelSet">The channel set.</param> /// <returns>The channel data blob string.</returns> public static string GetData(this ChannelSet channelSet) { if (channelSet == null || channelSet.Data == null || channelSet.Data.Data == null) { return(null); } return(channelSet.Data.Data.Value); }
private Resource ToResource(ChannelSet channelSet, Channel entity) { return(DiscoveryStoreProvider.New( uuid: entity.Uuid, uri: entity.GetUri(channelSet), resourceType: ResourceTypes.DataObject, name: entity.Mnemonic, lastChanged: GetLastChanged(entity))); }
private IDictionary <int, string> GetNullValuesByColumnIndex(ChannelSet entity) { Logger.Debug("Getting ChannelSet Channel null values by column index."); return(new SortedDictionary <int, string>(entity.Index.Select(i => "null") .Concat(entity.Channel.Select(c => "null")) .ToArray() .Select((unit, index) => new { Unit = unit, Index = index }) .ToDictionary(x => x.Index, x => x.Unit))); }
private IDictionary <int, string> GetDataTypesByColumnIndex(ChannelSet entity) { Logger.Debug("Getting ChannelSet Channel data types by column index."); return(new SortedDictionary <int, string>(entity.Index.Select(ToDataType) .Concat(entity.Channel.Select(c => c.DataType?.ToString())) .ToArray() .Select((dataType, index) => new { DataType = dataType, Index = index }) .ToDictionary(x => x.Index, x => x.DataType?.ToString()))); }
private IDictionary <int, string> GetUnitsByColumnIndex(ChannelSet entity) { Logger.Debug("Getting ChannelSet Channel units by column index."); return(new SortedDictionary <int, string>(entity.Index.Select(i => i.Uom) .Concat(entity.Channel.Select(c => c.Uom)) .ToArray() .Select((unit, index) => new { Unit = unit.ToString(), Index = index }) .ToDictionary(x => x.Index, x => x.Unit))); }
/// <summary> /// Updates the specified <see cref="Log" /> instance in the store. /// </summary> /// <param name="parser">The update parser.</param> /// <param name="dataObject">The data object to be updated.</param> public override void Update(WitsmlQueryParser parser, ChannelSet dataObject) { var uri = dataObject.GetUri(); UpdateEntity(parser, uri); // Extract Data var reader = ExtractDataReader(dataObject, GetEntity(uri)); UpdateChannelDataAndIndexRange(uri, reader); }
/// <summary> /// Sets additional default values for the specified data object and URI. /// </summary> /// <param name="dataObject">The data object.</param> /// <param name="uri">The data object URI.</param> partial void SetAdditionalDefaultValues(ChannelSet dataObject, EtpUri uri) { if (dataObject.Index == null) { dataObject.Index = new List <ChannelIndex>(); } if (dataObject.Channel == null) { dataObject.Channel = new List <Channel>(); } }
public void CreateMockChannelSetData(ChannelSet channelSet, List <ChannelIndex> indices) { channelSet.Data = new ChannelData() { FileUri = "file://", }; if (indices.Count == 1) { if (indices[0].IndexType == ChannelIndexType.measureddepth) { channelSet.SetData(@"[ [ [0.0 ], [ [ 1.0, true ], 2.0, 3.0 ] ], [ [0.1 ], [ [ 1.1, false ], null, 3.1 ] ], [ [0.2 ], [ null, null, 3.2 ] ], [ [0.3 ], [ [ 1.3, true ], 2.3, 3.3 ] ] ]"); } else if (indices[0].IndexType == ChannelIndexType.datetime) { channelSet.SetData(@"[ [ [ ""2016-01-01T00:00:00.0000Z"" ], [ [ 1.0, true ], 2.0, 3.0 ] ], [ [ ""2016-01-01T00:00:01.0000Z"" ], [ [ 1.1, false ], null, 3.1 ] ], [ [ ""2016-01-01T00:00:02.0000Z"" ], [ null, null, 3.2 ] ], [ [ ""2016-01-01T00:00:03.0000Z"" ], [ [ 1.3, true ], 2.3, 3.3 ] ] ]"); } } else if (indices.Count == 2) { if (indices[0].IndexType == ChannelIndexType.measureddepth) { channelSet.SetData(@"[ [ [0.0, ""2016-01-01T00:00:00.0000Z"" ], [ [1.0, true ], 2.0, 3.0 ] ], [ [0.1, ""2016-01-01T00:00:01.0000Z"" ], [ [1.1, false ], null, 3.1 ] ], [ [0.2, ""2016-01-01T00:00:02.0000Z"" ], [ null, null, 3.2 ] ], [ [0.3, ""2016-01-01T00:00:03.0000Z"" ], [ [1.3, true ], 2.3, 3.3 ] ] ]"); } else if (indices[0].IndexType == ChannelIndexType.datetime) { channelSet.SetData(@"[ [ [ ""2016-01-01T00:00:00.0000Z"", 0.0 ], [ [ 1.0, true ], 2.0, 3.0 ] ], [ [ ""2016-01-01T00:00:01.0000Z"", 0.1 ], [ [ 1.1, false ], null, 3.1 ] ], [ [ ""2016-01-01T00:00:02.0000Z"", 0.2 ], [ null, null, 3.2 ] ], [ [ ""2016-01-01T00:00:03.0000Z"", 0.3 ], [ [ 1.3, true ], 2.3, 3.3 ] ] ]"); } } }
private IIndexMetadataRecord ToIndexMetadataRecord(IEtpAdapter etpAdapter, ChannelSet entity, ChannelIndex indexChannel, int scale = 3) { var metadata = etpAdapter.CreateIndexMetadata( uri: indexChannel.GetUri(entity), isTimeIndex: indexChannel.IsTimeIndex(true), isIncreasing: indexChannel.IsIncreasing()); metadata.Mnemonic = indexChannel.Mnemonic; metadata.Description = indexChannel.Mnemonic; metadata.Uom = Units.GetUnit(indexChannel.Uom); metadata.Scale = scale; return(metadata); }
public async Task ChannelSet200_GetResources_Can_Get_All_ChannelSet_Resources() { AddParents(); DevKit.AddAndAssert(ChannelSet); await RequestSessionAndAssert(); var uri = ChannelSet.GetUri(); var parentUri = uri.Parent; await GetResourcesAndAssert(ChannelSet.Wellbore.GetUri()); var folderUri = parentUri.Append(uri.ObjectType); await GetResourcesAndAssert(folderUri); }
internal ChannelDataReader ExtractDataReader(ChannelSet entity, ChannelSet existing = null) { // TODO: Handle: if (!string.IsNullOrEmpty(entity.Data.FileUri)) // return null; if (existing == null) { var reader = entity.GetReader(); entity.Data = null; return(reader); } existing.Data = entity.Data; return(existing.GetReader()); }
private Dictionary <string, Range <double?> > GetCurrentIndexRange(ChannelSet entity) { var ranges = new Dictionary <string, Range <double?> >(); foreach (var index in entity.Index) { AddIndexRange(index.Mnemonic, entity.StartIndex, entity.EndIndex, ranges); } foreach (var channel in entity.Channel) { AddIndexRange(channel.Mnemonic, channel.StartIndex, channel.EndIndex, ranges); } return(ranges); }
/// <summary> /// Updates the specified <see cref="Log" /> instance in the store. /// </summary> /// <param name="parser">The update parser.</param> /// <param name="dataObject">The data object to be updated.</param> public override void Update(WitsmlQueryParser parser, ChannelSet dataObject) { using (var transaction = GetTransaction()) { transaction.SetContext(dataObject.GetUri()); var uri = dataObject.GetUri(); UpdateEntity(parser, uri); // Extract Data var reader = ExtractDataReader(dataObject, GetEntity(uri)); UpdateChannelDataAndIndexRange(uri, reader); // Commit transaction transaction.Commit(); } }
private IndexMetadataRecord ToIndexMetadataRecord(ChannelSet entity, ChannelIndex indexChannel, int scale = 3) { return(new IndexMetadataRecord() { Uri = indexChannel.GetUri(entity), Mnemonic = indexChannel.Mnemonic, Description = indexChannel.Mnemonic, Uom = Units.GetUnit(indexChannel.Uom), Scale = scale, IndexType = indexChannel.IsTimeIndex(true) ? ChannelIndexTypes.Time : ChannelIndexTypes.Depth, Direction = indexChannel.IsIncreasing() ? IndexDirections.Increasing : IndexDirections.Decreasing, CustomData = new Dictionary <string, DataValue>(0), }); }
private IDictionary <int, string> ComputeMnemonicIndexes(ChannelSet entity, string[] allMnemonics, string[] queryMnemonics) { Logger.DebugFormat("Computing mnemonic indexes for ChannelSet."); // Start with all mnemonics var mnemonicIndexes = allMnemonics .Select((mn, index) => new { Mnemonic = mn, Index = index }); // Check if mnemonics need to be filtered if (queryMnemonics.Any()) { // always return the index channel mnemonicIndexes = mnemonicIndexes .Where(x => x.Index < entity.Index.Count || queryMnemonics.Contains(x.Mnemonic)); } // create an index-to-mnemonic map return(new SortedDictionary <int, string>(mnemonicIndexes .ToDictionary(x => x.Index, x => x.Mnemonic))); }
private string GenerateChannelValues(Random random, ChannelSet channelSet) { var channelValues = string.Empty; var nullCount = 0; foreach (Channel channel in channelSet.Channel) { channelValues = channelValues == string.Empty ? " [" : channelValues + ", "; var column = string.Empty; bool setToNull = (random.Next() % 5 == 0); // Don't allow all channels to have a null value if (setToNull && nullCount < (channelSet.Channel.Count - 1)) { column += "null"; nullCount++; } else { var columnValue = GenerateValuesByType(random, channel.DataType, true); if (channel.PointMetadata == null) { column += columnValue; } else { column = "[" + columnValue; foreach (PointMetadata pointMetaData in channel.PointMetadata) { var etpDataType = pointMetaData.EtpDataType ?? null; column += ", " + GenerateValuesByType(random, etpDataType, false); } column += "]"; } } channelValues += column; } channelValues += "]"; return(channelValues); }
private string GenerateIndexValues(Random random, ChannelSet channelSet, object[] indexesStart) { var indexValues = string.Empty; for (int idx = 0; idx < channelSet.Index.Count; idx++) { var index = channelSet.Index[idx]; ChannelIndexType indexValue; if (index.IndexType.HasValue) { indexValue = index.IndexType.Value; } else { continue; } indexValues = indexValues == string.Empty ? "[ " : indexValues + ", "; bool isIncreasing = index.Direction.HasValue ? index.Direction.Value == IndexDirection.increasing : true; if (indexValue.Equals(ChannelIndexType.datetime)) { indexesStart[idx] = ((DateTimeOffset)indexesStart[idx]).AddSeconds(random.Next(1, 5)); indexValues += "\"" + ((DateTimeOffset)indexesStart[idx]).UtcDateTime.ToString("o") + "\""; } else if (indexValue.Equals(ChannelIndexType.elapsedtime)) { indexesStart[idx] = isIncreasing ? (long)indexesStart[idx] + 4 : (long)indexesStart[idx] - 4; indexValues += string.Format(" {0:0}", (long)indexesStart[idx]); } else if (DepthIndexTypes.Contains(indexValue)) { indexesStart[idx] = isIncreasing ? (double)indexesStart[idx] + random.Next(1, 10) / 10.0 : (double)indexesStart[idx] - random.Next(1, 10) / 10.0; indexValues += string.Format(" {0:0.###}", (double)indexesStart[idx]); } } indexValues += " ]"; return(indexValues); }
public static ChannelBlock Fit(ChannelSet points, Options options) { throw new NotImplementedException(); }