public void Log141DataAdapter_AddToStore_With_Custom_Data_Delimiter() { var delimiter = "|"; AddParents(); // Set data delimiter to other charactrer than "," Log.DataDelimiter = delimiter; DevKit.InitHeader(Log, LogIndexType.measureddepth); DevKit.InitDataMany(Log, DevKit.Mnemonics(Log), DevKit.Units(Log), 10, hasEmptyChannel: false); var response = DevKit.Add <LogList, Log>(Log); Assert.AreEqual((short)ErrorCodes.Success, response.Result); var result = GetLog(Log); // Assert data delimiter Assert.AreEqual(delimiter, result.DataDelimiter); var data = result.LogData.FirstOrDefault()?.Data; Assert.IsNotNull(data); var channelCount = Log.LogCurveInfo.Count; // Assert data delimiter in log data foreach (var row in data) { var points = ChannelDataReader.Split(row, delimiter); Assert.AreEqual(channelCount, points.Length); } }
private static void InitNullValues(out string[] nullValues, out SortedDictionary <int, string> nullValueDictionary) { nullValues = ChannelDataReader.Split("-999.25,,-1000.1"); var tempDictionary = new SortedDictionary <int, string>(); nullValues.ForEach((x, i) => tempDictionary.Add(i, x)); nullValueDictionary = tempDictionary; }
private static void InitUnits(out string[] units, out SortedDictionary <int, string> unitDictionary) { units = ChannelDataReader.Split("unitless,m,m,m"); var tempDictionary = new SortedDictionary <int, string>(); units.ForEach((x, i) => tempDictionary.Add(i, x)); unitDictionary = tempDictionary; }
/// <summary> /// Checks the log data for duplicate indexes. /// </summary> /// <param name="logData">The log data.</param> /// <param name="function">The context function</param> /// <param name="delimiter">The data delimiter.</param> /// <param name="isTimeLog">Is the log a time log.</param> /// <param name="mnemonicCount">The count of mnemonics.</param> /// <returns><c>true</c> if Log data has duplicates; otherwise, <c>false</c>.</returns> public static bool HasDuplicateIndexes(this List <string> logData, Functions function, string delimiter, bool isTimeLog, int mnemonicCount) { var warnings = new List <WitsmlValidationResult>(); var indexValues = new HashSet <double>(); foreach (var row in logData) { var values = ChannelDataReader.Split(row, delimiter, mnemonicCount, warnings); var value = values.FirstOrDefault(); if (isTimeLog) { DateTimeOffset dto; if (!DateTimeOffset.TryParse(value, out dto)) { var error = new WitsmlException(function.GetNonConformingErrorCode()); ChannelDataExtensions.HandleInvalidDataRow(error, warnings); continue; } // TODO: Add compatibility option for DuplicateIndexSetting if (indexValues.Contains(dto.UtcTicks)) { return(true); } indexValues.Add(dto.UtcTicks); } else { double doubleValue; if (!double.TryParse(value, out doubleValue)) { var error = new WitsmlException(function.GetNonConformingErrorCode()); ChannelDataExtensions.HandleInvalidDataRow(error, warnings); continue; } // TODO: Add compatibility option for DuplicateIndexSetting if (indexValues.Contains(doubleValue)) { return(true); } indexValues.Add(doubleValue); } } if (warnings.Any()) { WitsmlOperationContext.Current.Warnings.AddRange(warnings); } return(false); }
/// <summary> /// Gets a <see cref="ChannelDataReader"/> that can be used to process the <see cref="ChannelDataChunk"/> data. /// </summary> /// <param name="channelDataChunk">The channel data chunk.</param> /// <param name="reverse">if set to <c>true</c> the primary index should be reversed.</param> /// <returns></returns> public static ChannelDataReader GetReader(this ChannelDataChunk channelDataChunk, bool reverse = false) { _log.DebugFormat("Creating a ChannelDataReader for a ChannelDataChunk. Reverse: {0}", reverse); var mnemonics = ChannelDataReader.Split(channelDataChunk.MnemonicList); var units = ChannelDataReader.Split(channelDataChunk.UnitList); var dataTypes = new string[0]; var nullValues = ChannelDataReader.Split(channelDataChunk.NullValueList); return(new ChannelDataReader(channelDataChunk.Data, mnemonics, units, dataTypes, nullValues, channelDataChunk.Uri, channelDataChunk.Uid) .WithIndices(channelDataChunk.Indices, calculate: reverse, reverse: reverse)); }
private static void InitMnemonics(out string[] mnemonics, out string[] mnemonicFilter, out SortedDictionary <int, string> mnemonicFilterDictionary) { mnemonics = ChannelDataReader.Split("DEPTH,A,B,C"); var tempFilter = new[] { "MD", "A", "B" }; var tempDictionary = new SortedDictionary <int, string>(); mnemonics.ForEach((x, i) => { if (tempFilter.ContainsIgnoreCase(x)) { tempDictionary.Add(i, x); } }); mnemonicFilter = tempFilter; mnemonicFilterDictionary = tempDictionary; }
/// <summary> /// Gets multiple readers for each LogData from a <see cref="Witsml141.Log"/> instance. /// </summary> /// <param name="log">The log.</param> /// <returns>An <see cref="IEnumerable{ChannelDataReader}"/>.</returns> public static IEnumerable <ChannelDataReader> GetReaders(this Witsml141.Log log) { if (log?.LogData == null) { yield break; } _log.DebugFormat("Creating ChannelDataReaders for {0}", log.GetType().FullName); var isTimeIndex = log.IsTimeLog(); var increasing = log.IsIncreasing(); foreach (var logData in log.LogData) { if (logData?.Data == null || !logData.Data.Any()) { continue; } var mnemonics = ChannelDataReader.Split(logData.MnemonicList); var units = ChannelDataReader.Split(logData.UnitList); var dataTypes = log.LogCurveInfo.Select(x => x.TypeLogData?.ToString()).ToArray(); var nullValues = log.GetNullValues(mnemonics).ToArray(); // Split index curve from other value curves var indexCurve = log.LogCurveInfo.GetByMnemonic(log.IndexCurve) ?? new Witsml141.ComponentSchemas.LogCurveInfo { Mnemonic = new Witsml141.ComponentSchemas.ShortNameStruct(mnemonics.FirstOrDefault()), Unit = units.FirstOrDefault() }; // Skip index curve when passing mnemonics to reader mnemonics = mnemonics.Skip(1).ToArray(); units = units.Skip(1).ToArray(); dataTypes = dataTypes.Skip(1).ToArray(); nullValues = nullValues.Skip(1).ToArray(); yield return(new ChannelDataReader(logData.Data, mnemonics.Length + 1, mnemonics, units, dataTypes, nullValues, log.GetUri(), dataDelimiter: log.GetDataDelimiterOrDefault()) // Add index curve to separate collection .WithIndex(indexCurve.Mnemonic.Value, indexCurve.Unit, increasing, isTimeIndex)); } }
private bool UnitSpecified(List <LogCurveInfo> logCurves, LogData logData) { var mnemonics = ChannelDataReader.Split(logData.MnemonicList); var units = ChannelDataReader.Split(logData.UnitList); for (var i = 0; i < mnemonics.Length; i++) { var mnemonic = mnemonics[i]; var logCurve = logCurves.FirstOrDefault(l => l.Mnemonic.Value.EqualsIgnoreCase(mnemonic)); // If there are not enough units to cover all of the mnemonics OR //... the LogCurve has a unit and the unit is empty the the unit is not specified. if (units.Length <= i || (!string.IsNullOrEmpty(logCurve?.Unit) && string.IsNullOrEmpty(units[i].Trim()))) { return(false); } } return(true); }
/// <summary> /// Gets the log data mnemonics. /// </summary> /// <param name="parser">The parser.</param> /// <returns></returns> public static IEnumerable <string> GetLogDataMnemonics(this WitsmlQueryParser parser) { _log.Debug("Getting logData mnemonics from parser."); var logData = parser.Property("logData"); if (logData == null) { return(null); } var mnemonicList = parser.Properties(logData, "mnemonicList").FirstOrDefault(); if (mnemonicList == null) { return(null); } return(string.IsNullOrWhiteSpace(mnemonicList.Value) ? Enumerable.Empty <string>() : ChannelDataReader.Split(mnemonicList.Value)); }
private bool UnitsMatch(List <LogCurveInfo> logCurves, LogData logData) { var mnemonics = ChannelDataReader.Split(logData.MnemonicList); var units = ChannelDataReader.Split(logData.UnitList); for (var i = 0; i < mnemonics.Length; i++) { var mnemonic = mnemonics[i]; var logCurve = logCurves.FirstOrDefault(l => l.Mnemonic.Value.EqualsIgnoreCase(mnemonic)); if (logCurve == null) { continue; } if (string.IsNullOrEmpty(units[i].Trim()) && string.IsNullOrEmpty(logCurve.Unit) || units[i].Trim().EqualsIgnoreCase(logCurve.Unit)) { continue; } return(false); } return(true); }
/// <summary> /// Determines if the first value in the data row is a type of datetime. /// </summary> /// <param name="dataRow">A row of data from log data.</param> /// <param name="delimiter">The delimeter of the log data row.</param> /// <returns> /// <c>true</c> if the first value of the row is a type of date time; otherwise, false. /// </returns> public static bool IsFirstValueDateTime(this string dataRow, string delimiter = ",") { double value; return(!double.TryParse(ChannelDataReader.Split(dataRow, delimiter).FirstOrDefault(), out value)); }
private ValidationResult ValidateLogData(string indexCurve, List <LogCurveInfo> logCurves, List <LogData> logDatas, List <string> mergedLogCurveInfoMnemonics, string delimiter, Functions function, bool insert = true) { var totalPoints = 0; if (Context.Function.IsDataNodesValid(ObjectTypes.GetObjectType(DataObject), logDatas.Sum(x => x.Data.Count))) { return(new ValidationResult(ErrorCodes.MaxDataExceeded.ToString(), new[] { "LogData", "Data" })); } else { foreach (var logData in logDatas) { if (string.IsNullOrWhiteSpace(logData.MnemonicList)) { return(new ValidationResult(ErrorCodes.MissingColumnIdentifiers.ToString(), new[] { "LogData", "MnemonicList" })); } else { var mnemonics = ChannelDataReader.Split(logData.MnemonicList); if (logData.Data != null && logData.Data.Count > 0) { // TODO: Optimize use of IsFirstValueDateTime inside of HasDuplicateIndexes (e.g. multiple calls to Split) if (logData.Data.HasDuplicateIndexes(function, delimiter, logData.Data[0].IsFirstValueDateTime(delimiter), mnemonics.Length)) { return(new ValidationResult(ErrorCodes.NodesWithSameIndex.ToString(), new[] { "LogData", "Data" })); } // TODO: Can we use mnemonics.Length instead of calling Split again here? totalPoints += logData.Data.Count * ChannelDataReader.Split(logData.Data[0], delimiter).Length; } if (function.IsTotalDataPointsValid(totalPoints)) { return(new ValidationResult(ErrorCodes.MaxDataExceeded.ToString(), new[] { "LogData", "Data" })); } else if (mnemonics.Distinct().Count() < mnemonics.Length) { return(new ValidationResult(ErrorCodes.MnemonicsNotUnique.ToString(), new[] { "LogData", "MnemonicList" })); } else if (mnemonics.Any(m => _illegalColumnIdentifiers.Any(c => m.Contains(c)))) { return(new ValidationResult(ErrorCodes.BadColumnIdentifier.ToString(), new[] { "LogData", "MnemonicList" })); } else if (!IsValidLogDataMnemonics(mergedLogCurveInfoMnemonics, mnemonics)) { return(new ValidationResult(ErrorCodes.MissingColumnIdentifiers.ToString(), new[] { "LogData", "MnemonicList" })); } else if (insert && logCurves != null && mnemonics.Length > logCurves.Count) { return(new ValidationResult(ErrorCodes.BadColumnIdentifier.ToString(), new[] { "LogData", "MnemonicList" })); } else if (string.IsNullOrWhiteSpace(logData.UnitList)) { return(new ValidationResult(ErrorCodes.MissingUnitList.ToString(), new[] { "LogData", "UnitList" })); } else if (!UnitSpecified(logCurves, logData)) { return(new ValidationResult(ErrorCodes.MissingUnitForMeasureData.ToString(), new[] { "LogData", "UnitList" })); } else if (!string.IsNullOrEmpty(indexCurve) && mnemonics.All(m => m != indexCurve)) { return(new ValidationResult(ErrorCodes.IndexCurveNotFound.ToString(), new[] { "IndexCurve" })); } else if (!mnemonics[0].EqualsIgnoreCase(indexCurve)) { return(new ValidationResult(ErrorCodes.IndexNotFirstInDataColumnList.ToString(), new[] { "LogData", "MnemonicList" })); } else if (DuplicateUid(mnemonics)) { return(new ValidationResult(ErrorCodes.MnemonicsNotUnique.ToString(), new[] { "LogData", "MnemonicList" })); } else if (logCurves != null && !UnitsMatch(logCurves, logData)) { return(new ValidationResult(ErrorCodes.UnitListNotMatch.ToString(), new[] { "LogData", "UnitList" })); } } } } return(null); }