public static ActivityUpsertTO CreateEntriesFromOutputTOs(IList<OutputTO> outputToList, IDataListCompiler compiler, Guid dlId, out ErrorResultTO errors) { errors = new ErrorResultTO(); ActivityUpsertTO result = new ActivityUpsertTO(); foreach(OutputTO outputTo in outputToList) { // I first need to detect if the entry is a recordset!!!!!!!!!!! // Then if scalar upsert scalar else upsert a recordset-- how was this to ever work?!?! // Break into parts so we can correctly create the required entry...... IBinaryDataListEntry entry = Dev2BinaryDataListFactory.CreateEntry(RecsetName, string.Empty, dlId); int idx = 1; foreach(string output in outputTo.OutputStrings) { IBinaryDataListItem itemToAdd = Dev2BinaryDataListFactory.CreateBinaryItem(output, RecsetName, FieldName, idx); idx++; string error; entry.TryAppendRecordItem(itemToAdd, out error); if(error != string.Empty) { errors.AddError(error); } } // push entry one time, no looping ;) result.AddEntry(entry, outputTo.OutPutDescription); } return result; }
Guid TryExecuteWf(ErrorResultTO to, IServiceTestModelTO test) { var result = new Guid(); var wfappUtils = new WfApplicationUtils(); var invokeErrors = new ErrorResultTO(); var resourceId = DataObject.ResourceID; if (test?.Inputs != null) { AddTestInputsToJsonOrRecordset(test); } var serializer = new Dev2JsonSerializer(); try { result = _inner.ExecuteWf(new TestExecutionContext { _test = test, _wfappUtils = wfappUtils, _invokeErrors = invokeErrors, _serializer = serializer, }); } catch (InvalidWorkflowException iwe) { Dev2Logger.Error(iwe, DataObject.ExecutionID.ToString()); var msg = iwe.Message; var start = msg.IndexOf("Flowchart ", StringComparison.Ordinal); to?.AddError(start > 0 ? GlobalConstants.NoStartNodeError : iwe.Message); var failureMessage = DataObject.Environment.FetchErrors(); wfappUtils.DispatchDebugState(DataObject, StateType.End, out invokeErrors); SetTestRunResultAfterInvalidWorkflowException(test, resourceId, serializer, failureMessage); } catch (Exception ex) { Dev2Logger.Error(ex, GlobalConstants.WarewolfError); to.AddError(ex.Message); wfappUtils.DispatchDebugState(DataObject, StateType.End, out invokeErrors); SetTestRunResultAfterException(test, resourceId, serializer, ex); } return(result); }
public string GenerateUserFriendlyModel(IExecutionEnvironment env, Dev2DecisionMode mode, out ErrorResultTO errors) { errors = new ErrorResultTO(); ErrorResultTO allErrors = new ErrorResultTO(); string fn = DecisionDisplayHelper.GetDisplayValue(EvaluationFn); if (PopulatedColumnCount == 0) { return("If " + fn + " "); } if (PopulatedColumnCount == 1) { if (DataListUtil.GetRecordsetIndexType(Col1) == enRecordsetIndexType.Star) { var allValues = DataListUtil.GetAllPossibleExpressionsForFunctionOperations(Col1, env, out errors); allErrors.MergeErrors(errors); StringBuilder expandStarredIndex = new StringBuilder(); expandStarredIndex.Append(allValues[0] + " " + fn); allValues.RemoveAt(0); foreach (var value in allValues) { expandStarredIndex.Append(" " + mode + " " + value + " " + fn); } errors = allErrors; return("If " + expandStarredIndex); } errors = allErrors; return("If " + Col1 + " " + fn + " "); } if (PopulatedColumnCount == 2) { StringBuilder expandStarredIndices = new StringBuilder(); if (DataListUtil.GetRecordsetIndexType(Col1) != enRecordsetIndexType.Star && DataListUtil.GetRecordsetIndexType(Col2) == enRecordsetIndexType.Star) { var allCol2Values = DataListUtil.GetAllPossibleExpressionsForFunctionOperations(Col2, env, out errors); allErrors.MergeErrors(errors); expandStarredIndices.Append(Col1 + " " + fn + " " + allCol2Values[0]); allCol2Values.RemoveAt(0); foreach (var value in allCol2Values) { expandStarredIndices.Append(" " + mode + " " + Col1 + " " + fn + " " + value); } errors = allErrors; return("If " + expandStarredIndices); } if (DataListUtil.GetRecordsetIndexType(Col1) == enRecordsetIndexType.Star && DataListUtil.GetRecordsetIndexType(Col2) != enRecordsetIndexType.Star) { var allCol1Values = DataListUtil.GetAllPossibleExpressionsForFunctionOperations(Col1, env, out errors); allErrors.MergeErrors(errors); expandStarredIndices.Append(allCol1Values[0] + " " + fn + " " + Col2); allCol1Values.RemoveAt(0); foreach (var value in allCol1Values) { expandStarredIndices.Append(" " + mode + " " + value + " " + fn + " " + Col2); } errors = allErrors; return("If " + expandStarredIndices); } if ((DataListUtil.GetRecordsetIndexType(Col1) == enRecordsetIndexType.Star && DataListUtil.GetRecordsetIndexType(Col2) == enRecordsetIndexType.Star) || (DataListUtil.GetRecordsetIndexType(Col1) != enRecordsetIndexType.Star && DataListUtil.GetRecordsetIndexType(Col2) != enRecordsetIndexType.Star)) { var allCol1Values = DataListUtil.GetAllPossibleExpressionsForFunctionOperations(Col1, env, out errors); allErrors.MergeErrors(errors); var allCol2Values = DataListUtil.GetAllPossibleExpressionsForFunctionOperations(Col2, env, out errors); allErrors.MergeErrors(errors); expandStarredIndices.Append(allCol1Values[0] + " " + fn + " " + allCol2Values[0]); allCol1Values.RemoveAt(0); allCol2Values.RemoveAt(0); for (var i = 0; i < Math.Max(allCol1Values.Count, allCol2Values.Count); i++) { if (i > allCol1Values.Count) { allCol1Values.Add(null); } if (i > allCol2Values.Count) { allCol2Values.Add(null); } try { expandStarredIndices.Append(" " + mode + " " + allCol1Values[i] + " " + fn + " " + allCol2Values[i]); } catch (IndexOutOfRangeException) { errors.AddError("You appear to have recordsets of differnt sizes"); allErrors.MergeErrors(errors); } } errors = allErrors; return("If " + expandStarredIndices); } errors = allErrors; return("If " + Col1 + " " + fn + " " + Col2 + " "); } if (PopulatedColumnCount == 3) { var expandStarredIndices = ResolveStarredIndices(env, mode.ToString(), out errors); allErrors.MergeErrors(errors); if (!string.IsNullOrEmpty(expandStarredIndices)) { errors = allErrors; return(expandStarredIndices); } errors = allErrors; return("If " + Col1 + " " + fn + " " + Col2 + " and " + Col3); } errors = allErrors; return("<< Internal Error Generating Decision Model: Populated Column Count Cannot Exeed 3 >>"); }
/// <summary> /// Merges the into instance. /// </summary> /// <param name="obj">The obj.</param> /// <param name="typeOf">The type of.</param> /// <param name="depth">The depth.</param> /// <param name="errorResult">The error result.</param> private void MergeIntoInstance(IBinaryDataList obj, enDataListMergeTypes typeOf, enTranslationDepth depth, out ErrorResultTO errorResult) { errorResult = new ErrorResultTO(); BinaryDataList toClone = (BinaryDataList)obj; if(obj.ParentUID != UID) { ParentUID = toClone.ParentUID; } IList<string> lamdaErrors = new List<string>(); IList<string> errorList = new List<string>(); IList<string> unionKeyHits = new List<string>(); // clone the dictionary IList<string> tmp = _templateDict.Keys.ToList(); // must be this way since we modify the collection... foreach(string e in tmp) { string error; IBinaryDataListEntry cloned; if(typeOf == enDataListMergeTypes.Union) { // fetch this instance via clone, fetch toClone instance and merge the data IBinaryDataListEntry fetchTmp; if(toClone._templateDict.TryGetValue(e, out fetchTmp)) { unionKeyHits.Add(e); cloned = fetchTmp.Clone(depth, UID, out error); if(error != string.Empty) { lamdaErrors.Add(error); } else { DepthMerge(depth, cloned, e, out lamdaErrors); } // We need to ensure that the intellisense dictionary is populated with this key ;) } } else if(typeOf == enDataListMergeTypes.Intersection) { IBinaryDataListEntry toFetch; if(toClone.TryGetEntry(e, out toFetch, out error)) { cloned = toClone._templateDict[e].Clone(depth, UID, out error); if(error != string.Empty) { lamdaErrors.Add(error); } else { DepthMerge(depth, cloned, e, out lamdaErrors); } } else { lamdaErrors.Add("Missing DataList item [ " + e + " ] "); } } // compile error list ?! foreach(string err in lamdaErrors) { errorList.Add(err); } lamdaErrors.Clear(); } // now process key misses for union if(typeOf == enDataListMergeTypes.Union) { //toClone._templateDict.Keys foreach(string k in (toClone._templateDict.Keys.ToArray().Except(unionKeyHits))) { string error; IBinaryDataListEntry cloned = toClone._templateDict[k].Clone(depth, UID, out error); if(error != string.Empty) { lamdaErrors.Add(error); } else { DepthMerge(depth, cloned, k, out lamdaErrors); } } } // now build the silly composite object since lamba is an daft construct // how about proper exception handling MS?! foreach(string err in errorList) { errorResult.AddError(err); } }
public IBinaryDataList ConvertTo(byte[] input, StringBuilder targetShape, out ErrorResultTO errors) { errors = new ErrorResultTO(); var payload = Encoding.UTF8.GetString(input); IBinaryDataList result = null; // build shape if (String.IsNullOrEmpty(targetShape.ToString())) { errors.AddError("Null payload shape"); } else { string error; result = BuildTargetShape(targetShape, out error); if (!string.IsNullOrEmpty(error)) { errors.AddError(error); } // populate the shape if (payload != string.Empty) { try { XmlDocument xDoc = new XmlDocument(); xDoc.LoadXml(payload); if (xDoc.DocumentElement != null) { var children = xDoc.DocumentElement.ChildNodes; IDictionary <string, int> indexCache = new Dictionary <string, int>(); { // spin through each element in the XML foreach (XmlNode c in children) { if (!DataListUtil.IsSystemTag(c.Name)) { // scalars and recordset fetch IBinaryDataListEntry entry; if (result.TryGetEntry(c.Name, out entry, out error)) { if (entry.IsRecordset) { // fetch recordset index int fetchIdx; int idx; // recset index if (indexCache.TryGetValue(c.Name, out fetchIdx)) { idx = fetchIdx; } else { // 28-02-2013 - Sashen.Naidoo // BUG 9144 // A cache miss does not necessary mean there is nothing in the record set, // it just means the value isn't in the record set. idx = indexCache.Count == 0 ? 1 : indexCache.Count; } // process recordset var nl = c.ChildNodes; foreach (XmlNode subc in nl) { entry.TryPutRecordItemAtIndex(Dev2BinaryDataListFactory.CreateBinaryItem(subc.InnerXml, c.Name, subc.Name, idx), idx, out error); if (!string.IsNullOrEmpty(error)) { errors.AddError(error); } // update this recordset index } indexCache[c.Name] = ++idx; } else { // process scalar entry.TryPutScalar(Dev2BinaryDataListFactory.CreateBinaryItem(c.InnerXml, c.Name), out error); if (!string.IsNullOrEmpty(error)) { errors.AddError(error); } } } else { errors.AddError(error); } } } } } // Transfer System Tags for (var i = 0; i < TranslationConstants.systemTags.Length; i++) { var key = TranslationConstants.systemTags.GetValue(i).ToString(); var query = String.Concat("//", key); var n = xDoc.SelectSingleNode(query); if (n != null && !string.IsNullOrEmpty(n.InnerXml)) { var bkey = GlobalConstants.SystemTagNamespace + "." + key; IBinaryDataListEntry sysEntry; if (result.TryGetEntry(bkey, out sysEntry, out error)) { sysEntry.TryPutScalar(Dev2BinaryDataListFactory.CreateBinaryItem(n.InnerXml, bkey), out error); } } } } catch (Exception e) { // if use passed in empty input they only wanted the shape ;) if (input.Length > 0) { errors.AddError(e.Message); } } } } return(result); }
public List <DebugItem> CreateDebugInputs(IExecutionEnvironment env) { List <IDebugItem> result = new List <IDebugItem>(); var allErrors = new ErrorResultTO(); try { Dev2DecisionStack dds = Conditions; ErrorResultTO error; string userModel = dds.GenerateUserFriendlyModel(env, dds.Mode, out error); allErrors.MergeErrors(error); foreach (Dev2Decision dev2Decision in dds.TheStack) { AddInputDebugItemResultsAfterEvaluate(result, ref userModel, env, dds.Mode, dev2Decision.Col1, out error); allErrors.MergeErrors(error); AddInputDebugItemResultsAfterEvaluate(result, ref userModel, env, dds.Mode, dev2Decision.Col2, out error); allErrors.MergeErrors(error); AddInputDebugItemResultsAfterEvaluate(result, ref userModel, env, dds.Mode, dev2Decision.Col3, out error); allErrors.MergeErrors(error); } var itemToAdd = new DebugItem(); userModel = userModel.Replace("OR", " OR\r\n") .Replace("AND", " AND\r\n") .Replace("\r\n ", "\r\n") .Replace("\r\n\r\n", "\r\n") .Replace(" ", " "); AddDebugItem(new DebugItemStaticDataParams(userModel, "Statement"), itemToAdd); result.Add(itemToAdd); itemToAdd = new DebugItem(); AddDebugItem(new DebugItemStaticDataParams(dds.Mode == Dev2DecisionMode.AND ? "YES" : "NO", "Require All decisions to be True"), itemToAdd); result.Add(itemToAdd); } catch (JsonSerializationException) { } catch (Exception e) { allErrors.AddError(e.Message); } finally { if (allErrors.HasErrors()) { var serviceName = GetType().Name; DisplayAndWriteError(serviceName, allErrors); } } var val = result.Select(a => a as DebugItem).ToList(); _inner.SetDebugInputs(val); return(val); }
/// <summary> /// Checks the validity of the input argument and returns the fields in a list of strings /// </summary> /// <param name="to">To.</param> /// <param name="bdl">The BDL.</param> /// <param name="errors">The errors.</param> /// <returns></returns> public Func <IList <RecordSetSearchPayload> > GenerateInputRange(IRecsetSearch to, IBinaryDataList bdl, out ErrorResultTO errors) { errors = new ErrorResultTO(); ErrorResultTO allErrors = new ErrorResultTO(); Func <IList <RecordSetSearchPayload> > result = () => { IList <RecordSetSearchPayload> fieldList = new List <RecordSetSearchPayload>(); string InputField = to.FieldsToSearch; string recSet = DataListUtil.ExtractRecordsetNameFromValue(DataListUtil.StripLeadingAndTrailingBracketsFromValue(InputField)); IBinaryDataListEntry bdle; string error; bdl.TryGetEntry(recSet, out bdle, out error); allErrors.AddError(error); if (bdle == null) { throw new RecordsetNotFoundException("Could not find Recordset [ " + recSet + " ]"); } IList <Dev2Column> realCols = bdle.Columns; string[] tmpCols = InputField.Replace(" ", "").Split(','); // Travis.Frisinger : 09.25.2012 // we need to adjust the tmpCols to avoid * causing crap with the match int loc = 0; foreach (string tc in tmpCols) { string recset = DataListUtil.ExtractRecordsetNameFromValue(tc); string field = DataListUtil.ExtractFieldNameFromValue(tc); string myNewSearch = DataListUtil.AddBracketsToValueIfNotExist(DataListUtil.MakeValueIntoHighLevelRecordset(recset)); if (field != string.Empty) { myNewSearch = DataListUtil.MakeValueIntoHighLevelRecordset(recset) + "." + field; } tmpCols[loc] = DataListUtil.AddBracketsToValueIfNotExist(myNewSearch); loc++; } int pos = 0; bool found = true; int start; Int32.TryParse(to.StartIndex, out start); if (start == 0) { start = 1; } while (pos < tmpCols.Length && found) { int innerPos; if (IsMatch(tmpCols[pos], recSet, realCols, out innerPos)) { for (int i = start; i <= bdle.FetchLastRecordsetIndex(); i++) { IBinaryDataListItem tmp = bdle.TryFetchRecordsetColumnAtIndex(realCols[innerPos].ColumnName, i, out error); if (error != string.Empty) { allErrors.AddError(error); } RecordSetSearchPayload p = new RecordSetSearchPayload { Index = i, Payload = tmp.TheValue }; fieldList.Add(p); } } else { if (IsRecorsetWithoutField(tmpCols[pos], recSet)) { IIndexIterator ixItr = bdle.FetchRecordsetIndexes(); while (ixItr.HasMore()) { int next = ixItr.FetchNextIndex(); foreach (Dev2Column col in realCols) { IBinaryDataListItem tmp = bdle.TryFetchRecordsetColumnAtIndex(col.ColumnName, next, out error); RecordSetSearchPayload p = new RecordSetSearchPayload { Index = next, Payload = tmp.TheValue }; fieldList.Add(p); } } } else { found = false; } } pos++; } if (!found) { fieldList.Clear(); } return(fieldList); }; return(result); }
/// <summary> /// Executes the sub request. /// </summary> /// <param name="dataObject">The data object.</param> /// <param name="workspaceId">The workspace unique identifier.</param> /// <param name="inputDefs">The input defs.</param> /// <param name="outputDefs">The output defs.</param> /// <param name="errors">The errors.</param> /// <param name="update"></param> /// <param name="handleErrors"> buble up errors or not</param> /// <returns></returns> public IExecutionEnvironment ExecuteSubRequest(IDSFDataObject dataObject, Guid workspaceId, string inputDefs, string outputDefs, out ErrorResultTO errors, int update, bool handleErrors) { var wasTestExecution = dataObject.IsServiceTestExecution; try { dataObject.IsServiceTestExecution = false; var theWorkspace = wRepository.Get(workspaceId); var invoker = CreateEsbServicesInvoker(theWorkspace); ErrorResultTO invokeErrors; var oldID = dataObject.DataListID; errors = new ErrorResultTO(); // local non-scoped execution ;) var isLocal = !dataObject.IsRemoteWorkflow(); var principle = Thread.CurrentPrincipal; Dev2Logger.Info("SUB-EXECUTION USER CONTEXT IS [ " + principle.Identity.Name + " ] FOR SERVICE [ " + dataObject.ServiceName + " ]"); var oldStartTime = dataObject.StartTime; dataObject.StartTime = DateTime.Now; if (dataObject.RunWorkflowAsync) { ExecuteRequestAsync(dataObject, inputDefs, invoker, isLocal, oldID, out invokeErrors, update); dataObject.StartTime = oldStartTime; errors.MergeErrors(invokeErrors); } else { if (isLocal) { if (GetResource(workspaceId, dataObject.ResourceID) == null && GetResource(workspaceId, dataObject.ServiceName) == null) { errors.AddError(string.Format(ErrorResource.ResourceNotFound, dataObject.ServiceName)); dataObject.StartTime = oldStartTime; return(null); } } var executionContainer = invoker.GenerateInvokeContainer(dataObject, dataObject.ServiceName, isLocal, oldID); dataObject.IsServiceTestExecution = wasTestExecution; if (executionContainer != null) { CreateNewEnvironmentFromInputMappings(dataObject, inputDefs, update); if (!isLocal) { SetRemoteExecutionDataList(dataObject, executionContainer, errors); } if (!errors.HasErrors()) { executionContainer.InstanceInputDefinition = inputDefs; executionContainer.InstanceOutputDefinition = outputDefs; executionContainer.Execute(out invokeErrors, update); var env = UpdatePreviousEnvironmentWithSubExecutionResultUsingOutputMappings(dataObject, outputDefs, update, handleErrors, errors); errors.MergeErrors(invokeErrors); string errorString = dataObject.Environment.FetchErrors(); invokeErrors = ErrorResultTO.MakeErrorResultFromDataListString(errorString); errors.MergeErrors(invokeErrors); dataObject.StartTime = oldStartTime; return(env); } errors.AddError(string.Format(ErrorResource.ResourceNotFound, dataObject.ServiceName)); } } dataObject.StartTime = oldStartTime; return(new ExecutionEnvironment()); } finally { dataObject.IsServiceTestExecution = wasTestExecution; } }
private IDev2Tokenizer CreateSplitPattern(ref string stringToSplit, IEnumerable <DataSplitDTO> args, IDataListCompiler compiler, Guid dlId, out ErrorResultTO errors) { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder { ToTokenize = stringToSplit, ReverseOrder = ReverseOrder }; errors = new ErrorResultTO(); foreach (DataSplitDTO t in args) { var fieldName = t.OutputVariable; t.At = t.At ?? ""; if (!string.IsNullOrEmpty(_datalistString)) { var isValidExpr = new IsValidExpressionRule(() => fieldName, _datalistString) { LabelText = fieldName }; var errorInfo = isValidExpr.Check(); if (errorInfo != null) { errors.AddError(errorInfo.Message); continue; } } IBinaryDataListEntry entry; string error; switch (t.SplitType) { case "Index": try { entry = compiler.Evaluate(dlId, enActionType.User, t.At, false, out errors); string index = DataListUtil.GetValueAtIndex(entry, 1, out error); int indexNum = Convert.ToInt32(index); if (indexNum > 0) { dtb.AddIndexOp(indexNum); } } catch (Exception ex) { errors.AddError(ex.Message); } break; case "End": dtb.AddEoFOp(); break; case "Space": dtb.AddTokenOp(" ", t.Include); break; case "Tab": dtb.AddTokenOp("\t", t.Include); break; case "New Line": if (stringToSplit.Contains("\r\n")) { dtb.AddTokenOp("\r\n", t.Include); } else if (stringToSplit.Contains("\n")) { dtb.AddTokenOp("\n", t.Include); } else if (stringToSplit.Contains("\r")) { dtb.AddTokenOp("\r", t.Include); } break; case "Chars": if (!string.IsNullOrEmpty(t.At)) { entry = compiler.Evaluate(dlId, enActionType.User, t.At, false, out errors); string val = DataListUtil.GetValueAtIndex(entry, 1, out error); string escape = t.EscapeChar; if (!String.IsNullOrEmpty(escape)) { entry = compiler.Evaluate(dlId, enActionType.User, t.EscapeChar, false, out errors); escape = DataListUtil.GetValueAtIndex(entry, 1, out error); } dtb.AddTokenOp(val, t.Include, escape); } break; } _indexCounter++; } return(string.IsNullOrEmpty(dtb.ToTokenize) || errors.HasErrors() ? null : dtb.Generate()); }
//MO - Changed : new ctor that accepts the new arguments public ForEachBootstrapTO(enForEachType forEachType, string from, string to, string csvNumbers, string numberOfExecutes, string recordsetName, IExecutionEnvironment compiler, out ErrorResultTO errors) { errors = new ErrorResultTO(); ForEachType = forEachType; IDev2IteratorCollection colItr = Dev2ValueObjectFactory.CreateIteratorCollection(); IIndexIterator localIndexIterator; IndexList indexList; switch (forEachType) { case enForEachType.InRecordset: var records = compiler.EvalRecordSetIndexes(recordsetName); if (!compiler.HasRecordSet(recordsetName)) { errors.AddError("When selecting a recordset only valid recordsets can be used"); break; } var isEmpty = !records.Any(); if (isEmpty) { localIndexIterator = new IndexListIndexIterator(records); } else { localIndexIterator = new IndexListIndexIterator(records); } IndexIterator = localIndexIterator; break; case enForEachType.InRange: if (string.IsNullOrWhiteSpace(@from)) { errors.AddError("The from field can not be left empty."); break; } if (string.IsNullOrWhiteSpace(to)) { errors.AddError("The to field can not be left empty."); break; } if (@from.Contains("(*)")) { errors.AddError("The Star notation is not accepted in the From field."); break; } var evalledFrom = Warewolf.Storage.ExecutionEnvironment.WarewolfEvalResultToString(compiler.Eval(@from)); int intFrom; if (!int.TryParse(evalledFrom, out intFrom) || intFrom < 1) { errors.AddError("From range must be a whole number from 1 onwards."); break; } if (to.Contains("(*)")) { errors.AddError("The Star notation is not accepted in the To field."); break; } var evalledTo = Warewolf.Storage.ExecutionEnvironment.WarewolfEvalResultToString(compiler.Eval(@to)); int intTo; if (!int.TryParse(evalledTo, out intTo) || intTo < 1) { errors.AddError("To range must be a whole number from 1 onwards."); break; } if (intFrom > intTo) { indexList = new IndexList(new HashSet <int>(), 0) { MinValue = intFrom, MaxValue = intTo }; ReverseIndexIterator revIdxItr = new ReverseIndexIterator(new HashSet <int>(), 0) { IndexList = indexList }; IndexIterator = revIdxItr; } else { indexList = new IndexList(new HashSet <int>(), 0) { MinValue = intFrom, MaxValue = intTo }; localIndexIterator = new IndexIterator(new HashSet <int>(), 0) { IndexList = indexList }; IndexIterator = localIndexIterator; } break; case enForEachType.InCSV: var csvIndexedsItr = Warewolf.Storage.ExecutionEnvironment.WarewolfEvalResultToString(compiler.Eval(csvNumbers)); ErrorResultTO allErrors; List <int> listOfIndexes = SplitOutCsvIndexes(csvIndexedsItr, out allErrors); if (allErrors.HasErrors()) { errors.MergeErrors(allErrors); break; } ListIndexIterator listLocalIndexIterator = new ListIndexIterator(listOfIndexes); ListOfIndex listOfIndex = new ListOfIndex(listOfIndexes); listLocalIndexIterator.IndexList = listOfIndex; IndexIterator = listLocalIndexIterator; break; default: if (numberOfExecutes != null && numberOfExecutes.Contains("(*)")) { errors.AddError("The Star notation is not accepted in the Numbers field."); break; } int intExNum; var numOfExItr = Warewolf.Storage.ExecutionEnvironment.WarewolfEvalResultToString(compiler.Eval(numberOfExecutes)); if (!int.TryParse(numOfExItr, out intExNum)) { errors.AddError("Number of executes must be a whole number from 1 onwards."); } IndexIterator = new IndexIterator(new HashSet <int>(), intExNum); break; } }
protected override IList <OutputTO> ExecuteConcreteAction(IDSFDataObject dataObject, out ErrorResultTO allErrors, int update) { IList <OutputTO> outputs = new List <OutputTO>(); allErrors = new ErrorResultTO(); var colItr = new WarewolfListIterator(); //get all the possible paths for all the string variables var outputItr = new WarewolfIterator(dataObject.Environment.Eval(OutputPath, update)); colItr.AddVariableToIterateOn(outputItr); var unameItr = new WarewolfIterator(dataObject.Environment.Eval(Username, update)); colItr.AddVariableToIterateOn(unameItr); var passItr = new WarewolfIterator(dataObject.Environment.Eval(DecryptedPassword, update)); colItr.AddVariableToIterateOn(passItr); var privateKeyItr = new WarewolfIterator(dataObject.Environment.Eval(PrivateKeyFile, update)); colItr.AddVariableToIterateOn(privateKeyItr); if (dataObject.IsDebugMode()) { AddDebugInputItem(new DebugEvalResult(OutputPath, "File or Folder", dataObject.Environment, update)); AddDebugInputItem(new DebugItemStaticDataParams(Overwrite.ToString(), "Overwrite")); AddDebugInputItemUserNamePassword(dataObject.Environment, update); if (!string.IsNullOrEmpty(PrivateKeyFile)) { AddDebugInputItem(PrivateKeyFile, "Destination Private Key File", dataObject.Environment, update); } } while (colItr.HasMoreData()) { IActivityOperationsBroker broker = ActivityIOFactory.CreateOperationsBroker(); Dev2CRUDOperationTO opTo = new Dev2CRUDOperationTO(Overwrite); try { IActivityIOPath dst = ActivityIOFactory.CreatePathFromString(colItr.FetchNextValue(outputItr), colItr.FetchNextValue(unameItr), colItr.FetchNextValue(passItr), true, colItr.FetchNextValue(privateKeyItr)); IActivityIOOperationsEndPoint dstEndPoint = ActivityIOFactory.CreateOperationEndPointFromIOPath(dst); string result = broker.Create(dstEndPoint, opTo, true); outputs.Add(DataListFactory.CreateOutputTO(Result, result)); } catch (Exception e) { outputs.Add(DataListFactory.CreateOutputTO(Result, "Failure")); allErrors.AddError(e.Message); break; } } return(outputs); }
public System.Guid MockExecutionImpl(IEsbChannel esbChannel, IDSFDataObject dataObject, string inputs, string outputs, out DataList.Contract.ErrorResultTO tmpErrors) { tmpErrors = new ErrorResultTO(); tmpErrors.AddError("Something bad happened"); return(Guid.Empty); }
protected override void ExecuteTool(IDSFDataObject dataObject, int update) { AddScriptSourcePathsToList(); var allErrors = new ErrorResultTO(); var errors = new ErrorResultTO(); allErrors.MergeErrors(errors); var env = dataObject.Environment; InitializeDebug(dataObject); try { if (!errors.HasErrors()) { if (dataObject.IsDebugMode()) { var language = ScriptType.GetDescription(); AddDebugInputItem(new DebugItemStaticDataParams(language, "Language")); AddDebugInputItem(new DebugEvalResult(Script, "Script", env, update)); } allErrors.MergeErrors(errors); if (allErrors.HasErrors()) { return; } TryExecute(dataObject, update, allErrors, env); } } catch (NullReferenceException) { allErrors.AddError(ErrorResource.ScriptingErrorReturningValue); } catch (RuntimeBinderException e) { allErrors.AddError(e.Message.Replace(" for main:Object", string.Empty)); } catch (MissingMemberException e) { allErrors.AddError(e.Message); } finally { if (allErrors.HasErrors()) { var errorString = allErrors.MakeDisplayReady(); dataObject.Environment.AddError(errorString); DisplayAndWriteError(dataObject, DisplayName, allErrors); } if (dataObject.IsDebugMode()) { if (allErrors.HasErrors()) { AddDebugOutputItem(new DebugItemStaticDataParams("", Result, "")); } DispatchDebugState(dataObject, StateType.Before, update); DispatchDebugState(dataObject, StateType.After, update); } } }
protected override IList <OutputTO> ExecuteConcreteAction(IDSFDataObject dataObject, out ErrorResultTO allErrors, int update) { _debugInputs = new List <DebugItem>(); allErrors = new ErrorResultTO(); IList <OutputTO> outputs = new List <OutputTO>(); var colItr = new WarewolfListIterator(); var sharepointSource = ResourceCatalog.GetResource <SharepointSource>(dataObject.WorkspaceID, SharepointServerResourceId); if (sharepointSource == null) { sharepointSource = SharepointSource; SharepointServerResourceId = sharepointSource.ResourceID; } ValidateRequest(); var serverInputFromItr = new WarewolfIterator(dataObject.Environment.Eval(ServerInputPathFrom, update)); colItr.AddVariableToIterateOn(serverInputFromItr); var serverInputFromTo = new WarewolfIterator(dataObject.Environment.Eval(ServerInputPathTo, update)); colItr.AddVariableToIterateOn(serverInputFromTo); if (dataObject.IsDebugMode()) { AddDebugInputItem(ServerInputPathFrom, "ServerInput Path From", dataObject.Environment, update); AddDebugInputItem(ServerInputPathTo, "ServerInput Path To", dataObject.Environment, update); } while (colItr.HasMoreData()) { try { var serverPath = colItr.FetchNextValue(serverInputFromItr); var localPath = colItr.FetchNextValue(serverInputFromTo); if (DataListUtil.IsValueRecordset(Result) && DataListUtil.GetRecordsetIndexType(Result) != enRecordsetIndexType.Numeric) { if (DataListUtil.GetRecordsetIndexType(Result) == enRecordsetIndexType.Star) { string recsetName = DataListUtil.ExtractRecordsetNameFromValue(Result); string fieldName = DataListUtil.ExtractFieldNameFromValue(Result); var newPath = MoveFile(sharepointSource, serverPath, localPath); int indexToUpsertTo = 1; foreach (var file in newPath) { string fullRecsetName = DataListUtil.CreateRecordsetDisplayValue(recsetName, fieldName, indexToUpsertTo.ToString(CultureInfo.InvariantCulture)); outputs.Add(DataListFactory.CreateOutputTO(DataListUtil.AddBracketsToValueIfNotExist(fullRecsetName), file)); indexToUpsertTo++; } } else if (DataListUtil.GetRecordsetIndexType(Result) == enRecordsetIndexType.Blank) { var newPath = MoveFile(sharepointSource, serverPath, localPath); foreach (var folder in newPath) { outputs.Add(DataListFactory.CreateOutputTO(Result, folder)); } } } else { var newPath = MoveFile(sharepointSource, serverPath, localPath); string xmlList = string.Join(",", newPath.Select(c => c)); outputs.Add(DataListFactory.CreateOutputTO(Result)); outputs.Last().OutputStrings.Add(xmlList); } } catch (Exception e) { outputs.Add(DataListFactory.CreateOutputTO(null)); allErrors.AddError(e.Message); break; } } return(outputs); }
protected override IList <OutputTO> TryExecuteConcreteAction(IDSFDataObject context, out ErrorResultTO error, int update) { IList <OutputTO> outputs = new List <OutputTO>(); error = new ErrorResultTO(); var colItr = new WarewolfListIterator(); //get all the possible paths for all the string variables var inputItr = new WarewolfIterator(context.Environment.Eval(InputPath, update)); colItr.AddVariableToIterateOn(inputItr); var userItr = new WarewolfIterator(context.Environment.Eval(Username, update)); colItr.AddVariableToIterateOn(userItr); var passItr = new WarewolfIterator(context.Environment.Eval(DecryptedPassword, update)); colItr.AddVariableToIterateOn(passItr); var privateKeyItr = new WarewolfIterator(context.Environment.Eval(PrivateKeyFile, update)); colItr.AddVariableToIterateOn(privateKeyItr); outputs.Add(DataListFactory.CreateOutputTO(Result)); if (context.IsDebugMode()) { AddDebugInputItem(InputPath, "Input Path", context.Environment, update); AddDebugInputItemUserNamePassword(context.Environment, update); if (!string.IsNullOrEmpty(PrivateKeyFile)) { AddDebugInputItem(PrivateKeyFile, "Private Key File", context.Environment, update); } } while (colItr.HasMoreData()) { var broker = ActivityIOFactory.CreateOperationsBroker(); try { var dst = ActivityIOFactory.CreatePathFromString(colItr.FetchNextValue(inputItr), colItr.FetchNextValue(userItr), colItr.FetchNextValue(passItr), true, colItr.FetchNextValue(privateKeyItr)); var dstEndPoint = ActivityIOFactory.CreateOperationEndPointFromIOPath(dst); var result = broker.Delete(dstEndPoint); outputs[0].OutputStrings.Add(result); outputs.Add(DataListFactory.CreateOutputTO($"Username [ {dstEndPoint.IOPath.Username} ]")); } catch (Exception e) { outputs.Add(DataListFactory.CreateOutputTO(Result, "Failure")); error.AddError(e.Message); break; } } return(outputs); }
public DataListTranslatedPayloadTO ConvertFrom(IBinaryDataList payload, out ErrorResultTO errors) { if (payload == null) { throw new ArgumentNullException("payload"); } TranslatorUtils tu = new TranslatorUtils(); StringBuilder result = new StringBuilder("<" + RootTag + ">"); errors = new ErrorResultTO(); var itemKeys = payload.FetchAllKeys(); foreach (string key in itemKeys) { IBinaryDataListEntry entry; string error; if (payload.TryGetEntry(key, out entry, out error)) { if (entry.IsRecordset) { var idxItr = entry.FetchRecordsetIndexes(); while (idxItr.HasMore() && !entry.IsEmpty()) { while (idxItr.HasMore()) { int i = idxItr.FetchNextIndex(); IList <IBinaryDataListItem> rowData = entry.FetchRecordAt(i, out error); errors.AddError(error); result.Append("<"); result.Append(entry.Namespace); result.Append(">"); foreach (IBinaryDataListItem col in rowData) { string fName = col.FieldName; result.Append("<"); result.Append(fName); result.Append(">"); // Travis.Frisinger 04.02.2013 if (!col.IsDeferredRead) { try { result.Append(tu.CleanForEmit(col.TheValue)); } catch (Exception e) { Dev2Logger.Log.Error(e); } } else { // deferred read, just print the location result.Append(!string.IsNullOrEmpty(col.TheValue) ? col.FetchDeferredLocation() : string.Empty); } result.Append("</"); result.Append(fName); result.Append(">"); } result.Append("</"); result.Append(entry.Namespace); result.Append(">"); } } } else { string fName = entry.Namespace; IBinaryDataListItem val = entry.FetchScalar(); if (val != null) { result.Append("<"); result.Append(fName); result.Append(">"); // Travis.Frisinger 04.02.2013 if (!val.IsDeferredRead) { // Dev2System.FormView is our html region, pass it by ;) try { result.Append(!entry.IsManagmentServicePayload ? tu.CleanForEmit(val.TheValue) : val.TheValue); } catch (Exception e) { Dev2Logger.Log.Error(e); } } else { // deferred read, just print the location result.Append(val.FetchDeferredLocation()); } result.Append("</"); result.Append(fName); result.Append(">"); } } } } result.Append("</" + RootTag + ">"); DataListTranslatedPayloadTO tmp = new DataListTranslatedPayloadTO(result.ToString()); return(tmp); }
// BUG 9626 - 2013.06.11 - TWR: refactored for recursion static void TryConvert(XmlNodeList children, IBinaryDataList result, IDictionary <string, int> indexCache, ErrorResultTO errors, bool onlyMapInputs, int level = 0) { // spin through each element in the XML foreach (XmlNode c in children) { if (!DataListUtil.IsSystemTag(c.Name) && c.Name != GlobalConstants.NaughtyTextNode) { // scalars and recordset fetch IBinaryDataListEntry entry; string error; if (result.TryGetEntry(c.Name, out entry, out error)) { if (entry.IsRecordset) { // fetch recordset index int fetchIdx; var idx = indexCache.TryGetValue(c.Name, out fetchIdx) ? fetchIdx : 1; // process recordset var nl = c.ChildNodes; foreach (XmlNode subc in nl) { // Extract column being mapped to ;) var theCol = entry.Columns.FirstOrDefault(col => col.ColumnName == subc.Name); var dir = enDev2ColumnArgumentDirection.None; if (theCol != null) { dir = theCol.ColumnIODirection; } if (CanMapValue(onlyMapInputs, dir)) { entry.TryPutRecordItemAtIndex(Dev2BinaryDataListFactory.CreateBinaryItem(subc.InnerXml, c.Name, subc.Name, idx), idx, out error); } errors.AddError(error); } // update this recordset index indexCache[c.Name] = ++idx; } else if (CanMapValue(onlyMapInputs, entry.ColumnIODirection)) { // process scalar entry.TryPutScalar(Dev2BinaryDataListFactory.CreateBinaryItem(c.InnerXml, c.Name), out error); if (!string.IsNullOrEmpty(error)) { errors.AddError(error); } } } else { if (level == 0) { // Only recurse if we're at the first level!! TryConvert(c.ChildNodes, result, indexCache, errors, onlyMapInputs, ++level); } else { errors.AddError(error); } } } } }
protected override IList <OutputTO> TryExecuteConcreteAction(IDSFDataObject context, out ErrorResultTO error, int update) { IList <OutputTO> outputs = new List <OutputTO>(); error = new ErrorResultTO(); var colItr = new WarewolfListIterator(); //get all the possible paths for all the string variables var inputItr = new WarewolfIterator(context.Environment.Eval(InputPath, update)); colItr.AddVariableToIterateOn(inputItr); var passItr = new WarewolfIterator(context.Environment.Eval(DecryptedPassword, update)); colItr.AddVariableToIterateOn(passItr); var privateKeyItr = new WarewolfIterator(context.Environment.Eval(PrivateKeyFile ?? string.Empty, update)); colItr.AddVariableToIterateOn(privateKeyItr); outputs.Add(DataListFactory.CreateOutputTO(Result)); if (context.IsDebugMode()) { AddDebugInputItem(InputPath, "Input Path", context.Environment, update); AddDebugInputItemUserNamePassword(context.Environment, update); if (!string.IsNullOrEmpty(PrivateKeyFile)) { AddDebugInputItem(PrivateKeyFile, "Private Key File", context.Environment, update); } if (IsResultBase64) { AddDebugInputItem(IsResultBase64.ToString(), "Result As Base64", context.Environment, update); } } while (colItr.HasMoreData()) { var broker = ActivityIOFactory.CreateOperationsBroker(); var ioPath = ActivityIOFactory.CreatePathFromString(colItr.FetchNextValue(inputItr), Username, colItr.FetchNextValue(passItr), true, colItr.FetchNextValue(privateKeyItr)); var endpoint = ActivityIOFactory.CreateOperationEndPointFromIOPath(ioPath); try { if (IsResultBase64) { var result = broker.GetBytes(endpoint); outputs[0].OutputStrings.Add(result.ToBase64String()); } else { var result = broker.Get(endpoint); outputs[0].OutputStrings.Add(result); } } catch (Exception e) { outputs[0].OutputStrings.Add(null); error.AddError(e.Message); break; } } return(outputs); }
private ErrorResultTO InvokeService(IDSFDataObject dataObject, ErrorResultTO errors, Guid serviceId, string serviceName) { Dev2Logger.Debug("Finding service", dataObject.ExecutionID.ToString()); var theService = serviceId == Guid.Empty ? _serviceLocator.FindService(serviceName, _workspace.ID) : _serviceLocator.FindService(serviceId, _workspace.ID); if (theService == null) { if (!dataObject.IsServiceTestExecution) { theService = _serviceLocator.FindService(serviceName, GlobalConstants.ServerWorkspaceID); } if (theService == null) { if (dataObject.IsServiceTestExecution) { var testResult = new ServiceTestModelTO { Result = new TestRunResult { RunTestResult = RunResult.TestResourceDeleted, Message = "Resource has been deleted", DebugForTest = new List <IDebugState>(), TestName = dataObject.TestName }, TestPassed = false, TestInvalid = true, FailureMessage = "Resource has been deleted", TestName = dataObject.TestName, }; var ser = new Dev2JsonSerializer(); _request.ExecuteResult = ser.SerializeToBuilder(testResult); } errors.AddError(string.Format(ErrorResource.ServiceNotFound, serviceName)); } } else if (theService.Actions.Count <= 1) { #region Execute ESB container var theStart = theService.Actions.FirstOrDefault(); if (theStart != null && theStart.ActionType != enActionType.InvokeManagementDynamicService && theStart.ActionType != enActionType.Workflow && dataObject.IsFromWebServer) { throw new Exception(ErrorResource.CanOnlyExecuteWorkflowsFromWebBrowser); } Dev2Logger.Debug("Mapping Action Dependencies", dataObject.ExecutionID.ToString()); MapServiceActionDependencies(theStart); if (theStart != null) { theStart.Service = theService; theStart.DataListSpecification = theService.DataListSpecification; Dev2Logger.Debug("Getting container", dataObject.ExecutionID.ToString()); var container = GenerateContainer(theStart, dataObject, _workspace); container.Execute(out errors, 0); } #endregion Execute ESB container } else { errors.AddError(string.Format(ErrorResource.MalformedService, serviceId)); } return(errors); }
#pragma warning disable S1541 // Methods and properties should not be too complex #pragma warning disable S3776 // Cognitive Complexity of methods should not be too high protected override void ExecuteTool(IDSFDataObject dataObject, int update) #pragma warning restore S3776 // Cognitive Complexity of methods should not be too high #pragma warning restore S1541 // Methods and properties should not be too complex { var allErrors = new ErrorResultTO(); InitializeDebug(dataObject); if (string.IsNullOrEmpty(DataSource)) { allErrors.AddError(ErrorResource.DataSourceEmpty); } if (string.IsNullOrEmpty(Alias)) { allErrors.AddError(string.Format(ErrorResource.CanNotBeEmpty, "Alias")); } if (allErrors.HasErrors()) { DisplayAndWriteError("DsfSelectAndApplyActivity", allErrors); foreach (var fetchError in allErrors.FetchErrors()) { dataObject.Environment.AddError(fetchError); } } var startTime = DateTime.Now; _previousParentId = dataObject.ParentInstanceID; _debugInputs = new List <DebugItem>(); _debugOutputs = new List <DebugItem>(); dataObject.ForEachNestingLevel++; var expressions = new List <string>(); try { string ds; try { ds = dataObject.Environment.ToStar(DataSource); expressions = dataObject.Environment.GetIndexes(ds); if (expressions.Count == 0) { expressions.Add(ds); } } catch (NullReferenceException) { //Do nothing exception aleady added to errors throw new NullDataSource(); } if (dataObject.IsDebugMode()) { AddDebugInputItem(new DebugItemStaticDataParams(Alias, "As", DataSource)); } var scopedEnvironment = new ScopedEnvironment(dataObject.Environment, ds, Alias); //Push the new environment dataObject.PushEnvironment(scopedEnvironment); dataObject.ForEachNestingLevel++; if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before, update); } dataObject.ParentInstanceID = UniqueID; dataObject.IsDebugNested = true; if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.After, update); } foreach (var exp in expressions) { //Assign the warewolfAtom to Alias using new environment scopedEnvironment.SetDataSource(exp); if (ApplyActivityFunc.Handler is IDev2Activity exeAct) { _childUniqueID = exeAct.UniqueID; exeAct.Execute(dataObject, 0); } } } catch (NullDataSource e) { Dev2Logger.Error("DSFSelectAndApply", e, GlobalConstants.WarewolfError); } catch (Exception e) { Dev2Logger.Error("DSFSelectAndApply", e, GlobalConstants.WarewolfError); allErrors.AddError(e.Message); } finally { if (dataObject.IsServiceTestExecution) { if (dataObject.IsDebugMode()) { GetTestOurputResultForDebug(dataObject); } else { GetTestOutputForBrowserExecution(dataObject); } } dataObject.PopEnvironment(); dataObject.ForEachNestingLevel--; if (allErrors.HasErrors()) { DisplayAndWriteError("DsfSelectAndApplyActivity", allErrors); foreach (var fetchError in allErrors.FetchErrors()) { dataObject.Environment.AddError(fetchError); } } if (dataObject.IsDebugMode()) { foreach (var expression in expressions) { AddExpresionEvalOutputItem(dataObject, update, expression); } DispatchDebugState(dataObject, StateType.End, update, startTime, DateTime.Now); } OnCompleted(dataObject); } }
/// <summary> /// Merges the into instance. /// </summary> /// <param name="obj">The obj.</param> /// <param name="typeOf">The type of.</param> /// <param name="depth">The depth.</param> /// <param name="errorResult">The error result.</param> private void MergeIntoInstance(IBinaryDataList obj, enDataListMergeTypes typeOf, enTranslationDepth depth, out ErrorResultTO errorResult) { errorResult = new ErrorResultTO(); BinaryDataList toClone = (BinaryDataList)obj; if (obj.ParentUID != UID) { ParentUID = toClone.ParentUID; } IList <string> lamdaErrors = new List <string>(); IList <string> errorList = new List <string>(); IList <string> unionKeyHits = new List <string>(); // clone the dictionary IList <string> tmp = _templateDict.Keys.ToList(); // must be this way since we modify the collection... foreach (string e in tmp) { string error; IBinaryDataListEntry cloned; if (typeOf == enDataListMergeTypes.Union) { // fetch this instance via clone, fetch toClone instance and merge the data IBinaryDataListEntry fetchTmp; if (toClone._templateDict.TryGetValue(e, out fetchTmp)) { unionKeyHits.Add(e); cloned = fetchTmp.Clone(depth, UID, out error); if (error != string.Empty) { lamdaErrors.Add(error); } else { DepthMerge(depth, cloned, e, out lamdaErrors); } // We need to ensure that the intellisense dictionary is populated with this key ;) } } else if (typeOf == enDataListMergeTypes.Intersection) { IBinaryDataListEntry toFetch; if (toClone.TryGetEntry(e, out toFetch, out error)) { cloned = toClone._templateDict[e].Clone(depth, UID, out error); if (error != string.Empty) { lamdaErrors.Add(error); } else { DepthMerge(depth, cloned, e, out lamdaErrors); } } else { lamdaErrors.Add("Missing DataList item [ " + e + " ] "); } } // compile error list ?! foreach (string err in lamdaErrors) { errorList.Add(err); } lamdaErrors.Clear(); } // now process key misses for union if (typeOf == enDataListMergeTypes.Union) { //toClone._templateDict.Keys foreach (string k in (toClone._templateDict.Keys.ToArray().Except(unionKeyHits))) { string error; IBinaryDataListEntry cloned = toClone._templateDict[k].Clone(depth, UID, out error); if (error != string.Empty) { lamdaErrors.Add(error); } else { DepthMerge(depth, cloned, k, out lamdaErrors); } } } // now build the silly composite object since lamba is an daft construct // how about proper exception handling MS?! foreach (string err in errorList) { errorResult.AddError(err); } }
protected override void ExecuteTool(IDSFDataObject dataObject, int update) { IExecutionToken exeToken = dataObject.ExecutionToken; var allErrors = new ErrorResultTO(); InitializeDebug(dataObject); try { if (dataObject.IsDebugMode()) { AddDebugInputItem(new DebugEvalResult(CommandFileName, "Command", dataObject.Environment, update)); } var itr = new WarewolfIterator(dataObject.Environment.Eval(CommandFileName, update)); if (!allErrors.HasErrors()) { while (itr.HasMoreData()) { var val = itr.GetNextValue(); { if (string.IsNullOrEmpty(val)) { throw new Exception("Empty script to execute"); } StreamReader errorReader; StringBuilder outputReader; if (!ExecuteProcess(val, exeToken, out errorReader, out outputReader)) { return; } allErrors.AddError(errorReader.ReadToEnd()); var bytes = Encoding.Default.GetBytes(outputReader.ToString().Trim()); string readValue = Encoding.ASCII.GetString(bytes).Replace("?", " "); //2013.06.03: Ashley Lewis for bug 9498 - handle multiple regions in result foreach (var region in DataListCleaningUtils.SplitIntoRegions(CommandResult)) { if (dataObject.Environment != null) { dataObject.Environment.Assign(region, readValue, update); } } errorReader.Close(); } } if (dataObject.IsDebugMode() && !allErrors.HasErrors()) { if (!string.IsNullOrEmpty(CommandResult)) { AddDebugOutputItem(new DebugEvalResult(CommandResult, "", dataObject.Environment, update)); } } } } catch (Exception e) { Dev2Logger.Log.Error("DSFCommandLine", e); allErrors.AddError(e.Message); } finally { // Handle Errors var hasErrors = allErrors.HasErrors(); if (hasErrors) { DisplayAndWriteError("DsfExecuteCommandLineActivity", allErrors); if (dataObject.Environment != null) { var errorString = allErrors.MakeDisplayReady(); dataObject.Environment.AddError(errorString); dataObject.Environment.Assign(CommandResult, null, update); } } if (dataObject.IsDebugMode()) { if (hasErrors) { AddDebugOutputItem(new DebugItemStaticDataParams("", CommandResult, "")); } DispatchDebugState(dataObject, StateType.Before, update); DispatchDebugState(dataObject, StateType.After, update); } if (!string.IsNullOrEmpty(_fullPath)) { File.Delete(_fullPath); } } }
/// <summary> /// Executes the request. /// </summary> /// <param name="dataObject">The data object.</param> /// <param name="request"></param> /// <param name="workspaceId">The workspace ID.</param> /// <param name="errors">The errors.</param> /// <returns></returns> public Guid ExecuteRequest(IDSFDataObject dataObject, EsbExecuteRequest request, Guid workspaceId, out ErrorResultTO errors) { var resultID = GlobalConstants.NullDataListID; errors = new ErrorResultTO(); var theWorkspace = WorkspaceRepository.Instance.Get(workspaceId); var principle = Thread.CurrentPrincipal; var name = principle.Identity.Name; // If no DLID, we need to make it based upon the request ;) if (dataObject.DataListID == GlobalConstants.NullDataListID) { IResource resource; try { resource = dataObject.ResourceID == Guid.Empty ? GetResource(workspaceId, dataObject.ServiceName) : GetResource(workspaceId, dataObject.ResourceID); } catch (Exception ex) { Dev2Logger.Log.Error(ex); errors.AddError(string.Format("Service [ {0} ] not found.", dataObject.ServiceName)); return(resultID); } // TODO : Amend here to respect Inputs only when creating shape ;) if (resource != null) { if (resource.DataList != null) { Dev2Logger.Log.Debug("Mapping Inputs from Environment"); ExecutionEnvironmentUtils.UpdateEnvironmentFromInputPayload(dataObject, dataObject.RawPayload, resource.DataList.ToString(), 0); } } dataObject.RawPayload = new StringBuilder(); // We need to create the parentID around the system ;) dataObject.ParentThreadID = Thread.CurrentThread.ManagedThreadId; } try { // Setup the invoker endpoint ;) Dev2Logger.Log.Debug("Creating Invoker"); using (var invoker = new EsbServiceInvoker(this, this, theWorkspace, request)) { // Should return the top level DLID ErrorResultTO invokeErrors; resultID = invoker.Invoke(dataObject, out invokeErrors); errors.MergeErrors(invokeErrors); } } catch (Exception ex) { errors.AddError(ex.Message); } return(resultID); }
protected void ExecuteImpl(out ErrorResultTO errors, int update) { errors = new ErrorResultTO(); #region Create OutputFormatter IOutputFormatter outputFormatter = null; try { if (!string.IsNullOrEmpty(InstanceOutputDefintions)) { outputFormatter = GetOutputFormatter(Service); } } catch (Exception) { if (HandlesOutputFormatting) { errors.AddError( string.Format(ErrorResource.InvalidOutputFormat + "Please edit and remap.", Service.ResourceName)); return; } } if (HandlesOutputFormatting && outputFormatter == null && !string.IsNullOrEmpty(InstanceOutputDefintions)) { errors.AddError(string.Format(ErrorResource.InvalidOutputFormat, Service.ResourceName)); return; } #endregion try { var itrs = new List <IWarewolfIterator>(5); IWarewolfListIterator itrCollection = new WarewolfListIterator(); if (string.IsNullOrEmpty(InstanceInputDefinitions) && string.IsNullOrEmpty(InstanceOutputDefintions)) { MergeErrors(errors, update, outputFormatter, itrs, itrCollection); return; } var method = Service.Method; var inputs = method.Parameters; if (inputs.Count == 0) { ExecuteService(out ErrorResultTO invokeErrors, update, outputFormatter); errors.MergeErrors(invokeErrors); } else { BuildParameterIterators(update, inputs, itrCollection, itrs); while (itrCollection.HasMoreData()) { ExecuteService(itrCollection, itrs, out ErrorResultTO invokeErrors, update, outputFormatter); errors.MergeErrors(invokeErrors); } } } finally { var disposable = Service as IDisposable; disposable?.Dispose(); // ensure errors bubble up ;) errors.MergeErrors(_errorResult); } }
protected override void ExecuteTool(IDSFDataObject dataObject) { ErrorResultTO allErrors = new ErrorResultTO(); try { _debugOutputs.Clear(); _debugInputs.Clear(); if (dataObject.IsDebugMode()) { _debugInputs = CreateDebugInputs(dataObject.Environment); } var stack = Conditions.TheStack.Select(a => parseDecision(dataObject.Environment, a)); var factory = Dev2DecisionFactory.Instance(); var res = stack.SelectMany(a => { if (a.EvaluationFn == enDecisionType.IsError) { return(new [] { dataObject.Environment.Errors.Count > 0 }); } if (a.EvaluationFn == enDecisionType.IsNotError) { return(new[] { dataObject.Environment.Errors.Count == 0 }); } IList <bool> ret = new List <bool>(); var iter = new WarewolfListIterator(); var c1 = new WarewolfAtomIterator(a.Cols1); var c2 = new WarewolfAtomIterator(a.Cols2); var c3 = new WarewolfAtomIterator(a.Cols3); iter.AddVariableToIterateOn(c1); iter.AddVariableToIterateOn(c2); iter.AddVariableToIterateOn(c3); while (iter.HasMoreData()) { ret.Add(factory.FetchDecisionFunction(a.EvaluationFn).Invoke(new[] { iter.FetchNextValue(c1), iter.FetchNextValue(c2), iter.FetchNextValue(c3) })); } return(ret); }); var resultval = And ? res.Aggregate(true, (a, b) => a && b) : res.Any(a => a); if (dataObject.IsDebugMode()) { _debugOutputs = GetDebugOutputs(dataObject.Environment, resultval.ToString()); } if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before); DispatchDebugState(dataObject, StateType.After); } if (resultval) { if (TrueArm != null) { var activity = TrueArm.FirstOrDefault(); if (activity != null) { activity.Execute(dataObject); } } } else { if (FalseArm != null) { var activity = FalseArm.FirstOrDefault(); if (activity != null) { activity.Execute(dataObject); } } } } catch (Exception e) { allErrors.AddError(e.Message); } finally { // Handle Errors var hasErrors = allErrors.HasErrors(); if (hasErrors) { DisplayAndWriteError("DsfDeleteRecordsActivity", allErrors); var errorString = allErrors.MakeDisplayReady(); dataObject.Environment.AddError(errorString); } if (dataObject.IsDebugMode()) { //DispatchDebugState(dataObject, StateType.Before); //DispatchDebugState(dataObject, StateType.After); } } }
protected override void ExecuteTool(IDSFDataObject dataObject, int update) { ErrorResultTO allErrors = new ErrorResultTO(); ErrorResultTO errors = new ErrorResultTO(); allErrors.MergeErrors(errors); InitializeDebug(dataObject); // Process if no errors try { if (dataObject.IsDebugMode()) { if (string.IsNullOrEmpty(Input1)) { AddDebugInputItem(new DebugItemStaticDataParams(DateTime.Now.ToString(GlobalConstants.GlobalDefaultNowFormat), "now()", "Input 1", "=")); } else { AddDebugInputItem(Input1, "Input 1", dataObject.Environment, update); } if (string.IsNullOrEmpty(Input2)) { AddDebugInputItem(new DebugItemStaticDataParams(DateTime.Now.ToString(GlobalConstants.GlobalDefaultNowFormat), "now()", "Input 2", "=")); } else { AddDebugInputItem(Input2, "Input 2", dataObject.Environment, update); } AddDebugInputItem(InputFormat, "Input Format", dataObject.Environment, update); if (!String.IsNullOrEmpty(OutputType)) { AddDebugInputItem(new DebugItemStaticDataParams(OutputType, "Output In")); } } var colItr = new WarewolfListIterator(); var input1Itr = new WarewolfIterator(dataObject.Environment.EvalStrict(string.IsNullOrEmpty(Input1) ? GlobalConstants.CalcExpressionNow : Input1, update)); colItr.AddVariableToIterateOn(input1Itr); var evalInp2 = dataObject.Environment.EvalStrict(string.IsNullOrEmpty(Input2) ? GlobalConstants.CalcExpressionNow : Input2, update); var input2Itr = new WarewolfIterator(evalInp2); colItr.AddVariableToIterateOn(input2Itr); var ifItr = new WarewolfIterator(dataObject.Environment.Eval(InputFormat ?? string.Empty, update)); colItr.AddVariableToIterateOn(ifItr); int indexToUpsertTo = 1; while (colItr.HasMoreData()) { IDateTimeDiffTO transObj = ConvertToDateTimeDiffTo(colItr.FetchNextValue(input1Itr), colItr.FetchNextValue(input2Itr), colItr.FetchNextValue(ifItr), OutputType); //Create a DateTimeComparer using the DateTimeConverterFactory IDateTimeComparer comparer = DateTimeConverterFactory.CreateComparer(); //Call the TryComparer method on the DateTimeComparer and pass it the IDateTimeDiffTO created from the ConvertToDateTimeDiffTO Method string result; string error; string expression = Result; if (comparer.TryCompare(transObj, out result, out error)) { if (DataListUtil.IsValueRecordset(Result) && DataListUtil.GetRecordsetIndexType(Result) == enRecordsetIndexType.Star) { if (update == 0) { expression = Result.Replace(GlobalConstants.StarExpression, indexToUpsertTo.ToString(CultureInfo.InvariantCulture)); } } else { expression = Result; } var rule = new IsSingleValueRule(() => Result); var single = rule.Check(); if (single != null) { allErrors.AddError(single.Message); } else { dataObject.Environment.Assign(expression, result, update); } } else { DoDebugOutput(dataObject, expression, update); allErrors.AddError(error); } indexToUpsertTo++; } allErrors.MergeErrors(errors); if (dataObject.IsDebugMode() && !allErrors.HasErrors()) { AddDebugOutputItem(new DebugEvalResult(Result, null, dataObject.Environment, update)); } } catch (Exception e) { Dev2Logger.Error("DSFDateTime", e); allErrors.AddError(e.Message); } finally { // Handle Errors if (allErrors.HasErrors()) { DisplayAndWriteError("DsfDateTimeDifferenceActivity", allErrors); var errorString = allErrors.MakeDisplayReady(); dataObject.Environment.AddError(errorString); dataObject.Environment.Assign(Result, null, update); } if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before, update); DispatchDebugState(dataObject, StateType.After, update); } } }
/// <summary> /// Clones the specified type of. /// </summary> /// <param name="depth">The depth.</param> /// <param name="errorResult">The error result.</param> /// <param name="onlySystemTags">if set to <c>true</c> [only system tags].</param> /// <returns></returns> public IBinaryDataList Clone(enTranslationDepth depth, out ErrorResultTO errorResult, bool onlySystemTags) { // set parent child reference BinaryDataList result = new BinaryDataList { ParentUID = ParentUID }; errorResult = new ErrorResultTO(); // clone the dictionary foreach(string e in _templateDict.Keys) { if((onlySystemTags && e.IndexOf(GlobalConstants.SystemTagNamespaceSearch, StringComparison.Ordinal) >= 0) || !onlySystemTags) { string error; // fetch this instance via clone, fetch toClone instance and merge the data IBinaryDataListEntry cloned = _templateDict[e].Clone(depth, UID, out error); // Copy over the intellisesne parts ;) result._intellisenseParts = _intellisenseParts; errorResult.AddError(error); if(error == string.Empty) { // safe to add result._templateDict[e] = cloned; } } } // if only system tags, clean the intellisense parts out ;) if(onlySystemTags) { var parts = result._intellisenseParts.Where( c => c.Name.IndexOf(GlobalConstants.SystemTagNamespaceSearch, StringComparison.Ordinal) >= 0); result._intellisenseParts = parts.ToList(); } return result; }
protected override void ExecuteTool(IDSFDataObject dataObject) { _debugInputs = new List <DebugItem>(); _debugOutputs = new List <DebugItem>(); ErrorResultTO allErrors = new ErrorResultTO(); ErrorResultTO errors = new ErrorResultTO(); var env = dataObject.Environment; InitializeDebug(dataObject); try { CleanArgs(); allErrors.MergeErrors(errors); int inputIndex = 1; int outputIndex = 1; foreach (ICaseConvertTO item in ConvertCollection.Where(a => !String.IsNullOrEmpty(a.StringToConvert))) { IsSingleValueRule.ApplyIsSingleValueRule(item.ExpressionToConvert, allErrors); if (dataObject.IsDebugMode()) { var debugItem = new DebugItem(); AddDebugItem(new DebugItemStaticDataParams("", inputIndex.ToString(CultureInfo.InvariantCulture)), debugItem); AddDebugItem(new DebugEvalResult(item.StringToConvert, "Convert", env), debugItem); AddDebugItem(new DebugItemStaticDataParams(item.ConvertType, "To"), debugItem); _debugInputs.Add(debugItem); inputIndex++; } if (!allErrors.HasErrors()) { try { env.ApplyUpdate(item.StringToConvert, TryConvertFunc(item.ConvertType, env)); } catch (Exception e) { allErrors.AddError(e.Message); } if (!allErrors.HasErrors() && dataObject.IsDebugMode()) { var debugItem = new DebugItem(); AddDebugItem(new DebugItemStaticDataParams("", outputIndex.ToString(CultureInfo.InvariantCulture)), debugItem); AddDebugItem(new DebugEvalResult(item.StringToConvert, "", env), debugItem); _debugOutputs.Add(debugItem); outputIndex++; } } } } catch (Exception e) { allErrors.AddError(e.Message); } finally { // Handle Errors var hasErrors = allErrors.HasErrors(); if (hasErrors) { DisplayAndWriteError("DsfCaseConvertActivity", allErrors); var errorString = allErrors.MakeDisplayReady(); dataObject.Environment.AddError(errorString); } if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before); DispatchDebugState(dataObject, StateType.After); } } }
protected override IList <OutputTO> TryExecuteConcreteAction(IDSFDataObject context, out ErrorResultTO error, int update) { IList <OutputTO> outputs = new List <OutputTO>(); error = new ErrorResultTO(); var colItr = new WarewolfListIterator(); //get all the possible paths for all the string variables var inputItr = new WarewolfIterator(context.Environment.Eval(OutputPath, update)); colItr.AddVariableToIterateOn(inputItr); var passItr = new WarewolfIterator(context.Environment.Eval(DecryptedPassword, update)); colItr.AddVariableToIterateOn(passItr); var privateKeyItr = new WarewolfIterator(context.Environment.Eval(PrivateKeyFile, update)); colItr.AddVariableToIterateOn(privateKeyItr); var contentItr = new WarewolfIterator(context.Environment.Eval(FileContents, update)); colItr.AddVariableToIterateOn(contentItr); outputs.Add(DataListFactory.CreateOutputTO(Result)); if (context.IsDebugMode()) { AddDebugInputItem(OutputPath, "Output Path", context.Environment, update); AddDebugInputItem(new DebugItemStaticDataParams(GetMethod(), "Method")); AddDebugInputItemUserNamePassword(context.Environment, update); if (!string.IsNullOrEmpty(PrivateKeyFile)) { AddDebugInputItem(PrivateKeyFile, "Private Key File", context.Environment, update); } AddDebugInputItem(FileContents, "File Contents", context.Environment, update); } while (colItr.HasMoreData()) { var broker = ActivityIOFactory.CreateOperationsBroker(); var writeType = GetCorrectWriteType(); var putTo = ActivityIOFactory.CreatePutRawOperationTO(writeType, TextUtils.ReplaceWorkflowNewLinesWithEnvironmentNewLines(colItr.FetchNextValue(contentItr))); var opath = ActivityIOFactory.CreatePathFromString(colItr.FetchNextValue(inputItr), Username, colItr.FetchNextValue(passItr), true, colItr.FetchNextValue(privateKeyItr)); var endPoint = ActivityIOFactory.CreateOperationEndPointFromIOPath(opath); try { if (error.HasErrors()) { outputs[0].OutputStrings.Add(null); } else { var result = broker.PutRaw(endPoint, putTo); outputs[0].OutputStrings.Add(result); } } catch (Exception e) { outputs[0].OutputStrings.Add(null); error.AddError(e.Message); break; } } return(outputs); }
protected override void ExecuteTool(IDSFDataObject dataObject, int update) { ErrorResultTO allErrors = new ErrorResultTO(); ErrorResultTO errors = new ErrorResultTO(); allErrors.MergeErrors(errors); InitializeDebug(dataObject); try { IsSingleValueRule.ApplyIsSingleValueRule(Result, allErrors); if (dataObject.IsDebugMode()) { AddDebugInputItem(dataObject.Environment, update); } string input = string.IsNullOrEmpty(Expression) ? Expression : Expression.Replace("\\r", string.Empty).Replace("\\n", string.Empty).Replace(Environment.NewLine, ""); var warewolfListIterator = new WarewolfListIterator(); var calc = String.Format(GlobalConstants.CalculateTextConvertFormat, input); var warewolfEvalResult = dataObject.Environment.Eval(calc, update); var scalarResult = warewolfEvalResult as WarewolfDataEvaluationCommon.WarewolfEvalResult.WarewolfAtomResult; if (scalarResult != null && scalarResult.Item.IsNothing) { throw new NullValueInVariableException("Error with variables in input.", input); } var inputIterator = new WarewolfIterator(warewolfEvalResult); warewolfListIterator.AddVariableToIterateOn(inputIterator); while (warewolfListIterator.HasMoreData()) { var result = warewolfListIterator.FetchNextValue(inputIterator); dataObject.Environment.Assign(Result, result, update); } if (dataObject.IsDebugMode() && !allErrors.HasErrors()) { AddDebugOutputItem(Result, dataObject.Environment, update); } allErrors.MergeErrors(errors); } catch (Exception ex) { Dev2Logger.Log.Error("Calculate Exception", ex); allErrors.AddError(ex.Message); } finally { // Handle Errors var hasErrors = allErrors.HasErrors(); if (hasErrors) { DisplayAndWriteError("DsfCalculateActivity", allErrors); var errorString = allErrors.MakeDisplayReady(); dataObject.Environment.AddError(errorString); } if (dataObject.IsDebugMode()) { if (hasErrors) { AddDebugOutputItem(Result, dataObject.Environment, update); } DispatchDebugState(dataObject, StateType.Before, update); DispatchDebugState(dataObject, StateType.After, update); } } }
Guid ExecuteWf(ErrorResultTO to, IServiceTestModelTO test) { Guid result = new Guid(); var wfappUtils = new WfApplicationUtils(); ErrorResultTO invokeErrors = new ErrorResultTO(); var resourceId = DataObject.ResourceID; if (test?.Inputs != null) { AddRecordsetsInputs(test.Inputs.Where(input => DataListUtil.IsValueRecordset(input.Variable) && !input.Variable.Contains("@")), DataObject.Environment); foreach (var input in test.Inputs) { var variable = DataListUtil.AddBracketsToValueIfNotExist(input.Variable); var value = input.Value; if (variable.StartsWith("[[@")) { var jContainer = JsonConvert.DeserializeObject(value) as JObject; DataObject.Environment.AddToJsonObjects(variable, jContainer); } else if (!DataListUtil.IsValueRecordset(input.Variable)) { string errorMessage; if (ExecutionEnvironment.IsValidVariableExpression(input.Value, out errorMessage, 0)) { DataObject.Environment.AllErrors.Add("Cannot use variables as input value."); } else { if (!input.EmptyIsNull || !string.IsNullOrEmpty(value)) { DataObject.Environment.Assign(variable, value, 0); } } } } } Dev2JsonSerializer serializer = new Dev2JsonSerializer(); try { IExecutionToken exeToken = new ExecutionToken { IsUserCanceled = false }; DataObject.ExecutionToken = exeToken; if (DataObject.IsDebugMode()) { var debugState = wfappUtils.GetDebugState(DataObject, StateType.Start, DataObject.Environment.HasErrors(), DataObject.Environment.FetchErrors(), invokeErrors, DateTime.Now, true, false, false); wfappUtils.WriteDebug(DataObject, debugState); } var testRunResult = Eval(resourceId, DataObject, test); if (DataObject.IsDebugMode()) { if (!DataObject.StopExecution) { var debugState = wfappUtils.GetDebugState(DataObject, StateType.End, DataObject.Environment.HasErrors(), DataObject.Environment.FetchErrors(), invokeErrors, DataObject.StartTime, false, true, true); DebugItem outputDebugItem = new DebugItem(); if (test != null) { var msg = test.FailureMessage; if (test.TestPassed) { msg = Warewolf.Resource.Messages.Messages.Test_PassedResult; } outputDebugItem.AddRange(new DebugItemServiceTestStaticDataParams(msg, test.TestFailing).GetDebugItemResult()); } debugState.AssertResultList.Add(outputDebugItem); wfappUtils.WriteDebug(DataObject, debugState); } var testAggregateDebugState = wfappUtils.GetDebugState(DataObject, StateType.TestAggregate, false, string.Empty, new ErrorResultTO(), DataObject.StartTime, false, false, false); AggregateTestResult(resourceId, test); DebugItem itemToAdd = new DebugItem(); if (test != null) { var msg = test.FailureMessage; if (test.TestPassed) { msg = Warewolf.Resource.Messages.Messages.Test_PassedResult; } itemToAdd.AddRange(new DebugItemServiceTestStaticDataParams(msg, test.TestFailing).GetDebugItemResult()); } testAggregateDebugState.AssertResultList.Add(itemToAdd); wfappUtils.WriteDebug(DataObject, testAggregateDebugState); if (testRunResult != null) { if (test != null) { test.Result.DebugForTest = TestDebugMessageRepo.Instance.FetchDebugItems(resourceId, test.TestName); } _request.ExecuteResult = serializer.SerializeToBuilder(testRunResult); } } else { AggregateTestResult(resourceId, test); if (test != null) { _request.ExecuteResult = serializer.SerializeToBuilder(test); } } result = DataObject.DataListID; } catch (InvalidWorkflowException iwe) { Dev2Logger.Error(iwe); var msg = iwe.Message; int start = msg.IndexOf("Flowchart ", StringComparison.Ordinal); to?.AddError(start > 0 ? GlobalConstants.NoStartNodeError : iwe.Message); var failureMessage = DataObject.Environment.FetchErrors(); wfappUtils.DispatchDebugState(DataObject, StateType.End, DataObject.Environment.HasErrors(), failureMessage, out invokeErrors, DataObject.StartTime, false, true); // ReSharper disable once PossibleNullReferenceException test.TestFailing = false; test.TestPassed = false; test.TestPending = false; test.TestInvalid = true; test.LastRunDate = DateTime.Now; Common.Utilities.PerformActionInsideImpersonatedContext(Common.Utilities.ServerUser, () => { TestCatalog.Instance.SaveTest(resourceId, test); }); var testRunResult = new TestRunResult { TestName = test.TestName }; if (test.TestInvalid) { testRunResult.RunTestResult = RunResult.TestInvalid; testRunResult.Message = failureMessage; Dev2Logger.Error($"Test {DataObject.TestName} for Resource {DataObject.ServiceName} ID {DataObject.ResourceID} marked invalid in exception for no start node"); } testRunResult.DebugForTest = TestDebugMessageRepo.Instance.FetchDebugItems(resourceId, test.TestName); if (_request != null) { _request.ExecuteResult = serializer.SerializeToBuilder(testRunResult); } } catch (Exception ex) { Dev2Logger.Error(ex); to.AddError(ex.Message); var failureMessage = DataObject.Environment.FetchErrors(); wfappUtils.DispatchDebugState(DataObject, StateType.End, DataObject.Environment.HasErrors(), failureMessage, out invokeErrors, DataObject.StartTime, false, true); // ReSharper disable once PossibleNullReferenceException test.TestFailing = false; test.TestPassed = false; test.TestPending = false; test.TestInvalid = true; test.LastRunDate = DateTime.Now; Common.Utilities.PerformActionInsideImpersonatedContext(Common.Utilities.ServerUser, () => { TestCatalog.Instance.SaveTest(resourceId, test); }); var testRunResult = new TestRunResult { TestName = test.TestName }; if (test.TestInvalid) { testRunResult.RunTestResult = RunResult.TestInvalid; testRunResult.Message = ex.Message; Dev2Logger.Error($"Test {DataObject.TestName} for Resource {DataObject.ServiceName} ID {DataObject.ResourceID} marked invalid in general exception"); } testRunResult.DebugForTest = TestDebugMessageRepo.Instance.FetchDebugItems(resourceId, test.TestName); _request.ExecuteResult = serializer.SerializeToBuilder(testRunResult); } return(result); }
protected override void ExecuteTool(IDSFDataObject dataObject, int update) { ErrorResultTO allErrors = new ErrorResultTO(); ErrorResultTO errors = new ErrorResultTO(); allErrors.MergeErrors(errors); InitializeDebug(dataObject); // Process if no errors try { ValidateRecordsetName(RecordsetName, errors); allErrors.MergeErrors(errors); if (!allErrors.HasErrors()) { try { string rs = DataListUtil.ExtractRecordsetNameFromValue(RecordsetName); if (RecordsLength == string.Empty) { allErrors.AddError(ErrorResource.BlankResultVariable); } if (dataObject.IsDebugMode()) { var warewolfEvalResult = dataObject.Environment.Eval(RecordsetName.Replace("()", "(*)"), update); if (warewolfEvalResult.IsWarewolfRecordSetResult) { var recsetResult = warewolfEvalResult as CommonFunctions.WarewolfEvalResult.WarewolfRecordSetResult; if (recsetResult != null) { AddDebugInputItem(new DebugItemWarewolfRecordset(recsetResult.Item, RecordsetName, "Recordset", "=")); } } if (warewolfEvalResult.IsWarewolfAtomListresult) { var recsetResult = warewolfEvalResult as CommonFunctions.WarewolfEvalResult.WarewolfAtomListresult; if (recsetResult != null) { AddDebugInputItem(new DebugEvalResult(RecordsetName, "Recordset", dataObject.Environment, update)); } } } var rule = new IsSingleValueRule(() => RecordsLength); var single = rule.Check(); if (single != null) { allErrors.AddError(single.Message); } else { if (dataObject.Environment.HasRecordSet(RecordsetName)) { var count = dataObject.Environment.GetLength(rs); var value = count.ToString(); dataObject.Environment.Assign(RecordsLength, value, update); AddDebugOutputItem(new DebugItemWarewolfAtomResult(value, RecordsLength, "")); } else { if (TreatNullAsZero) { dataObject.Environment.Assign(RecordsLength, 0.ToString(), update); AddDebugOutputItem(new DebugItemWarewolfAtomResult(0.ToString(), RecordsLength, "")); } else { allErrors.AddError(string.Format(ErrorResource.NullRecordSet, RecordsetName)); } } } } catch (Exception e) { allErrors.AddError(e.Message); dataObject.Environment.Assign(RecordsLength, "0", update); AddDebugOutputItem(new DebugItemStaticDataParams("0", RecordsLength, "", "=")); } } } finally { // Handle Errors var hasErrors = allErrors.HasErrors(); if (hasErrors) { DisplayAndWriteError("DsfRecordsetNullhandlerLengthActivity", allErrors); var errorString = allErrors.MakeDisplayReady(); dataObject.Environment.AddError(errorString); } if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before, update); DispatchDebugState(dataObject, StateType.After, update); } } }
// ReSharper restore RedundantOverridenMember protected override void OnExecute(NativeActivityContext context) { _debugInputs = new List <DebugItem>(); _debugOutputs = new List <DebugItem>(); _indexCounter = 1; IDSFDataObject dataObject = context.GetExtension <IDSFDataObject>(); IDataListCompiler compiler = DataListFactory.CreateDataListCompiler(); Guid dlId = dataObject.DataListID; ErrorResultTO allErrors = new ErrorResultTO(); ErrorResultTO errors; _datalistString = compiler.ConvertFrom(dataObject.DataListID, DataListFormat.CreateFormat(GlobalConstants._Studio_XML), Dev2.DataList.Contract.enTranslationDepth.Shape, out errors).ToString(); InitializeDebug(dataObject); try { var sourceString = SourceString ?? ""; IBinaryDataListEntry expressionsEntry = compiler.Evaluate(dlId, enActionType.User, sourceString, false, out errors); if (dataObject.IsDebugMode()) { AddDebugInputItem(new DebugItemVariableParams(sourceString, "String to Split", expressionsEntry, dlId)); AddDebugInputItem(new DebugItemStaticDataParams(ReverseOrder ? "Backward" : "Forward", "Process Direction")); AddDebugInputItem(new DebugItemStaticDataParams(SkipBlankRows ? "Yes" : "No", "Skip blank rows")); } CleanArguments(ResultsCollection); ResultsCollection.ToList().ForEach(a => IsSingleValueRule.ApplyIsSingleValueRule(a.OutputVariable, allErrors)); if (ResultsCollection.Count > 0) { if (dataObject.IsDebugMode()) { AddDebug(ResultsCollection, compiler, dlId); } CheckIndex(sourceString); allErrors.MergeErrors(errors); IDev2DataListEvaluateIterator itr = Dev2ValueObjectFactory.CreateEvaluateIterator(expressionsEntry); IDev2DataListUpsertPayloadBuilder <string> toUpsert = Dev2DataListBuilderFactory.CreateStringDataListUpsertBuilder(true); bool singleInnerIteration = ArePureScalarTargets(ResultsCollection); bool exit = false; while (itr.HasMoreRecords()) { IList <IBinaryDataListItem> cols = itr.FetchNextRowData(); foreach (IBinaryDataListItem c in cols) { // set up live flushing iterator details toUpsert.HasLiveFlushing = true; toUpsert.LiveFlushingLocation = dlId; #pragma warning disable 219 int opCnt = 0; #pragma warning restore 219 if (!string.IsNullOrEmpty(c.TheValue)) { string val = c.TheValue; var blankRows = new List <int>(); if (SkipBlankRows) { var strings = val.Split(new[] { Environment.NewLine, "\r", "\n" }, StringSplitOptions.RemoveEmptyEntries); var newSourceString = string.Join(Environment.NewLine, strings); val = newSourceString; } else { var strings = val.Split(new[] { Environment.NewLine }, StringSplitOptions.None); for (int blankRow = 0; blankRow < strings.Length; blankRow++) { if (String.IsNullOrEmpty(strings[blankRow])) { blankRows.Add(blankRow); } } } IDev2Tokenizer tokenizer = CreateSplitPattern(ref val, ResultsCollection, compiler, dlId, out errors); allErrors.MergeErrors(errors); if (!allErrors.HasErrors()) { if (tokenizer != null) { int pos = 0; int end = (ResultsCollection.Count - 1); // track used tokens so we can adjust flushing ;) HashSet <string> usedTokens = new HashSet <string>(); while (tokenizer.HasMoreOps() && !exit) { string tmp = tokenizer.NextToken(); if (blankRows.Contains(opCnt) && blankRows.Count != 0) { tmp = tmp.Replace(Environment.NewLine, ""); while (pos != end + 1) { UpdateOutputVariableWithValue(pos, usedTokens, toUpsert, ""); pos++; } pos = CompletedRow(usedTokens, toUpsert, singleInnerIteration, ref opCnt, ref exit); } UpdateOutputVariableWithValue(pos, usedTokens, toUpsert, tmp); // Per pass if (pos == end) { //row has been processed pos = CompletedRow(usedTokens, toUpsert, singleInnerIteration, ref opCnt, ref exit); } else { pos++; } } // flush the final frame ;) toUpsert.FlushIterationFrame(); toUpsert = Dev2DataListBuilderFactory.CreateStringDataListUpsertBuilder(true); } } } } } if (dataObject.IsDebugMode() && !allErrors.HasErrors()) { AddResultToDebug(compiler, dlId); } } } catch (Exception e) { Dev2Logger.Log.Error("DSFDataSplit", e); allErrors.AddError(e.Message); } finally { // Handle Errors var hasErrors = allErrors.HasErrors(); if (hasErrors) { DisplayAndWriteError("DsfDataSplitActivity", allErrors); compiler.UpsertSystemTag(dlId, enSystemTag.Dev2Error, allErrors.MakeDataListReady(), out errors); } if (dataObject.IsDebugMode()) { if (hasErrors) { AddResultToDebug(compiler, dlId); } DispatchDebugState(context, StateType.Before); DispatchDebugState(context, StateType.After); } } }
public void AddAlias(Guid dlId, string parentColumn, string parentNamespace, string childColumn, out ErrorResultTO errors) { errors = new ErrorResultTO(); // TODO : This needs to change so we can track at all levels what the root alias is ;) IDataListCompiler compiler = DataListFactory.CreateDataListCompiler(); Guid masterId = dlId; string masterRs = parentNamespace; string masterCol = parentColumn; Guid searchId = dlId; IBinaryDataListEntry masterEntry = null; int aliasSearchRounds = 0; BinaryDataListAlias binaryDataListAlias = null; while(searchId != Guid.Empty) { ErrorResultTO invokeErrors; var bdl = compiler.FetchBinaryDataList(searchId, out invokeErrors); errors.MergeErrors(invokeErrors); if(bdl != null) { string error; bdl.TryGetEntry(masterRs, out masterEntry, out error); errors.AddError(error); if(masterEntry != null) { var aliases = masterEntry.FetchAlias(); if(aliases.TryGetValue(masterCol, out binaryDataListAlias)) { // we have a hit ;) masterId = binaryDataListAlias.MasterKeyID; searchId = masterId; masterRs = binaryDataListAlias.MasterNamespace; masterCol = binaryDataListAlias.MasterColumn; aliasSearchRounds++; } else { // ensure we copy over the alias entry's keys ;) if(IsEmtpy) { var keyItr = masterEntry.FetchRecordsetIndexes(); _myKeys = new IndexList(keyItr.FetchGaps(), keyItr.MaxIndex(), keyItr.MinIndex()); IsEmtpy = false; } searchId = Guid.Empty; // signal end ;) } } else { if(aliasSearchRounds == 0) { throw new Exception("Missing Entry"); } // we hit the bottom earlier, handle it ;) if(binaryDataListAlias != null) { masterEntry = binaryDataListAlias.MasterEntry; } searchId = Guid.Empty; // signal end ;) } } else { throw new Exception("Missing DataList"); } } // Check MasterKeyID to see if it contains an alias, if so keep bubbling until we at end ;) _keyToAliasMap[childColumn] = new BinaryDataListAlias { MasterKeyID = masterId, ChildKey = GenerateKeyPrefix(Namespace, DataListKey), MasterKey = GenerateKeyPrefix(masterRs, masterId), MasterColumn = masterCol, MasterNamespace = masterRs, MasterEntry = masterEntry }; }