public List <int> DistinctGetRows(StorageKey sk, IIndexIterator keys, List <int> colIdx) { List <IndexBasedBinaryDataListRow> rows = new List <IndexBasedBinaryDataListRow>(); // avoid blank rows ;) while (keys.HasMore()) { // fetch a fixed segment at a time ;) var idx = keys.FetchNextIndex(); StorageKey tmpKey = new StorageKey(sk.UID, idx + sk.UniqueKey); var tmp = _levelZeroCache[tmpKey]; if (tmp != null) { rows.Add(new IndexBasedBinaryDataListRow { Row = tmp, Index = idx }); } else { throw new Exception(string.Format("Critical error. No value in storage for index {0}", idx)); } } var indexBasedBinaryDataListRowEqualityComparer = new BinaryDataListRowEqualityComparer(colIdx); // fetch row indexes ;) IEnumerable <int> indexBasedBinaryDataListRows = rows.Distinct(indexBasedBinaryDataListRowEqualityComparer).Select(c => c.Index); return(indexBasedBinaryDataListRows.ToList()); }
void GetValues(IBinaryDataListEntry dlEntry, string value, int iterCnt, IIndexIterator idxItr, enRecordsetIndexType indexType, IList <IDebugItemResult> results, string initExpression, string labelText, string fieldName = null) { string error; int index = idxItr.FetchNextIndex(); if (string.IsNullOrEmpty(fieldName)) { IList <IBinaryDataListItem> record = dlEntry.FetchRecordAt(index, out error); // ReSharper disable LoopCanBeConvertedToQuery foreach (IBinaryDataListItem recordField in record) // ReSharper restore LoopCanBeConvertedToQuery { GetValue(dlEntry, value, iterCnt, fieldName, indexType, results, initExpression, recordField, index, false, labelText); } } else { IBinaryDataListItem recordField = dlEntry.TryFetchRecordsetColumnAtIndex(fieldName, index, out error); bool ignoreCompare = false; if (recordField == null) { if (dlEntry.Columns.Count == 1) { recordField = dlEntry.TryFetchIndexedRecordsetUpsertPayload(index, out error); ignoreCompare = true; } } GetValue(dlEntry, value, iterCnt, fieldName, indexType, results, initExpression, recordField, index, ignoreCompare, labelText); } }
public void Merge(IBinaryDataListEntry toMerge, out string error) { error = string.Empty; if (IsRecordset && toMerge.IsRecordset) { IIndexIterator ii = toMerge.FetchRecordsetIndexes(); while (ii.HasMore()) { int next = ii.FetchNextIndex(); // merge toMerge into this foreach (IBinaryDataListItem item in toMerge.FetchRecordAt(next, out error)) { TryAppendRecordItem(item, out error); } } } else if (!IsRecordset && !toMerge.IsRecordset) { TryPutScalar(toMerge.FetchScalar(), out error); // over write this with toMerge } else { error = "Type mis-match, one side is Recordset while the other is a scalar"; } }
public IDictionary <int, IList <IBinaryDataListItem> > FetchSortData() { IDictionary <int, IList <IBinaryDataListItem> > result = new Dictionary <int, IList <IBinaryDataListItem> >(Count); IIndexIterator ii = Keys; if (IsRecordset) { short colCnt = (short)Columns.Count; while (ii.HasMore()) { // TODO : What if they are in different locations? IList <IBinaryDataListItem> tmp = new List <IBinaryDataListItem>(Columns.Count); BinaryDataListRow row; int next = ii.FetchNextIndex(); // Hi-jack lookup StorageKey sk = new StorageKey(DataListKey, next + GenerateKeyPrefix(Namespace, DataListKey)); if (_itemStorage.TryGetValue(sk, colCnt, out row)) { for (int i = 0; i < Columns.Count; i++) { tmp.Add(new BinaryDataListItem(row.FetchValue(i, colCnt), Namespace, Columns[i].ColumnName, next)); } result[next] = tmp; } } } return(result); }
public void BlankRecordSetData(string colName) { IIndexIterator ii = _internalObj.Keys; if (colName != null) { Dev2Column cc = Columns.FirstOrDefault(c => c.ColumnName == colName); if (cc != null) { while (ii.HasMore()) { int next = ii.FetchNextIndex(); // now blank all values at this location IBinaryDataListItem itm = Dev2BinaryDataListFactory.CreateBinaryItem(string.Empty, Namespace, cc.ColumnName, next); string error; TryPutRecordItemAtIndex(itm, next, out error); } } else { ClearAll(ii); } } else { ClearAll(ii); } }
internal Dev2DataListEvaluateIterator(IBinaryDataListEntry entry) { _entry = entry; if (_entry.IsRecordset) { _idxItr = entry.FetchRecordsetIndexes(); } }
internal Dev2DataListEvaluateIterator(IBinaryDataListEntry entry) { _entry = entry; if (_entry.IsRecordset) { _idxItr = entry.FetchRecordsetIndexes(); } }
/// <summary> /// Clears all. /// </summary> /// <param name="idxItr">The idx itr.</param> void ClearAll(IIndexIterator idxItr) { // miss, clear it all out ;) while (idxItr.HasMore()) { int next = idxItr.FetchNextIndex(); _internalObj.Remove(next); } }
/// <summary> /// Processes the record set. /// </summary> /// <param name="entry">The entry.</param> /// <param name="error">The error.</param> /// <returns></returns> private string ProcessRecordSet(IBinaryDataListEntry entry, out string error) { StringBuilder result = new StringBuilder(); error = string.Empty; // MAKE RS START ;) result.Append("\""); result.Append(entry.Namespace); result.Append("\" : ["); IIndexIterator idxItr = entry.FetchRecordsetIndexes(); int rsCnt = 0; while (idxItr.HasMore() && !entry.IsEmpty()) { int idx = idxItr.FetchNextIndex(); IList <IBinaryDataListItem> rowData = entry.FetchRecordAt(idx, out error); result.Append("{"); int colIdx = 0; foreach (IBinaryDataListItem col in rowData) { result.Append("\""); result.Append(col.FieldName); result.Append("\":\""); result.Append(col.TheValue); result.Append("\""); // add , if need be ;) colIdx++; if (colIdx < rowData.Count) { result.Append(","); } } result.Append("}"); // append , for row data ;) rsCnt++; if (rsCnt < idxItr.Count) { result.Append(", "); } } // END RS ;) result.Append("]"); return(result.ToString()); }
public void CanIterateNormally() { const int maxValue = 5; IIndexIterator ii = Dev2BinaryDataListFactory.CreateLoopedIndexIterator(10, maxValue); int cnt = 0; while (ii.HasMore()) { ii.FetchNextIndex(); cnt++; } Assert.AreEqual(maxValue, cnt); }
public void DistinctGetValuesWhenHasDistinctValuesShouldReturnOnlyDistinctRows1MilOfRows100Col() { BinaryDataListStorage bdls = new BinaryDataListStorage("MySweetNamespace", Guid.NewGuid()); // build insert value ;) IIndexIterator keys = AddLotsOfRows1Mil(bdls); // Insert information List <int> distinctCols = new List <int> { 2, 4, 5, 7, 120, 134, 99, 78, 34 }; DateTime start = DateTime.Now; List <int> rows = bdls.DistinctGetRows(keys, distinctCols); DateTime end = DateTime.Now; double dif = (end.Ticks - (double)start.Ticks) / TimeSpan.TicksPerSecond; Assert.AreEqual(7, rows.Count); Assert.IsTrue(dif < 20, string.Format("Time taken: {0}", dif)); Console.Write(dif); }
public bool GetRecordSetFieldValueFromDataList(Guid dataListId, string recordSet, string fieldNameToRetrieve, out IList <IBinaryDataListItem> result, out string error) { IList <IBinaryDataListItem> dLItems = new List <IBinaryDataListItem>(); ErrorResultTO errorResult; IBinaryDataListEntry entry; bool isCool = true; IBinaryDataList bdl = Compiler.FetchBinaryDataList(dataListId, out errorResult); bdl.TryGetEntry(recordSet, out entry, out error); if (entry == null) { result = dLItems; return(false); } if (entry.IsEmpty()) { result = dLItems; return(true); } IIndexIterator idxItr = entry.FetchRecordsetIndexes(); while (idxItr.HasMore()) { var fetchNextIndex = idxItr.FetchNextIndex(); dLItems.Add(entry.TryFetchRecordsetColumnAtIndex(fieldNameToRetrieve, fetchNextIndex, out error).Clone()); } result = dLItems; if (!string.IsNullOrEmpty(error)) { isCool = false; } return(isCool); }
public void LoopedIndexIterator_UnitTest_CanDetectIsEmptyCorrectly() { IIndexIterator ii = Dev2BinaryDataListFactory.CreateLoopedIndexIterator(10, 0); Assert.IsFalse(ii.IsEmpty); }
void GetValues(IBinaryDataListEntry dlEntry, string value, int iterCnt, IIndexIterator idxItr, enRecordsetIndexType indexType, IList<IDebugItemResult> results, string initExpression, string labelText, string fieldName = null) { string error; int index = idxItr.FetchNextIndex(); if(string.IsNullOrEmpty(fieldName)) { IList<IBinaryDataListItem> record = dlEntry.FetchRecordAt(index, out error); // ReSharper disable LoopCanBeConvertedToQuery foreach(IBinaryDataListItem recordField in record) // ReSharper restore LoopCanBeConvertedToQuery { GetValue(dlEntry, value, iterCnt, fieldName, indexType, results, initExpression, recordField, index, false, labelText); } } else { IBinaryDataListItem recordField = dlEntry.TryFetchRecordsetColumnAtIndex(fieldName, index, out error); bool ignoreCompare = false; if(recordField == null) { if(dlEntry.Columns.Count == 1) { recordField = dlEntry.TryFetchIndexedRecordsetUpsertPayload(index, out error); ignoreCompare = true; } } GetValue(dlEntry, value, iterCnt, fieldName, indexType, results, initExpression, recordField, index, ignoreCompare, labelText); } }
public IBinaryDataListEntry Clone(enTranslationDepth depth, Guid clonedStorageId, out string error) { error = string.Empty; BinaryDataListEntry result; Guid dlKey = DataListKey; if (clonedStorageId != GlobalConstants.NullDataListID) { dlKey = clonedStorageId; } if (Columns != null) { // clone the columns IList <Dev2Column> cols = new List <Dev2Column>(Columns.Count); foreach (Dev2Column c in Columns) { cols.Add(new Dev2Column(c.ColumnName, c.ColumnDescription)); } result = new BinaryDataListEntry(Namespace, Description, cols, dlKey); } else { result = new BinaryDataListEntry(Namespace, Description, dlKey); } // 2013.09.09 - we're the same, just adjust the view and return if (clonedStorageId.Equals(DataListKey)) { // manip result's _internalObj aka the view of the data ;) result._internalObj.CopyTo(_internalObj); // copy express auditing data too ;) result.ComplexExpressionAuditor = ComplexExpressionAuditor; } if (depth == enTranslationDepth.Data || depth == enTranslationDepth.Data_With_Blank_OverWrite) { // clone _items if (IsRecordset) { IIndexIterator ii = _internalObj.Keys; bool isEmtpy = _internalObj.IsEmtpy; result._internalObj.IsEmtpy = isEmtpy; while (ii.HasMore()) { int next = ii.FetchNextIndex(); // clone the data IList <IBinaryDataListItem> items = _internalObj[next]; IList <IBinaryDataListItem> clone = new List <IBinaryDataListItem>(); // Bug 8725 if (items != null) { foreach (IBinaryDataListItem itm in items) { clone.Add(itm.Clone()); } } // now push back clone result._internalObj[next] = clone; } // ensure we reset min index if not 1 ;) var keys = _internalObj.Keys; var min = keys.MinIndex(); var max = keys.MaxIndex(); var gaps = _internalObj.FetchGaps(); result._internalObj.MoveIndexDataForClone(min, max, gaps, false); } else { IList <IBinaryDataListItem> items = _internalObj[0]; IList <IBinaryDataListItem> clone = items.Select(itm => itm.Clone()).ToList(); // now push back clone result._internalObj[0] = clone; result._internalObj.IsEmtpy = false; } } else // only wanted the shape cloned { var keys = _internalObj.Keys; var min = keys.MinIndex(); var max = keys.MaxIndex(); var gaps = _internalObj.FetchGaps(); result._internalObj.MoveIndexDataForClone(min, max, gaps, false); } result.ComplexExpressionAuditor = ComplexExpressionAuditor; return(result); }
/// <summary> /// Depths the merge. /// </summary> /// <param name="depth">The depth.</param> /// <param name="cloned">The cloned.</param> /// <param name="key"></param> /// <param name="errors">The errors.</param> private void DepthMerge(enTranslationDepth depth, IBinaryDataListEntry cloned, string key, out IList <string> errors) { errors = new List <string>(); if (key != null) { if (depth == enTranslationDepth.Data || depth == enTranslationDepth.Data_With_Blank_OverWrite) { // safe to add if (cloned.IsRecordset) { // Inject into the intellisense options... CreateIntelliseneResult(key, cloned.Columns); //Massimo.Guerrera - 21-01-2013 - Added for the DeleteRecordOperation, it need to over write the data with blank values. if (depth == enTranslationDepth.Data_With_Blank_OverWrite) { _templateDict[key] = cloned; } else { // merge all the cloned rows into this reference #pragma warning disable 219 int insertIdx = 1; // always default to start of recordset #pragma warning restore 219 // fetch last row id and build from there IBinaryDataListEntry tmpRec; bool isFound = _templateDict.TryGetValue(key, out tmpRec); // verify that the key exist first ;) IIndexIterator ii = cloned.FetchRecordsetIndexes(); while (ii.HasMore()) { int next = ii.FetchNextIndex(); string error; IList <IBinaryDataListItem> cols = cloned.FetchRecordAt(next, out error); if (error != string.Empty) { errors.Add(error); } if (!isFound) { // we need to boot strap the recordset ;) // intellisense takecare of with template method ;) TryCreateRecordsetTemplate(cloned.Namespace, cloned.Description, cloned.Columns, true, out error); if (error != string.Empty) { errors.Add(error); } isFound = true; } foreach (IBinaryDataListItem itm in cols) { _templateDict[key].TryPutRecordItemAtIndex(itm, next, out error); if (error != string.Empty) { errors.Add(error); } } insertIdx++; } } } else { IBinaryDataListEntry thisTmp; // we have an entry, better check clone for empty if (_templateDict.TryGetValue(key, out thisTmp)) { string theValue = null; try { theValue = cloned.FetchScalar().TheValue; } catch (Exception e) { Dev2Logger.Log.Error(e); } if (theValue != string.Empty && depth == enTranslationDepth.Data) { // The clone has data, over write it on the merge ;) _templateDict[key] = cloned; // Inject into the intellisense options... CreateIntelliseneResult(key); } else if (depth == enTranslationDepth.Data_With_Blank_OverWrite) { // The user wants to over-write Blank data on the right with existing data on the left ;) _templateDict[key] = cloned; // Inject into the intellisense options... CreateIntelliseneResult(key); } } else { // no entry, just place it there as there is no harm ;) _templateDict[key] = cloned; // Inject into the intellisense options... CreateIntelliseneResult(key); } } } else if (depth == enTranslationDepth.Shape) { _templateDict[key] = cloned; // set blank data ;) // Inject into the intellisense options... CreateIntelliseneResult(key); } } }
static void ValidateRecordSet(IBinaryDataList dataList, string name, KeyValuePair <string, string[]>[] expectedValues) { string error; IBinaryDataListEntry entry; dataList.TryGetEntry(name, out entry, out error); if (!string.IsNullOrEmpty(error)) { Assert.Fail("Error fetching RecordSet '{0}' from Binary DataList", name); } else { IIndexIterator idxItr = entry.FetchRecordsetIndexes(); while (idxItr.HasMore()) { var fields = entry.FetchRecordAt(idxItr.FetchNextIndex(), out error); if (!string.IsNullOrEmpty(error)) { Assert.Fail("Error fetching RecordSet '{0}' fields", name); } else { var foundCount = 0; // ReSharper disable LoopCanBeConvertedToQuery foreach (var field in fields) // ReSharper restore LoopCanBeConvertedToQuery { // ReSharper disable LoopCanBeConvertedToQuery foreach (var expectedValue in expectedValues) // ReSharper restore LoopCanBeConvertedToQuery { if (field.FieldName == expectedValue.Key && expectedValue.Value.Contains(field.TheValue)) { foundCount++; } } } Assert.AreEqual(expectedValues.Length, foundCount); } } //foreach(var index in entry.FetchRecordsetIndexes()) //{ // var fields = entry.FetchRecordAt(index, out error); // if (!string.IsNullOrEmpty(error)) // { // Assert.Fail("Error fetching RecordSet '{0}' fields", name); // } // else // { // var foundCount = 0; // foreach (var field in fields) // { // foreach (var expectedValue in expectedValues) // { // if (field.FieldName == expectedValue.Key && expectedValue.Value.Contains(field.TheValue)) // { // foundCount++; // } // } // } // Assert.AreEqual(expectedValues.Length, foundCount); // } //} } }
/// <summary> /// Checks the validity of the input argument and returns the fields in a list of strings /// </summary> /// <param name="to">To.</param> /// <param name="bdl">The BDL.</param> /// <param name="errors">The errors.</param> /// <returns></returns> public Func <IList <RecordSetSearchPayload> > GenerateInputRange(IRecsetSearch to, IBinaryDataList bdl, out ErrorResultTO errors) { errors = new ErrorResultTO(); ErrorResultTO allErrors = new ErrorResultTO(); Func <IList <RecordSetSearchPayload> > result = () => { IList <RecordSetSearchPayload> fieldList = new List <RecordSetSearchPayload>(); string InputField = to.FieldsToSearch; string recSet = DataListUtil.ExtractRecordsetNameFromValue(DataListUtil.StripLeadingAndTrailingBracketsFromValue(InputField)); IBinaryDataListEntry bdle; string error; bdl.TryGetEntry(recSet, out bdle, out error); allErrors.AddError(error); if (bdle == null) { throw new RecordsetNotFoundException("Could not find Recordset [ " + recSet + " ]"); } IList <Dev2Column> realCols = bdle.Columns; string[] tmpCols = InputField.Replace(" ", "").Split(','); // Travis.Frisinger : 09.25.2012 // we need to adjust the tmpCols to avoid * causing crap with the match int loc = 0; foreach (string tc in tmpCols) { string recset = DataListUtil.ExtractRecordsetNameFromValue(tc); string field = DataListUtil.ExtractFieldNameFromValue(tc); string myNewSearch = DataListUtil.AddBracketsToValueIfNotExist(DataListUtil.MakeValueIntoHighLevelRecordset(recset)); if (field != string.Empty) { myNewSearch = DataListUtil.MakeValueIntoHighLevelRecordset(recset) + "." + field; } tmpCols[loc] = DataListUtil.AddBracketsToValueIfNotExist(myNewSearch); loc++; } int pos = 0; bool found = true; int start; Int32.TryParse(to.StartIndex, out start); if (start == 0) { start = 1; } while (pos < tmpCols.Length && found) { int innerPos; if (IsMatch(tmpCols[pos], recSet, realCols, out innerPos)) { for (int i = start; i <= bdle.FetchLastRecordsetIndex(); i++) { IBinaryDataListItem tmp = bdle.TryFetchRecordsetColumnAtIndex(realCols[innerPos].ColumnName, i, out error); if (error != string.Empty) { allErrors.AddError(error); } RecordSetSearchPayload p = new RecordSetSearchPayload { Index = i, Payload = tmp.TheValue }; fieldList.Add(p); } } else { if (IsRecorsetWithoutField(tmpCols[pos], recSet)) { IIndexIterator ixItr = bdle.FetchRecordsetIndexes(); while (ixItr.HasMore()) { int next = ixItr.FetchNextIndex(); foreach (Dev2Column col in realCols) { IBinaryDataListItem tmp = bdle.TryFetchRecordsetColumnAtIndex(col.ColumnName, next, out error); RecordSetSearchPayload p = new RecordSetSearchPayload { Index = next, Payload = tmp.TheValue }; fieldList.Add(p); } } } else { found = false; } } pos++; } if (!found) { fieldList.Clear(); } return(fieldList); }; return(result); }
IList <IDebugItemResult> CreateRecordsetDebugItems(string expression, IBinaryDataListEntry dlEntry, string value, int iterCnt, string labelText) { var results = new List <IDebugItemResult>(); if (dlEntry.ComplexExpressionAuditor == null) { string initExpression = expression; string fieldName = DataListUtil.ExtractFieldNameFromValue(expression); enRecordsetIndexType indexType = DataListUtil.GetRecordsetIndexType(expression); if (indexType == enRecordsetIndexType.Blank && string.IsNullOrEmpty(fieldName)) { indexType = enRecordsetIndexType.Star; } if (indexType == enRecordsetIndexType.Star || indexType == enRecordsetIndexType.Numeric) { IIndexIterator idxItr = dlEntry.FetchRecordsetIndexes(); while (idxItr.HasMore()) { GetValues(dlEntry, value, iterCnt, idxItr, indexType, results, initExpression, labelText, fieldName); } } } else { // Complex expressions are handled differently ;) ComplexExpressionAuditor auditor = dlEntry.ComplexExpressionAuditor; enRecordsetIndexType indexType = DataListUtil.GetRecordsetIndexType(expression); foreach (ComplexExpressionAuditItem item in auditor.FetchAuditItems()) { int grpIdx = -1; try { grpIdx = Int32.Parse(DataListUtil.ExtractIndexRegionFromRecordset(item.TokenBinding)); } // ReSharper disable EmptyGeneralCatchClause catch (Exception) // ReSharper restore EmptyGeneralCatchClause { // Best effort ;) } if (indexType == enRecordsetIndexType.Star) { string displayExpression = item.Expression.Replace(item.Token, item.RawExpression); results.Add(new DebugItemResult { Type = DebugItemResultType.Variable, Value = displayExpression, GroupName = displayExpression, GroupIndex = grpIdx }); results.Add(new DebugItemResult { Type = DebugItemResultType.Label, Value = GlobalConstants.EqualsExpression, GroupName = displayExpression, GroupIndex = grpIdx }); results.Add(new DebugItemResult { Type = DebugItemResultType.Value, Value = item.BoundValue, GroupName = displayExpression, GroupIndex = grpIdx }); } } } return(results); }
public List<int> DistinctGetRows(StorageKey sk, IIndexIterator keys, List<int> colIdx) { List<IndexBasedBinaryDataListRow> rows = new List<IndexBasedBinaryDataListRow>(); // avoid blank rows ;) while(keys.HasMore()) { // fetch a fixed segment at a time ;) var idx = keys.FetchNextIndex(); StorageKey tmpKey = new StorageKey(sk.UID, idx + sk.UniqueKey); var tmp = _levelZeroCache[tmpKey]; if(tmp != null) { rows.Add(new IndexBasedBinaryDataListRow { Row = tmp, Index = idx }); } else { throw new Exception(string.Format("Critical error. No value in storage for index {0}", idx)); } } var indexBasedBinaryDataListRowEqualityComparer = new BinaryDataListRowEqualityComparer(colIdx); // fetch row indexes ;) IEnumerable<int> indexBasedBinaryDataListRows = rows.Distinct(indexBasedBinaryDataListRowEqualityComparer).Select(c => c.Index); return indexBasedBinaryDataListRows.ToList(); }
protected override void ExecuteTool(IDSFDataObject dataObject, int update) { lock (_forEachExecutionObject) { _previousParentId = dataObject.ParentInstanceID; _debugInputs = new List <DebugItem>(); _debugOutputs = new List <DebugItem>(); dataObject.ForEachNestingLevel++; ErrorResultTO allErrors = new ErrorResultTO(); IIndexIterator itr = null; InitializeDebug(dataObject); try { ErrorResultTO errors; ForEachBootstrapTO exePayload = FetchExecutionType(dataObject, dataObject.Environment, out errors, update); foreach (var err in errors.FetchErrors()) { dataObject.Environment.AddError(err); } itr = exePayload.IndexIterator; string error; ForEachInnerActivityTO innerA = GetInnerActivity(out error); var exeAct = innerA.InnerActivity; allErrors.AddError(error); if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before, update); } dataObject.ParentInstanceID = UniqueID; dataObject.IsDebugNested = true; if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.After, update); } exePayload.InnerActivity = innerA; while (itr.HasMore()) { operationalData = exePayload; int idx = exePayload.IndexIterator.FetchNextIndex(); int innerupdate = 0; if (exePayload.ForEachType != enForEachType.NumOfExecution) { innerupdate = idx; } exeAct.Execute(dataObject, innerupdate); operationalData.IncIterationCount(); } if (errors.HasErrors()) { allErrors.MergeErrors(errors); } if (dataObject.IsDebugMode()) { _debugOutputs = new List <DebugItem>(); _debugOutputs = new List <DebugItem>(); DispatchDebugState(dataObject, StateType.Duration, 0); } } catch (Exception e) { Dev2Logger.Log.Error("DSFForEach", e); allErrors.AddError(e.Message); } finally { if (itr != null) { if (ForEachType != enForEachType.NumOfExecution) { RestoreHandlerFn(); } } dataObject.ParentInstanceID = _previousParentId; dataObject.ForEachNestingLevel--; dataObject.IsDebugNested = false; // Handle Errors if (allErrors.HasErrors()) { dataObject.ParentInstanceID = _previousParentId; dataObject.ForEachNestingLevel--; dataObject.IsDebugNested = false; // Handle Errors if (allErrors.HasErrors()) { DisplayAndWriteError("DsfForEachActivity", allErrors); foreach (var fetchError in allErrors.FetchErrors()) { dataObject.Environment.AddError(fetchError); } dataObject.ParentInstanceID = _previousParentId; } } } } }
public void LoopedIndexIterator_UnitTest_ReturnsCorrectCount() { IIndexIterator ii = Dev2BinaryDataListFactory.CreateLoopedIndexIterator(10, 2); Assert.AreEqual(2, ii.Count); }
protected override void ExecuteTool(IDSFDataObject dataObject, int update) { _previousParentId = dataObject.ParentInstanceID; _debugInputs = new List <DebugItem>(); _debugOutputs = new List <DebugItem>(); ErrorResultTO allErrors = new ErrorResultTO(); IIndexIterator itr = null; InitializeDebug(dataObject); dataObject.ForEachNestingLevel++; try { ErrorResultTO errors; ForEachBootstrapTO exePayload = FetchExecutionType(dataObject, dataObject.Environment, out errors, update); foreach (var err in errors.FetchErrors()) { dataObject.Environment.AddError(err); } itr = exePayload.IndexIterator; string error; ForEachInnerActivityTO innerA = GetInnerActivity(out error); var exeAct = innerA?.InnerActivity; allErrors.AddError(error); if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before, update); } dataObject.ParentInstanceID = UniqueID; dataObject.IsDebugNested = true; if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.After, update); } exePayload.InnerActivity = innerA; while (itr?.HasMore() ?? false) { operationalData = exePayload; int idx = exePayload.IndexIterator.FetchNextIndex(); int innerupdate = 0; if (exePayload.ForEachType != enForEachType.NumOfExecution) { innerupdate = idx; } _childUniqueID = exeAct?.UniqueID; exeAct?.Execute(dataObject, innerupdate); operationalData.IncIterationCount(); } if (errors.HasErrors()) { allErrors.MergeErrors(errors); } } catch (Exception e) { Dev2Logger.Error("DSFForEach", e); allErrors.AddError(e.Message); } finally { if (itr != null) { if (ForEachType != enForEachType.NumOfExecution) { RestoreHandlerFn(); } } var serviceTestStep = dataObject.ServiceTest?.TestSteps?.Flatten(step => step.Children)?.FirstOrDefault(step => step.UniqueId == _originalUniqueID); if (dataObject.IsServiceTestExecution) { var serviceTestSteps = serviceTestStep?.Children; UpdateDebugStateWithAssertions(dataObject, serviceTestSteps?.ToList()); if (serviceTestStep != null) { var testRunResult = new TestRunResult(); GetFinalTestRunResult(serviceTestStep, testRunResult); serviceTestStep.Result = testRunResult; } } dataObject.ParentInstanceID = _previousParentId; dataObject.ForEachNestingLevel--; dataObject.IsDebugNested = false; if (dataObject.IsDebugMode()) { if (dataObject.IsServiceTestExecution && serviceTestStep != null) { var debugItems = TestDebugMessageRepo.Instance.GetDebugItems(dataObject.ResourceID, dataObject.TestName); debugItems = debugItems.Where(state => state.WorkSurfaceMappingId == serviceTestStep.UniqueId).ToList(); var debugStates = debugItems.LastOrDefault(); var debugItemStaticDataParams = new DebugItemServiceTestStaticDataParams(serviceTestStep.Result.Message, serviceTestStep.Result.RunTestResult == RunResult.TestFailed); DebugItem itemToAdd = new DebugItem(); itemToAdd.AddRange(debugItemStaticDataParams.GetDebugItemResult()); debugStates?.AssertResultList?.Add(itemToAdd); } DispatchDebugState(dataObject, StateType.Duration, 0); } // Handle Errors if (allErrors.HasErrors()) { dataObject.ParentInstanceID = _previousParentId; dataObject.ForEachNestingLevel--; dataObject.IsDebugNested = false; // Handle Errors if (allErrors.HasErrors()) { DisplayAndWriteError("DsfForEachActivity", allErrors); foreach (var fetchError in allErrors.FetchErrors()) { dataObject.Environment.AddError(fetchError); } dataObject.ParentInstanceID = _previousParentId; } } } }
public void LoopedIndexIterator_UnitTest_ReturnsCorrectMinIndex() { IIndexIterator ii = Dev2BinaryDataListFactory.CreateLoopedIndexIterator(10, 0); Assert.AreEqual(10, ii.MinIndex()); }