public void Eof_Op_Bwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder
            {
                ToTokenize   = Search,
                ReverseOrder = true
            };


            dtb.AddEoFOp();

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            int cnt = 0;

            while (dt.HasMoreOps())
            {
                result += dt.NextToken();
                cnt++;
            }

            Assert.AreEqual("AB-CD-DE-FG-HI", result);
            Assert.IsTrue(cnt == 1);
        }
        public void More_Then_One_Op_Fwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder {
                ToTokenize = Search
            };


            dtb.AddIndexOp(2);
            dtb.AddEoFOp();

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            int cnt = 0;

            while (dt.HasMoreOps())
            {
                result += dt.NextToken();
                cnt++;
            }

            Assert.AreEqual("AB-CD-DE-FG-HI", result);
            Assert.IsTrue(cnt == 2);
        }
        public void Single_Token_Perfomance_Op()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder {
                ToTokenize = TestStrings.tokenizerBase
            };


            dtb.AddTokenOp("-", false);

            IDev2Tokenizer dt = dtb.Generate();

            int       opCnt = 0;
            Stopwatch sw    = new Stopwatch();

            sw.Start();
            while (dt.HasMoreOps() && opCnt < 100000)
            {
                dt.NextToken();
                opCnt++;
            }
            sw.Stop();

            long exeTime = sw.ElapsedMilliseconds;

            // can we do 100k ops in less then 1,2s?
            // I sure hope so ;)
            Console.WriteLine(@"Total Time : " + exeTime);
            Assert.IsTrue(opCnt == 100000 && exeTime < 1200, "Expecting it to take 1200 ms but it took " + exeTime + " ms.");
        }
        private List <string> Split()
        {
            List <string> results = new List <string>();

            try
            {
                IDev2Tokenizer tokenizer = CreateSplitPattern(VariableListString, SplitType, SplitToken);

                while (tokenizer.HasMoreOps())
                {
                    string tmp = tokenizer.NextToken();
                    if (!string.IsNullOrEmpty(tmp))
                    {
                        results.Add(tmp);
                    }
                }
            }
            catch (Exception e)
            {
                _errorColletion.Add(new KeyValuePair <ErrorType, string>(ErrorType.Critical, e.Message));
                CanAdd = false;
            }



            return(results);
        }
        public void Three_Token_Perfomance_Op()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder {
                ToTokenize = TestStrings.tokenizerBase
            };


            dtb.AddTokenOp("AB-", false);

            IDev2Tokenizer dt = dtb.Generate();

            int       opCnt = 0;
            Stopwatch sw    = new Stopwatch();

            sw.Start();
            while (dt.HasMoreOps() && opCnt < 35000)
            {
                dt.NextToken();
                opCnt++;
            }
            sw.Stop();

            long exeTime = sw.ElapsedMilliseconds;

            // can we do it in less then 2.5s?
            // I sure hope so ;)
            Console.WriteLine("Total Time : " + exeTime);
            Assert.IsTrue(opCnt == 35000 && exeTime < 2500, "It took [ " + exeTime + " ]");
        }
Esempio n. 6
0
        public void Empty_String_Error()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder();

            dtb.ToTokenize = string.Empty;

            dtb.AddEoFOp();

            try {
                IDev2Tokenizer dt = dtb.Generate();

                Assert.Fail();
            }
            catch (Exception) {
                Assert.IsTrue(1 == 1);
            }
        }
Esempio n. 7
0
        public void Token_Op_With_Token_Fwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder();

            dtb.ToTokenize = search;

            dtb.AddTokenOp("-", true);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += dt.NextToken();
            }

            Assert.AreEqual("AB-CD-DE-FG-HI", result);
        }
Esempio n. 8
0
        public void Three_Token_Op_Fwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder();

            dtb.ToTokenize = search2;

            dtb.AddTokenOp("AB-", false);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += " " + dt.NextToken();
            }

            Assert.AreEqual("  CD- CD", result);
        }
Esempio n. 9
0
        public void Index_Op_Fwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder();

            dtb.ToTokenize = search;

            dtb.AddIndexOp(2);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += " " + dt.NextToken();
            }

            Assert.AreEqual(" AB -C D- DE -F G- HI", result);
        }
Esempio n. 10
0
        public void Index_Op_Bwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder();

            dtb.ToTokenize   = search;
            dtb.ReverseOrder = true;

            dtb.AddIndexOp(2);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += "." + dt.NextToken();
            }

            Assert.AreEqual(".HI.G-.-F.DE.D-.-C.AB", result);
        }
Esempio n. 11
0
        public void Token_Op_With_Token_Bwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder();

            dtb.ToTokenize   = search;
            dtb.ReverseOrder = true;

            dtb.AddTokenOp("-", true);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += "." + dt.NextToken();
            }

            Assert.AreEqual(".-HI.-FG.-DE.-CD.AB", result);
        }
Esempio n. 12
0
        public void Two_Token_Op_Bwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder();

            dtb.ToTokenize   = search2;
            dtb.ReverseOrder = true;

            dtb.AddTokenOp("B-", false);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += "." + dt.NextToken();
            }

            Assert.AreEqual(".CD.CD-A.A", result);
        }
Esempio n. 13
0
        public void Single_Token_Op_Fwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder {
                ToTokenize = Search
            };


            dtb.AddTokenOp("-", false);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += dt.NextToken();
            }

            Assert.AreEqual("ABCDDEFGHI", result);
        }
Esempio n. 14
0
        public void Single_Token_Op_Bwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder
            {
                ReverseOrder = true,
                ToTokenize   = Search
            };


            dtb.AddTokenOp("-", false);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += "." + dt.NextToken();
            }

            Assert.AreEqual(".HI.FG.DE.CD.AB", result);
        }
Esempio n. 15
0
        public void Token_And_Index_Op_Bwd()
        {
            Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder
            {
                ToTokenize   = Search,
                ReverseOrder = true
            };


            dtb.AddTokenOp("-", false);
            dtb.AddIndexOp(3);

            IDev2Tokenizer dt = dtb.Generate();

            string result = string.Empty;

            while (dt.HasMoreOps())
            {
                result += "." + dt.NextToken();
            }

            Assert.AreEqual(".HI.-FG.DE.-CD.AB", result);
        }
Esempio n. 16
0
 public static void UpsertTokens(Collection <ObservablePair <string, string> > target, IDev2Tokenizer tokenizer) => UpsertTokens(target, tokenizer, null, null, true);
Esempio n. 17
0
        public static void UpsertTokens(Collection <ObservablePair <string, string> > target, IDev2Tokenizer tokenizer, string tokenPrefix, string tokenSuffix, bool removeEmptyEntries)
        {
            if (target == null)
            {
                throw new ArgumentNullException(nameof(target));
            }

            target.Clear();

            if (tokenizer == null)
            {
                return;
            }


            while (tokenizer.HasMoreOps())
            {
                var token = tokenizer.NextToken();
                if (string.IsNullOrEmpty(token))
                {
                    if (!removeEmptyEntries)
                    {
                        target.Add(new ObservablePair <string, string>(string.Empty, string.Empty));
                    }
                }
                else
                {
                    token = AddBracketsToValueIfNotExist($"{tokenPrefix}{StripLeadingAndTrailingBracketsFromValue(token)}{tokenSuffix}");

                    target.Add(new ObservablePair <string, string>(token, string.Empty));
                }
            }

            foreach (var observablePair in target)
            {
                observablePair.Key = observablePair.Key.Replace(" ", "");
            }
        }
Esempio n. 18
0
        protected override void ExecuteTool(IDSFDataObject dataObject, int update)
        {
            _indexCounter = 1;

            ErrorResultTO        allErrors = new ErrorResultTO();
            var                  env       = dataObject.Environment;
            WarewolfListIterator iter      = new WarewolfListIterator();

            InitializeDebug(dataObject);
            try
            {
                var sourceString = SourceString ?? "";
                if (dataObject.IsDebugMode())
                {
                    AddDebugInputItem(new DebugEvalResult(sourceString, "String to Split", env, update));
                    AddDebugInputItem(new DebugItemStaticDataParams(ReverseOrder ? "Backward" : "Forward", "Process Direction"));
                    AddDebugInputItem(new DebugItemStaticDataParams(SkipBlankRows ? "Yes" : "No", "Skip blank rows"));
                    AddDebug(ResultsCollection, dataObject.Environment, update);
                }
                var res = new WarewolfIterator(env.Eval(sourceString, update));
                iter.AddVariableToIterateOn(res);
                IDictionary <string, int> positions = new Dictionary <string, int>();
                CleanArguments(ResultsCollection);
                ResultsCollection.ToList().ForEach(a =>
                {
                    if (!positions.ContainsKey(a.OutputVariable))
                    {
                        positions.Add(a.OutputVariable, update == 0 ? 1 : update);
                    }
                    IsSingleValueRule.ApplyIsSingleValueRule(a.OutputVariable, allErrors);
                });
                bool singleInnerIteration = ArePureScalarTargets(ResultsCollection);
                var  resultsEnumerator    = ResultsCollection.GetEnumerator();
                var  debugDictionary      = new List <string>();
                while (res.HasMoreData())
                {
                    const int OpCnt = 0;

                    var item = res.GetNextValue(); // item is the thing we split on
                    if (!string.IsNullOrEmpty(item))
                    {
                        string val = item;

                        var blankRows = new List <int>();
                        if (SkipBlankRows)
                        {
                            var strings         = val.Split(new[] { Environment.NewLine, "\r", "\n" }, StringSplitOptions.RemoveEmptyEntries);
                            var newSourceString = string.Join(Environment.NewLine, strings);
                            val = newSourceString;
                        }
                        else
                        {
                            var strings = val.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
                            for (int blankRow = 0; blankRow < strings.Length; blankRow++)
                            {
                                if (String.IsNullOrEmpty(strings[blankRow]))
                                {
                                    blankRows.Add(blankRow);
                                }
                            }
                        }

                        ErrorResultTO  errors;
                        IDev2Tokenizer tokenizer = CreateSplitPattern(ref val, ResultsCollection, env, out errors, update);
                        allErrors.MergeErrors(errors);

                        if (!allErrors.HasErrors())
                        {
                            if (tokenizer != null)
                            {
                                int pos = 0;
                                int end = ResultsCollection.Count - 1;

                                // track used tokens so we can adjust flushing ;)
                                while (tokenizer.HasMoreOps())
                                {
                                    var currentval = resultsEnumerator.MoveNext();
                                    if (!currentval)
                                    {
                                        if (singleInnerIteration)
                                        {
                                            break;
                                        }
                                        resultsEnumerator.Reset();
                                        resultsEnumerator.MoveNext();
                                    }
                                    string tmp = tokenizer.NextToken();

                                    if (tmp.StartsWith(Environment.NewLine) && !SkipBlankRows)
                                    {
                                        resultsEnumerator.Reset();
                                        while (resultsEnumerator.MoveNext())
                                        {
                                            var tovar = resultsEnumerator.Current.OutputVariable;
                                            if (!String.IsNullOrEmpty(tovar))
                                            {
                                                var assignToVar = ExecutionEnvironment.ConvertToIndex(tovar, positions[tovar]);
                                                env.AssignWithFrame(new AssignValue(assignToVar, ""), update);
                                                positions[tovar] = positions[tovar] + 1;
                                            }
                                        }
                                        resultsEnumerator.Reset();
                                        resultsEnumerator.MoveNext();
                                    }
                                    if (blankRows.Contains(OpCnt) && blankRows.Count != 0)
                                    {
                                        tmp = tmp.Replace(Environment.NewLine, "");
                                        while (pos != end + 1)
                                        {
                                            pos++;
                                        }
                                    }
                                    var outputVar = resultsEnumerator.Current.OutputVariable;

                                    if (!String.IsNullOrEmpty(outputVar))
                                    {
                                        var assignVar = ExecutionEnvironment.ConvertToIndex(outputVar, positions[outputVar]);
                                        if (ExecutionEnvironment.IsRecordsetIdentifier(assignVar))
                                        {
                                            env.AssignWithFrame(new AssignValue(assignVar, tmp), update);
                                        }
                                        else if (ExecutionEnvironment.IsScalar(assignVar) && positions[outputVar] == 1)
                                        {
                                            env.AssignWithFrame(new AssignValue(assignVar, tmp), update);
                                        }
                                        else
                                        {
                                            env.AssignWithFrame(new AssignValue(assignVar, tmp), update);
                                        }
                                        positions[outputVar] = positions[outputVar] + 1;
                                    }
                                    if (dataObject.IsDebugMode())
                                    {
                                        var debugItem   = new DebugItem();
                                        var outputVarTo = resultsEnumerator.Current.OutputVariable;
                                        AddDebugItem(new DebugEvalResult(outputVarTo, "", env, update), debugItem);
                                        if (!debugDictionary.Contains(outputVarTo))
                                        {
                                            debugDictionary.Add(outputVarTo);
                                        }
                                    }
                                    if (pos == end)
                                    {
                                    }
                                    else
                                    {
                                        pos++;
                                    }
                                }
                            }
                        }
                    }
                    env.CommitAssign();
                    if (singleInnerIteration)
                    {
                        break;
                    }
                }

                if (dataObject.IsDebugMode())
                {
                    var outputIndex = 1;
                    foreach (var varDebug in debugDictionary)
                    {
                        var debugItem = new DebugItem();
                        AddDebugItem(new DebugItemStaticDataParams("", outputIndex.ToString(CultureInfo.InvariantCulture)), debugItem);
                        var dataSplitUsesStarForOutput = varDebug.Replace("().", "(*).");
                        AddDebugItem(new DebugEvalResult(dataSplitUsesStarForOutput, "", env, update), debugItem);
                        _debugOutputs.Add(debugItem);
                        outputIndex++;
                    }
                }
            }
            catch (Exception e)
            {
                Dev2Logger.Error("DSFDataSplit", e);
                allErrors.AddError(e.Message);
            }
            finally
            {
                // Handle Errors
                var hasErrors = allErrors.HasErrors();
                if (hasErrors)
                {
                    DisplayAndWriteError("DsfDataSplitActivity", allErrors);
                    var errorString = allErrors.MakeDisplayReady();
                    dataObject.Environment.AddError(errorString);
                }

                if (dataObject.IsDebugMode())
                {
                    DispatchDebugState(dataObject, StateType.Before, update);
                    DispatchDebugState(dataObject, StateType.After, update);
                }
            }
        }
Esempio n. 19
0
        // ReSharper restore RedundantOverridenMember

        protected override void OnExecute(NativeActivityContext context)
        {
            _debugInputs  = new List <DebugItem>();
            _debugOutputs = new List <DebugItem>();
            _indexCounter = 1;
            IDSFDataObject    dataObject = context.GetExtension <IDSFDataObject>();
            IDataListCompiler compiler   = DataListFactory.CreateDataListCompiler();
            Guid          dlId           = dataObject.DataListID;
            ErrorResultTO allErrors      = new ErrorResultTO();
            ErrorResultTO errors;

            _datalistString = compiler.ConvertFrom(dataObject.DataListID, DataListFormat.CreateFormat(GlobalConstants._Studio_XML), Dev2.DataList.Contract.enTranslationDepth.Shape, out errors).ToString();

            InitializeDebug(dataObject);
            try
            {
                var sourceString = SourceString ?? "";
                IBinaryDataListEntry expressionsEntry = compiler.Evaluate(dlId, enActionType.User, sourceString, false, out errors);

                if (dataObject.IsDebugMode())
                {
                    AddDebugInputItem(new DebugItemVariableParams(sourceString, "String to Split", expressionsEntry, dlId));
                    AddDebugInputItem(new DebugItemStaticDataParams(ReverseOrder ? "Backward" : "Forward", "Process Direction"));
                    AddDebugInputItem(new DebugItemStaticDataParams(SkipBlankRows ? "Yes" : "No", "Skip blank rows"));
                }
                CleanArguments(ResultsCollection);
                ResultsCollection.ToList().ForEach(a => IsSingleValueRule.ApplyIsSingleValueRule(a.OutputVariable, allErrors));
                if (ResultsCollection.Count > 0)
                {
                    if (dataObject.IsDebugMode())
                    {
                        AddDebug(ResultsCollection, compiler, dlId);
                    }

                    CheckIndex(sourceString);
                    allErrors.MergeErrors(errors);
                    IDev2DataListEvaluateIterator itr = Dev2ValueObjectFactory.CreateEvaluateIterator(expressionsEntry);
                    IDev2DataListUpsertPayloadBuilder <string> toUpsert = Dev2DataListBuilderFactory.CreateStringDataListUpsertBuilder(true);
                    bool singleInnerIteration = ArePureScalarTargets(ResultsCollection);
                    bool exit = false;
                    while (itr.HasMoreRecords())
                    {
                        IList <IBinaryDataListItem> cols = itr.FetchNextRowData();
                        foreach (IBinaryDataListItem c in cols)
                        {
                            // set up live flushing iterator details
                            toUpsert.HasLiveFlushing      = true;
                            toUpsert.LiveFlushingLocation = dlId;

#pragma warning disable 219
                            int opCnt = 0;
#pragma warning restore 219
                            if (!string.IsNullOrEmpty(c.TheValue))
                            {
                                string val       = c.TheValue;
                                var    blankRows = new List <int>();
                                if (SkipBlankRows)
                                {
                                    var strings         = val.Split(new[] { Environment.NewLine, "\r", "\n" }, StringSplitOptions.RemoveEmptyEntries);
                                    var newSourceString = string.Join(Environment.NewLine, strings);
                                    val = newSourceString;
                                }
                                else
                                {
                                    var strings = val.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
                                    for (int blankRow = 0; blankRow < strings.Length; blankRow++)
                                    {
                                        if (String.IsNullOrEmpty(strings[blankRow]))
                                        {
                                            blankRows.Add(blankRow);
                                        }
                                    }
                                }

                                IDev2Tokenizer tokenizer = CreateSplitPattern(ref val, ResultsCollection, compiler, dlId, out errors);
                                allErrors.MergeErrors(errors);

                                if (!allErrors.HasErrors())
                                {
                                    if (tokenizer != null)
                                    {
                                        int pos = 0;
                                        int end = (ResultsCollection.Count - 1);

                                        // track used tokens so we can adjust flushing ;)
                                        HashSet <string> usedTokens = new HashSet <string>();

                                        while (tokenizer.HasMoreOps() && !exit)
                                        {
                                            string tmp = tokenizer.NextToken();
                                            if (blankRows.Contains(opCnt) && blankRows.Count != 0)
                                            {
                                                tmp = tmp.Replace(Environment.NewLine, "");
                                                while (pos != end + 1)
                                                {
                                                    UpdateOutputVariableWithValue(pos, usedTokens, toUpsert, "");
                                                    pos++;
                                                }
                                                pos = CompletedRow(usedTokens, toUpsert, singleInnerIteration, ref opCnt, ref exit);
                                            }
                                            UpdateOutputVariableWithValue(pos, usedTokens, toUpsert, tmp);

                                            // Per pass
                                            if (pos == end)
                                            {
                                                //row has been processed
                                                pos = CompletedRow(usedTokens, toUpsert, singleInnerIteration, ref opCnt, ref exit);
                                            }
                                            else
                                            {
                                                pos++;
                                            }
                                        }

                                        // flush the final frame ;)

                                        toUpsert.FlushIterationFrame();
                                        toUpsert = Dev2DataListBuilderFactory.CreateStringDataListUpsertBuilder(true);
                                    }
                                }
                            }
                        }
                    }
                    if (dataObject.IsDebugMode() && !allErrors.HasErrors())
                    {
                        AddResultToDebug(compiler, dlId);
                    }
                }
            }
            catch (Exception e)
            {
                Dev2Logger.Log.Error("DSFDataSplit", e);
                allErrors.AddError(e.Message);
            }
            finally
            {
                // Handle Errors
                var hasErrors = allErrors.HasErrors();
                if (hasErrors)
                {
                    DisplayAndWriteError("DsfDataSplitActivity", allErrors);
                    compiler.UpsertSystemTag(dlId, enSystemTag.Dev2Error, allErrors.MakeDataListReady(), out errors);
                }

                if (dataObject.IsDebugMode())
                {
                    if (hasErrors)
                    {
                        AddResultToDebug(compiler, dlId);
                    }
                    DispatchDebugState(context, StateType.Before);
                    DispatchDebugState(context, StateType.After);
                }
            }
        }