public void Three_Token_Perfomance_Op() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder { ToTokenize = TestStrings.tokenizerBase }; dtb.AddTokenOp("AB-", false); IDev2Tokenizer dt = dtb.Generate(); int opCnt = 0; Stopwatch sw = new Stopwatch(); sw.Start(); while (dt.HasMoreOps() && opCnt < 35000) { dt.NextToken(); opCnt++; } sw.Stop(); long exeTime = sw.ElapsedMilliseconds; // can we do it in less then 2.5s? // I sure hope so ;) Console.WriteLine("Total Time : " + exeTime); Assert.IsTrue(opCnt == 35000 && exeTime < 2500, "It took [ " + exeTime + " ]"); }
public void Single_Token_Perfomance_Op() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder { ToTokenize = TestStrings.tokenizerBase }; dtb.AddTokenOp("-", false); IDev2Tokenizer dt = dtb.Generate(); int opCnt = 0; Stopwatch sw = new Stopwatch(); sw.Start(); while (dt.HasMoreOps() && opCnt < 100000) { dt.NextToken(); opCnt++; } sw.Stop(); long exeTime = sw.ElapsedMilliseconds; // can we do 100k ops in less then 1,2s? // I sure hope so ;) Console.WriteLine(@"Total Time : " + exeTime); Assert.IsTrue(opCnt == 100000 && exeTime < 1200, "Expecting it to take 1200 ms but it took " + exeTime + " ms."); }
public void More_Then_One_Op_Fwd() { var dtb = new Dev2TokenizerBuilder { ToTokenize = Search.ToStringBuilder() }; dtb.AddIndexOp(2); dtb.AddEoFOp(); var dt = dtb.Generate(); var result = string.Empty; var cnt = 0; while (dt.HasMoreOps()) { result += dt.NextToken(); cnt++; } Assert.AreEqual("AB-CD-DE-FG-HI", result); Assert.IsTrue(cnt == 2); }
public void Eof_Op_Bwd() { var dtb = new Dev2TokenizerBuilder { ToTokenize = Search.ToStringBuilder(), ReverseOrder = true }; dtb.AddEoFOp(); var dt = dtb.Generate(); var result = string.Empty; var cnt = 0; while (dt.HasMoreOps()) { result += dt.NextToken(); cnt++; } Assert.AreEqual("AB-CD-DE-FG-HI", result); Assert.IsTrue(cnt == 1); }
public void Single_Token_Perfomance_Op() { var dtb = new Dev2TokenizerBuilder { ToTokenize = Properties.TestStrings.tokenizerBase.ToStringBuilder() }; dtb.AddTokenOp("-", false); var dt = dtb.Generate(); var opCnt = 0; var sw = new Stopwatch(); sw.Start(); while (dt.HasMoreOps() && opCnt < 100000) { dt.NextToken(); opCnt++; } sw.Stop(); var exeTime = sw.ElapsedMilliseconds; Console.WriteLine(@"Total Time : " + exeTime); Assert.IsTrue(opCnt == 100000 && exeTime < 1300, "Expecting it to take 1300 ms but it took " + exeTime + " ms."); }
public void Three_Token_Perfomance_Op() { var dtb = new Dev2TokenizerBuilder { ToTokenize = Properties.TestStrings.tokenizerBase.ToStringBuilder() }; dtb.AddTokenOp("AB-", false); var dt = dtb.Generate(); var opCnt = 0; var sw = new Stopwatch(); sw.Start(); while (dt.HasMoreOps() && opCnt < 35000) { dt.NextToken(); opCnt++; } sw.Stop(); var exeTime = sw.ElapsedMilliseconds; Console.WriteLine("Total Time : " + exeTime); Assert.IsTrue(opCnt == 35000 && exeTime < 2500, "It took [ " + exeTime + " ]"); }
IDev2Tokenizer CreateSplitPattern(string stringToSplit, string splitType, string at) { var dtb = new Dev2TokenizerBuilder { ToTokenize = stringToSplit.ToStringBuilder() }; switch (splitType) { case "Index": if (!string.IsNullOrEmpty(at)) { if (int.TryParse(at, out int indexNum) && indexNum > 0) { dtb.AddIndexOp(indexNum); } } break; case "Space": dtb.AddTokenOp(" ", false); break; case "Tab": dtb.AddTokenOp("\t", false); break; case "New Line": if (stringToSplit.Contains("\r\n")) { dtb.AddTokenOp("\r\n", false); } else if (stringToSplit.Contains("\n")) { dtb.AddTokenOp("\n", false); } else { if (stringToSplit.Contains("\r")) { dtb.AddTokenOp("\r", false); } } break; case "Chars": if (!string.IsNullOrEmpty(at)) { dtb.AddTokenOp(at, false); } break; default: break; } return(dtb.Generate()); }
public void Empty_String_Error() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder(); dtb.ToTokenize = string.Empty; dtb.AddEoFOp(); try { IDev2Tokenizer dt = dtb.Generate(); Assert.Fail(); } catch (Exception) { Assert.IsTrue(1 == 1); } }
public void Three_Token_Op_Fwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder(); dtb.ToTokenize = search2; dtb.AddTokenOp("AB-", false); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; while (dt.HasMoreOps()) { result += " " + dt.NextToken(); } Assert.AreEqual(" CD- CD", result); }
public void Index_Op_Fwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder(); dtb.ToTokenize = search; dtb.AddIndexOp(2); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; while (dt.HasMoreOps()) { result += " " + dt.NextToken(); } Assert.AreEqual(" AB -C D- DE -F G- HI", result); }
public void Two_Token_Op_Fwd() { var dtb = new Dev2TokenizerBuilder { ToTokenize = Search2.ToStringBuilder() }; dtb.AddTokenOp("AB", false); var dt = dtb.Generate(); var result = string.Empty; while (dt.HasMoreOps()) { result += " " + dt.NextToken(); } Assert.AreEqual(" -CD- -CD", result); }
public void Token_Op_With_Token_Fwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder(); dtb.ToTokenize = search; dtb.AddTokenOp("-", true); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; while (dt.HasMoreOps()) { result += dt.NextToken(); } Assert.AreEqual("AB-CD-DE-FG-HI", result); }
public void Empty_String_Error() { var dtb = new Dev2TokenizerBuilder { ToTokenize = string.Empty.ToStringBuilder() }; dtb.AddEoFOp(); try { dtb.Generate(); Assert.Fail(); } catch (Exception) { Assert.IsTrue(true); } }
public void Two_Token_Op_Bwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder(); dtb.ToTokenize = search2; dtb.ReverseOrder = true; dtb.AddTokenOp("B-", false); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; while (dt.HasMoreOps()) { result += "." + dt.NextToken(); } Assert.AreEqual(".CD.CD-A.A", result); }
public void Token_Op_With_Token_Bwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder(); dtb.ToTokenize = search; dtb.ReverseOrder = true; dtb.AddTokenOp("-", true); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; while (dt.HasMoreOps()) { result += "." + dt.NextToken(); } Assert.AreEqual(".-HI.-FG.-DE.-CD.AB", result); }
public void Index_Op_Bwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder(); dtb.ToTokenize = search; dtb.ReverseOrder = true; dtb.AddIndexOp(2); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; while (dt.HasMoreOps()) { result += "." + dt.NextToken(); } Assert.AreEqual(".HI.G-.-F.DE.D-.-C.AB", result); }
public void Single_Token_Op_Fwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder { ToTokenize = Search }; dtb.AddTokenOp("-", false); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; while (dt.HasMoreOps()) { result += dt.NextToken(); } Assert.AreEqual("ABCDDEFGHI", result); }
public void Token_And_Index_Op_Fwd() { var dtb = new Dev2TokenizerBuilder { ToTokenize = Search.ToStringBuilder() }; dtb.AddTokenOp("-", false); dtb.AddIndexOp(3); var dt = dtb.Generate(); var result = string.Empty; while (dt.HasMoreOps()) { result += " " + dt.NextToken(); } Assert.AreEqual(" AB CD- DE FG- HI", result); }
public void Three_Token_Op_Bwd() { var dtb = new Dev2TokenizerBuilder { ToTokenize = Search2.ToStringBuilder(), ReverseOrder = true }; dtb.AddTokenOp("AB-", false); var dt = dtb.Generate(); var result = string.Empty; while (dt.HasMoreOps()) { result += "." + dt.NextToken(); } Assert.AreEqual(".CD.CD-", result); }
public void Single_Token_Op_Bwd() { var dtb = new Dev2TokenizerBuilder { ReverseOrder = true, ToTokenize = Search.ToStringBuilder() }; dtb.AddTokenOp("-", false); var dt = dtb.Generate(); var result = string.Empty; while (dt.HasMoreOps()) { result += "." + dt.NextToken(); } Assert.AreEqual(".HI.FG.DE.CD.AB", result); }
public void Eof_Op_Fwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder(); dtb.ToTokenize = search; dtb.AddEoFOp(); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; int cnt = 0; while (dt.HasMoreOps()) { result += dt.NextToken(); cnt++; } Assert.AreEqual("AB-CD-DE-FG-HI", result); Assert.IsTrue(cnt == 1); }
public void Token_And_Index_Op_Bwd() { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder { ToTokenize = Search, ReverseOrder = true }; dtb.AddTokenOp("-", false); dtb.AddIndexOp(3); IDev2Tokenizer dt = dtb.Generate(); string result = string.Empty; while (dt.HasMoreOps()) { result += "." + dt.NextToken(); } Assert.AreEqual(".HI.-FG.DE.-CD.AB", result); }
private IDev2Tokenizer CreateSplitPattern(ref string stringToSplit, IEnumerable <DataSplitDTO> args, IDataListCompiler compiler, Guid dlId, out ErrorResultTO errors) { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder { ToTokenize = stringToSplit, ReverseOrder = ReverseOrder }; errors = new ErrorResultTO(); foreach (DataSplitDTO t in args) { var fieldName = t.OutputVariable; t.At = t.At ?? ""; if (!string.IsNullOrEmpty(_datalistString)) { var isValidExpr = new IsValidExpressionRule(() => fieldName, _datalistString) { LabelText = fieldName }; var errorInfo = isValidExpr.Check(); if (errorInfo != null) { errors.AddError(errorInfo.Message); continue; } } IBinaryDataListEntry entry; string error; switch (t.SplitType) { case "Index": try { entry = compiler.Evaluate(dlId, enActionType.User, t.At, false, out errors); string index = DataListUtil.GetValueAtIndex(entry, 1, out error); int indexNum = Convert.ToInt32(index); if (indexNum > 0) { dtb.AddIndexOp(indexNum); } } catch (Exception ex) { errors.AddError(ex.Message); } break; case "End": dtb.AddEoFOp(); break; case "Space": dtb.AddTokenOp(" ", t.Include); break; case "Tab": dtb.AddTokenOp("\t", t.Include); break; case "New Line": if (stringToSplit.Contains("\r\n")) { dtb.AddTokenOp("\r\n", t.Include); } else if (stringToSplit.Contains("\n")) { dtb.AddTokenOp("\n", t.Include); } else if (stringToSplit.Contains("\r")) { dtb.AddTokenOp("\r", t.Include); } break; case "Chars": if (!string.IsNullOrEmpty(t.At)) { entry = compiler.Evaluate(dlId, enActionType.User, t.At, false, out errors); string val = DataListUtil.GetValueAtIndex(entry, 1, out error); string escape = t.EscapeChar; if (!String.IsNullOrEmpty(escape)) { entry = compiler.Evaluate(dlId, enActionType.User, t.EscapeChar, false, out errors); escape = DataListUtil.GetValueAtIndex(entry, 1, out error); } dtb.AddTokenOp(val, t.Include, escape); } break; } _indexCounter++; } return(string.IsNullOrEmpty(dtb.ToTokenize) || errors.HasErrors() ? null : dtb.Generate()); }
private IDev2Tokenizer CreateSplitPattern(ref string stringToSplit, IEnumerable <DataSplitDTO> args, IExecutionEnvironment compiler, out ErrorResultTO errors, int update) { Dev2TokenizerBuilder dtb = new Dev2TokenizerBuilder { ToTokenize = stringToSplit, ReverseOrder = ReverseOrder }; errors = new ErrorResultTO(); foreach (DataSplitDTO t in args) { t.At = t.At ?? ""; string entry; switch (t.SplitType) { case "Index": try { entry = compiler.EvalAsListOfStrings(t.At, update).FirstOrDefault(); if (entry == null) { throw new Exception("null iterator expression"); } string index = entry; int indexNum = Convert.ToInt32(index); if (indexNum > 0) { dtb.AddIndexOp(indexNum); } } catch (Exception ex) { errors.AddError(ex.Message); } break; case "End": dtb.AddEoFOp(); break; case "Space": dtb.AddTokenOp(" ", t.Include); break; case "Tab": dtb.AddTokenOp("\t", t.Include); break; case "New Line": if (stringToSplit.Contains("\r\n")) { dtb.AddTokenOp("\r\n", t.Include); } else if (stringToSplit.Contains("\n")) { dtb.AddTokenOp("\n", t.Include); } else if (stringToSplit.Contains("\r")) { dtb.AddTokenOp("\r", t.Include); } break; case "Chars": if (!string.IsNullOrEmpty(t.At)) { entry = compiler.EvalAsListOfStrings(t.At, update).FirstOrDefault(); if (entry != null && (entry.Contains(@"\r\n") || entry.Contains(@"\n"))) { var match = Regex.Match(stringToSplit, @"[\r\n]+"); if (match.Success && !SkipBlankRows) { stringToSplit = Regex.Escape(stringToSplit); dtb.ToTokenize = stringToSplit; } } string escape = t.EscapeChar; if (!String.IsNullOrEmpty(escape)) { escape = compiler.EvalAsListOfStrings(t.EscapeChar, update).FirstOrDefault(); } dtb.AddTokenOp(entry, t.Include, escape); } break; } _indexCounter++; } return(string.IsNullOrEmpty(dtb.ToTokenize) || errors.HasErrors() ? null : dtb.Generate()); }
IDev2Tokenizer CreateTokenizer() { _tokenizerValidationErrors.Clear(); var stringToSplit = VariableListString; var splitType = SplitType; var at = SplitToken; if (string.IsNullOrWhiteSpace(stringToSplit)) { return(null); } var dtb = new Dev2TokenizerBuilder { ToTokenize = stringToSplit }; switch (splitType) { case "Index": if (!string.IsNullOrEmpty(at)) { // No need for try..parse as ValidationErrors() function checks this! var indexNum = int.Parse(at); dtb.AddIndexOp(indexNum); } break; case "Space": dtb.AddTokenOp(" ", false); break; case "Tab": dtb.AddTokenOp("\t", false); break; case "New Line": if (stringToSplit.Contains("\r\n")) { dtb.AddTokenOp("\r\n", false); } else if (stringToSplit.Contains("\n")) { dtb.AddTokenOp("\n", false); } else if (stringToSplit.Contains("\r")) { dtb.AddTokenOp("\r", false); } else { // Assume environment dtb.AddTokenOp(Environment.NewLine, false); } break; case "Chars": if (!string.IsNullOrEmpty(at)) { dtb.AddTokenOp(at, false); } break; } try { return(dtb.Generate()); } catch (Exception ex) { _tokenizerValidationErrors.Add(new ErrorInfo { ErrorType = ErrorType.Critical, Message = ex.Message }); } return(null); }