/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public override bool IsAllRight(TextTokenInfo token) { if (token.NextToken == null) return false; return string.IsNullOrWhiteSpace(token.PreviousToken.TokenText); }
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public override bool IsAllRight(TextTokenInfo token) { if (token.NextToken == null) { return(false); } return(string.IsNullOrWhiteSpace(token.NextToken.TokenText)); }
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public override bool IsAllRight(TextTokenInfo token) { if (token == null) { return(false); } return(token.NextToken?.TokenType == TokenType); }
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> protected bool InternalIsAllRight(TextTokenInfo token) { if (token == null) { return(false); } return(token.TokenType == TokenType); }
private void ProcessToken(TextSubstitutionProcessContext context, TextTokenInfo tokenInfo) { switch (tokenInfo.Kind) { case TextTokenKind.Text: OnProcessText(context, tokenInfo); break; case TextTokenKind.DelimitedText: OnProcessDelimitedText(context, tokenInfo); break; } ; }
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public override bool IsAllRight(TextTokenInfo token) { if (string.IsNullOrEmpty(token.TokenText)) { return(false); } return(!string.IsNullOrWhiteSpace(token.TokenText) && token.TokenText.ToUpper() == token.TokenText); }
public void Parse_StartDelimiterAtEndOfInput() { //Arrange var input = "Begin <"; var target = new TextTokenParser("<", ">"); var expected = new TextTokenInfo[] { new TextTokenInfo(TextTokenKind.Text, "Begin <", 0) }; //Act var actual = target.Parse(input).ToList(); //Assert actual.Should().ContainInOrder(expected); }
public void Parse_EndDelimiterWithNoStart() { //Arrange var input = "Begin first> End"; var target = new TextTokenParser("<", ">"); var expected = new TextTokenInfo[] { new TextTokenInfo(TextTokenKind.Text, input, 0) }; //Act var actual = target.Parse(input).ToList(); //Assert actual.Should().ContainInOrder(expected); }
public void Parse_NoDelimitedText() { //Arrange var input = "Some text"; var expected = new TextTokenInfo[] { new TextTokenInfo(TextTokenKind.Text, "Some text", 0) }; var target = new TextTokenParser("{", "}"); //Act var actual = target.Parse(input).ToList(); //Assert actual.Should().ContainInOrder(expected); }
public void Parse_DelimiterHasMultipleCharacters() { //Arrange var input = "Begin <!-- comment -->"; var target = new TextTokenParser("<!--", "-->"); var expected = new TextTokenInfo[] { new TextTokenInfo(TextTokenKind.Text, "Begin ", 0), new TextTokenInfo(TextTokenKind.DelimitedText, " comment ", 6) }; //Act var actual = target.Parse(input).ToList(); //Assert actual.Should().ContainInOrder(expected); }
public void Parse_DelimiterAtStart() { //Arrange var input = "<first> End"; var target = new TextTokenParser("<", ">"); var expected = new TextTokenInfo[] { new TextTokenInfo(TextTokenKind.DelimitedText, "first", 0), new TextTokenInfo(TextTokenKind.Text, " End", 7) }; //Act var actual = target.Parse(input).ToList(); //Assert actual.Should().ContainInOrder(expected); }
public void Parse_TooManyEndDelimiters() { //Arrange var input = "Begin <4 > 5> End"; var target = new TextTokenParser("<", ">"); var expected = new TextTokenInfo[] { new TextTokenInfo(TextTokenKind.Text, "Begin ", 0), new TextTokenInfo(TextTokenKind.DelimitedText, "4 ", 6), new TextTokenInfo(TextTokenKind.Text, " 5> End", 10) }; //Act var actual = target.Parse(input).ToList(); //Assert actual.Should().ContainInOrder(expected); }
public void Parse_SpacesAtFront() { //Arrange var input = " Begin <target> End"; var target = new TextTokenParser("<", ">"); var expected = new TextTokenInfo[] { new TextTokenInfo(TextTokenKind.Text, " Begin ", 0), new TextTokenInfo(TextTokenKind.DelimitedText, "target", 10), new TextTokenInfo(TextTokenKind.Text, " End", 18) }; //Act var actual = target.Parse(input).ToList(); //Assert actual.Should().ContainInOrder(expected); }
public void Parse_DelimitersSideBySide() { //Arrange var input = "Begin <first><second> End"; var target = new TextTokenParser("<", ">"); var expected = new TextTokenInfo[] { new TextTokenInfo(TextTokenKind.Text, "Begin ", 0), new TextTokenInfo(TextTokenKind.DelimitedText, "first", 6), new TextTokenInfo(TextTokenKind.DelimitedText, "second", 13), new TextTokenInfo(TextTokenKind.Text, " End", 21) }; //Act var actual = target.Parse(input).ToList(); //Assert actual.Should().ContainInOrder(expected); }
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public override bool IsAllRight(TextTokenInfo token) { return(token.TokenNumber == 1); }
/// <summary>Called to process text that is not delimited.</summary> /// <param name="context">The processing context.</param> /// <param name="tokenInfo">The token being processed.</param> /// <remarks> /// The default implementation returns the original text. /// </remarks> protected virtual void OnProcessText(TextSubstitutionProcessContext context, TextTokenInfo tokenInfo) { //Just append to the output context.Output.Append(tokenInfo.OriginalText); }
/// <summary>Called to process delimited text.</summary> /// <param name="context">The processing context.</param> /// <param name="tokenInfo">The token being processed.</param> /// <remarks> /// The default implementation returns the replacement obj from the corresponding substitution rule. /// </remarks> protected virtual void OnProcessDelimitedText(TextSubstitutionProcessContext context, TextTokenInfo tokenInfo) { //Process the template and output whatever results var subContext = new TextSubstitutionContext(context.Options, tokenInfo.OriginalText); //Find the applicable rule var rule = (from r in Rules where r.CanProcess(subContext) select r).FirstOrDefault(); //Get the new text var text = (rule != null) ? rule.Process(subContext) : OnHandleRuleNotFound(context, tokenInfo); //Output if (!String.IsNullOrEmpty(text)) { context.Output.Append(text); } }
/// <summary>Called when no matching rule can be found for a token.</summary> /// <param name="context">The processing context.</param> /// <param name="tokenInfo">The token being processed.</param> /// <returns>The text to use.</returns> /// <remarks> /// The default implementation returns the original token text inside the delimiters. /// </remarks> protected virtual string OnHandleRuleNotFound(TextSubstitutionProcessContext context, TextTokenInfo tokenInfo) { return(context.Options.StartDelimiter + tokenInfo.OriginalText + context.Options.EndDelimiter); }
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public abstract bool IsAllRight(TextTokenInfo token);
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public abstract bool IsCanFilter(TextTokenInfo token);
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> protected bool InternalIsAllRight(TextTokenInfo token) { return(token != null && token.TokenPartNumber == PartNumber && token.TokenNumber == TokenNumber); }
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public override bool IsAllRight(TextTokenInfo token) { return(InternalIsAllRight(token)); }
/// <summary> /// /// </summary> /// <param name="token"></param> /// <returns></returns> public abstract void Prepare(TextTokenInfo token);