public void TokenDefinition_IsInteger2() { var input = "0115 -1"; var tokenDefinition = new TokenDefinition(TokenType.Integer, IntegerRegex); var match = tokenDefinition.Match(input); Assert.IsTrue(match.IsMatch); Assert.AreEqual("0115", match.Value); Assert.AreEqual(" -1", match.RemainingText); Assert.AreEqual(TokenType.Integer, match.TokenType); }
public void CheckBlockOutTokenType3() { // Arrange string stringInput = "}"; // Act var actualResult = TokenDefinition.DefineTokenType(stringInput); // Assert Assert.AreEqual(ClassLibrary.Enum.TokenType.BlockOut, actualResult); }
public void CheckNumberValueTokenType2() { // Arrange string stringInput = "9"; // Act var actualResult = TokenDefinition.DefineTokenType(stringInput); // Assert Assert.AreEqual(ClassLibrary.Enum.TokenType.NumberValue, actualResult); }
public void CheckOperatorTokenType() { // Arrange string stringInput = "-"; // Act var actualResult = TokenDefinition.DefineTokenType(stringInput); // Assert Assert.AreEqual(ClassLibrary.Enum.TokenType.Operator, actualResult); }
public void TokenDefinition_IsString1() { var input = "_asdf123"; var tokenDefinition = new TokenDefinition(TokenType.Identifier, IdentifierRegex); var match = tokenDefinition.Match(input); Assert.IsTrue(match.IsMatch); Assert.AreEqual("_asdf123", match.Value); Assert.AreEqual(string.Empty, match.RemainingText); Assert.AreEqual(TokenType.Identifier, match.TokenType); }
public void TokenDefinition_IsString1() { var input = "\"asdf\\n\""; var tokenDefinition = new TokenDefinition(TokenType.String, StringRegex); var match = tokenDefinition.Match(input); Assert.IsTrue(match.IsMatch); Assert.AreEqual("\"asdf\\n\"", match.Value); Assert.AreEqual(string.Empty, match.RemainingText); Assert.AreEqual(TokenType.String, match.TokenType); }
public void CodeDetectionTest() { string code = @"ASSET Test '1234' MY_FUNCTION { there is code in here } "; List <TokenMatch> matches = new List <TokenMatch>(); TokenDefinition td = new TokenDefinition(TokenType.SUB, @"(\S+)\s*{([^}]*)}", 1); matches.AddRange(td.FindMatches(code)); Assert.IsTrue(matches.Count == 1); }
public void Remove_NonExistingTokenDefinition_ExpectsNoChange() { var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false); var td1 = new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, ""); tokenizer.Add(td1); var td2 = new TokenDefinition <TokenTypeTest>(TokenTypeTest.E, ""); tokenizer.Remove(td2); Assert.Single(tokenizer.TokenDefinitions); }
public void SetTokens(TokenDefinition token, int playerId, int amount) { TokenBunch bunch = Tokens .Where(t => t.PlayerId == playerId && t.Token.Id == token.Id) .FirstOrDefault(); if (bunch != null) { AddTokens(token, playerId, amount - bunch.Amount); } else { AddTokens(token, playerId, amount); } }
public void AddTokens(TokenDefinition token, int playerId, int amount) { TokenBunch bunch = Tokens .Where(t => t.PlayerId == playerId && t.Token.Id == token.Id) .FirstOrDefault(); if (bunch == null) { Tokens.Add(new TokenBunch(token, amount, playerId)); } else { bunch.Amount += amount; } }
public int GetTokenAmount(TokenDefinition token, int playerId = -1) { if (playerId == -1) { return(Tokens .Where(t => t.Token.Id == token.Id) .Sum(t => t.Amount)); } TokenBunch bunch = Tokens .Where(t => t.PlayerId == playerId && t.Token.Id == token.Id) .FirstOrDefault(); return(bunch != null ? bunch.Amount : 0); }
public void RemoveWhitespaceCheck2() { // Arrange string stringInput = "(4 /3) +4"; string expectedResult = "(4/3)+4"; // Act var actualResult = TokenDefinition.SanitizeString(stringInput); // Assert Assert.AreEqual(expectedResult, actualResult); }
public void FindMatches_TokenProperties_MatchDefinition(int precedence, bool hasVariableValue, string location) { var td = new TokenDefinition <TokenTypeTest>(TokenTypeTest.MyToken2, ".+", null, precedence, hasVariableValue, null, -1, true); var matches = td.FindMatches("hello_world", 5); foreach (Token <TokenTypeTest> match in matches) { Assert.Equal(TokenTypeTest.MyToken2, match.TokenType); Assert.Equal(precedence, match.Precedence); Assert.Equal(hasVariableValue, match.HasVariableValue); Assert.Equal(5, match.LineNumber); Assert.Equal(location, match.Location); } }
public void SetTokensToElement(TokenDefinition token, ElementInstance element, int amount) { if (amount < 0) { return; } int currentAmount = element.GetTokenAmount(token, Id); int diff = amount - currentAmount; if (diff <= GetTokenAmount(token)) { element.AddTokens(token, Id, diff); AddTokens(token, Id, -diff); } }
public void TokenDefinition_IsReserved_ReturnWithNewline() { var input = "return\nasdf"; var reservedRegex = RegexWrapper.DefaultWrap(ReservedProvider.GetPattern()); var tokenDefinition = new TokenDefinition(TokenType.Reserved, reservedRegex); var match = tokenDefinition.Match(input); Assert.IsTrue(match.IsMatch); Assert.AreEqual("return", match.Value); Assert.AreEqual("\nasdf", match.RemainingText); Assert.AreEqual(TokenType.Reserved, match.TokenType); }
public void set_escape() { var td = new TokenDefinition(new[] { "[a-z]" }); Assert.AreEqual(5, td.MaximumLength); Assert.IsTrue(td.StartsWith('[')); Assert.IsFalse(td.StartsWith('x')); Assert.AreEqual("\\G(\\[a-z])", td.ToString()); Assert.AreEqual(0, td.MatchLength("x[a-z]x", 0)); Assert.AreEqual(5, td.MatchLength("x[a-z]x", 1)); Assert.AreEqual(0, td.MatchLength("x[a-z]x", 2)); }
public void SanitizeStringCheck2() { // Arrange string stringInput = "(4%/2$)^(1££/3@)"; string expectedResult = "(4/2)^(1/3)"; // Act string actualResult = TokenDefinition.SanitizeString(stringInput); // Assert Assert.AreEqual(expectedResult, actualResult); }
public void TokenDefinition_IsInvalid1() { var input = "fake_int a=32;"; var reservedRegex = RegexWrapper.DefaultWrap(ReservedProvider.GetPattern()); var tokenDefinition = new TokenDefinition(TokenType.Reserved, reservedRegex); var match = tokenDefinition.Match(input); Assert.IsFalse(match.IsMatch); Assert.IsNull(match.Value); Assert.IsNull(match.RemainingText); Assert.AreEqual(TokenType.Invalid, match.TokenType); }
public void TokenDefinition_IsReserved_Integer() { var input = "int a=32;"; var reservedRegex = RegexWrapper.DefaultWrap(ReservedProvider.GetPattern()); var tokenDefinition = new TokenDefinition(TokenType.Reserved, reservedRegex); var match = tokenDefinition.Match(input); Assert.IsTrue(match.IsMatch); Assert.AreEqual("int", match.Value); Assert.AreEqual(" a=32;", match.RemainingText); Assert.AreEqual(TokenType.Reserved, match.TokenType); }
public void SanitizeStringCheck1() { // Arrange string stringInput = "4$+£3:;;/2"; string expectedResult = "4+3/2"; // Act string actualResult = TokenDefinition.SanitizeString(stringInput); // Assert Assert.AreEqual(expectedResult, actualResult); }
public Parser(IRobotInstructionHandler[] instructionHandlers) { var defs = new TokenDefinition[] { new TokenDefinition(@"(([-+]?[1-9][0-9]*)|0)", "int"), new TokenDefinition(@"[a-zA-Z]", "char") }; _lexer = new Lexer(defs); _dispatcher = new Dictionary <char, Func <IEnumerator <string[]>, Instruction> >(); foreach (var rc in instructionHandlers) { _dispatcher.Add(rc.Key, rc.ParseArguments); } }
public void regex() { var re = new Regex("abc"); var td = new TokenDefinition(re); Assert.AreEqual(int.MaxValue, td.MaximumLength); Assert.IsTrue(td.StartsWith('a')); Assert.IsTrue(td.StartsWith('x')); Assert.AreEqual(0, td.MatchLength("xabcx", 0)); Assert.AreEqual(3, td.MatchLength("xabcx", 1)); Assert.AreEqual(0, td.MatchLength("xabcx", 2)); Assert.AreEqual("abc", td.ToString()); }
private string getTokenRegex(TokenDefinition definition) { StringBuilder regexBuilder = new StringBuilder(); regexBuilder.Append(@"\G\s*(?<Token>"); if (definition.IgnoreCase) { regexBuilder.Append("(?i)"); } regexBuilder.Append(definition.Regex); if (definition.IgnoreCase) { regexBuilder.Append("(?-i)"); } regexBuilder.Append(")"); return(regexBuilder.ToString()); }
public void set_case_insensitive() { var td = new TokenDefinition(new[] { "ab", "abc", "bc" }, caseSensitive: false); Assert.AreEqual(3, td.MaximumLength); Assert.IsTrue(td.StartsWith('a')); Assert.IsTrue(td.StartsWith('A')); Assert.IsFalse(td.StartsWith('x')); Assert.AreEqual("\\G(abc|ab|bc)", td.ToString()); Assert.AreEqual(0, td.MatchLength("xabcx", 0)); Assert.AreEqual(3, td.MatchLength("xabcx", 1)); Assert.AreEqual(2, td.MatchLength("xabcx", 2)); Assert.AreEqual(3, td.MatchLength("xaBcx", 1)); }
/// <summary> /// Transforms text into list of <see cref="Token"/> objects based on rules. /// </summary> /// <param name="source">Text to tokenize.</param> /// <returns>Returns list of tokenized items.</returns> public IEnumerable <Token> Tokenize(string source) { int currentIndex = 0; while (currentIndex < source.Length) { if (source[currentIndex] == ' ') { currentIndex++; continue; } TokenDefinition matchedDefinition = null; int matchLength = 0; foreach (var rule in _tokenDefinitions) { var match = rule.Pattern.Match(source, currentIndex); if (!match.Success || (match.Index - currentIndex) != 0) { continue; } matchedDefinition = rule; matchLength = match.Length; break; } if (matchedDefinition == null) { throw new Exception(string.Format("Unrecognized symbol '{0}'.", source[currentIndex])); } var value = source.Substring(currentIndex, matchLength); if (!matchedDefinition.IsIgnored) { yield return(new Token(matchedDefinition.Type, value)); } currentIndex += matchLength; } yield return(new Token(TokenType.LastToken, string.Empty)); }
public void set() { var td = new TokenDefinition(new[] { "ab", "abc", "bc" }); Assert.AreEqual(3, td.MaximumLength); Assert.IsTrue(td.StartsWith('a')); Assert.IsTrue(td.StartsWith('A')); // Approximation, may change in the future Assert.IsFalse(td.StartsWith('x')); Assert.AreEqual("\\G(abc|ab|bc)", td.ToString()); Assert.AreEqual(0, td.MatchLength("xabcx", 0)); Assert.AreEqual(3, td.MatchLength("xabcx", 1)); Assert.AreEqual(2, td.MatchLength("xabcx", 2)); Assert.AreEqual(0, td.MatchLength("xaBcx", 1)); }
/// <summary> /// Associates the given token name to the regular expression that /// tokens of that type are expected to match. /// </summary> /// <param name="tokenName">The type of the token to associate the regular expression with.</param> /// <param name="regex">The regular expression that the token is expected match.</param> /// <param name="ignoreCase">Specifies whether the regex should be case-sensitive.</param> /// <remarks> /// Multiple regular expressions can be registered to the same token name. /// They will be tried in the order that they are defined. /// </remarks> public void Define(string tokenName, string regex, bool ignoreCase = false) { if (definitionLookup.ContainsKey(tokenName)) { string message = String.Format("An attempt was made to define two tokens with the same name: {0}.", tokenName); throw new SQLGenerationException(message); } tokenNames.Add(tokenName); TokenDefinition definition = new TokenDefinition() { Type = tokenName, Regex = regex, IgnoreCase = ignoreCase, }; definitionLookup.Add(tokenName, definition); }
/// <summary> /// Associates the given token name to the regular expression that /// tokens of that type are expected to match. /// </summary> /// <param name="tokenName">The type of the token to associate the regular expression with.</param> /// <param name="regex">The regular expression that the token is expected match.</param> /// <param name="ignoreCase">Specifies whether the regex should be case-sensitive.</param> /// <remarks> /// Multiple regular expressions can be registered to the same token name. /// They will be tried in the order that they are defined. /// </remarks> public void Define(string tokenName, string regex, bool ignoreCase = false) { if (definitionLookup.ContainsKey(tokenName)) { string message = String.Format(Resources.DuplicateTokenDefinition, tokenName); throw new SQLGenerationException(message); } tokenNames.Add(tokenName); TokenDefinition definition = new TokenDefinition() { Type = tokenName, Regex = regex, IgnoreCase = ignoreCase, }; definitionLookup.Add(tokenName, definition); }
private int countMatches(StringBuilder current) { int matches = 0; string currentString = current.ToString(); foreach (TokenDefinition t in tokenDefinitions) { if (t.Regex.IsMatch(currentString)) { ++matches; currentMatch = t; currentContents = currentString; //Console.WriteLine("'{0}' matches '{1}'", t.Regex, currentString); } } return(matches); }
public void TokenDefinition_IsOperator2() { string[] input = { "{ 3 }" }; var syntaxRegex = RegexWrapper.DefaultWrap(SyntaxProvider.GetPattern()); var tokenDefinition = new TokenDefinition(TokenType.SyntaxOperator, syntaxRegex); var tokensGenerated = _tokenizer.Tokenize(input).ToList(); var operatorTokens = tokensGenerated.Where(t => t.TokenType == TokenType.SyntaxOperator).ToList(); Assert.IsTrue(operatorTokens.Any()); Assert.AreEqual(2, operatorTokens.Count()); Assert.IsNotNull(operatorTokens.FirstOrDefault(t => t.Value == "{")); Assert.IsNotNull(operatorTokens.FirstOrDefault(t => t.Value == "}")); }
public static void ColorToken(int token, TokenType type, TokenColor color, TokenTriggers trigger) { definitions[token] = new TokenDefinition(type, color, trigger); }
public void AddDefinition(TokenDefinition tokenDefinition) { // no-op }
public static void ColorToken(string tokenName, TokenType type, TokenColor color, TokenTriggers trigger) { definitions[tokenName] = new TokenDefinition(type, color, trigger); }