Exemple #1
0
            public void Should_Parse_Right_Parenthesis_Correctly()
            {
                // Given
                var tokenizer = new ScriptTokenizer(" ) ");

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.RightParenthesis, token.Type);
                Assert.Equal(1, token.Index);
                Assert.Equal(1, token.Length);
            }
Exemple #2
0
            public void Should_Parse_Left_Brace_Correctly()
            {
                // Given
                var tokenizer = new ScriptTokenizer(" { ");

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.LeftBrace, token.Type);
                Assert.Equal(1, token.Index);
                Assert.Equal(1, token.Length);
            }
Exemple #3
0
            [InlineData("\"Hello \\\"World\\\"\" ", 0, 17)]  // "Hello \"World\""
            public void Should_Parse_Strings_Correctly(string content, int start, int end)
            {
                // Given
                var tokenizer = new ScriptTokenizer(content);

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.String, token.Type);
                Assert.Equal(start, token.Index);
                Assert.Equal(end, token.Length);
            }
Exemple #4
0
            public void Should_Skip_New_Line()
            {
                // Given
                var tokenizer = new ScriptTokenizer(" \n Hello ");

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.Word, token.Type);
                Assert.Equal(3, token.Index);
                Assert.Equal(5, token.Length);
            }
Exemple #5
0
            public void Should_Skip_Carriage_Return()
            {
                // Given
                var tokenizer = new ScriptTokenizer(" \r\n Hello ");

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.Word, token.Type);
                Assert.Equal(4, token.Index);
                Assert.Equal(5, token.Length);
            }
Exemple #6
0
            public void Should_Skip_Single_Line_Comment()
            {
                // Given
                var tokenizer = new ScriptTokenizer("// This should be ignored \n {");

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.LeftBrace, token.Type);
                Assert.Equal(28, token.Index);
                Assert.Equal(1, token.Length);
            }
Exemple #7
0
            public void Should_Skip_Multi_Line_Comment()
            {
                // Given
                var tokenizer = new ScriptTokenizer("/* This should be ignored \n   And this as well */ {");

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.LeftBrace, token.Type);
                Assert.Equal(50, token.Index);
                Assert.Equal(1, token.Length);
            }
Exemple #8
0
            public void Should_Parse_Character_Correctly(string content, int index, int length)
            {
                // Given
                var tokenizer = new ScriptTokenizer(content);

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.Character, token.Type);
                Assert.Equal(index, token.Index);
                Assert.Equal(length, token.Length);
            }
Exemple #9
0
            public void Should_Parse_Semicolon_Correctly()
            {
                // Given
                var tokenizer = new ScriptTokenizer(" ; ");

                // When
                var token = tokenizer.GetNextToken();

                // Then
                Assert.Equal(ScriptTokenType.Semicolon, token.Type);
                Assert.Equal(1, token.Index);
                Assert.Equal(1, token.Length);
            }
        //////////////////////////////////////////////////////////////////////////
        private void ScanScript(string ScriptFile)
        {
            try
            {
                FileStream      fs        = new FileStream(ScriptFile, FileMode.Open, FileAccess.Read, FileShare.Read);
                ScriptTokenizer Tokenizer = new ScriptTokenizer(fs, false);
                Tokenizer.GetTokens();

                for (int i = 0; i < Tokenizer.Tokens.Count; i++)
                {
                    ScriptToken PrevToken = i > 0 ? Tokenizer.Tokens[i - 1] : null;
                    ScriptToken Token     = Tokenizer.Tokens[i];

                    // some quick checks
                    if (!Token.IsString)
                    {
                        continue;
                    }
                    if (PrevToken != null)
                    {
                        if ((PrevToken.IsKeyword && PrevToken.Value == "on") ||
                            (PrevToken.IsKeyword && PrevToken.Value == "extern") ||
                            (PrevToken.IsIdentifier && PrevToken.Value == "#include"))
                        {
                            continue;
                        }
                    }

                    string Str = Token.Value.Substring(1, Token.Value.Length - 2);
                    if (Str == string.Empty)
                    {
                        continue;
                    }

                    Strings.Add(new StringLocation(Str, ScriptFile, Token.StartLine, StringLocation.StringType.ScriptFile));
                }
            }
            catch
            {
                AddLog(LogSeverity.Error, "Error parsing script '" + ScriptFile + "'");
            }
        }
Exemple #11
0
    static T TestParse <T>(string source, ICfgNode expected, TokenListParser <ScriptToken, T> parser) where T : ICfgNode
    {
        var tokens = ScriptTokenizer.Tokenize(source);

        if (!tokens.HasValue)
        {
            throw new InvalidOperationException(
                      $"Tokenization failure: {tokens.ErrorMessage} at {tokens.ErrorPosition}");
        }

        var filtered = ScriptParser.FilterTokens(tokens.Value);
        var parsed   = parser.TryParse(filtered);

        if (!parsed.HasValue)
        {
            throw new InvalidOperationException($"Parse failure: {parsed}");
        }

        Assert.Equal(expected, parsed.Value);
        return(parsed.Value);
    }
Exemple #12
0
        public bool Parse()
        {
            if (mScript == null)
            {
                return(false);
            }
            if (mScript.Count < 1)
            {
                mScript = null; return(false);
            }
            if (mScript.Next() == false)
            {
                mScript = null; return(false);
            }
            var head = mScript.Head;

            try {
                mChain.Invoke(mArgs);
            } catch (System.Exception e) {
                throw new ScriptUnparsable(head, mScript.Line, e);
            }
            return(true);
        }
Exemple #13
0
 public void SetScript(string str)
 {
     mScript     = ScriptTokenizer.Load(str);
     mArgs.Value = mScript;
 }
Exemple #14
0
 public void Init(string src, string path)
 {
     TokenizerBase tkz = new ScriptTokenizer();
     LineBufferedReader r = LineBufferedReader.GetInstanceWithText(src, path);
     tkz.Init(r);
     Init(tkz);
 }