예제 #1
0
파일: FGParser.cs 프로젝트: Dronjak/Puzz
        public virtual void LexLine(int currentLine, FormatedLine formatedLine)
        {
            formatedLine.index = currentLine;

            if (parserThread != null)
            {
                parserThread.Join();
            }
            parserThread = null;

            string textLine   = textBuffer.lines[currentLine];
            var    lineTokens = formatedLine.tokens ?? new List <SyntaxToken>();

            lineTokens.Clear();
            formatedLine.tokens = lineTokens;

            if (!string.IsNullOrEmpty(textLine))
            {
                //Tokenize(lineTokens, textLine, ref formatedLine.blockState);
                lineTokens.Add(new SyntaxToken(SyntaxToken.Kind.Comment, textLine)
                {
                    style = textBuffer.styles.normalStyle, formatedLine = formatedLine
                });

                var lineWidth = textBuffer.CharIndexToColumn(textLine.Length, currentLine);
                if (lineWidth > textBuffer.longestLine)
                {
                    textBuffer.longestLine = lineWidth;
                }
            }
        }
예제 #2
0
    public virtual void LexLine(string textLine, FormatedLine formatedLine)
    {
        var lineTokens = formatedLine.tokens = new List <LexerToken>();

        if (textLine.Length != 0)
        {
            lineTokens.Add(new LexerToken(LexerToken.Kind.Comment, textLine) /*style = textBuffer.styles.normalStyle,*/ formatedLine
            {
    public static FormatedLine[] Lex(List <string> lsLine)
    {
        if (lsLine.Count == 0)
        {
            return(new FormatedLine[0]);
        }
        var lexer = new Lexer_CSharp();
        var ret   = new FormatedLine[lsLine.Count];
        var prev  = ret[0] = new FormatedLine()
        {
            index = 0, regionTree = new RegionTree(), blockState = FormatedLine.BlockState.None
        };

        for (int i = 0; i < lsLine.Count; ++i)
        {
            prev = ret[i] = new FormatedLine()
            {
                index = i, regionTree = prev.regionTree, blockState = prev.blockState
            };
            lexer.Tokenize(lsLine[i], ret[i]);
        }
        return(ret);
    }
예제 #4
0
	public virtual void CutParseTree(int fromLine, FormatedLine[] formatedLines)
	{
		if (parseTree == null)
			return;

		ParseTree.BaseNode cut = null;
		var prevLine = fromLine;
		while (cut == null && prevLine --> 0)
		{
			var tokens = textBuffer.formatedLines[prevLine].tokens;
			if (tokens != null)
			{
				for (var i = tokens.Count; i --> 0; )
				{
					if (tokens[i].tokenKind > SyntaxToken.Kind.LastWSToken && tokens[i].parent != null &&
						tokens[i].parent.syntaxError == null)
					{
						cut = tokens[i].parent;
						break;
					}
				}
			}
		}

		var cutThis = false;
		if (cut == null)
		{
			cut = parseTree.root.ChildAt(0);
			cutThis = true;
		}

		while (cut != null)
		{
			var cutParent = cut.parent;
			if (cutParent == null)
				break;
			var cutIndex = cutThis ? cut.childIndex : cut.childIndex + 1;
			while (cutIndex > 0)
			{
				var child = cutParent.ChildAt(cutIndex - 1);
				if (child != null && !child.HasLeafs())
					--cutIndex;
				else
					break;
			}
			cutThis = cutThis && cutIndex == 0;
			if (cutIndex < cutParent.numValidNodes)
			{
				cutParent.InvalidateFrom(cutIndex);
			}
			cut = cutParent;
			cut.syntaxError = null;
		}
	}
예제 #5
0
	public virtual void LexLine(int currentLine, FormatedLine formatedLine)
	{
		formatedLine.index = currentLine;

		if (parserThread != null)
			parserThread.Join();
		parserThread = null;

		string textLine = textBuffer.lines[currentLine];
		var lineTokens = new List<SyntaxToken>();

		if (textLine.Length == 0)
		{
			formatedLine.tokens = lineTokens;
		}
		else
		{
			//Tokenize(lineTokens, textLine, ref formatedLine.blockState);
			lineTokens.Add(new SyntaxToken(SyntaxToken.Kind.Comment, textLine) { style = textBuffer.styles.normalStyle, formatedLine = formatedLine });

			formatedLine.tokens = lineTokens;

			var lineWidth = textBuffer.CharIndexToColumn(textLine.Length, currentLine);
			if (lineWidth > textBuffer.longestLine)
				textBuffer.longestLine = lineWidth;
		}
	}
    public override void Tokenize(string sData, FormatedLine formatedLine)
    {
        var tokens = formatedLine.tokens = new List <LexerToken>();

        int        iIndex  = 0;
        int        iLength = sData.Length;
        LexerToken token;

        TryScan_Whitespace(sData, ref iIndex, formatedLine);

        while (iIndex < iLength)
        {
            switch (formatedLine.blockState)
            {
            case FormatedLine.BlockState.None:
                if (TryScan_Whitespace(sData, ref iIndex, formatedLine))
                {
                    continue;
                }

                if (formatedLine.regionTree.kind > RegionTree.Kind.LastActive)
                {
                    tokens.Add(new LexerToken(LexerToken.Kind.Comment, sData.Substring(iIndex))
                    {
                        formatedLine = formatedLine
                    });
                    iIndex = iLength;
                    break;
                }

                if (sData[iIndex] == '/' && iIndex < iLength - 1)
                {
                    if (sData[iIndex + 1] == '/')
                    {
                        tokens.Add(new LexerToken(LexerToken.Kind.Comment, "//")
                        {
                            formatedLine = formatedLine
                        });
                        iIndex += 2;
                        tokens.Add(new LexerToken(LexerToken.Kind.Comment, sData.Substring(iIndex))
                        {
                            formatedLine = formatedLine
                        });
                        iIndex = iLength;
                        break;
                    }
                    else if (sData[iIndex + 1] == '*')
                    {
                        tokens.Add(new LexerToken(LexerToken.Kind.Comment, "/*")
                        {
                            formatedLine = formatedLine
                        });
                        iIndex += 2;
                        formatedLine.blockState = FormatedLine.BlockState.CommentBlock;
                        break;
                    }
                }

                if (sData[iIndex] == '\'')
                {
                    token = ScanCharLiteral(sData, ref iIndex);
                    tokens.Add(token);
                    token.formatedLine = formatedLine;
                    break;
                }

                if (sData[iIndex] == '\"')
                {
                    token = ScanStringLiteral(sData, ref iIndex);
                    tokens.Add(token);
                    token.formatedLine = formatedLine;
                    break;
                }

                if (iIndex < iLength - 1 && sData[iIndex] == '@' && sData[iIndex + 1] == '\"')
                {
                    token = new LexerToken(LexerToken.Kind.VerbatimStringBegin, sData.Substring(iIndex, 2))
                    {
                        formatedLine = formatedLine
                    };
                    tokens.Add(token);
                    iIndex += 2;
                    formatedLine.blockState = FormatedLine.BlockState.StringBlock;
                    break;
                }

                if (sData[iIndex] >= '0' && sData[iIndex] <= '9' ||
                    iIndex < iLength - 1 && sData[iIndex] == '.' && sData[iIndex + 1] >= '0' && sData[iIndex + 1] <= '9')
                {
                    token = ScanNumericLiteral(sData, ref iIndex);
                    tokens.Add(token);
                    token.formatedLine = formatedLine;
                    break;
                }

                token = ScanIdentifierOrKeyword(sData, ref iIndex);
                if (token != null)
                {
                    tokens.Add(token);
                    token.formatedLine = formatedLine;
                    break;
                }

                // Multi-character operators / punctuators
                // "++", "--", "<<", ">>", "<=", ">=", "==", "!=", "&&", "||", "??", "+=", "-=", "*=", "/=", "%=",
                // "&=", "|=", "^=", "<<=", ">>=", "=>", "::"
                var punctuatorStart = iIndex++;
                if (iIndex < sData.Length)
                {
                    switch (sData[punctuatorStart])
                    {
                    case '?':
                        if (sData[iIndex] == '?')
                        {
                            ++iIndex;
                        }
                        break;

                    case '+':
                        if (sData[iIndex] == '+' || sData[iIndex] == '=')
                        {
                            ++iIndex;
                        }
                        break;

                    case '-':
                        if (sData[iIndex] == '-' || sData[iIndex] == '=')
                        {
                            ++iIndex;
                        }
                        break;

                    case '<':
                        if (sData[iIndex] == '=')
                        {
                            ++iIndex;
                        }
                        else if (sData[iIndex] == '<')
                        {
                            ++iIndex;
                            if (iIndex < sData.Length && sData[iIndex] == '=')
                            {
                                ++iIndex;
                            }
                        }
                        break;

                    case '>':
                        if (sData[iIndex] == '=')
                        {
                            ++iIndex;
                        }
                        break;

                    case '=':
                        if (sData[iIndex] == '=' || sData[iIndex] == '>')
                        {
                            ++iIndex;
                        }
                        break;

                    case '&':
                        if (sData[iIndex] == '=' || sData[iIndex] == '&')
                        {
                            ++iIndex;
                        }
                        break;

                    case '|':
                        if (sData[iIndex] == '=' || sData[iIndex] == '|')
                        {
                            ++iIndex;
                        }
                        break;

                    case '*':
                    case '/':
                    case '%':
                    case '^':
                    case '!':
                        if (sData[iIndex] == '=')
                        {
                            ++iIndex;
                        }
                        break;

                    case ':':
                        if (sData[iIndex] == ':')
                        {
                            ++iIndex;
                        }
                        break;
                    }
                }
                tokens.Add(new LexerToken(LexerToken.Kind.Punctuator, sData.Substring(punctuatorStart, iIndex - punctuatorStart))
                {
                    formatedLine = formatedLine
                });
                break;

            case FormatedLine.BlockState.CommentBlock:
                int commentBlockEnd = sData.IndexOf("*/", iIndex, StringComparison.Ordinal);
                if (commentBlockEnd == -1)
                {
                    tokens.Add(new LexerToken(LexerToken.Kind.Comment, sData.Substring(iIndex))
                    {
                        formatedLine = formatedLine
                    });
                    iIndex = iLength;
                }
                else
                {
                    tokens.Add(new LexerToken(LexerToken.Kind.Comment, sData.Substring(iIndex, commentBlockEnd + 2 - iIndex))
                    {
                        formatedLine = formatedLine
                    });
                    iIndex = commentBlockEnd + 2;
                    formatedLine.blockState = FormatedLine.BlockState.None;
                }
                break;

            case FormatedLine.BlockState.StringBlock:
                int i            = iIndex;
                int closingQuote = sData.IndexOf('\"', iIndex);
                while (closingQuote != -1 && closingQuote < iLength - 1 && sData[closingQuote + 1] == '\"')
                {
                    i            = closingQuote + 2;
                    closingQuote = sData.IndexOf('\"', i);
                }
                if (closingQuote == -1)
                {
                    tokens.Add(new LexerToken(LexerToken.Kind.VerbatimStringLiteral, sData.Substring(iIndex))
                    {
                        formatedLine = formatedLine
                    });
                    iIndex = iLength;
                }
                else
                {
                    tokens.Add(new LexerToken(LexerToken.Kind.VerbatimStringLiteral, sData.Substring(iIndex, closingQuote - iIndex))
                    {
                        formatedLine = formatedLine
                    });
                    iIndex = closingQuote;
                    tokens.Add(new LexerToken(LexerToken.Kind.VerbatimStringLiteral, sData.Substring(iIndex, 1))
                    {
                        formatedLine = formatedLine
                    });
                    ++iIndex;
                    formatedLine.blockState = FormatedLine.BlockState.None;
                }
                break;
            }
        }
    }
예제 #7
0
    private void Preprocess(string sData, ref int iIndex, FormatedLine formatedLine)
    {
        TryScan_Whitespace(sData, ref iIndex, formatedLine);

        var error        = false;
        var commentsOnly = false;

        int iLength = sData.Length;
        List <LexerToken> tokens = formatedLine.tokens;
        LexerToken        token  = ScanWord(sData, ref iIndex);

        if (!preprocessorKeywords.Contains(token.text))
        {
            token.tokenKind = LexerToken.Kind.PreprocessorDirectiveExpected;
            tokens.Add(token);
            token.formatedLine = formatedLine;
            error = true;
        }
        else
        {
            token.tokenKind = LexerToken.Kind.Preprocessor;
            tokens.Add(token);
            token.formatedLine = formatedLine;
            TryScan_Whitespace(sData, ref iIndex, formatedLine);

            switch (token.text)
            {
            case "if":
                if (ParsePPOrExpression(sData, formatedLine, ref iIndex) && formatedLine.regionTree.kind <= RegionTree.Kind.LastActive)
                {
                    OpenRegion(formatedLine, RegionTree.Kind.If);
                }
                else
                {
                    OpenRegion(formatedLine, RegionTree.Kind.InactiveIf);
                }
                commentsOnly = true;
                break;

            case "elif":
                bool active = ParsePPOrExpression(sData, formatedLine, ref iIndex);
                switch (formatedLine.regionTree.kind)
                {
                case RegionTree.Kind.If:
                case RegionTree.Kind.Elif:
                case RegionTree.Kind.InactiveElif:
                    OpenRegion(formatedLine, RegionTree.Kind.InactiveElif); break;

                case RegionTree.Kind.InactiveIf:
                    if (active && formatedLine.regionTree.kind <= RegionTree.Kind.LastActive)
                    {
                        OpenRegion(formatedLine, RegionTree.Kind.Elif);
                    }
                    else
                    {
                        OpenRegion(formatedLine, RegionTree.Kind.InactiveElif);
                    }
                    break;

                default:
                    token.tokenKind = LexerToken.Kind.PreprocessorUnexpectedDirective; break;
                }
                break;

            case "else":
                if (formatedLine.regionTree.kind == RegionTree.Kind.If ||
                    formatedLine.regionTree.kind == RegionTree.Kind.Elif)
                {
                    OpenRegion(formatedLine, RegionTree.Kind.InactiveElse);
                }
                else if (formatedLine.regionTree.kind == RegionTree.Kind.InactiveIf ||
                         formatedLine.regionTree.kind == RegionTree.Kind.InactiveElif)
                {
                    if (formatedLine.regionTree.parent.kind > RegionTree.Kind.LastActive)
                    {
                        OpenRegion(formatedLine, RegionTree.Kind.InactiveElse);
                    }
                    else
                    {
                        OpenRegion(formatedLine, RegionTree.Kind.Else);
                    }
                }
                else
                {
                    token.tokenKind = LexerToken.Kind.PreprocessorUnexpectedDirective;
                }
                break;

            case "endif":
                if (formatedLine.regionTree.kind == RegionTree.Kind.If ||
                    formatedLine.regionTree.kind == RegionTree.Kind.Elif ||
                    formatedLine.regionTree.kind == RegionTree.Kind.Else ||
                    formatedLine.regionTree.kind == RegionTree.Kind.InactiveIf ||
                    formatedLine.regionTree.kind == RegionTree.Kind.InactiveElif ||
                    formatedLine.regionTree.kind == RegionTree.Kind.InactiveElse)
                {
                    CloseRegion(formatedLine);
                }
                else
                {
                    token.tokenKind = LexerToken.Kind.PreprocessorUnexpectedDirective;
                }
                break;

            case "define":
            case "undef":
            {
                var symbol = Lexer_Base.ScanIdentifierOrKeyword(sData, ref iIndex);
                if (symbol != null && symbol.text != "true" && symbol.text != "false")
                {
                    symbol.tokenKind = LexerToken.Kind.PreprocessorSymbol;
                    formatedLine.tokens.Add(symbol);
                    symbol.formatedLine  = formatedLine;
                    scriptDefinesChanged = true;

                    var inactive = formatedLine.regionTree.kind > RegionTree.Kind.LastActive;
                    if (!inactive)
                    {
                        if (token.text == "define")
                        {
                            if (!CompilationDefines.Contains(symbol.text))
                            {
                                CompilationDefines.Add(symbol.text);
                            }
                        }
                        else if (CompilationDefines.Contains(symbol.text))
                        {
                            CompilationDefines.Remove(symbol.text);
                        }
                    }
                }
            }
            break;


            case "region":
                if (formatedLine.regionTree.kind > RegionTree.Kind.LastActive)
                {
                    OpenRegion(formatedLine, RegionTree.Kind.InactiveRegion);
                }
                else
                {
                    OpenRegion(formatedLine, RegionTree.Kind.Region);
                }
                break;

            case "endregion":
                if (formatedLine.regionTree.kind == RegionTree.Kind.Region ||
                    formatedLine.regionTree.kind == RegionTree.Kind.InactiveRegion)
                {
                    CloseRegion(formatedLine);
                }
                else
                {
                    token.tokenKind = LexerToken.Kind.PreprocessorUnexpectedDirective;
                }
                break;

            case "error":
            case "warning":
                break;
            }
        }

        switch (token.text)
        {
        case "region":
        case "endregion":
        case "error":
        case "warning":
            TryScan_Whitespace(sData, ref iIndex, formatedLine);
            if (iIndex < iLength)
            {
                var textArgument = sData.Substring(iIndex);
                textArgument.TrimEnd(new[] { ' ', '\t' });
                tokens.Add(new LexerToken(LexerToken.Kind.PreprocessorArguments, textArgument)
                {
                    formatedLine = formatedLine
                });
                iIndex = iLength - textArgument.Length;
                if (iIndex < iLength)
                {
                    tokens.Add(new LexerToken(LexerToken.Kind.Whitespace, sData.Substring(iIndex))
                    {
                        formatedLine = formatedLine
                    });
                }
            }
            return;
        }

        while (iIndex < iLength)
        {
            if (TryScan_Whitespace(sData, ref iIndex, formatedLine))
            {
                continue;
            }

            var firstChar = sData[iIndex];
            if (iIndex < iLength - 1 && firstChar == '/' && sData[iIndex + 1] == '/')
            {
                tokens.Add(new LexerToken(LexerToken.Kind.Comment, sData.Substring(iIndex))
                {
                    formatedLine = formatedLine
                });
                break;
            }
            else if (commentsOnly)
            {
                tokens.Add(new LexerToken(LexerToken.Kind.PreprocessorCommentExpected, sData.Substring(iIndex))
                {
                    formatedLine = formatedLine
                });
                break;
            }

            if (char.IsLetterOrDigit(firstChar) || firstChar == '_')
            {
                token           = ScanWord(sData, ref iIndex);
                token.tokenKind = LexerToken.Kind.PreprocessorArguments;
                tokens.Add(token);
                token.formatedLine = formatedLine;
            }
            else if (firstChar == '"')
            {
                token           = ScanStringLiteral(sData, ref iIndex);
                token.tokenKind = LexerToken.Kind.PreprocessorArguments;
                tokens.Add(token);
                token.formatedLine = formatedLine;
            }
            else if (firstChar == '\'')
            {
                token           = ScanCharLiteral(sData, ref iIndex);
                token.tokenKind = LexerToken.Kind.PreprocessorArguments;
                tokens.Add(token);
                token.formatedLine = formatedLine;
            }
            else
            {
                token = new LexerToken(LexerToken.Kind.PreprocessorArguments, firstChar.ToString())
                {
                    formatedLine = formatedLine
                };
                tokens.Add(token);
                ++iIndex;
            }

            if (error)
            {
                token.tokenKind = LexerToken.Kind.PreprocessorDirectiveExpected;
            }
        }
    }