Ejemplo n.º 1
0
        private RuleMatchResult ValidateType(Lexer.Lexme lexme)
        {
            foreach (Lexer.LexmeType type in Types)
            {
                if (lexme.Type == type)
                {
                    StopAutoValidate = true;
                    return(RuleMatchResult.Match);
                }
            }

            if (this.Type == RuleType.ValidateAllUntilMatch && !StopAutoValidate)
            {
                return(RuleMatchResult.Waiting);
            }

            //no matches, is it still valid?
            if (this.Optional)
            {
                return(RuleMatchResult.Waiting);
            }
            else
            {
                return(RuleMatchResult.Invalid);
            }
        }
Ejemplo n.º 2
0
        public Dictionary <string, Parser.Token> GetAllInherits()
        {
            //List<Parser.Token> calls = new List<Parser.Token>();
            Dictionary <string, Parser.Token> calls = new Dictionary <string, Parser.Token>();

            foreach (Parser.Token token in this.Tokens)
            {
                if (token.Type == Parser.TokenType.InheritCommand)
                {
                    string funcName = "";

                    //gotta find out the form of the function call with respect to
                    //the scope resolution operator
                    for (int index = 0; index < token.Lexmes.Count; index++)
                    {
                        Lexer.Lexme lexme = token.Lexmes[index];

                        if (lexme.Type == Lexer.LexmeType.Instruction && lexme.Data.ToString() == "inherit")
                        {
                            funcName = token.Code;
                            break;
                        }
                    }

                    if (funcName.Length < 1 || funcName == "")
                    {
                        throw new Exception("Unidentifiable form for inherit.");
                    }
                    calls.Add(funcName, token);
                }
            }

            return(calls);
        }
Ejemplo n.º 3
0
        private RuleMatchResult ValidateValue(Lexer.Lexme lexme)
        {
            foreach (string value in Values)
            {
                if (lexme.Data.ToString() == value)
                {
                    StopAutoValidate = true;
                    return(RuleMatchResult.Match);
                }
            }

            if (this.Type == RuleType.ValidateAllUntilMatch && !StopAutoValidate)
            {
                return(RuleMatchResult.Waiting);
            }

            //no matches, is it still valid?
            if (this.Optional)
            {
                return(RuleMatchResult.Waiting);
            }
            else
            {
                return(RuleMatchResult.Invalid);
            }
        }
Ejemplo n.º 4
0
        public Dictionary <string, Parser.Token> GetAllFunctionCalls(Parser.Token root)
        {
            //List<Parser.Token> calls = new List<Parser.Token>();
            Dictionary <string, Parser.Token> calls             = new Dictionary <string, Parser.Token>();
            Dictionary <string, int>          FuncInstanceCount = new Dictionary <string, int>();

            foreach (Parser.Token token in root.Children[1])
            {
                if (token.Type == Parser.TokenType.FunctionCall)
                {
                    string funcName = "";
                    //gotta find out the form of the function call with respect to
                    //the scope resolution operator
                    for (int index = 0; index < token.Lexmes.Count; index++)
                    {
                        Lexer.Lexme lexme = token.Lexmes[index];

                        if (lexme.Data.ToString() == "::")
                        {
                            funcName = token.Lexmes[index + 1].Data.ToString();
                            break;
                        }
                        if (lexme.Type == Lexer.LexmeType.Identifier)
                        {
                            //look ahead
                            if (token.Lexmes[index + 1].Data.ToString() == "::")
                            {
                                funcName = token.Lexmes[index + 2].Data.ToString();
                                break;
                            }
                            else
                            {
                                funcName = lexme.Data.ToString();
                                break;
                            }
                        }
                    }

                    if (funcName.Length < 1 || funcName == "")
                    {
                        throw new Exception("Unidentifiable form for function name.");
                    }
                    if (calls.ContainsKey(funcName))
                    {
                        calls.Add(funcName + Globals.Generator.FunctionHashSymbol + FuncInstanceCount[funcName], token);
                        FuncInstanceCount[funcName] += 1;
                    }
                    else
                    {
                        calls.Add(funcName, token);
                        FuncInstanceCount.Add(funcName, 1);
                    }
                }
            }

            return(calls);
        }
Ejemplo n.º 5
0
        public bool CombineGroupedTokens(Lexme lexme, List <Lexme> tokenList, int startLine)
        {
            if (ConsumingFlag)
            {
                //we previously started consuming, now we need to know if we can stop
                if (PairStateMachine.ValidMatchingCloseLexme(StartLexme.Type, lexme.Type))
                {
                    ConsumingFlag = false;

                    //HACK CITY!
                    if (StartLexme.Type == LexmeType.CommentLine)
                    {
                        tokenList.Add(StartLexme);
                        tokenList.Add(lexme);
                    }
                    else
                    {
                        if (StartLexme.Type == LexmeType.OpenComment)
                        {
                            StartLexme.Type = LexmeType.Comment;
                        }
                        else if (StartLexme.Type == LexmeType.Quote)
                        {
                            StartLexme.Type = LexmeType.StringLiteral;
                        }
                        StartLexme.Data.Append(lexme.Data);
                        tokenList.Add(StartLexme);
                    }
                    StartLexme = null;
                }
                else
                {
                    StartLexme.Data.Append(lexme.Data);
                }
            }
            else
            {
                //we have not opened consumption yet. Do we start?
                if (PairStateMachine.RequiresClosing(lexme.Type))
                {
                    //yes
                    ConsumingFlag = true;
                    Begin(lexme, startLine);
                }
                else
                {
                    tokenList.Add(lexme);
                }
            }


            return(ConsumingFlag);
        }
Ejemplo n.º 6
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="spawn"></param>
        private void ScanSpawnedGrammars(Lexer.Lexme activeLexme, ref LinkedList <Syntax> spawned, List <Token> identifiedTokens)
        {
            int spawnedCount = 0;
            LinkedList <Syntax> removalList = new LinkedList <Syntax>();

            foreach (Syntax spawn in spawned)
            {
                //if a spawned item is complete and it is the front of the list, we have our man
                if (spawn.IsComplete() && spawn == spawned.First.Value)
                {
                    //we got ooooooonnne!  *riiiiiiing*
                    int startIndex = spawn.StartIndex;
                    identifiedTokens.Add(new Token(spawn.LexmeToken.Lexmes, spawn.TokenID));


                    //ok, now every spawned syntax that overlaps with this one's lexme indices
                    //can also be ruled out. However, non overlaping ones naturally come afterward so
                    //we just leave them alone. They may be validating our next token already.
                    //(Note this also removes the spawn we just identified)
                    foreach (Syntax de in spawned)
                    {
                        if (de.StartIndex <= startIndex)
                        {
                            removalList.AddLast(de);
                        }
                    }
                    spawnedCount = 0;
                }
                //nope, continue validating any noncompleted ones until they are ruled out or completed
                else
                {
                    if (!spawn.IsAcceptableInput(activeLexme) && !spawn.IsComplete())
                    {
                        //this puppy is ruled out, remove from list
                        //spawned.Remove(spawn);
                        removalList.AddLast(spawn);
                    }
                }
                spawnedCount++;
            }

            //finally, remove the unwanted fluff
            foreach (Syntax syn in removalList)
            {
                spawned.Remove(syn);
            }
        }
Ejemplo n.º 7
0
        public Lexer(Stellarmass.LPC.Scanner.Scanner scannedData)
        {
            int             lineCount      = 0;
            int             tokenCount     = 0;
            GreedyLexmePair greedy         = new GreedyLexmePair();
            List <Lexme>    tempTokensList = new List <Lexme>();



            foreach (Scanner.CodeLine line in scannedData.Lines)
            {
                foreach (Scanner.Token scanned in line.Tokens)
                {
                    try{
                        Lexme lexme = new Lexme();
                        lexme.Data = scanned.Data;
                        lexme.Type = IdentifyType(scanned, lineCount + 1, greedy);
                        //this will automatically combine lexme with any previous lexmes if they are supposed to
                        //be grouped together as a string or comment
                        greedy.CombineGroupedTokens(lexme, tempTokensList, lineCount + 1);
                    }
                    catch (LexerException e)
                    {
                        throw new LexerException(e.Message, lineCount);
                    }


                    tokenCount++;
                }                        //end foreach

                //OLD POSITION


                lineCount++;
                tokenCount = 0;
            }
            //NEW POSITION
            //just in case the line ended before we could finish the token list
            if (greedy.FlushGroup() != null)
            {
                tempTokensList.AddRange(greedy.FlushGroup());
            }
            Lines.Add(new Line(tempTokensList, Lines.Count));
        }
Ejemplo n.º 8
0
        public int SyntaxID = 0;         //0 means unknown token

        public LexmeContainer(Lexer.Lexme lexme, int index)
        {
            Lexme = lexme;
            Index = index;
        }
Ejemplo n.º 9
0
        public LexmeHierarchy(List <Lexer.Lexme> lexmes, LexmeHierarchy parent, int index, int recursionDepth)
        {
            LexmeContainer ActiveContainer;

            LexmeContainers = new List <LexmeContainer>();
            Parent          = parent;


            //for(int index = 0; index < lexmes.Count; index++)
            while (index < lexmes.Count)
            {
                Lexer.Lexme lexme  = lexmes[index];                //shortcut to convert from a foreach loop that was previously used
                string      lexstr = lexme.Data.ToString();

                if (LPC.LPCKeywords.IsClosingPair(lexstr))
                {
                    if (recursionDepth < 1)
                    {
                        throw new ParserException("Unmatched " + lexstr + "found.");
                    }

                    recursionDepth--;
                    return;
                }

                LexmeContainers.Add(new LexmeContainer(lexme, index));
                ActiveContainer = LexmeContainers[LexmeContainers.Count - 1];
                this.LexmeCount++;

                if (LPC.LPCKeywords.IsOpenningPair(lexstr))
                {
                    //we have stepped into the realm of recursion, watch your step!
                    ActiveContainer.Children = new LexmeHierarchy(lexmes, this, index + 1, recursionDepth + 1);

                    //in order to track indexing correctly through recursion each hierarchy
                    //needs to store not only the number of lexmecontainers it has but the
                    //recursed number within each sub-hierarhchy within each lexmecontainer.
                    //If there were no consumed lexmes in the recursion we can just remove the children stub.
                    //(UPDATE: Actually, we can't remove that stub, I need it XD )
                    this.LexmeCount += ActiveContainer.Children.LexmeCount;
                    index           += ActiveContainer.Children.LexmeCount;

                    //we know now that the next lexme is ending the recursion so we can just consume it now
                    this.LexmeCount++;
                    index++;
                    LexmeContainers.Add(new LexmeContainer(lexmes[index], index));

                    //remove empty children stub
                    //UPDATE: nevermind, I need it

                    /*if(ActiveContainer.Children.LexmeCount <= 0)
                     *      {
                     *      ActiveContainer.Children = null;
                     *      }*/
                }


                index++;
            }                    //end foreach

            return;
        }
Ejemplo n.º 10
0
 private void Begin(Lexme start, int startLine)
 {
     StartLine  = startLine;
     StartLexme = start;
 }
Ejemplo n.º 11
0
 public GreedyLexmePair()
 {
     StartLexme      = new Lexme();
     StartLexme.Type = LexmeType.none;
 }
Ejemplo n.º 12
0
        /// <summary>
        /// This function
        /// </summary>
        /// <param name="name"></param>
        /// <param name="root"></param>
        private void RemoveFunctionCallFromCode(string name, Parser.Token root)
        {
            const int FUNC_BODY_TOKENS = 1;


            //foreach(Parser.Token token in root.Children[FUNC_BODY_TOKENS])
            for (int count = 0; count < root.Children[FUNC_BODY_TOKENS].Count; count++)
            {
                Parser.Token token = root.Children[FUNC_BODY_TOKENS][count];

                if (token.Type == Parser.TokenType.FunctionCall)
                {
                    string funcName = "";
                    //gotta find out the form of the function call with respect to
                    //the scope resolution operator
                    for (int index = 0; index < token.Lexmes.Count; index++)
                    {
                        Lexer.Lexme lexme = token.Lexmes[index];

                        if (lexme.Data.ToString() == "::")
                        {
                            funcName = token.Lexmes[index + 1].Data.ToString();
                            break;
                        }
                        if (lexme.Type == Lexer.LexmeType.Identifier)
                        {
                            //look ahead
                            if (token.Lexmes[index + 1].Data.ToString() == "::")
                            {
                                funcName = token.Lexmes[index + 2].Data.ToString();
                                break;
                            }
                            else
                            {
                                //found the name of the currently investigated grammar token.
                                //is it the one we are looking for?
                                funcName = lexme.Data.ToString();
                                if (name == funcName)
                                {
                                    Parser.Token funccall = root.Children[FUNC_BODY_TOKENS][count];
                                    Parser.Token pretab = null, newline = null;

                                    if (count > 0)
                                    {
                                        pretab = root.Children[FUNC_BODY_TOKENS][count - 1];
                                    }
                                    if (root.Children[FUNC_BODY_TOKENS].Count - 1 <= count + 1)
                                    {
                                        newline = root.Children[FUNC_BODY_TOKENS][count + 1];
                                    }


                                    //WARNING - hackish!
                                    //This parameter is used to remove the tab token and newline token that were
                                    //placed after the function back in 'PushDataToCode'. It should only be set
                                    //if you knoew for sure there is a tab before this function and a newline after.
                                    //It is used to remove all that exra space generated when a new function is created and then never used.

                                    //yup
                                    root.Children[FUNC_BODY_TOKENS].Remove(funccall);
                                    //if(pretab != null)	{root.Children[FUNC_BODY_TOKENS].Remove(pretab);}
                                    //if(newline != null)	{root.Children[FUNC_BODY_TOKENS].Remove(newline);}
                                }
                                break;
                            }
                        }
                    }
                }
            }
            return;
        }
Ejemplo n.º 13
0
        public RuleMatchResult Validate(Lexer.Lexme lexme)
        {
            //check for the potential that the rule itself is whitespace!
            if (this.Types.Count == 1 && this.Types[0] == Lexer.LexmeType.Whitespace && lexme.Type == Lexer.LexmeType.Whitespace)
            {
                return(RuleMatchResult.Match);
            }

            //make sure any whitespace, line feeds and comments are handled properly.
            //They can never be a match but they can be flagged as valid or invalid depending
            //on the rule's settings.
            if (lexme.Type == Lexer.LexmeType.NewLine)
            {
                if ((Flags & (int)RuleFlags.DontAllowLinefeeds) != 0)
                {
                    return(RuleMatchResult.Invalid);
                }
                else
                {
                    return(RuleMatchResult.Valid);
                }
            }
            if (lexme.Type == Lexer.LexmeType.Whitespace)
            {
                if ((Flags & (int)RuleFlags.DontAllowSpaces) != 0)
                {
                    return(RuleMatchResult.Invalid);
                }
                else
                {
                    return(RuleMatchResult.Valid);
                }
            }
            if (lexme.Type == Lexer.LexmeType.Comment)
            {
                if ((Flags & (int)RuleFlags.DontAllowComments) != 0)
                {
                    return(RuleMatchResult.Invalid);
                }
                else
                {
                    return(RuleMatchResult.Valid);
                }
            }


            RuleMatchResult result = RuleMatchResult.Invalid;

            switch (MatchType)
            {
            case RuleMatchType.ByValue:
            {
                result = ValidateValue(lexme);
                break;
            }

            case RuleMatchType.ByType:
            {
                result = ValidateType(lexme);
                break;
            }
            }

            return(result);
        }
Ejemplo n.º 14
0
        public bool IsAcceptableInput(Lexer.Lexme lexme)
        {
            if (Complete)
            {
                return(false);
            }
            Token.Lexmes.Add(lexme);

            //Here, we are systematically checking every rule until we have a match or an invalid lexme.
            //In the event of a match we can increment the rules-index so that we start with the next
            //rule the next time this function is called. If we have a match and it was the final rule
            //we set the completed flag.

            //Keep in mind that optional rules will still return valid if we don't have a match, this allows us
            //to keep moving forward until we match a rule or we come across a required rule that cannot be
            //matched at which point it is invalid. Thus, we only need to return a success or fail state based
            //on a found match or an invalid lexme.
            for (int i = RuleIndex; i < Rules.Count; i++)
            {
                //-slight hack-
                //we are going to remove any leading whitespace symbols by returning invalid
                if (lexme.Type == Lexer.LexmeType.Whitespace || lexme.Type == Lexer.LexmeType.NewLine)
                {
                    if (this.Length < 1)
                    {
                        return(false);
                    }
                }

                switch (Rules[i].Validate(lexme))
                {
                case RuleMatchResult.Match:
                {
                    //if we've matched all required rules, flag the construct as completed
                    if (i == Rules.Count - 1)
                    {
                        Complete = true;
                    }

                    //otherwise, we are at least one step closer
                    RuleIndex = i + 1;
                    Length++;
                    return(true);
                }

                case RuleMatchResult.Valid:
                {
                    //no match but we are still in the game
                    Length++;
                    return(true);
                }

                case RuleMatchResult.Waiting:
                {
                    RuleIndex++;
                    break;
                }

                case RuleMatchResult.Invalid:
                {
                    /*if(Rules[i].Optional)
                     *      {
                     *      RuleIndex = i + 1;
                     *      Length++;
                     *      return true;
                     *      }*/
                    ResetValidation(true);
                    return(false);
                }
                }
            }


            return(false);
        }