public static void Transform(OpenMarkdown doc, XmlNode context) { List <Tokenizer.Token> tokens = new List <Tokenizer.Token>(); foreach (XmlNode elem in context.ChildNodes) { XmlText textElem = elem as XmlText; if (textElem != null) { Tokenizer engine = new Tokenizer(textElem.Value); for (Tokenizer.Token tok = engine.NextToken(); tok != null; tok = engine.NextToken()) { tokens.Add(tok); } } else { tokens.Add(new Tokenizer.Token(elem)); if (OpenMarkdown.KindOfInline(elem) != OpenMarkdown.InlineKind.Literal) { Transform(doc, elem); } } } ProcessTokens(tokens, doc, context); }
private void treeView1_AfterSelect(object sender, TreeViewEventArgs e) { Tokenizer.Token x = (Tokenizer.Token)e.Node.Tag; this.textBox1.Text = x.GetNodeType().ToString(); if (x.GetTopNodeType() != null) { this.textBox1.Text += "\r\nin\r\n" + x.GetTopNodeType().ToString(); } }
void TokensToTree(TreeNodeCollection Tree, Tokenizer.Token Tokens) { Tree.Clear(); TreeNodeBuilder visitor = new TreeNodeBuilder(Tree); Tokens.InspectNodes(visitor); /* * LinkedList<Tokenizer.Token>.Enumerator x =Tokens.GetEnumerator(); * while (x.MoveNext()) { * x.Current.InspectNodes(visitor); * }*/ }
void UpdateTree() { Tokenizer test = new Tokenizer( ); //LinkedList<Tokenizer.Token> Tokens = m_Tokens = test.Tokenize(this.textBox2.Text, ""); /*LinkedList<Tokenizer.Token>.Enumerator x=m_Tokens.GetEnumerator(); * while (x.MoveNext()) { * Console.WriteLine(x.Current.GetValue(true)); * }*/ treeView1.BeginUpdate(); TokensToTree(treeView1.Nodes, m_Tokens); treeView1.EndUpdate(); treeView1.Nodes[0].Expand(); }
private static void AppendSentenceEnd(List <Tokenizer.Token> tokens, ref int index, ref Tokenizer.Token tok, OpenMarkdown doc, XmlNode context, ref StringBuilder accum) { // Test whether this is a sentence-ending period. if (index + 1 == tokens.Count) { return; } else if (tokens[index + 1].TokenKind == Tokenizer.Token.Kind.Whitespace) { if (index + 2 == tokens.Count) { index++; return; } Tokenizer.Token ntok = tokens[index + 2]; if (ntok.TokenKind == Tokenizer.Token.Kind.BackQuote || ntok.TokenKind == Tokenizer.Token.Kind.SingleQuote || ntok.TokenKind == Tokenizer.Token.Kind.DoubleQuote || ntok.TokenKind == Tokenizer.Token.Kind.OpenDoubleQuote) { AppendSpecial(OpenMarkdown.SpecialKind.EndOfSentence, doc, context, ref accum); tok = ntok; index++; return; } if (capOrDigRe.IsMatch(ntok.Content) && !sentEndRe.IsMatch(accum.ToString())) { AppendSpecial(OpenMarkdown.SpecialKind.EndOfSentence, doc, context, ref accum); tok = ntok; index++; } } }
override public void Visit(Tokenizer.Token token) { TreeNode Node = new TreeNode(token.GetValue(m_Index.Count <= 2 && 1 <= m_Index.Count)); Node.Tag = token; LinkedList <int> .Enumerator y = m_Index.GetEnumerator(); TreeNodeCollection trc = m_Tree; while (y.MoveNext()) { trc = trc[y.Current].Nodes; } int Index = trc.Add(Node); m_Index.AddLast(Index); LinkedList <Tokenizer.Token> .Enumerator x = token.GetEnumerator(); while (x.MoveNext()) { x.Current.InspectNodes(this); } m_Index.RemoveLast(); }
// Parse Vars and Labels bool Pass1() { Tokenizer.Token currentToken = tokenizer.GetNextToken(); if (currentToken.Type == Tokenizer.TokenType.Empty) { return(false); } int instrIdx = 0; while (currentToken.Type != Tokenizer.TokenType.EOF && currentToken.Type != Tokenizer.TokenType.Unknown) { // =================================================================== // Skip end of lines if (currentToken.Type == Tokenizer.TokenType.EOL) { currentToken = tokenizer.GetNextToken(); } // =================================================================== // Parse variables else if (currentToken.Type == Tokenizer.TokenType.Rsvd_Var) { currentToken = tokenizer.GetNextToken(); if (currentToken.Type == Tokenizer.TokenType.Ident) { if (!tables.AddVar(currentToken.Lexeme, scope, false)) { errorHandler.ParserLogError("Var Already Exists"); return(false); } if (scope != -1) { if (!tables.FuncIncrementFrameSize(scope)) { errorHandler.ParserLogError("Could not Find Function that Corresponds to Scope"); return(false); } } } else { errorHandler.ParserLogError("Ident Expected"); return(false); } currentToken = tokenizer.GetNextToken(); } // =================================================================== // Parse arguments else if (currentToken.Type == Tokenizer.TokenType.Rsvd_Arg) { currentToken = tokenizer.GetNextToken(); if (currentToken.Type == Tokenizer.TokenType.Ident) { if (scope != -1) { if (!tables.AddVar(currentToken.Lexeme, scope, true)) { errorHandler.ParserLogError("Arg Already Exists"); return(false); } if (!tables.FuncIncrementArgFrameSize(scope)) { errorHandler.ParserLogError("Could not Find Function that Corresponds to Scope"); return(false); } } else { errorHandler.ParserLogError("Declaring Arguments outside Functions is Illegal"); return(false); } } else { errorHandler.ParserLogError("Ident Expected"); return(false); } currentToken = tokenizer.GetNextToken(); } // =================================================================== // Parse functions else if (currentToken.Type == Tokenizer.TokenType.Rsvd_Func) { if (scope != -1) { errorHandler.ParserLogError("Declaring Functions in Functions is Illegal"); return(false); } currentToken = tokenizer.GetNextToken(); if (currentToken.Type == Tokenizer.TokenType.Ident) { if (!tables.AddFunc(currentToken.Lexeme, instrIdx, out scope)) { errorHandler.ParserLogError("Function Already Exists"); return(false); } } else { errorHandler.ParserLogError("Ident Expected"); return(false); } currentToken = tokenizer.GetNextToken(); } else if (currentToken.Type == Tokenizer.TokenType.Rsvd_EndFunc) { scope = -1; currentToken = tokenizer.GetNextToken(); } // =================================================================== // Parse instructions and labels else if (currentToken.Type == Tokenizer.TokenType.Ident) { string ident = currentToken.Lexeme; currentToken = tokenizer.GetNextToken(); // =================================================================== // Is it a label? if (currentToken.Type == Tokenizer.TokenType.Colon) { tables.AddLabel(ident, instrIdx, scope); currentToken = tokenizer.GetNextToken(); } // =================================================================== // It's an instruction else { if (scope == -1 && tables.GetStartPC() == -1) { tables.SetStartPC(instrIdx); } instrIdx++; // Increment counter // Skip to next line if (currentToken.Type != Tokenizer.TokenType.EOL) { currentToken = tokenizer.SkipToNextLine(); } } } else { errorHandler.ParserLogError("Unexpected Token"); return(false); } } return(true); }
// Parse instructions bool Pass2() { scope = -1; Instruction currentInstruction; Tokenizer.Token currentToken = tokenizer.GetNextToken(); if (currentToken.Type == Tokenizer.TokenType.Empty) { return(false); } while (currentToken.Type != Tokenizer.TokenType.EOF && currentToken.Type != Tokenizer.TokenType.Unknown) { // =================================================================== // Skip end of lines if (currentToken.Type == Tokenizer.TokenType.EOL) { currentToken = tokenizer.GetNextToken(); } // =================================================================== // Skip variables declaration else if (currentToken.Type == Tokenizer.TokenType.Rsvd_Var) { currentToken = tokenizer.GetNextToken(); // Skip the VAR reserved word currentToken = tokenizer.GetNextToken(); // Skip VAR's identifier } else if (currentToken.Type == Tokenizer.TokenType.Rsvd_Arg) { currentToken = tokenizer.GetNextToken(); // Skip the ARG reserved word currentToken = tokenizer.GetNextToken(); // Skip ARG's identifier } else if (currentToken.Type == Tokenizer.TokenType.Rsvd_Func) { currentToken = tokenizer.GetNextToken(); // Skip the FUNC reserved word FuncDecl func; if (!tables.GetFuncByIdent(currentToken.Lexeme, out func)) { // Error imposible return(false); } scope = func.scope; currentToken = tokenizer.GetNextToken(); // Skip FUNC`s identifier } else if (currentToken.Type == Tokenizer.TokenType.Rsvd_EndFunc) { scope = -1; currentToken = tokenizer.GetNextToken(); // Skip the ENDFUNC reserved word } // =================================================================== // Parse instructions and labels else if (currentToken.Type == Tokenizer.TokenType.Ident) { string ident = currentToken.Lexeme; currentToken = tokenizer.GetNextToken(); // =================================================================== // Is it a label? Skip it if (currentToken.Type == Tokenizer.TokenType.Colon) { currentToken = tokenizer.GetNextToken(); } // =================================================================== // It's an instruction else { InstrDecl instr; if (!tables.GetInstrLookUp(ident, out instr)) { errorHandler.ParserLogError("Syntax Error"); return(false); } currentInstruction = new Instruction(); currentInstruction.OpCode = instr.OpCode; if (instr.ParamsCount > 0) { currentInstruction.Values = new Value[instr.ParamsCount]; } // =================================================================== // Parse params for (int i = 0; i < instr.ParamsCount; i++) { // We have to skip the ',' if (i > 0) { currentToken = tokenizer.GetNextToken(); if (currentToken.Type != Tokenizer.TokenType.Comma) { errorHandler.ParserLogError("Comma Expected"); return(false); } currentToken = tokenizer.GetNextToken(); } Tokenizer.TokenType t = currentToken.Type; int flags = instr.ParamsFlags[i]; // =================================================================== // Is it a variable or label? if (t == Tokenizer.TokenType.Ident) { if ((flags & OpFlags.MemIdx) != 0) { VarDecl varDecl; if (!tables.GetVarByIdent(currentToken.Lexeme, out varDecl, scope)) { errorHandler.ParserLogError("Variable Doesn´t Exist"); return(false); } if (varDecl.scope == -1) { currentInstruction.Values[i].Type = OpType.AbsMemIdx; } else if (varDecl.isArg) { currentInstruction.Values[i].Type = OpType.ArgMemIdx; } else { currentInstruction.Values[i].Type = OpType.RelMemIdx; } currentInstruction.Values[i].StackIndex = varDecl.Idx; } else if ((flags & OpFlags.InstrIdx) != 0) { LabelDecl label; if (!tables.GetLabelByName(currentToken.Lexeme, out label, scope)) { errorHandler.ParserLogError("Label Doesn´t Exist"); return(false); } currentInstruction.Values[i].Type = OpType.InstrIdx; currentInstruction.Values[i].InstrIndex = label.Idx; } else if ((flags & OpFlags.FuncIdx) != 0) { FuncDecl func; if (!tables.GetFuncByIdent(currentToken.Lexeme, out func)) { errorHandler.ParserLogError("Function Doesn´t Exist"); return(false); } currentInstruction.Values[i].Type = OpType.FuncIdx; currentInstruction.Values[i].FunctionIndex = func.scope; } else if ((flags & OpFlags.HostAPICallIdx) != 0) { currentInstruction.Values[i].Type = OpType.HostAPICallString; currentInstruction.Values[i].StringLiteral = currentToken.Lexeme; // TODO: host api calls } } // =================================================================== // Is it a literal value? else if (t == Tokenizer.TokenType.Number || t == Tokenizer.TokenType.String) { if ((flags & OpFlags.Literal) == 0) { errorHandler.ParserLogError("Doesn´t Allow Literals"); return(false); } if (t == Tokenizer.TokenType.Number) { if (StringUtil.IsStringFloat(currentToken.Lexeme)) { float val = 0; currentInstruction.Values[i].Type = OpType.Float; if (float.TryParse(currentToken.Lexeme, out val)) { currentInstruction.Values[i].FloatLiteral = val; } else { errorHandler.ParserLogError("Error Parsing Float Value"); return(false); } } else if (StringUtil.IsStringInt(currentToken.Lexeme)) { int val = 0; currentInstruction.Values[i].Type = OpType.Int; if (int.TryParse(currentToken.Lexeme, out val)) { currentInstruction.Values[i].IntLiteral = val; } else { errorHandler.ParserLogError("Error Parsing Int Value"); return(false); } } else if (StringUtil.IsStringHex(currentToken.Lexeme)) { currentInstruction.Values[i].Type = OpType.Int; currentInstruction.Values[i].IntLiteral = StringUtil.StrHexToInt(currentToken.Lexeme); } else { errorHandler.ParserLogError("Error Parsing Literal Value"); return(false); } } else { currentInstruction.Values[i].Type = OpType.String; currentInstruction.Values[i].StringLiteral = currentToken.Lexeme; } } else { errorHandler.ParserLogError("Unexpected Token"); return(false); } } // Add the instruction to the stream tables.AddInstrToStream(currentInstruction); // Skip to next token currentToken = tokenizer.GetNextToken(); } } else { errorHandler.ParserLogError("Unexpected Token"); return(false); } } return(true); }
public ExprAST(Tokenizer.Token type, List <RootAST> children) : base(children) { Type = type; }
public ExprAST(Tokenizer.Token type) : base() { Type = type; }
private static void ProcessTokens(List <Tokenizer.Token> tokens, OpenMarkdown doc, XmlNode context) { // Reset the elements list, and then restore what it should look // like from the token stream OpenMarkdown.StripChildNodes(context); StringBuilder accum = new StringBuilder(); Tokenizer.Token lastToken = null; for (int i = 0; i < tokens.Count; i++) { Tokenizer.Token tok = tokens[i]; switch (tok.TokenKind) { case Tokenizer.Token.Kind.Backslash: if (i + 1 < tokens.Count) { accum.Append(tokens[i + 1].Content); i++; } break; case Tokenizer.Token.Kind.Referral: AppendNode(tok.Referral, doc, context, ref accum); break; case Tokenizer.Token.Kind.DoubleDash: switch (doc.Config.DashesStyle) { case Configuration.SmartyDashes.DoubleEmdashNoEndash: case Configuration.SmartyDashes.DoubleEmdashTripleEndash: AppendSpecial(OpenMarkdown.SpecialKind.Emdash, doc, context, ref accum); break; case Configuration.SmartyDashes.TripleEmdashDoubleEndash: AppendSpecial(OpenMarkdown.SpecialKind.Endash, doc, context, ref accum); break; } break; case Tokenizer.Token.Kind.TripleDash: switch (doc.Config.DashesStyle) { case Configuration.SmartyDashes.DoubleEmdashTripleEndash: AppendSpecial(OpenMarkdown.SpecialKind.Endash, doc, context, ref accum); break; case Configuration.SmartyDashes.TripleEmdashDoubleEndash: AppendSpecial(OpenMarkdown.SpecialKind.Emdash, doc, context, ref accum); break; } break; case Tokenizer.Token.Kind.Ellipsis: AppendSpecial(OpenMarkdown.SpecialKind.Ellipsis, doc, context, ref accum); AppendSentenceEnd(tokens, ref i, ref tok, doc, context, ref accum); break; case Tokenizer.Token.Kind.UnbreakableSpace: AppendSpecial(OpenMarkdown.SpecialKind.UnbreakableSpace, doc, context, ref accum); break; case Tokenizer.Token.Kind.OpenDoubleQuote: AppendSpecial(OpenMarkdown.SpecialKind.OpenDoubleQuote, doc, context, ref accum); break; case Tokenizer.Token.Kind.CloseDoubleQuote: AppendSpecial(OpenMarkdown.SpecialKind.CloseDoubleQuote, doc, context, ref accum); AppendSentenceEnd(tokens, ref i, ref tok, doc, context, ref accum); break; case Tokenizer.Token.Kind.SingleQuote: if (lastToken == null || lastToken.TokenKind == Tokenizer.Token.Kind.Whitespace) { AppendSpecial(OpenMarkdown.SpecialKind.OpenSingleQuote, doc, context, ref accum); break; } else if (i + 1 == tokens.Count) { AppendSpecial(OpenMarkdown.SpecialKind.CloseSingleQuote, doc, context, ref accum); AppendSentenceEnd(tokens, ref i, ref tok, doc, context, ref accum); break; } else { Tokenizer.Token.Kind kind = tokens[i + 1].TokenKind; switch (kind) { case Tokenizer.Token.Kind.QuestionMark: case Tokenizer.Token.Kind.ExclamationMark: case Tokenizer.Token.Kind.Comma: case Tokenizer.Token.Kind.Period: case Tokenizer.Token.Kind.Semicolon: case Tokenizer.Token.Kind.Colon: case Tokenizer.Token.Kind.CloseParen: case Tokenizer.Token.Kind.Whitespace: AppendSpecial(OpenMarkdown.SpecialKind.CloseSingleQuote, doc, context, ref accum); AppendSentenceEnd(tokens, ref i, ref tok, doc, context, ref accum); break; default: accum.Append(tok.Content); break; } } break; case Tokenizer.Token.Kind.DoubleQuote: if (lastToken == null || lastToken.TokenKind == Tokenizer.Token.Kind.Whitespace) { AppendSpecial(OpenMarkdown.SpecialKind.OpenDoubleQuote, doc, context, ref accum); break; } else if (lastToken != null && (lastToken.TokenKind == Tokenizer.Token.Kind.QuestionMark || lastToken.TokenKind == Tokenizer.Token.Kind.ExclamationMark || lastToken.TokenKind == Tokenizer.Token.Kind.Ellipsis || lastToken.TokenKind == Tokenizer.Token.Kind.Period)) { AppendSpecial(OpenMarkdown.SpecialKind.CloseDoubleQuote, doc, context, ref accum); AppendSentenceEnd(tokens, ref i, ref tok, doc, context, ref accum); break; } else if (i + 1 == tokens.Count) { AppendSpecial(OpenMarkdown.SpecialKind.CloseDoubleQuote, doc, context, ref accum); AppendSentenceEnd(tokens, ref i, ref tok, doc, context, ref accum); break; } else { Tokenizer.Token.Kind kind = tokens[i + 1].TokenKind; switch (kind) { case Tokenizer.Token.Kind.Whitespace: case Tokenizer.Token.Kind.UnbreakableSpace: case Tokenizer.Token.Kind.QuestionMark: case Tokenizer.Token.Kind.ExclamationMark: case Tokenizer.Token.Kind.Comma: case Tokenizer.Token.Kind.Period: case Tokenizer.Token.Kind.Semicolon: case Tokenizer.Token.Kind.Colon: case Tokenizer.Token.Kind.CloseParen: case Tokenizer.Token.Kind.SingleQuote: case Tokenizer.Token.Kind.SingleDash: case Tokenizer.Token.Kind.DoubleDash: case Tokenizer.Token.Kind.TripleDash: case Tokenizer.Token.Kind.Ellipsis: case Tokenizer.Token.Kind.Referral: AppendSpecial(OpenMarkdown.SpecialKind.CloseDoubleQuote, doc, context, ref accum); AppendSentenceEnd(tokens, ref i, ref tok, doc, context, ref accum); break; default: accum.Append(tok.Content); break; } } break; case Tokenizer.Token.Kind.SingleDash: case Tokenizer.Token.Kind.BackQuote: case Tokenizer.Token.Kind.Comma: case Tokenizer.Token.Kind.Semicolon: case Tokenizer.Token.Kind.Colon: case Tokenizer.Token.Kind.OpenParen: accum.Append(tok.Content); break; case Tokenizer.Token.Kind.QuestionMark: case Tokenizer.Token.Kind.ExclamationMark: case Tokenizer.Token.Kind.CloseParen: case Tokenizer.Token.Kind.Period: accum.Append(tok.Content); AppendSentenceEnd(tokens, ref i, ref tok, doc, context, ref accum); break; case Tokenizer.Token.Kind.Text: if (doc.Config.UseWikiLinks) { Match m = wikiLinkRe.Match(tok.Content); if (m.Success) { AppendText(doc, context, ref accum); XmlElement elem = doc.CreateElement("wikilink"); XmlText value = doc.Document.CreateTextNode(tok.Content); elem.AppendChild(value); context.AppendChild(elem); break; } } accum.Append(tok.Content); break; case Tokenizer.Token.Kind.Whitespace: bool append = true; if (doc.Config.SpacesAroundDashes && i + 1 < tokens.Count) { Tokenizer.Token.Kind kind = tokens[i + 1].TokenKind; switch (kind) { case Tokenizer.Token.Kind.DoubleDash: switch (doc.Config.DashesStyle) { case Configuration.SmartyDashes.DoubleEmdashNoEndash: case Configuration.SmartyDashes.DoubleEmdashTripleEndash: AppendSpecial(OpenMarkdown.SpecialKind.Emdash, doc, context, ref accum); break; case Configuration.SmartyDashes.TripleEmdashDoubleEndash: AppendSpecial(OpenMarkdown.SpecialKind.Endash, doc, context, ref accum); break; } append = false; break; case Tokenizer.Token.Kind.TripleDash: switch (doc.Config.DashesStyle) { case Configuration.SmartyDashes.DoubleEmdashTripleEndash: AppendSpecial(OpenMarkdown.SpecialKind.Endash, doc, context, ref accum); break; case Configuration.SmartyDashes.TripleEmdashDoubleEndash: AppendSpecial(OpenMarkdown.SpecialKind.Emdash, doc, context, ref accum); break; } append = false; break; } if (!append) { i++; if (i + 1 < tokens.Count && tokens[i + 1].TokenKind == Tokenizer.Token.Kind.Whitespace) { i++; } } } if (append) { accum.Append(tok.Content); } break; } lastToken = tok; } AppendText(doc, context, ref accum); }
// Parse Vars and Labels bool Pass1() { Tokenizer.Token currentToken = tokenizer.GetNextToken(); if (currentToken.Type == Tokenizer.TokenType.Empty) { return(false); } int instrIdx = 0; while (currentToken.Type != Tokenizer.TokenType.EOF && currentToken.Type != Tokenizer.TokenType.Unknown) { // =================================================================== // Skip end of lines if (currentToken.Type == Tokenizer.TokenType.EOL) { currentToken = tokenizer.GetNextToken(); } // =================================================================== // Parse variables else if (currentToken.Type == Tokenizer.TokenType.Rsvd_Var) { currentToken = tokenizer.GetNextToken(); if (currentToken.Type == Tokenizer.TokenType.Ident) { if (!tables.AddVar(currentToken.Lexeme)) { logger.Log("Parser Error: Var already exists"); return(false); } } else { logger.Log("Parser Error: Ident expected"); return(false); } currentToken = tokenizer.GetNextToken(); } // =================================================================== // Parse instructions and labels else if (currentToken.Type == Tokenizer.TokenType.Ident) { string ident = currentToken.Lexeme; currentToken = tokenizer.GetNextToken(); // =================================================================== // Is it a label? if (currentToken.Type == Tokenizer.TokenType.Colon) { tables.AddLabel(ident, instrIdx); currentToken = tokenizer.GetNextToken(); } // =================================================================== // It's an instruction else { instrIdx++; // Increment counter // Skip to next line currentToken = tokenizer.SkipToNextLine(); } } else { logger.Log("Parser Error: unexpected token"); return(false); } } return(true); }
// Parse instructions bool Pass2() { Instruction currentInstruction; Tokenizer.Token currentToken = tokenizer.GetNextToken(); if (currentToken.Type == Tokenizer.TokenType.Empty) { return(false); } while (currentToken.Type != Tokenizer.TokenType.EOF && currentToken.Type != Tokenizer.TokenType.Unknown) { // =================================================================== // Skip end of lines if (currentToken.Type == Tokenizer.TokenType.EOL) { currentToken = tokenizer.GetNextToken(); } // =================================================================== // Skip variables declaration else if (currentToken.Type == Tokenizer.TokenType.Rsvd_Var) { currentToken = tokenizer.GetNextToken(); // Skip the VAR reserved word currentToken = tokenizer.GetNextToken(); // Skip VAR's identifier } // =================================================================== // Parse instructions and labels else if (currentToken.Type == Tokenizer.TokenType.Ident) { string ident = currentToken.Lexeme; currentToken = tokenizer.GetNextToken(); // =================================================================== // Is it a label? Skip it if (currentToken.Type == Tokenizer.TokenType.Colon) { currentToken = tokenizer.GetNextToken(); } // =================================================================== // It's an instruction else { InstrDecl instr; if (!tables.GetInstrLookUp(ident, out instr)) { logger.Log("Parser Error: syntax error"); return(false); } currentInstruction = new Instruction(); currentInstruction.OpCode = instr.OpCode; if (instr.ParamsCount > 0) { currentInstruction.Values = new Value[instr.ParamsCount]; } // =================================================================== // Parse params for (int i = 0; i < instr.ParamsCount; i++) { // We have to skip the ',' if (i > 0) { currentToken = tokenizer.GetNextToken(); if (currentToken.Type != Tokenizer.TokenType.Comma) { logger.Log("Parser Error: Comma expected"); return(false); } currentToken = tokenizer.GetNextToken(); } Tokenizer.TokenType t = currentToken.Type; int flags = instr.ParamsFlags[i]; // =================================================================== // Is it a variable or label? if (t == Tokenizer.TokenType.Ident) { if ((flags & OpFlags.MemIdx) != 0) { VarDecl varDecl; if (!tables.GetVarByIdent(currentToken.Lexeme, out varDecl)) { logger.Log("Parser Error: Variable doesn't exist"); return(false); } currentInstruction.Values[i].Type = OpType.MemIdx; currentInstruction.Values[i].StackIndex = varDecl.Idx; } else if ((flags & OpFlags.InstrIdx) != 0) { LabelDecl label; if (!tables.GetLabelByName(currentToken.Lexeme, out label)) { logger.Log("Parser Error: Label doesn't exist"); return(false); } currentInstruction.Values[i].Type = OpType.InstrIdx; currentInstruction.Values[i].InstrIndex = label.Idx; } else if ((flags & OpFlags.HostAPICallIdx) != 0) { // TODO: host api calls } } // =================================================================== // Is it a literal value? else if (t == Tokenizer.TokenType.Number || t == Tokenizer.TokenType.String) { if ((flags & OpFlags.Literal) == 0) { // TODO: Log error: doesn´t allow literals return(false); } if (t == Tokenizer.TokenType.Number) { if (StringUtil.IsStringFloat(currentToken.Lexeme)) { float val = 0; currentInstruction.Values[i].Type = OpType.Float; if (float.TryParse(currentToken.Lexeme, out val)) { currentInstruction.Values[i].FloatLiteral = val; } else { // TODO: log error: error parsing float value return(false); } } else if (StringUtil.IsStringInt(currentToken.Lexeme)) { int val = 0; currentInstruction.Values[i].Type = OpType.Int; if (int.TryParse(currentToken.Lexeme, out val)) { currentInstruction.Values[i].IntLiteral = val; } else { // TODO: log error: error parsing int value return(false); } } else if (StringUtil.IsStringHex(currentToken.Lexeme)) { currentInstruction.Values[i].Type = OpType.Int; currentInstruction.Values[i].IntLiteral = StringUtil.StrHexToInt(currentToken.Lexeme); } else { // TODO: log error: error parsing literal value return(false); } } else { currentInstruction.Values[i].Type = OpType.String; currentInstruction.Values[i].StringLiteral = currentToken.Lexeme; } } else { // TODO: log error: unexpected token return(false); } } // Add the instruction to the stream tables.AddInstrToStream(currentInstruction); // Skip to next token currentToken = tokenizer.GetNextToken(); } } else { // TODO: log error: unexpected token return(false); } } return(true); }