public static PdfArray Parse(Lexical.ILexer lexer) { var list = new List<IPdfObject>(); PdfObject value; while ((value = PdfObject.ParseAny(lexer, "]")) != null) { list.Add(value); } return new PdfArray(list); }
/// <summary> /// Assumes only two args /// </summary> /// <param name="symbols"></param> /// <param name="funcall"></param> /// <returns></returns> public static EvalResult binary_typeCheck(SymbolTable symbols, FunCall funcall, Lexical l, Lexical r, string funcName, EvalFunc f) { var left = funcall.args[0].value; var right = funcall.args[1].value; Func<Token, bool> isNumber = token => token.type == Lexical.Int || token.type == Lexical.Float; bool areNumbers = isNumber(left) && isNumber(right); if (areNumbers || left.type == l && right.type == r) return f(symbols, funcall); else return new EvalResult(new Error(0, "Non-matching types for " + funcName)); }
public static PdfXRef Parse(Lexical.ILexer lexer) { var sections = new List<PdfXRefSection>(); String token = lexer.PeekToken1(); while (Char.IsDigit (token[0])) { sections.Add(PdfXRefSection.Parse(lexer)); token = lexer.PeekToken1(); } return new PdfXRef(sections); }
public static PdfHexadecimalString Parse(Lexical.ILexer lexer) { StringBuilder hexString = new StringBuilder(); String text = string.Empty; while (text != ">") { hexString.Append(text); text = lexer.ReadToken(); } if ((hexString.Length % 2) != 0) hexString.Append('0'); return new PdfHexadecimalString(hexString.ToString()); }
public String Compile(String program) { // Primero, pasamos la entrada al analizador lexico Lexical lex = new Lexical(program); String result = ""; LexicalToken token = lex.NextToken(); while (token.Tipo != TipoToken.EOF) { result += token.Lexema + "\r\n"; token = lex.NextToken(); } return result; }
public static PdfXRefEntry Parse(int objectNumber, Lexical.ILexer lexer) { string offsetS = lexer.ReadToken(); if (offsetS.Length != 10) throw new Exception("Parser error: 10 digits expected for offset in xref"); long offset = long.Parse(offsetS); string generationS = lexer.ReadToken(); if (generationS.Length != 5) throw new Exception("Parser error: 5 digits expected for generation in xref"); int generationNumber = int.Parse(generationS); string inuse = lexer.ReadToken(); if (inuse != "f" && inuse != "n") throw new Exception("Parser error: only 'f' and 'n' are valid flags in xref"); bool inUse = (inuse == "n"); return new PdfXRefEntry(objectNumber, generationNumber, offset, inUse); }
public static Node GetTree(IEnumerable <Token> tokens, Lexical lexical) { var baseNode = new Node(lexical); foreach (var t in lexical.Right.First().Token) { if (Grammar.IsTermanl(t)) { //var t2 = tokens.Take(t.Length); //var tokenString = t2.Aggregate<TokenBase, StringBuilder>(new StringBuilder(), (x, y) => x.Append(y.Ch)).ToString(); //if (tokenString == t) //{ // // baseNode. //} } } return(baseNode); }
public static string Run(string code) { long start = DateTime.Now.Ticks; Compiler com = new Compiler(); Reserved.Load(); string output = string.Empty; Lexical lexical = new Lexical(code); lexical.Run(); Compiler.LINE = lexical.row; Compiler.COLUMN = lexical.column; com.Tokens = lexical.Tokens; foreach (Token t in com.Tokens) { Console.WriteLine(t.ToString()); } com.Errors.AddRange(lexical.Errors); Syntactic syntatic = new Syntactic(com.Tokens); syntatic.Run(); //com.Errors.AddRange(syntatic.Errors); foreach (Error err in lexical.Errors) { output += $">> Lexical Error: {err}\n"; } foreach (Error err in syntatic.Errors) { output += $">> Syntatic Error: {err}\n"; } long stop = DateTime.Now.Ticks; output = (output == string.Empty) ? ">> Compilado com sucesso!" : output; int res = Convert.ToInt32(stop - start); output += "\n>> Tempo de compilação: " + (double)res / 10000000 + "s"; return(output); }
public static PdfStream Parse(PdfDictionary dictionary, Lexical.ILexer lexer) { lexer.Expects("stream"); char eol = lexer.ReadChar(); if (eol == '\r') eol = lexer.ReadChar(); if (eol != '\n') throw new Exception(@"Parser error: stream needs to be followed by either \r\n or \n alone"); if (dictionary == null) throw new Exception("Parser error: stream needs a dictionary"); IPdfObject lengthObject = dictionary["Length"]; if (lengthObject == null) throw new Exception("Parser error: stream dictionary requires a Length entry"); int length = 0; if (lengthObject is PdfIndirectReference) { PdfIndirectReference reference = lengthObject as PdfIndirectReference; PdfIndirectObject lenobj = lexer.IndirectReferenceResolver .GetObject(reference.ObjectNumber, reference.GenerationNumber); PdfNumeric len = lenobj.Object as PdfNumeric; length = int.Parse(len.ToString()); } else { length = int.Parse(lengthObject.ToString()); } PdfData data = PdfData.Parse(lexer, length); lexer.Expects("endstream"); return new PdfStream(dictionary, data); }
static Token scan(Lexical type, string regex, int line_num, string input) { if (Regex.Match(input, regex).Value == input) return new Token(type, line_num, input); else return null; }
public static PdfData Parse(Lexical.ILexer lexer, int length) { byte[] data = lexer.ReadBytes(length); return new PdfData(data); }
static public void Add(Lexical lexical) { Grammar._Lexicals.Add(lexical.Name, lexical); }
public IndirectReferenceResolver(Lexical.ILexer lexer) { _lexer = lexer; RetrieveXRef(); }
public static FunCall id_val(Lexical type, int line_num, string value) { return new FunCall(new Token(type, line_num, value)); }
public Node(Lexical lexical) { this.lexical = lexical; this.Nodes = new List <Node>(); }
public Assembler(Lexical lexical) { Lexical = lexical; }
public static void LoadRoules() { Isloading = true; var fileStream = new FileStream(@"Rules\JSRules.txt", FileMode.Open, FileAccess.Read); using (var streamReader = new StreamReader(fileStream, Encoding.UTF8)) { string line; Lexical lex = null; while ((line = streamReader.ReadLine()) != null) { var split = line.Split(new char[] { ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); if (split.Count() < 2 || split[0].StartsWith("///")) { continue; } if (split[1] == "::=") { if (lex != null) { Grammar.Add(lex); } lex = new Lexical() { Name = split[0] }; lex.AddRight(split.Skip(2).ToList()); continue; } if (split[0] == "::=") { lex.AddRight(split.Skip(1).ToList()); continue; } if (split[1] == "::=>") { if (lex != null) { Grammar.Add(lex); } lex = new Lexical() { Name = split[0], HasTerminals = true }; split.Skip(2).ToList().ForEach(x => lex.AddRight(new List <string> { x })); continue; } if (split[1] == "::=-") { if (lex != null) { Grammar.Add(lex); } lex = new Lexical() { Name = split[0], IsRexEx = true, HasTerminals = false }; split.Skip(2).ToList().ForEach(x => lex.AddRight(new List <string> { x })); continue; } if (split[1] == "::==") { if (lex != null) { Grammar.Add(lex); } lex = new Lexical() { Name = split[0], IsCode = true, HasTerminals = true, }; split.Skip(2).ToList().ForEach(x => lex.AddRight(new List <string> { x })); continue; } } } Isloading = false; }
/// <summary> /// 品詞のリストを解析し、数式ツリーの1ノードを作成する。 /// </summary> /// <param name="lexicalList">品詞のリスト。</param> /// <param name="master">数式ツリーのマスター。</param> /// <returns>数式ツリーのノード。</returns> private static MathTreeNode makeMathTreeNode(List <Lexical> lexicalList, ref MathTree master) { if (lexicalList.Count == 0) { throw new ApplicationException("無効な式が指定されました。"); } var node = new MathTreeNode(); node.master = master; if (lexicalList.Count == 1) { Lexical lex = lexicalList[0]; if (!(lex is Literal)) { throw new ArgumentException("最終評価が演算子となる品詞が存在します。"); } node.lex = lex; return(node); } else { int operatorIndex = leastPrioritizedRightOperatorIndex(lexicalList); if (operatorIndex == -1) // 2つ以上項があるのに、演算子が見つからない場合 { throw new ArgumentException("複数のリテラルからなる式が評価されました。"); } Operator op = (Operator)lexicalList[operatorIndex]; var leftLexicalList = new List <Lexical>(); for (int i = 0; i < operatorIndex; i++) { leftLexicalList.Add(lexicalList[i]); } var rightLexicalList = new List <Lexical>(); for (int i = operatorIndex + 1; i < lexicalList.Count; i++) { rightLexicalList.Add(lexicalList[i]); } if (op is UnaryOperator) { if (leftLexicalList.Count != 0) { throw new ArgumentException("単項演算子の左側にオペランドを持つことはできません。"); } else if (rightLexicalList.Count == 0) { throw new ArgumentException("単項演算子の右側にオペランドが存在しません。"); } node.lex = op; node.left = null; node.right = makeMathTreeNode(rightLexicalList, ref master); return(node); } else if (op is BinaryOperator) { if (leftLexicalList.Count == 0) { throw new ArgumentException("2項演算子の左側にオペランドが存在しません。"); } else if (rightLexicalList.Count == 0) { throw new ArgumentException("2項演算子の右側にオペランドが存在しません。"); } node.lex = op; node.left = makeMathTreeNode(leftLexicalList, ref master); node.right = makeMathTreeNode(rightLexicalList, ref master); return(node); } else { throw new ApplicationException("単項演算子、2項演算子以外の品詞が、演算子として評価されました。"); } } }
// Use this for initialization void Start() { syn = gameObject.GetComponent <Syntactic>(); lex = gameObject.GetComponent <Lexical>(); }
public Assembler(string name, Lexical lexical) : this(lexical) { Name = name; }
public static PdfName Parse(Lexical.ILexer lexer) { String name = lexer.ReadToken(); return new PdfName(name); }
public Leaf(Lexical lexical, string text) : base(lexical) { }
static void Main(string[] args) { Lexical lexical = new Lexical(); Parsers parser = new Parsers(); string code = "func main()\n" + "{\n" + "photo a;\n" + "photo b;\n" + "photo c;\n" + "a = \"test1.png\";\n" + "b = \"test2.png\";\n" + "c = \"test1.png\";\n" + "read(c);\n" + "switch(c)\n" + "{\n" + "case a: print(a);\n" + "case b: print(b);\n" + "}\n" + "}\n" + "main();\n"; Console.WriteLine(code); int count = 0; var sourceCode = new SourceCode(code); var tokens = lexical.LexFile(sourceCode).ToArray(); foreach (var token in tokens) { if (token.Kind == TokenKind.Error) { count++; } // Console.WriteLine($"line {token.Span.Start.Line} {token.Kind} ( \"{token.Value}\" ) "); //column {token.Span.Start.Column}-{token.Span.End.Column} } if (lexical.ErrorSink.Any() || count > 0) { Console.WriteLine($"\nLexer\n"); Console.WriteLine($"Error"); foreach (var error in lexical.ErrorSink) { Console.WriteLine(new string('-', Console.WindowWidth / 3)); WriteError(error); } Console.WriteLine(new string('-', Console.WindowWidth / 2)); lexical.ErrorSink.Clear(); } else { parser.ParseFile(sourceCode, tokens); Semantic.Semantic semantic = new Semantic.Semantic(parser); semantic.AnalyzeFile(); if (lexical.ErrorSink.Any()) { Console.WriteLine($"\nSyntax\n"); foreach (var error in lexical.ErrorSink) { Console.WriteLine(new string('-', Console.WindowWidth / 3)); WriteError(error); } Console.WriteLine(new string('-', Console.WindowWidth / 2)); lexical.ErrorSink.Clear(); } else if (semantic.errors.Count > 0) { Console.WriteLine($"\nSemantic\n"); var err = semantic.errors.Distinct(); foreach (var e in err) { Console.WriteLine($"{e}"); } Console.WriteLine(new string('-', Console.WindowWidth / 2)); } else { Interpreter interpreter = new Interpreter(parser); Console.WriteLine($"\nOutput\n"); interpreter.CreateCode(); } } Console.ReadKey(); }