Exemple #1
0
 public ILexer <Token> Tokenize(ICharSource text, string fileName, IMessageSink msgs, IParsingOptions options)
 {
     return(new EcsLexer(text, fileName, msgs)
     {
         SpacesPerTab = options.SpacesPerTab
     });
 }
Exemple #2
0
		protected override ILexer<Token> PrepareLexer(ILexer<Token> lexer, ICharSource file, int position)
		{
			if (lexer == null)
				return new EcsLexer(file, "?", MessageSink.Trace, position);
			((EcsLexer)lexer).Reset(file, "?", position);
			return lexer;
		}
 public TestCompiler(IMessageSink sink, ICharSource text, params string[] preOpenedNamespaces)
     : base(sink, text, preOpenedNamespaces)
 {
     MacroProcessor.PreOpenedNamespaces.Add(Loyc.LLPG.Macros.MacroNamespace);
     MacroProcessor.AbortTimeout = TimeSpan.Zero;
     AddMacros(Assembly.GetExecutingAssembly());
 }
Exemple #4
0
			public TestCompiler(IMessageSink sink, ICharSource text, string fileName = "")
				: base(sink, text, fileName)
			{
				MacroProcessor.PreOpenedNamespaces.Add(GSymbol.Get("LeMP.Prelude.Les"));
				MacroProcessor.PreOpenedNamespaces.Add(Loyc.LLPG.Macros.MacroNamespace);
				MacroProcessor.AbortTimeout = TimeSpan.Zero;
				AddMacros(Assembly.GetExecutingAssembly());
			}
Exemple #5
0
 public ILexer <Token> Tokenize(ICharSource text, string fileName, IMessageSink msgs, IParsingOptions options)
 {
     return(new Les2Lexer(text, fileName, msgs)
     {
         SkipValueParsing = options.SurfaceScanOnly,
         SpacesPerTab = options.SpacesPerTab
     });
 }
Exemple #6
0
 /// <summary>Gets the original source text for a token if available, under the
 /// assumption that the specified source file correctly specifies where the
 /// token came from. If the token is synthetic, returns <see cref="UString.Null"/>.</summary>
 public UString SourceText(ICharSource file)
 {
     if ((uint)StartIndex <= (uint)file.Count)
     {
         return(file.Slice(StartIndex, Length));
     }
     return(UString.Null);
 }
 public TestCompiler(IMessageSink sink, ICharSource text, string fileName = "")
     : base(sink, text, fileName)
 {
     MacroProcessor.PreOpenedNamespaces.Add(GSymbol.Get("LeMP.Prelude.Les"));
     MacroProcessor.PreOpenedNamespaces.Add(Loyc.LLPG.Macros.MacroNamespace);
     MacroProcessor.AbortTimeout = TimeSpan.Zero;
     AddMacros(Assembly.GetExecutingAssembly());
 }
Exemple #8
0
 /// <summary>Gets the original source text for a token if available, under the
 /// assumption that the specified source file correctly specifies where the
 /// token came from. If the token is synthetic, returns <see cref="UString.Null"/>.</summary>
 public UString SourceText(ICharSource chars)
 {
     if ((uint)StartIndex <= (uint)chars.Count)
     {
         return(chars.Slice(StartIndex, Length));
     }
     return(UString.Null);
 }
Exemple #9
0
 /// <summary>Helps get the "text value" from tokens that used one of the
 /// constructors designed to support this use case, e.g.
 /// <see cref="Token(int, int, UString, NodeStyle, Symbol, int, int)"/>.
 /// If one of the other constructors was used, this function returns the same
 /// value as <see cref="SourceText(ICharSource)"/>.</summary>
 /// <param name="source">Original source code or lexer from which this token was derived.</param>
 public UString TextValue(ICharSource source)
 {
     if (SubstringOffset == 0xFF)
     {
         return(((Tuple <Symbol, UString>)_value).Item2);
     }
     return(source.Slice(StartIndex + SubstringOffset, Length - SubstringOffset - SubstringOffsetFromEnd));
 }
Exemple #10
0
 protected override ILexer <Token> PrepareLexer(ILexer <Token> lexer, ICharSource file, int position)
 {
     if (lexer == null)
     {
         return(new Les2Lexer(file, "?", _lexerMessageSink, position));
     }
     ((Les2Lexer)lexer).Reset(file, "?", position);
     return(lexer);
 }
Exemple #11
0
 public InputOutput(ICharSource text, string fileName, IParsingService input = null, ILNodePrinter outPrinter = null, string outFileName = null)
 {
     Text                = text;
     FileName            = fileName ?? "";
     InputLang           = input;
     OutPrinter          = outPrinter;
     OutFileName         = outFileName;
     PreOpenedNamespaces = AutoPreOpenedNamespaces(FileName);
 }
		public TestCompiler(IMessageSink sink, ICharSource text, string fileName = "")
			: base(sink, typeof(LeMP.Prelude.BuiltinMacros), new[] { new InputOutput(text, fileName) }) 
		{
			Parallel = false;
			MacroProcessor.AddMacros(typeof(LeMP.Prelude.Les.Macros));
			MacroProcessor.AddMacros(typeof(LeMP.Test.TestMacros));
			MacroProcessor.PreOpenedNamespaces.Add(GSymbol.Get("LeMP.Prelude"));
			MacroProcessor.PreOpenedNamespaces.Add(GSymbol.Get("LeMP.Prelude.Les"));
		}
Exemple #13
0
 protected override ILexer <Token> PrepareLexer(ILexer <Token> lexer, ICharSource file, int position)
 {
     if (lexer == null)
     {
         return(new EcsLexer(file, "?", TraceMessageSink.Value, position));
     }
     ((EcsLexer)lexer).Reset(file, "?", position);
     return(lexer);
 }
Exemple #14
0
 public TestCompiler(IMessageSink sink, ICharSource text, string fileName = "")
     : base(sink, typeof(LeMP.Prelude.BuiltinMacros), new[] { new InputOutput(text, fileName) })
 {
     Parallel = false;
     MacroProcessor.AddMacros(typeof(LeMP.Prelude.Les.Macros));
     MacroProcessor.AddMacros(typeof(LeMP.Test.TestMacros));
     MacroProcessor.PreOpenedNamespaces.Add(GSymbol.Get("LeMP.Prelude"));
     MacroProcessor.PreOpenedNamespaces.Add(GSymbol.Get("LeMP.Prelude.Les"));
 }
Exemple #15
0
        public void CharSourceTest()
        {
            ICharSource source = new StringCharSource(" \n\"Escaped characters: \\\"\\\\\\/\\b\\f\\n\\r\\t\\u03C0\"");
            var         param0 = new PrivateObject(JsonParser.CreateJsonParser(source));
            var         target = new JsonParser_Accessor(param0);

            ICharSource actual = target.CharSource;

            while (!actual.EndOfFile)
            {
                Assert.AreEqual(source.Current, actual.Next());
            }
        }
Exemple #16
0
 /// <summary>Reinitializes BaseLexer, as if you called the constructor again.</summary>
 protected virtual void Reset(ICharSource source, string fileName = "", int inputPosition = 0, bool newSourceFile = true)
 {
     CheckParam.IsNotNull("source", source);
     _source       = source;
     _fileName     = fileName;
     _block        = UString.Empty;
     InputPosition = inputPosition;
     _lineNumber   = 1;
     _lineStartAt  = inputPosition;
     if (newSourceFile)
     {
         _sourceFile = new LexerSourceFile(source, fileName);
     }
     else
     {
         _sourceFile = null;
     }
 }
Exemple #17
0
 public TestCompiler(IMessageSink sink, ICharSource text, params string[] preOpenedNamespaces)
     : base(sink, typeof(LeMP.Prelude.BuiltinMacros), new[] { new InputOutput(text, "TEST") })
 {
     Parallel = false;
     MacroProcessor.AddMacros(typeof(LeMP.les2.to.ecs.Macros));
     MacroProcessor.AddMacros(typeof(LeMP.les3.to.ecs.Macros));
     MacroProcessor.AddMacros(typeof(LeMP.Tests.TestMacros));
     MacroProcessor.AddMacros(typeof(LeMP.Tests.A.AliasTest));
     MacroProcessor.AddMacros(typeof(LeMP.Tests.B.AliasTest));
     // Register all the old names, as we would in production
     MacroProcessor.AddMacros(typeof(LeMP.Prelude.Les.Macros));
     MacroProcessor.AddMacros(typeof(LeMP.Prelude.Les3.Macros));
     MacroProcessor.AddMacros(typeof(LeMP.Les3.To.CSharp.Macros));
     MacroProcessor.PreOpenedNamespaces.Add((Symbol)"LeMP.Prelude");
     foreach (var ns in preOpenedNamespaces)
     {
         MacroProcessor.PreOpenedNamespaces.Add((Symbol)ns);
     }
 }
Exemple #18
0
		public ILexer<Token> Tokenize(ICharSource text, string fileName, IMessageSink msgs)
		{
			var lexer = new LesLexer(text, fileName, msgs);
			return new LesIndentTokenGenerator(new WhitespaceFilter(lexer));
		}
		public IListSource<LNode> Parse(ICharSource text, string fileName, IMessageSink msgs, ParsingMode inputType = null, bool preserveComments = true)
		{
			var lexer = Tokenize(text, fileName, msgs);
			return Parse(lexer, msgs, inputType, preserveComments);
		}
Exemple #20
0
		public IListSource<LNode> Parse(ICharSource text, string fileName, IMessageSink msgs, Symbol inputType = null)
		{
			var lexer = Tokenize(text, fileName, msgs);
			return Parse(lexer, msgs, inputType);
		}
Exemple #21
0
		public ILexer<Token> Tokenize(ICharSource text, string fileName, IMessageSink msgs)
		{
			return new EcsLexer(text, fileName, msgs);
		}
Exemple #22
0
        public string ToString(Func <Token, ICharSource, string> toStringStrategy = null, ICharSource sourceCode = null)
        {
            StringBuilder sb = new StringBuilder();

            AppendTo(sb, toStringStrategy ?? Token.ToStringStrategy, sourceCode);
            return(sb.ToString());
        }
Exemple #23
0
		public InputOutput(ICharSource text, string fileName, IParsingService input = null, LNodePrinter outPrinter = null, string outFileName = null)
		{
			Text = text; FileName = fileName ?? ""; InputLang = input; OutPrinter = outPrinter; OutFileName = outFileName;
		}
Exemple #24
0
 public ILexer Tokenize(ICharSource text, string fileName, IMessageSink msgs)
 {
     return(new LesLexer(text, fileName, msgs));
 }
Exemple #25
0
 public CalculatorLexer(ICharSource text, string fileName = "")
 {
     Src = new LexerSource(text);
 }
Exemple #26
0
        /// <summary>Expresses a token as a string, using LES printers for identifiers and literals.</summary>
        /// <remarks>Note that some Tokens do not contain enough information to
        /// reconstruct a useful token string, e.g. comment tokens do not store the
        /// comment but merely contain the location of the comment in the source code.
        /// For performance reasons, a <see cref="Token"/> does not have a reference
        /// to its source file, so this method cannot return the original string.
        /// </remarks>
        public static string ToString(Token t, ICharSource sourceCode)
        {
            if (sourceCode != null && t.EndIndex <= sourceCode.Count)
            {
                return(sourceCode.Slice(t.StartIndex, t.Length).ToString());
            }

            StringBuilder sb = new StringBuilder();

            switch (t.Kind)
            {
            case TokenKind.Spaces: return((t.Value ?? " ").ToString());

            case TokenKind.Comment:
                if (t.Type() == TokenType.SLComment)
                {
                    return("// (comment)");
                }
                else
                {
                    return("/* (comment) */");
                }

            case TokenKind.Id:
                return(Les2Printer.PrintId(t.Value as Symbol ?? GSymbol.Empty));

            case TokenKind.Literal:
                return(Les2Printer.PrintLiteral(t.Value, t.Style));
            }
            if (t.Value != null)
            {
                if (t.Type() == TokenType.BQOperator)
                {
                    return(Les2Printer.PrintString((t.Value ?? "").ToString(), '`', false));
                }
                else if (t.Type() == TokenType.Shebang)
                {
                    return("#!" + t.Value.ToString() + "\n");
                }
                return(t.Value.ToString());
            }
            switch (t.Kind)
            {
            case TokenKind.LParen: return("(");

            case TokenKind.RParen: return(")");

            case TokenKind.LBrack: return("[");

            case TokenKind.RBrack: return("]");

            case TokenKind.LBrace: return("{");

            case TokenKind.RBrace: return("}");

            case TokenKind.Indent:       return("(Indent)");

            case TokenKind.Dedent:       return("(Dedent)");

            case TokenKind.Dot:          return("(Dot)");

            case TokenKind.Assignment:   return("(Assignment)");

            case TokenKind.Operator:     return("(Operator)");

            case TokenKind.Separator:    return("(Separator)");

            case TokenKind.AttrKeyword:  return("(AttrKeyword)");

            case TokenKind.TypeKeyword:  return("(TypeKeyword)");

            case TokenKind.OtherKeyword: return("(OtherKeyword)");
            }
            return("(Type " + t.TypeInt + ")");
        }
Exemple #27
0
        public IListSource <LNode> Parse(ICharSource text, string fileName, IMessageSink msgs, IParsingOptions options)
        {
            var lexer = Tokenize(text, fileName, msgs, options);

            return(Parse(lexer, msgs, options));
        }
Exemple #28
0
        public IListSource <LNode> Parse(ICharSource text, string fileName, IMessageSink msgs, ParsingMode inputType = null, bool preserveComments = true)
        {
            var lexer = Tokenize(text, fileName, msgs);

            return(Parse(lexer, msgs, inputType, preserveComments));
        }
 protected abstract ILexer <Token> PrepareLexer(ILexer <Token> oldLexer, ICharSource file, int position);
Exemple #30
0
		public static LNode ParseSingle(this IParsingService parser, ICharSource file, string fileName, IMessageSink msgs = null, Symbol inputType = null)
		{
			var e = parser.Parse(file, fileName, msgs, inputType);
			return Single(e);
		}
Exemple #31
0
 public void Reset(ICharSource source, string fileName = "", int inputPosition = 0)
 {
     base.Reset(source, fileName, inputPosition, true);
 }
Exemple #32
0
        public static JsonParser CreateJsonParser(ICharSource source)
        {
            var parser = new JsonParser(source);

            return(parser);
        }
Exemple #33
0
        void AppendTo(StringBuilder sb, Func <Token, ICharSource, string> toStringStrategy, ICharSource sourceCode, int prevEndIndex = 0)
        {
            Token prev = new Token((ushort)0, prevEndIndex, 0);

            for (int i = 0; i < Count; i++)
            {
                Token t = this[i];
                if (t.StartIndex != prev.EndIndex || t.StartIndex <= 0)
                {
                    sb.Append(' ');
                }
                sb.Append(toStringStrategy(t, sourceCode));
                if (t.Value is TokenTree)
                {
                    var subtree = ((TokenTree)t.Value);
                    subtree.AppendTo(sb, toStringStrategy, sourceCode, t.EndIndex);
                    if (subtree.Count != 0)
                    {
                        t = t.WithRange(t.StartIndex, subtree.Last.EndIndex);                         // to avoid printing unnecessary space before closing ')' or '}'
                    }
                }
                prev = t;
            }
        }
Exemple #34
0
        public IListSource <LNode> Parse(ICharSource text, string fileName, IMessageSink msgs, Symbol inputType = null)
        {
            var lexer = Tokenize(text, fileName, msgs);

            return(Parse(lexer, msgs, inputType));
        }
Exemple #35
0
 public InputOutput(ICharSource text, string fileName, IParsingService input = null, LNodePrinter outPrinter = null, string outFileName = null)
 {
     Text = text; FileName = fileName ?? ""; InputLang = input; OutPrinter = outPrinter; OutFileName = outFileName;
 }
Exemple #36
0
 public EcsLexer(ICharSource text, string fileName, IMessageSink sink, int startPosition = 0) : base(text, fileName, startPosition)
 {
     ErrorSink = sink;
 }
Exemple #37
0
        /// <summary>Parses a string and expects exactly one output.</summary>
        /// <exception cref="InvalidOperationException">The output list was empty or contained multiple nodes.</exception>
        public static LNode ParseSingle(this IParsingService parser, ICharSource text, string fileName, IMessageSink msgs = null, ParsingMode inputType = null, bool preserveComments = true)
        {
            var e = parser.Parse(text, fileName, msgs, inputType, preserveComments);

            return(Single(e));
        }
Exemple #38
0
		/// <summary>Parses a string and expects exactly one output.</summary>
		/// <exception cref="InvalidOperationException">The output list was empty or contained multiple nodes.</exception>
		public static LNode ParseSingle(this IParsingService parser, ICharSource text, string fileName, IMessageSink msgs = null, ParsingMode inputType = null, bool preserveComments = true)
		{
			var e = parser.Parse(text, fileName, msgs, inputType, preserveComments);
			return Single(e);
		}
Exemple #39
0
		protected abstract ILexer<Token> PrepareLexer(ILexer<Token> oldLexer, ICharSource file, int position);
Exemple #40
0
 protected JsonParser(ICharSource source)
 {
     _source = source;
 }
        public ILexer <Token> Tokenize(ICharSource text, string fileName, IMessageSink msgs)
        {
            var lexer = new LesLexer(text, fileName, msgs);

            return(new LesIndentTokenGenerator(new WhitespaceFilter(lexer)));
        }