// Returns list of 2-element arrays, element 0 is field name (or null), element 1 is term/phrase
        public List<NuGetSearchTerm> Parse(string searchTerm)
        {
            var terms = new List<NuGetSearchTerm>();
            _tokenizer = new Tokenizer(searchTerm);
            while (_tokenizer.Peek() != TokenType.Eof)
            {
                var term = new NuGetSearchTerm();
                if (_tokenizer.Peek() == TokenType.Field)
                {
                    if (ParseField(term))
                    {
                        terms.Add(term);
                    }
                }
                else
                {
                    if (ParseTermOrPhrase(term))
                    {
                        terms.Add(term);
                    }
                }
            }

            return terms;
        }
			public static bool Parse(ref Tokenizer tokenizer, out Amulet result) {
				tokenizer.Expect(TokenName.OPEN_PARENTHESIS);
				List < int > temp;
				result = new Amulet();
				result.effects = new List < Effect > ();
				result.jewel_ids = new List < int > ();
				if (tokenizer.ExpectIntegerList(out temp)) {
					if (temp.Count >= 2) {
						Effect effect = new Effect();
						effect.skill_id = temp[0];
						effect.points = temp[1];
						result.effects.Add(effect);
					}
					if (temp.Count >= 4) {
						Effect effect = new Effect();
						effect.skill_id = temp[2];
						effect.points = temp[3];
						result.effects.Add(effect);
					}
				} else {
					return false;
				}

				if (!tokenizer.ExpectIntegerList(out result.jewel_ids)) {
					return false;
				}
				return tokenizer.Expect(TokenName.CLOSE_PARENTHESIS);
			}
示例#3
0
        public void FirstTest()
        {
            var luaFile = TestUtils.GetTestPath(@"lua-5.2.1-tests\literals.lua");

            var engine = Lua.CreateEngine();
            var context = Lua.GetLuaContext(engine);
            var unit = context.CreateFileUnit(luaFile);
            var reader = TestUtils.OpenReaderOrIgnoreTest(unit.GetReader);
            Console.WriteLine("Reading data from {0}", new Uri(luaFile));

            var tokenizer = new Tokenizer(ErrorSink.Default, new LuaCompilerOptions() { SkipFirstLine = true });
            tokenizer.Initialize(null, reader, unit, SourceLocation.MinValue);

            var fname = @"C:\tmp\tokenizer.txt";
            using (var fout = File.CreateText(fname))
            {
                foreach (var token in tokenizer.EnumerateTokens().TakeWhile(t => t.Symbol != Symbol.Eof))
                {
                    if (token.Symbol == Symbol.Whitespace)
                        continue;
                    if (token.Symbol == Symbol.EndOfLine)
                        continue;

                    fout.Write("{0,-12}", token.Symbol);
                    fout.Write("{0,-10}", token.Span.Start);
                    fout.Write("{0,-10}", token.Span.End);
                    fout.Write("{0}", token.Lexeme);

                    fout.WriteLine();
                }
            }
            Console.WriteLine("Written results to {0}", new Uri(fname));
        }
        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();

            switch(c){
                case Chars.HYPHEN_MINUS:
                    t.ChangeTokenState<ScriptDataDoubleEscapedDashState>();
                    t.EmitToken(Chars.HYPHEN_MINUS);
                    return;
                case Chars.LESS_THAN_SIGN:
                    t.ChangeTokenState<ScriptDataDoubleEscapedLessThanSignState>();
                    t.EmitToken(Chars.LESS_THAN_SIGN);
                    return;
                case Chars.NULL:
                    OnMessageRaised(new NullInScriptError());
                    t.EmitToken(Chars.REPLACEMENT_CHARACTER);
                    return;
                case null:
                    OnMessageRaised(new SuddenlyEndAtScriptError());

                    t.UnConsume(1);
                    t.ChangeTokenState<DataState>();
                    return;
                default:
                    t.EmitToken(c);
                    return;
            }
        }
 internal static Dictionary<string, string> ReadHeaders(Tokenizer reader)
 {
     var s = BeginReadHeaders(reader);
     var result = new Dictionary<string, string>();
     ReadHeaders(reader, s, (key, value) => result[key] = value.TrimStart(new char[0]));
     return result;
 }
示例#6
0
 public void BlankStringReturnsEof()
 {
     var tokenizer = new Tokenizer("    ");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "First Call");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "Second Call");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "Third Call");
 }
示例#7
0
 public ITag Parse(string tag)
 {
     var tokenizer = new Tokenizer(tag, true, true, null, TagLibConstants.SEPERATORS, TagLibConstants.LITERALS, null);
     var helper = new ParseHelper(tokenizer);
     helper.Init();
     return Construct(helper,_factory.GetNewLocator()).Parse();
 }
示例#8
0
        public void ParserErrorReportTests(string luaFile, bool useLua52)
        {
            var options = new LuaCompilerOptions()
            {
                SkipFirstLine = true,
                UseLua52Features = useLua52,
            };

            var engine = Lua.CreateEngine();
            var context = Lua.GetLuaContext(engine);
            var sourceUnit = context.CreateFileUnit(luaFile);

            //var reader = TestUtils.OpenReaderOrIgnoreTest(() => File.OpenText(luaFile));
            var reader = TestUtils.OpenReaderOrIgnoreTest(sourceUnit.GetReader);

            var tokenizer = new Tokenizer(ErrorSink.Default, options);
            tokenizer.Initialize(null, reader, sourceUnit, SourceLocation.MinValue);
            var parser = new Parser(tokenizer, tokenizer.ErrorSink, options);

            TestUtils.AssertSyntaxError(() =>
            {
                var ast = parser.Parse();
                Assert.That(ast, Is.Not.Null);
            });
        }
示例#9
0
        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();
            switch(c){
                case Chars.HYPHEN_MINUS:
                    t.ChangeTokenState<CommentEndState>();
                    return;
                case Chars.NULL:
                    OnMessageRaised(new NullInCommentError());
                    t.CurrentCommentToken.Append(Chars.HYPHEN_MINUS);
                    t.CurrentCommentToken.Append(Chars.REPLACEMENT_CHARACTER);

                    t.ChangeTokenState<CommentState>();
                    return;
                case null:
                    OnMessageRaised(new SuddenlyEndAtCommentError());
                    t.EmitToken();
                    t.UnConsume(1);
                    t.ChangeTokenState<DataState>();
                    return;
                default:
                    t.CurrentCommentToken.Append(Chars.HYPHEN_MINUS);
                    t.CurrentCommentToken.Append(c);
                    t.ChangeTokenState<CommentState>();
                    return;
            }
        }
示例#10
0
		void tokenize_file (SourceFile sourceFile, ModuleContainer module, ParserSession session)
		{
			Stream input;

			try {
				input = File.OpenRead (sourceFile.Name);
			} catch {
				Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found");
				return;
			}

			using (input){
				SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding);
				var file = new CompilationSourceFile (module, sourceFile);

				Tokenizer lexer = new Tokenizer (reader, file, session, ctx.Report);
				int token, tokens = 0, errors = 0;

				while ((token = lexer.token ()) != Token.EOF){
					tokens++;
					if (token == Token.ERROR)
						errors++;
				}
				Console.WriteLine ("Tokenized: " + tokens + " found " + errors + " errors");
			}
			
			return;
		}
        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();

            if(c.IsLatinCapitalLetter()){
                t.TemporaryBuffer += c.ToLower();
                t.EmitToken(c);
                return;
            } else if(c.IsLatinSmallLetter()){
                t.TemporaryBuffer += c;
                t.EmitToken(c);
                return;
            }

            switch(c){
                case Chars.CHARACTER_TABULATION:
                case Chars.LINE_FEED:
                case Chars.FORM_FEED:
                case Chars.SPACE:
                case Chars.SOLIDUS:
                case Chars.GREATER_THAN_SIGN:
                    if(t.TemporaryBuffer.Equals("script", StringComparison.InvariantCulture)){
                        t.ChangeTokenState<ScriptDataDoubleEscapedState>();
                    } else {
                        t.ChangeTokenState<ScriptDataEscapedState>();
                    }
                    t.EmitToken(c);
                    return;
                default:
                    t.UnConsume(1);
                    t.ChangeTokenState<ScriptDataEscapedState>();
                    return;
            }
        }
示例#12
0
        static void Main(string[] args)
        {
            string sampleText = @"<html>
            <head>
            <title>[% IF Title %][% EVAL Title %][% ELSE %] Some Stuff! [% END %]</title>
            </head>
            <body>
            <h1>[% EVAL content %]</h1>
            <ul>[% FOREACH Foos %]<li>[% Name %]</li>[% END %]</ul>
            </body>
            </html>";
            Scanner s = new Scanner(sampleText);
            Tokenizer t = new Tokenizer(s);
            BlockParseNode bpn = new BlockParseNode(t);

            Program p1 = new Program()
            {
                HasTitle = false,
                Content = 3.1415,
                Foos = Enumerable.Range(0, 10).Select(i => new Foo() { Name = i.ToString() }).ToList()
            };

            Program p2 = new Program()
            {
                HasTitle = true,
                Title = "Example",
                Foos = Enumerable.Range(10, 10).Select(i => new Foo() { Name = i.ToString() }).ToList()
            };

            Template template = new Template(new StringReader(sampleText));
            template.Render(p1, Console.Out);
            template.Render(p2, Console.Out);

            Console.ReadKey();
        }
示例#13
0
 public void TokenizeOneLongOption()
 {
     var tokenizer = new Tokenizer("--alpha 123");
     AssertTokenEqual(TokenKind.LongOption, "alpha", tokenizer.NextToken);
     AssertTokenEqual(TokenKind.Value, "123", tokenizer.NextToken);
     Assert.IsTrue(tokenizer.AtEnd);
 }
示例#14
0
        public void TokenizeCode3()
        {
            TextReader reader = File.OpenText("code41.txt");
            Assert.IsNotNull(reader);
            Tokenizer t = new Tokenizer(s_errorHandler, true);
            List<Token> tokens = t.process(reader);

            Assert.AreEqual(tokens[0].getTokenType(), Token.TokenType.BUILT_IN_TYPE_NAME);
            Assert.AreEqual(tokens[1].getTokenType(), Token.TokenType.NAME);
            Assert.AreEqual(tokens[2].getTokenType(), Token.TokenType.ASSIGNMENT);
            Assert.AreEqual(tokens[3].getTokenType(), Token.TokenType.NUMBER);
            Assert.AreEqual(tokens[4].getTokenType(), Token.TokenType.NEW_LINE);
            Assert.AreEqual(tokens[5].getTokenType(), Token.TokenType.BUILT_IN_TYPE_NAME);
            Assert.AreEqual(tokens[6].getTokenType(), Token.TokenType.NAME);
            Assert.AreEqual(tokens[7].getTokenType(), Token.TokenType.ASSIGNMENT);
            Assert.AreEqual(tokens[8].getTokenType(), Token.TokenType.QUOTED_STRING);
            Assert.AreEqual(tokens[9].getTokenType(), Token.TokenType.NEW_LINE);
            Assert.AreEqual(tokens[10].getTokenType(), Token.TokenType.BUILT_IN_TYPE_NAME);
            Assert.AreEqual(tokens[11].getTokenType(), Token.TokenType.NAME);
            Assert.AreEqual(tokens[12].getTokenType(), Token.TokenType.PARANTHESIS_LEFT);
            Assert.AreEqual(tokens[13].getTokenType(), Token.TokenType.PARANTHESIS_RIGHT);
            Assert.AreEqual(tokens[14].getTokenType(), Token.TokenType.NEW_LINE);
            Assert.AreEqual(tokens[15].getTokenType(), Token.TokenType.NAME);
            Assert.AreEqual(tokens[16].getTokenType(), Token.TokenType.ASSIGNMENT);
            Assert.AreEqual(tokens[17].getTokenType(), Token.TokenType.NUMBER);
            Assert.AreEqual(tokens[18].getTokenType(), Token.TokenType.EOF);
        }
示例#15
0
        public void TestTokenizerBasicWhitespaceSeparatedStringsWithWhitespaceIncluded()
        {
            var input = "one two      three \n\n    \t   four\t\t\tfive\t\t\tsix";
            Tokenizer<Token> tokenizer = new Tokenizer<Token>();
            tokenizer.WhitespaceBehavior = WhitespaceBehavior.DelimitAndInclude;
            var tokens = tokenizer.Tokenize(input);

            var reconstructed = "";
            bool? lastTokenWasWhitespace = null;
            Token lastToken = null;
            foreach (var token in tokens)
            {
                if(token.Value == null)
                {
                    Assert.Fail("Unexpected null valued token");
                }
                else if(string.IsNullOrWhiteSpace(token.Value))
                {
                    lastTokenWasWhitespace = true;
                }
                else
                {
                    if(lastTokenWasWhitespace.HasValue && lastTokenWasWhitespace.Value == false)
                    {
                        Assert.Fail("2 consecutive non-whitespace tokens encountered.");
                    }
                    lastTokenWasWhitespace = false;
                }

                reconstructed += token.Value;
                lastToken = token;
            }

            Assert.AreEqual(input, reconstructed);
        }
示例#16
0
        public void BadTokenPosition()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            try
            {
                tokenizer.Tokenize("5 A");
            }
            catch (UnknownTokenException ex)
            {
                Assert.AreEqual(3, ex.Position.Column);
                Assert.AreEqual(1, ex.Position.Line);
                Assert.AreEqual("A", ex.Token);
            }

            try
            {
                tokenizer.Tokenize("5 4\r\n2\r\n   X\r\n5");
            }
            catch (UnknownTokenException ex)
            {
                Assert.AreEqual(4, ex.Position.Column);
                Assert.AreEqual(3, ex.Position.Line);
                Assert.AreEqual("X",ex.Token);
            }
        }
        public void NumericLiterals()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
            tokenizer.AddTokenMatcher(new DecimalLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            Token[] tokens;

            tokens = tokenizer.Tokenize("10 10.0");

            Assert.AreEqual(3,tokens.Length);
            Assert.AreEqual("10",tokens[0].Text);
            Assert.AreEqual("10.0",tokens[2].Text);

            tokens = tokenizer.Tokenize("10m 10ul");

            Assert.AreEqual(3, tokens.Length);
            Assert.AreEqual("10m", tokens[0].Text);
            Assert.AreEqual("10ul", tokens[2].Text);

            tokens = tokenizer.Tokenize("10f 10l");

            Assert.AreEqual(3, tokens.Length);
            Assert.AreEqual("10f", tokens[0].Text);
            Assert.AreEqual("10l", tokens[2].Text);
        }
示例#18
0
 public Form1()
 {
     InitializeComponent();
     geocoderService = new GeocoderService();
     tokenizer = new Tokenizer();
     geocoder = new Geocoder();
 }
        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();

            if(c.IsLatinCapitalLetter()){
                t.TemporaryBuffer = "";
                t.TemporaryBuffer += c.ToLower();
                t.EmitToken(Chars.LESS_THAN_SIGN);
                t.EmitToken(c);
                t.ChangeTokenState<ScriptDataDoubleEscapeStartState>();
                return;
            } else if(c.IsLatinSmallLetter()){
                t.TemporaryBuffer = "";
                t.TemporaryBuffer += c;
                t.EmitToken(Chars.LESS_THAN_SIGN);
                t.EmitToken(c);
                t.ChangeTokenState<ScriptDataDoubleEscapeStartState>();
                return;
            }

            switch(c){
                case Chars.SOLIDUS:
                    t.TemporaryBuffer = "";
                    t.ChangeTokenState<ScriptDataEscapedEndTagOpenState>();
                    return;
                default:
                    t.EmitToken(Chars.LESS_THAN_SIGN);
                    t.UnConsume(1);
                    t.ChangeTokenState<ScriptDataEscapedState>();
                    return;
            }
        }
        protected override Token IsMatchImpl( Tokenizer tokenizer )
        {
            var str = new StringBuilder();

            if( tokenizer.Current == StringDelim )
            {
                tokenizer.Consume();

                while( !tokenizer.EndOfStream() && tokenizer.Current != StringDelim )
                {
                    str.Append( tokenizer.Current );
                    tokenizer.Consume();
                }

                if( tokenizer.Current == StringDelim )
                {
                    tokenizer.Consume();
                }
            }

            if( str.Length > 0 )
            {
                return new Token( TokenType.QuotedString, str.ToString() );
            }

            return null;
        }
示例#21
0
文件: Lexer.cs 项目: cj525/yaclops
        public Lexer(string text)
        {
            Tokenizer tokenizer = new Tokenizer(text);

            _tokenList.AddRange(tokenizer.Tokenize());
            _tokenList.Add(new Token { Kind = TokenKind.EndOfInput, RawInput = "<end>" });
        }
示例#22
0
        public void CreatingSimpleTree()
        {
            Tokenizer tokenizer = new Tokenizer(s_errorHandler, true);
            List<Token> tokens = tokenizer.process(File.OpenText("code4.txt"));

            Parser parser = new Parser(tokens, s_errorHandler);
            parser.process();

            //Console.WriteLine("Tree: " + parser.getAST().toStringTree());
            ASTPainter p = new ASTPainter();
            p.PaintAST(parser.getAST());

            AST root = parser.getAST();
            Assert.AreEqual(Token.TokenType.PROGRAM_ROOT, root.getTokenType());

            AST statementList = root.getChild(0);
            Assert.AreEqual(Token.TokenType.STATEMENT_LIST, statementList.getTokenType());

            AST multiplicationTree = statementList.getChild(1);
            Assert.AreEqual(Token.TokenType.OPERATOR, multiplicationTree.getTokenType());

            AST operand1 = multiplicationTree.getChild(0);
            AST operand2 = multiplicationTree.getChild(1);
            Assert.AreEqual("a", operand1.getTokenString());
            Assert.AreEqual("b", operand2.getTokenString());
        }
示例#23
0
 // 文字参照
 // 参照されている文字を取得します。失敗したときはnullを返します。
 protected ReferencedCharacterToken ConsumeCharacterReference(Tokenizer t)
 {
     char? c = t.ConsumeChar();
     if(t.AdditionalAllowedCharacter != null && c == t.AdditionalAllowedCharacter){
         // Not a character reference. No characters are consumed, and nothing is returned. (This is not an error, either.)
         OnMessageRaised(new RawAmpersandWarning());
         t.UnConsume(1);
         return null;
     }
     switch(c){
         case Chars.AMPERSAND:
         case Chars.LINE_FEED:
         case Chars.FORM_FEED:
         case Chars.SPACE:
         case Chars.LESS_THAN_SIGN:
         case null:
             // Not a character reference. No characters are consumed, and nothing is returned. (This is not an error, either.)
             OnMessageRaised(new RawAmpersandWarning());
             t.UnConsume(1);
             return null;
         case Chars.NUMBER_SIGN:
             return ConsumeNumericCharacterReference(t);
         default:
             return ConsumeNamedCharacterReference(t);
     }
 }
示例#24
0
 private void CheckDuplicateAttribute(Tokenizer t)
 {
     if(t.CurrentTagToken.IsDuplicateAttribute){
         t.CurrentTagToken.DropAttribute();
         OnMessageRaised(new DuplicateAttributeError(t.CurrentTagToken.CurrentAttribute.Name));
     }
 }
示例#25
0
 public override void Read(Tokenizer t)
 {
     char? c = t.ConsumeChar();
     switch(c){
         case Chars.EXCLAMATION_MARK:
             t.ChangeTokenState<MarkupDeclarationOpenState>();
             return;
         case Chars.SOLIDUS:
             t.ChangeTokenState<EndTagOpenState>();
             return;
         case Chars.QUESTION_MARK:
             OnMessageRaised(new ProcessingInstructionError());
             t.ChangeTokenState<BogusCommentState>();
             return;
     }
     if(c.IsLatinCapitalLetter()){
         t.CurrentToken = new StartTagToken(){Name = c.ToLower().ToString()};
         t.ChangeTokenState<TagNameState>();
         return;
     } else if(c.IsLatinSmallLetter()){
         t.CurrentToken = new StartTagToken(){Name = c.ToString()};
         t.ChangeTokenState<TagNameState>();
         return;
     }
     OnMessageRaised(new UnknownMarkupError());
     t.UnConsume(1);
     t.ChangeTokenState<DataState>();
     t.EmitToken(Chars.LESS_THAN_SIGN);
     return;
 }
        protected override Token IsMatchImpl( Tokenizer tokenizer )
        {
            foreach( var character in Match )
            {
                if( tokenizer.Current == character.ToString( CultureInfo.InvariantCulture ) )
                {
                    tokenizer.Consume();
                }
                else
                {
                    return null;
                }
            }

            bool found;

            if( !AllowAsSubString )
            {
                var next = tokenizer.Current;

                found = string.IsNullOrWhiteSpace( next ) || SpecialCharacters.Any( character => character.Match == next );
            }
            else
            {
                found = true;
            }

            if( found )
            {
                return new Token( myTokenType, Match );
            }

            return null;
        }
        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();

            switch(c){
                case Chars.CHARACTER_TABULATION:
                case Chars.LINE_FEED:
                case Chars.FORM_FEED:
                case Chars.SPACE:
                    return;
                case Chars.GREATER_THAN_SIGN:{
                    t.ChangeTokenState<DataState>();
                    t.EmitToken();
                    return;
                }
                case null:
                    OnMessageRaised(new SuddenlyEndAtDoctypeError());
                    ((DoctypeToken)t.CurrentToken).ForceQuirks = true;
                    t.UnConsume(1);
                    t.ChangeTokenState<DataState>();
                    t.EmitToken();
                    return;
                default:
                    OnMessageRaised(new UnknownIdentifierAfterDoctypeError());
                    t.ChangeTokenState<BogusDoctypeState>();
                    return;
            }
        }
示例#28
0
 public override void Read(Tokenizer t)
 {
     char? c = t.ConsumeChar();
     switch(c){
         case Chars.GREATER_THAN_SIGN:
             OnMessageRaised(new EmptyEndTagError());
             t.ChangeTokenState<DataState>();
             return;
         case null:
             OnMessageRaised(new SuddenlyEndAtTagError());
             t.EmitToken(Chars.LESS_THAN_SIGN);
             t.EmitToken(Chars.SOLIDUS);
             t.UnConsume(1);
             t.ChangeTokenState<DataState>();
             return;
     }
     if(c.IsLatinCapitalLetter()){
         t.CurrentToken = new EndTagToken(){Name = c.ToLower().ToString()};
         t.ChangeTokenState<TagNameState>();
         return;
     } else if(c.IsLatinSmallLetter()){
         t.CurrentToken = new EndTagToken(){Name = c.ToString()};
         t.ChangeTokenState<TagNameState>();
         return;
     }
     OnMessageRaised(new UnknownEndTagError());
     t.ChangeTokenState<BogusCommentState>();
     return;
 }
示例#29
0
        public void GeneratorTest(SourceUnit sourceUnit, bool useLua52)
        {
            var options = new LuaCompilerOptions()
            {
                SkipFirstLine = true,
                UseLua52Features = useLua52,
            };

            var reader = TestUtils.OpenReaderOrIgnoreTest(sourceUnit.GetReader);

            TestUtils.AssertSyntaxError(() =>
            {
                var tokenizer = new Tokenizer(ErrorSink.Default, options);
                tokenizer.Initialize(null, reader, sourceUnit, SourceLocation.MinValue);
                var parser = new Parser(tokenizer, tokenizer.ErrorSink, options);
                var ast = parser.Parse();
                Assert.That(ast, Is.Not.Null);

                var codeContext = new CodeContext((LuaContext)sourceUnit.LanguageContext);

                var gen = new Generator(codeContext);
                var expr = gen.Compile(ast, sourceUnit);
                Assert.That(expr, Is.Not.Null);
            });
        }
        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();

            switch(c){
                case Chars.CHARACTER_TABULATION:
                case Chars.LINE_FEED:
                case Chars.FORM_FEED:
                case Chars.SPACE:
                    t.ChangeTokenState<BeforeAttributeNameState>();
                    return;
                case Chars.SOLIDUS:
                    t.ChangeTokenState<SelfClosingStartTagState>();
                    return;
                case Chars.GREATER_THAN_SIGN:
                    t.ChangeTokenState<DataState>();
                    t.EmitToken();
                    return;
                case null:
                    OnMessageRaised(new SuddenlyEndAtAttributeError());
                    t.UnConsume(1);
                    t.ChangeTokenState<DataState>();
                    return;
                default:
                    OnMessageRaised(new MissingSpaceAfterAttributeValueError(c));
                    t.UnConsume(1);
                    t.ChangeTokenState<BeforeAttributeNameState>();
                    return;
            }
        }
示例#31
0
        public virtual IList <ClassificationSpan> GetClassificationSpans(SnapshotSpan span)
        {
            List <ClassificationSpan> classifications = new List <ClassificationSpan>();

            if (_suspended)
            {
                return(classifications);
            }

            ITextSnapshot textSnapshot = TextBuffer.CurrentSnapshot;

            if (span.Length <= 2)
            {
                string ws = textSnapshot.GetText(span);
                if (String.IsNullOrWhiteSpace(ws))
                {
                    return(classifications);
                }
            }

            // Token collection at this point contains valid tokens at least to a point
            // of the most recent change. We can reuse existing tokens but may also need
            // to tokenize to get tokens for the recently changed range.
            if (span.End > _lastValidPosition)
            {
                // Span is beyond the last position we know about. We need to tokenize new area.
                // tokenize from end of the last good token. If last token intersected last change
                // it would have been removed from the collection by now.

                int tokenizeFrom   = Tokens.Count > 0 ? Tokens[Tokens.Count - 1].End : new SnapshotPoint(textSnapshot, 0);
                var tokenizeAnchor = GetAnchorPosition(tokenizeFrom);

                if (tokenizeAnchor < tokenizeFrom)
                {
                    Tokens.RemoveInRange(TextRange.FromBounds(tokenizeAnchor, span.End));
                    RemoveSensitiveTokens(tokenizeAnchor, Tokens);

                    tokenizeFrom = tokenizeAnchor;
                    VerifyTokensSorted();
                }

                var newTokens = Tokenizer.Tokenize(new TextProvider(TextBuffer.CurrentSnapshot), tokenizeFrom, span.End - tokenizeFrom);
                if (newTokens.Count > 0)
                {
                    Tokens.Add(newTokens);
                    _lastValidPosition = newTokens[newTokens.Count - 1].End;
                }
            }

            var tokensInSpan = Tokens.ItemsInRange(TextRange.FromBounds(span.Start, span.End));

            foreach (var token in tokensInSpan)
            {
                var compositeToken = token as ICompositeToken;

                if (compositeToken != null)
                {
                    AddClassificationFromCompositeToken(classifications, textSnapshot, compositeToken);
                }
                else
                {
                    AddClassificationFromToken(classifications, textSnapshot, token);
                }
            }

            return(classifications);
        }
示例#32
0
        // Group: Functions
        // __________________________________________________________________________


        /* Constructor: ParsedClassPrototype
         * Creates a new parsed prototype.
         */
        public ParsedClassPrototype(Tokenizer prototype)
        {
            tokenizer = prototype;
            sections  = null;
        }
示例#33
0
        public void Tokenizer()
        {
            Tokenizer     tokenizer;
            List <string> tokens;

            tokenizer = new Tokenizer(@"Field=Value,AnotherField    $= 'AnotherValue'| NumericField> 5, DoubleQuoted ^= ""double quoted""");
            tokens    = tokenizer.Tokenize();
            Assert.Equal("Field", tokens[0]);
            Assert.Equal("=", tokens[1]);
            Assert.Equal("Value", tokens[2]);
            Assert.Equal(",", tokens[3]);
            Assert.Equal("AnotherField", tokens[4]);
            Assert.Equal("$=", tokens[5]);
            Assert.Equal("AnotherValue", tokens[6]);
            Assert.Equal("|", tokens[7]);
            Assert.Equal("NumericField", tokens[8]);
            Assert.Equal(">", tokens[9]);
            Assert.Equal("5", tokens[10]);
            Assert.Equal(",", tokens[11]);
            Assert.Equal("DoubleQuoted", tokens[12]);
            Assert.Equal("^=", tokens[13]);
            Assert.Equal("double quoted", tokens[14]);
            Assert.Equal(15, tokens.Count);

            tokenizer = new Tokenizer(@"EscapedField='hoi\'test\\more'    ,ComplicatedField=""test\\\"""" | withoutQuotes=hello\,more");
            tokens    = tokenizer.Tokenize();
            Assert.Equal("EscapedField", tokens[0]);
            Assert.Equal("=", tokens[1]);
            Assert.Equal(@"hoi'test\more", tokens[2]);
            Assert.Equal(",", tokens[3]);
            Assert.Equal("ComplicatedField", tokens[4]);
            Assert.Equal("=", tokens[5]);
            Assert.Equal(@"test\""", tokens[6]);
            Assert.Equal("|", tokens[7]);
            Assert.Equal("withoutQuotes", tokens[8]);
            Assert.Equal("=", tokens[9]);
            Assert.Equal("hello,more", tokens[10]);
            Assert.Equal(11, tokens.Count);

            tokenizer = new Tokenizer(@"Field=Name With Whitespace,   AnotherField ^=  Prefixed and Suffixed space   | QuotedField='   Includes space  '");
            tokens    = tokenizer.Tokenize();
            Assert.Equal("Field", tokens[0]);
            Assert.Equal("=", tokens[1]);
            Assert.Equal("Name With Whitespace", tokens[2]);
            Assert.Equal(",", tokens[3]);
            Assert.Equal("AnotherField", tokens[4]);
            Assert.Equal("^=", tokens[5]);
            Assert.Equal("Prefixed and Suffixed space", tokens[6]);
            Assert.Equal("|", tokens[7]);
            Assert.Equal("QuotedField", tokens[8]);
            Assert.Equal("=", tokens[9]);
            Assert.Equal("   Includes space  ", tokens[10]);
            Assert.Equal(11, tokens.Count);

            Assert.Throws <ParseException>(() => new Tokenizer(@"Seed=test, NoValue=").Tokenize());
            Assert.Throws <ParseException>(() => new Tokenizer(@"Seed=test, Missing'Operator'").Tokenize());
            Assert.Throws <ParseException>(() => new Tokenizer(@"Seed=test, Missing='Operator', InSecond'Field'").Tokenize());
            Assert.Throws <ParseException>(() => new Tokenizer(@"Seed=test, Unexpected=End,").Tokenize());
            Assert.Throws <ParseException>(() => new Tokenizer(@"Seed=test, Unexpected=End,").Tokenize());
            Assert.Throws <ParseException>(() => new Tokenizer(@"Seed='test', Invalid='Quoting").Tokenize());
            Assert.Throws <ParseException>(() => new Tokenizer(@"Seed='test', Invalid='Quoting\'").Tokenize());
            Assert.Throws <ParseException>(() => new Tokenizer(@"Seed=""test"", Invalid='Quoting""").Tokenize());
            Assert.Throws <ParseException>(() => new Tokenizer(@"WithoutValue=     ").Tokenize());
            Assert.DoesNotThrow(() => new Tokenizer(@"WithoutValue='        '").Tokenize());
        }
示例#34
0
        public void Initialize()
        {
            var setting = new Setting();

            tokenizer = new Tokenizer(setting);
        }
示例#35
0
        private static string Tokenize(string text)
        {
            var tokens = new Tokenizer(text).Tokenize();

            return(string.Join(";", AsEnumerable(tokens).Select(t => t.ToString()).ToArray()));
        }
示例#36
0
 static void Main(string[] args)
 {
     try
     {
         if (args.Length < 2)
         {
             Console.WriteLine("bas2prg");
             Console.WriteLine("A commandline CBM Basic compiler.");
             Console.WriteLine("By Six/Style 2016-2018");
             Console.WriteLine("usage: bas2prg basfile.bas prgfile.prg");
             Console.WriteLine();
         }
         else
         {
             //var basic = new BASIC();
             var pp               = new PreProcessor();
             var basfile          = File.ReadAllLines(args[0]).ToList();
             var compilerSettings = new CompilerSettings();
             var p     = pp.PreProcess(basfile, ref compilerSettings);
             var bytes = (true) ? SixBASICTokenizer.Tokenize(p, ref compilerSettings) : Tokenizer.Tokenize(p, ref compilerSettings);
             File.WriteAllBytes(args[1], bytes);
         }
     }
     catch (Exception e)
     {
         Console.WriteLine(e);
     }
 }
示例#37
0
 public PropertyTokenReader(Tokenizer tokenizer, TokenId id)
     : base(tokenizer)
     => this._id = id;
示例#38
0
        public void WhiteSpace_are_skipped(string text)
        {
            var t = new Tokenizer(text);

            t.Current.Should().Be(TokenType.EOS);
        }
示例#39
0
文件: Engine.cs 项目: IGCBOOM/Skrypt
        //List<SkryptClass> Classes = new List<SkryptClass>();

        public SkryptEngine()
        {
            tokenizer        = new Tokenizer(this);
            statementParser  = new StatementParser(this);
            expressionParser = new ExpressionParser(this);
            generalParser    = new GeneralParser(this);
            methodParser     = new MethodParser(this);
            analizer         = new Analizer(this);
            executor         = new Executor(this);
            standardMethods  = new StandardMethods(this);

            standardMethods.AddMethodsToEngine();

            //Constants["_PI"] = new Numeric(Math.PI);
            //Constants["_E"] = new Numeric(Math.E);

            SkryptObject SystemObject = ObjectGenerator.MakeObjectFromClass(typeof(Library.Native.System));

            foreach (SkryptProperty property in SystemObject.Properties)
            {
                GlobalScope.AddVariable(property.Name, property.Value, true);
            }

            // Tokens that are found using a token rule with type defined as 'null' won't get added to the token list.
            // This means you can ignore certain characters, like whitespace in this case, that way.
            tokenizer.AddRule(
                new Regex(@"\s"),
                TokenTypes.None
                );

            tokenizer.AddRule(
                new Regex(@"\d+(\.\d+)?"),
                TokenTypes.NumericLiteral
                );

            tokenizer.AddRule(
                new Regex(@"[_a-zA-Z]+[_a-zA-Z0-9]*"),
                TokenTypes.Identifier
                );

            tokenizer.AddRule(
                new Regex(@"class|func|if|elseif|else|while"),
                TokenTypes.Keyword
                );

            tokenizer.AddRule(
                new Regex("true|false"),
                TokenTypes.BooleanLiteral
                );

            tokenizer.AddRule(
                new Regex("null"),
                TokenTypes.NullLiteral
                );

            tokenizer.AddRule(
                new Regex(@"[;]"),
                TokenTypes.EndOfExpression
                );

            tokenizer.AddRule(
                new Regex(@"(return)|(&&)|(\|\|)|(\|\|\|)|(==)|(!=)|(>=)|(<=)|(<<)|(>>)|(>>>)|(\+\+)|(--)|[~=:<>+\-*/%^&|!\[\]\(\)\.\,{}]"),
                TokenTypes.Punctuator
                );

            tokenizer.AddRule(
                new Regex(@""".*?(?<!\\)"""),
                TokenTypes.StringLiteral
                );

            // Multi line comment
            tokenizer.AddRule(
                new Regex(@"\/\*(.|\n)*\*\/"),
                TokenTypes.None
                );

            // Single line comment
            tokenizer.AddRule(
                new Regex(@"\/\/.*\n"),
                TokenTypes.None
                );
        }
示例#40
0
        public virtual void TestRandom()
        {
            int alphabetSize = TestUtil.NextInt32(Random, 2, 7);

            int docLen = AtLeast(3000);
            //final int docLen = 50;

            string document = GetRandomString('a', alphabetSize, docLen);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: doc=" + document);
            }

            int numSyn = AtLeast(5);
            //final int numSyn = 2;

            IDictionary <string, OneSyn> synMap = new Dictionary <string, OneSyn>();
            IList <OneSyn> syns  = new List <OneSyn>();
            bool           dedup = Random.nextBoolean();

            if (VERBOSE)
            {
                Console.WriteLine("  dedup=" + dedup);
            }
            b = new SynonymMap.Builder(dedup);
            for (int synIDX = 0; synIDX < numSyn; synIDX++)
            {
                string synIn = GetRandomString('a', alphabetSize, TestUtil.NextInt32(Random, 1, 5)).Trim();
                if (!synMap.TryGetValue(synIn, out OneSyn s) || s == null)
                {
                    s     = new OneSyn();
                    s.@in = synIn;
                    syns.Add(s);
                    s.@out        = new List <string>();
                    synMap[synIn] = s;
                    s.keepOrig    = Random.nextBoolean();
                }
                string synOut = GetRandomString('0', 10, TestUtil.NextInt32(Random, 1, 5)).Trim();
                [email protected](synOut);
                Add(synIn, synOut, s.keepOrig);
                if (VERBOSE)
                {
                    Console.WriteLine("  syns[" + synIDX + "] = " + s.@in + " -> " + s.@out + " keepOrig=" + s.keepOrig);
                }
            }

            tokensIn = new MockTokenizer(new StringReader("a"), MockTokenizer.WHITESPACE, true);
            tokensIn.Reset();
            assertTrue(tokensIn.IncrementToken());
            assertFalse(tokensIn.IncrementToken());
            tokensIn.End();
            tokensIn.Dispose();

            tokensOut  = new SynonymFilter(tokensIn, b.Build(), true);
            termAtt    = tokensOut.AddAttribute <ICharTermAttribute>();
            posIncrAtt = tokensOut.AddAttribute <IPositionIncrementAttribute>();
            posLenAtt  = tokensOut.AddAttribute <IPositionLengthAttribute>();
            offsetAtt  = tokensOut.AddAttribute <IOffsetAttribute>();

            if (dedup)
            {
                PruneDups(syns);
            }

            string expected = SlowSynMatcher(document, syns, 5);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: expected=" + expected);
            }

            Verify(document, expected);
        }
示例#41
0
		//
		// This parses the -arg and /arg options to the compiler, even if the strings
		// in the following text use "/arg" on the strings.
		//
		ParseResult ParseOption (string option, ref string[] args, CompilerSettings settings)
		{
			int idx = option.IndexOf (':');
			string arg, value;

			if (idx == -1) {
				arg = option;
				value = "";
			} else {
				arg = option.Substring (0, idx);

				value = option.Substring (idx + 1);
			}

			switch (arg.ToLowerInvariant ()) {
			case "/nologo":
				return ParseResult.Success;

			case "/t":
			case "/target":
				switch (value) {
				case "exe":
					settings.Target = Target.Exe;
					break;

				case "winexe":
					settings.Target = Target.WinExe;
					break;

				case "library":
					settings.Target = Target.Library;
					settings.TargetExt = ".dll";
					break;

				case "module":
					settings.Target = Target.Module;
					settings.TargetExt = ".netmodule";
					break;

				default:
					report.Error (2019, "Invalid target type for -target. Valid options are `exe', `winexe', `library' or `module'");
					return ParseResult.Error;
				}
				return ParseResult.Success;

			case "/out":
				if (value.Length == 0) {
					Error_RequiresFileName (option);
					return ParseResult.Error;
				}
				settings.OutputFile = value;
				return ParseResult.Success;

			case "/o":
			case "/o+":
			case "/optimize":
			case "/optimize+":
				settings.Optimize = true;
				return ParseResult.Success;

			case "/o-":
			case "/optimize-":
				settings.Optimize = false;
				return ParseResult.Success;

			// TODO: Not supported by csc 3.5+
			case "/incremental":
			case "/incremental+":
			case "/incremental-":
				// nothing.
				return ParseResult.Success;

			case "/d":
			case "/define": {
					if (value.Length == 0) {
						Error_RequiresArgument (option);
						return ParseResult.Error;
					}

					foreach (string d in value.Split (argument_value_separator)) {
						string conditional = d.Trim ();
						if (!Tokenizer.IsValidIdentifier (conditional)) {
							report.Warning (2029, 1, "Invalid conditional define symbol `{0}'", conditional);
							continue;
						}

						settings.AddConditionalSymbol (conditional);
					}
					return ParseResult.Success;
				}

			case "/bugreport":
				//
				// We should collect data, runtime, etc and store in the file specified
				//
				output.WriteLine ("To file bug reports, please visit: http://www.mono-project.com/Bugs");
				return ParseResult.Success;

			case "/pkg": {
					string packages;

					if (value.Length == 0) {
						Error_RequiresArgument (option);
						return ParseResult.Error;
					}
					packages = String.Join (" ", value.Split (new Char[] { ';', ',', '\n', '\r' }));
					string pkgout = Driver.GetPackageFlags (packages, report);

					if (pkgout == null)
						return ParseResult.Error;

					string[] xargs = pkgout.Trim (new Char[] { ' ', '\n', '\r', '\t' }).Split (new Char[] { ' ', '\t' });
					args = AddArgs (args, xargs);
					return ParseResult.Success;
				}

			case "/linkres":
			case "/linkresource":
			case "/res":
			case "/resource":
				AssemblyResource res = null;
				string[] s = value.Split (argument_value_separator, StringSplitOptions.RemoveEmptyEntries);
				switch (s.Length) {
				case 1:
					if (s[0].Length == 0)
						goto default;
					res = new AssemblyResource (s[0], Path.GetFileName (s[0]));
					break;
				case 2:
					res = new AssemblyResource (s[0], s[1]);
					break;
				case 3:
					if (s[2] != "public" && s[2] != "private") {
						report.Error (1906, "Invalid resource visibility option `{0}'. Use either `public' or `private' instead", s[2]);
						return ParseResult.Error;
					}
					res = new AssemblyResource (s[0], s[1], s[2] == "private");
					break;
				default:
					report.Error (-2005, "Wrong number of arguments for option `{0}'", option);
					return ParseResult.Error;
				}

				if (res != null) {
					res.IsEmbeded = arg[1] == 'r' || arg[1] == 'R';
					AddResource (res, settings);
				}

				return ParseResult.Success;

			case "/recurse":
				if (value.Length == 0) {
					Error_RequiresFileName (option);
					return ParseResult.Error;
				}
				ProcessSourceFiles (value, true, settings.SourceFiles);
				return ParseResult.Success;

			case "/r":
			case "/reference": {
					if (value.Length == 0) {
						Error_RequiresFileName (option);
						return ParseResult.Error;
					}

					string[] refs = value.Split (argument_value_separator);
					foreach (string r in refs) {
						if (r.Length == 0)
							continue;

						string val = r;
						int index = val.IndexOf ('=');
						if (index > -1) {
							string alias = r.Substring (0, index);
							string assembly = r.Substring (index + 1);
							AddAssemblyReference (alias, assembly, settings);
							if (refs.Length != 1) {
								report.Error (2034, "Cannot specify multiple aliases using single /reference option");
								return ParseResult.Error;
							}
						} else {
							settings.AssemblyReferences.Add (val);
						}
					}
					return ParseResult.Success;
				}
			case "/addmodule": {
					if (value.Length == 0) {
						Error_RequiresFileName (option);
						return ParseResult.Error;
					}

					string[] refs = value.Split (argument_value_separator);
					foreach (string r in refs) {
						settings.Modules.Add (r);
					}
					return ParseResult.Success;
				}
			case "/win32res": {
					if (value.Length == 0) {
						Error_RequiresFileName (option);
						return ParseResult.Error;
					}

					if (settings.Win32IconFile != null)
						report.Error (1565, "Cannot specify the `win32res' and the `win32ico' compiler option at the same time");

					settings.Win32ResourceFile = value;
					return ParseResult.Success;
				}
			case "/win32icon": {
					if (value.Length == 0) {
						Error_RequiresFileName (option);
						return ParseResult.Error;
					}

					if (settings.Win32ResourceFile != null)
						report.Error (1565, "Cannot specify the `win32res' and the `win32ico' compiler option at the same time");

					settings.Win32IconFile = value;
					return ParseResult.Success;
				}
			case "/doc": {
					if (value.Length == 0) {
						Error_RequiresFileName (option);
						return ParseResult.Error;
					}

					settings.DocumentationFile = value;
					return ParseResult.Success;
				}
			case "/lib": {
					string[] libdirs;

					if (value.Length == 0) {
						return ParseResult.Error;
					}

					libdirs = value.Split (argument_value_separator);
					foreach (string dir in libdirs)
						settings.ReferencesLookupPaths.Add (dir);
					return ParseResult.Success;
				}

			case "/debug-":
				settings.GenerateDebugInfo = false;
				return ParseResult.Success;

			case "/debug":
				if (value == "full" || value == "")
					settings.GenerateDebugInfo = true;

				return ParseResult.Success;

			case "/debug+":
				settings.GenerateDebugInfo = true;
				return ParseResult.Success;

			case "/checked":
			case "/checked+":
				settings.Checked = true;
				return ParseResult.Success;

			case "/checked-":
				settings.Checked = false;
				return ParseResult.Success;

			case "/clscheck":
			case "/clscheck+":
				settings.VerifyClsCompliance = true;
				return ParseResult.Success;

			case "/clscheck-":
				settings.VerifyClsCompliance = false;
				return ParseResult.Success;

			case "/unsafe":
			case "/unsafe+":
				settings.Unsafe = true;
				return ParseResult.Success;

			case "/unsafe-":
				settings.Unsafe = false;
				return ParseResult.Success;

			case "/warnaserror":
			case "/warnaserror+":
				if (value.Length == 0) {
					report.WarningsAreErrors = true;
				} else {
					foreach (string wid in value.Split (numeric_value_separator))
						report.AddWarningAsError (wid);
				}
				return ParseResult.Success;

			case "/warnaserror-":
				if (value.Length == 0) {
					report.WarningsAreErrors = false;
				} else {
					foreach (string wid in value.Split (numeric_value_separator))
						report.RemoveWarningAsError (wid);
				}
				return ParseResult.Success;

			case "/warn":
				if (value.Length == 0) {
					Error_RequiresArgument (option);
					return ParseResult.Error;
				}

				SetWarningLevel (value);
				return ParseResult.Success;

			case "/nowarn":
					if (value.Length == 0) {
						Error_RequiresArgument (option);
						return ParseResult.Error;
					}

					var warns = value.Split (numeric_value_separator);
					foreach (string wc in warns) {
						try {
							if (wc.Trim ().Length == 0)
								continue;

							int warn = Int32.Parse (wc);
							if (warn < 1) {
								throw new ArgumentOutOfRangeException ("warn");
							}
							report.SetIgnoreWarning (warn);
						} catch {
							report.Error (1904, "`{0}' is not a valid warning number", wc);
							return ParseResult.Error;
						}
					}
					return ParseResult.Success;

			case "/noconfig":
				settings.LoadDefaultReferences = false;
				return ParseResult.Success;

			case "/platform":
				if (value.Length == 0) {
					Error_RequiresArgument (option);
					return ParseResult.Error;
				}

				switch (value.ToLower (CultureInfo.InvariantCulture)) {
				case "anycpu":
					settings.Platform = Platform.AnyCPU;
					break;
				case "x86":
					settings.Platform = Platform.X86;
					break;
				case "x64":
					settings.Platform = Platform.X64;
					break;
				case "itanium":
					settings.Platform = Platform.IA64;
					break;
				default:
					report.Error (1672, "Invalid platform type for -platform. Valid options are `anycpu', `x86', `x64' or `itanium'");
					return ParseResult.Error;
				}

				return ParseResult.Success;

			case "/sdk":
				if (value.Length == 0) {
					Error_RequiresArgument (option);
					return ParseResult.Error;
				}

				settings.SdkVersion = value;
				return ParseResult.Success;

			// We just ignore this.
			case "/errorreport":
			case "/filealign":
				if (value.Length == 0) {
					Error_RequiresArgument (option);
					return ParseResult.Error;
				}

				return ParseResult.Success;

			case "/helpinternal":
				OtherFlags ();
				return ParseResult.Stop;

			case "/help":
			case "/?":
				Usage ();
				return ParseResult.Stop;

			case "/main":
			case "/m":
				if (value.Length == 0) {
					Error_RequiresArgument (option);
					return ParseResult.Error;
				}
				settings.MainClass = value;
				return ParseResult.Success;

			case "/nostdlib":
			case "/nostdlib+":
				settings.StdLib = false;
				return ParseResult.Success;

			case "/nostdlib-":
				settings.StdLib = true;
				return ParseResult.Success;

			case "/fullpaths":
				report.Printer.ShowFullPaths = true;
				return ParseResult.Success;

			case "/keyfile":
				if (value.Length == 0) {
					Error_RequiresFileName (option);
					return ParseResult.Error;
				}

				settings.StrongNameKeyFile = value;
				return ParseResult.Success;

			case "/keycontainer":
				if (value.Length == 0) {
					Error_RequiresArgument (option);
					return ParseResult.Error;
				}

				settings.StrongNameKeyContainer = value;
				return ParseResult.Success;

			case "/delaysign+":
			case "/delaysign":
				settings.StrongNameDelaySign = true;
				return ParseResult.Success;

			case "/delaysign-":
				settings.StrongNameDelaySign = false;
				return ParseResult.Success;

			case "/langversion":
				if (value.Length == 0) {
					Error_RequiresArgument (option);
					return ParseResult.Error;
				}

				switch (value.ToLowerInvariant ()) {
				case "iso-1":
					settings.Version = LanguageVersion.ISO_1;
					return ParseResult.Success;
				case "default":
					settings.Version = LanguageVersion.Default;
					return ParseResult.Success;
				case "iso-2":
					settings.Version = LanguageVersion.ISO_2;
					return ParseResult.Success;
				case "3":
					settings.Version = LanguageVersion.V_3;
					return ParseResult.Success;
				case "4":
					settings.Version = LanguageVersion.V_4;
					return ParseResult.Success;
				case "5":
					settings.Version = LanguageVersion.V_5;
					return ParseResult.Success;
				case "future":
					settings.Version = LanguageVersion.Future;
					return ParseResult.Success;
				}

				report.Error (1617, "Invalid -langversion option `{0}'. It must be `ISO-1', `ISO-2', `3', `4', `5', `Default' or `Future'", value);
				return ParseResult.Error;

			case "/codepage":
				if (value.Length == 0) {
					Error_RequiresArgument (option);
					return ParseResult.Error;
				}

				switch (value) {
				case "utf8":
					settings.Encoding = Encoding.UTF8;
					break;
				case "reset":
					settings.Encoding = Encoding.Default;
					break;
				default:
					try {
						settings.Encoding = Encoding.GetEncoding (int.Parse (value));
					} catch {
						report.Error (2016, "Code page `{0}' is invalid or not installed", value);
					}
					return ParseResult.Error;
				}
				return ParseResult.Success;

			default:
				return ParseResult.UnknownOption;
			}
		}
示例#42
0
        void Render()
        {
            _inputToOutputTable = new Tuple <int, int> [_input.Length + 1];
            _cursor.Reset();
            StringBuilder output = new StringBuilder();

            for (int i = 0; i < _input.Length; i++)
            {
                _inputToOutputTable[i] = new Tuple <int, int>(output.Length, Encoding.GetEncoding(932).GetByteCount(output.ToString()));
                if (_input[i] == '\t')
                {
                    for (int j = _tabWidth - output.Length % _tabWidth; j > 0; j--)
                    {
                        output.Append(' ');
                    }
                }
                else if (char.IsControl(_input[i]))
                {
                    if (_input[i] <= 26)
                    {
                        output.AppendFormat("^{0}", (char)(_input[i] + 'A' - 1));
                    }
                    else
                    {
                        output.Append("^?");
                    }
                }
                else
                {
                    output.Append(_input[i]);
                }
            }
            _inputToOutputTable[_input.Length] = new Tuple <int, int>(output.Length, Encoding.GetEncoding(932).GetByteCount(output.ToString()));
            var text = output.ToString();

            if (_coloringInput)
            {
                int end       = 0;
                var tokenizer = new Tokenizer();
                tokenizer.ErrorSink = Microsoft.Scripting.ErrorSink.Null;
                tokenizer.Initialize(null, new StringReader(_input.ToString()), null, Microsoft.Scripting.SourceLocation.MinValue);
                List <Token> tokens = new List <Token>();
                while (tokenizer.NextToken.Type != TokenType.EndOfStream)
                {
                    tokens.Add(tokenizer.Read());
                }
                foreach (var token in tokens)
                {
                    var start = _inputToOutputTable[token.Span.Start.Index].Item1;
                    if (end != start)
                    {
                        WriteColor(Output, text.Substring(end, start - end), ConsoleColor.Cyan);
                    }
                    end = _inputToOutputTable[token.Span.End.Index].Item1;
                    ConsoleColor color;
                    switch (token.Type)
                    {
                    case TokenType.KeywordClass:
                    case TokenType.KeywordConst:
                    case TokenType.KeywordEnum:
                    case TokenType.KeywordExtends:
                    case TokenType.KeywordFunction:
                    case TokenType.KeywordPrivate:
                    case TokenType.KeywordProperty:
                    case TokenType.KeywordProtected:
                    case TokenType.KeywordPublic:
                    case TokenType.KeywordStatic:
                    case TokenType.KeywordSynchronized:
                    case TokenType.KeywordInt:
                    case TokenType.KeywordOctet:
                    case TokenType.KeywordReal:
                    case TokenType.KeywordString:
                    case TokenType.KeywordVar:
                        color = ConsoleColor.Green;
                        break;

                    case TokenType.KeywordBreak:
                    case TokenType.KeywordCase:
                    case TokenType.KeywordCatch:
                    case TokenType.KeywordContinue:
                    case TokenType.KeywordDebugger:
                    case TokenType.KeywordDefault:
                    case TokenType.KeywordDelete:
                    case TokenType.KeywordDo:
                    case TokenType.KeywordElse:
                    case TokenType.KeywordExport:
                    case TokenType.KeywordFinally:
                    case TokenType.KeywordFor:
                    case TokenType.KeywordGetter:
                    case TokenType.KeywordGlobal:
                    case TokenType.KeywordGoTo:
                    case TokenType.KeywordIf:
                    case TokenType.KeywordImport:
                    case TokenType.KeywordIn:
                    case TokenType.KeywordInContextOf:
                    case TokenType.KeywordInstanceOf:
                    case TokenType.KeywordInvalidate:
                    case TokenType.KeywordIsValid:
                    case TokenType.KeywordNew:
                    case TokenType.KeywordReturn:
                    case TokenType.KeywordSetter:
                    case TokenType.KeywordSuper:
                    case TokenType.KeywordSwitch:
                    case TokenType.KeywordThis:
                    case TokenType.KeywordThrow:
                    case TokenType.KeywordTry:
                    case TokenType.KeywordTypeOf:
                    case TokenType.KeywordWhile:
                    case TokenType.KeywordWith:
                        color = ConsoleColor.Yellow;
                        break;

                    case TokenType.KeywordFalse:
                    case TokenType.KeywordInfinity:
                    case TokenType.KeywordNaN:
                    case TokenType.KeywordNull:
                    case TokenType.KeywordTrue:
                    case TokenType.KeywordVoid:
                    case TokenType.LiteralInteger:
                    case TokenType.LiteralReal:
                    case TokenType.LiteralString:
                        color = ConsoleColor.Magenta;
                        break;

                    case TokenType.Unknown:
                        color = ConsoleColor.Red;
                        break;

                    default:
                        color = ConsoleColor.White;
                        break;
                    }
                    WriteColor(Output, text.Substring(start, end - start), color);
                }
                if (end != text.Length)
                {
                    WriteColor(Output, text.Substring(end), ConsoleColor.Cyan);
                }
            }
            else
            {
                WriteColor(Output, text, ConsoleColor.White);
            }
            if (text.Length < _rendered)
            {
                Output.Write(new string(' ', _rendered - text.Length));
            }
            _rendered = text.Length;
            _cursor.Place(_inputToOutputTable[_current].Item2);
        }
示例#43
0
 /// <summary>
 ///		sets the current location
 /// </summary>
 public void SetLocation(int chars)
 {
     Character       = chars;
     CurrentLocation = Tokenizer.HumanizeCharacterLocation(Character, Lines);
 }
示例#44
0
        /// <summary>
        /// Creates the specified package
        /// </summary>
        /// <param name="fileName">Name of the file.</param>
        /// <param name="definition">The definition.</param>
        /// <returns></returns>
        public static TPack Create(string fileName, Pack definition)
        {
            if (fileName == null)
            {
                throw new ArgumentNullException("fileName");
            }

            if (definition == null)
            {
                throw new ArgumentNullException("definition");
            }

            AssuredStreamCreateArgs args = new AssuredStreamCreateArgs();

            if (definition.StrongNameKey != null)
            {
                args.StrongNameKey = definition.StrongNameKey;
            }

            args.FileType = PackageFileType;

            MultipleStreamCreateArgs msca = new MultipleStreamCreateArgs();

            msca.MaximumNumberOfStreams = 4;
            msca.VerificationMode       = VerificationMode.None;

            using (FileStream fs = File.Create(fileName, 65536))
                using (AssuredStream assurance = new AssuredStream(fs, args))
                    using (MultipleStreamWriter msw = new MultipleStreamWriter(assurance, msca))
                    {
                        MultipleStreamArgs msa = new MultipleStreamArgs();
                        msa.StreamType = 0x10;
                        msa.Assured    = true;
                        msa.GZipped    = true;
                        using (XmlWriter xw = new XmlTextWriter(msw.CreateStream(msa), Encoding.UTF8))
                        {
                            xw.WriteStartDocument();
                            xw.WriteStartElement("TurtlePackage", "http://schemas.qqn.nl/2007/TurtlePackage");
                            Tokenizer.TryWriteXml(xw, definition);
                            xw.WriteEndDocument();
                        }

                        msa            = new MultipleStreamArgs();
                        msa.StreamType = 0x11;

                        using (XmlWriter xw = new XmlTextWriter(msw.CreateStream(msa), Encoding.UTF8))
                        {
                            // TODO: Write tblog file
                        }


                        // Last stream: We add a zip file
                        msa            = new MultipleStreamArgs();
                        msa.StreamType = ZipFileId;  // Defined
                        msa.Assured    = false;      // Use the whole file assurance for the zip
                        msa.GZipped    = false;      // Don't compress again

                        using (Stream ms = msw.CreateStream(msa))
                            using (ZipFile zipFile = ZipFile.Create(ms))
                            {
                                zipFile.BeginUpdate();
                                zipFile.UseZip64 = UseZip64.Dynamic;

                                SetName setName = new SetName();

                                zipFile.NameTransform = setName;

                                SortedFileList added = new SortedFileList();
                                added.BaseDirectory = "c:\\" + Guid.NewGuid();

                                foreach (PackContainer container in definition.Containers)
                                {
                                    foreach (PackFile file in container.Files)
                                    {
                                        if (!QQnPath.IsRelativeSubPath(file.StreamName) || added.Contains(file.StreamName))
                                        {
                                            string name = Path.GetFileNameWithoutExtension(file.StreamName);
                                            string ext  = Path.GetExtension(file.StreamName);

                                            string attempt = "_/" + name + ext;
                                            int    n       = 0;
                                            do
                                            {
                                                if (!added.Contains(attempt))
                                                {
                                                    file.StreamName = attempt;
                                                    break;
                                                }

                                                attempt = string.Format("_/{0}.{1}.{2}", name, n++, ext);
                                            }while (true);
                                        }

                                        if (file.StreamName.Contains("\\"))
                                        {
                                            file.StreamName = file.StreamName.Replace('\\', '/');
                                        }

                                        added.Add(file.StreamName);
                                        setName.NextName = file.StreamName;

                                        zipFile.Add(file.FullName);
                                    }
                                }

                                zipFile.CommitUpdate();
                            }
                    }

            return(null);
        }
示例#45
0
        /// <summary>
        /// Recupera as propriedades da string informada.
        /// </summary>
        /// <param name="propString"></param>
        /// <returns></returns>
        private Hashtable GetProperties(string propString)
        {
            bool      flag       = false;
            Hashtable hashtable  = new Hashtable();
            Tokenizer tokenizer  = new Tokenizer(propString);
            string    tokenValue = "";
            int       num        = 0;
            State     keyNeeded  = State.keyNeeded;
            Stack     stack      = new Stack();

            while (true)
            {
                switch (tokenizer.GetNextToken())
                {
                case Tokenizer.UNNEST:
                    if (keyNeeded != State.keyNeeded)
                    {
                        throw new ConfigurationException(ResourceMessageFormatter.Create(() => Caching.Properties.Resources.ConfigurationException_PropsConfigReader_CloseParanthesisMisplaced).Format());
                    }
                    if (num < 1)
                    {
                        throw new ConfigurationException(ResourceMessageFormatter.Create(() => Caching.Properties.Resources.ConfigurationException_PropsConfigReader_CloseParenthesisUnexpected).Format());
                    }
                    if (flag)
                    {
                        flag = false;
                    }
                    hashtable = stack.Pop() as Hashtable;
                    num--;
                    continue;

                case Tokenizer.ID:
                    int num2;
                    switch (keyNeeded)
                    {
                    case State.keyNeeded:
                        if (tokenValue == "parameters")
                        {
                            flag = true;
                        }
                        tokenValue = tokenizer.TokenValue;
                        num2       = tokenizer.GetNextToken();
                        if (((num2 == Tokenizer.CONTINUE) || (num2 == Tokenizer.UNNEST)) || ((num2 == Tokenizer.ID) || (num2 == Tokenizer.EOF)))
                        {
                            throw new ConfigurationException(ResourceMessageFormatter.Create(() => Caching.Properties.Resources.ConfigurationException_PropsConfigReader_KeyFollowingBadToken).Format());
                        }
                        switch (num2)
                        {
                        case Tokenizer.ASSIGN:
                            keyNeeded = State.valNeeded;
                            continue;

                        case Tokenizer.NEST:
                            stack.Push(hashtable);
                            hashtable[tokenValue.ToLower()] = new Hashtable();
                            hashtable = hashtable[tokenValue.ToLower()] as Hashtable;
                            keyNeeded = State.keyNeeded;
                            num++;
                            break;
                        }
                        continue;

                    case State.valNeeded:
                    {
                        string str2 = tokenizer.TokenValue;
                        num2      = tokenizer.GetNextToken();
                        keyNeeded = State.keyNeeded;
                        switch (num2)
                        {
                        case Tokenizer.ASSIGN:
                        case Tokenizer.ID:
                        case Tokenizer.EOF:
                            throw new ConfigurationException(ResourceMessageFormatter.Create(() => Caching.Properties.Resources.ConfigurationException_PropsConfigReader_ValueFollowingBadToken).Format());
                        }
                        if (flag)
                        {
                            hashtable[tokenValue] = str2;
                        }
                        else
                        {
                            hashtable[tokenValue.ToLower()] = str2;
                        }
                        switch (num2)
                        {
                        case Tokenizer.NEST:
                            stack.Push(hashtable);
                            hashtable[tokenValue.ToLower()] = new Hashtable();
                            hashtable = hashtable[tokenValue.ToLower()] as Hashtable;
                            hashtable.Add("id", tokenValue);
                            hashtable.Add("type", str2);
                            keyNeeded = State.keyNeeded;
                            num++;
                            break;

                        case Tokenizer.UNNEST:
                            if (num < 1)
                            {
                                throw new ConfigurationException(ResourceMessageFormatter.Create(() => Caching.Properties.Resources.ConfigurationException_PropsConfigReader_CloseParenthesisUnexpected).Format());
                            }
                            if (flag)
                            {
                                flag = false;
                            }
                            hashtable = stack.Pop() as Hashtable;
                            num--;
                            keyNeeded = State.keyNeeded;
                            break;
                        }
                        continue;
                    }
                    }
                    continue;

                case Tokenizer.EOF:
                    if (keyNeeded != State.keyNeeded)
                    {
                        throw new ConfigurationException(ResourceMessageFormatter.Create(() => Caching.Properties.Resources.ConfigurationException_PropsConfigReader_InvalidEOF).Format());
                    }
                    if (num > 0)
                    {
                        throw new ConfigurationException("Invalid property string, un-matched paranthesis");
                    }
                    return(hashtable);
                }
                throw new ConfigurationException("Invalid property string");
            }
        }
示例#46
0
        //
        // Deambiguates the input string to determine if we
        // want to process a statement or if we want to
        // process a compilation unit.
        //
        // This is done using a top-down predictive parser,
        // since the yacc/jay parser can not deambiguage this
        // without more than one lookahead token.   There are very
        // few ambiguities.
        //
        InputKind ToplevelOrStatement(SeekableStreamReader seekable)
        {
            Tokenizer tokenizer = new Tokenizer(seekable, source_file, new ParserSession(), ctx.Report, false);

            // Prefer contextual block keywords over identifiers
            tokenizer.parsing_block++;

            int t = tokenizer.token();

            switch (t)
            {
            case Token.EOF:
                return(InputKind.EOF);

            // These are toplevels
            case Token.EXTERN:
            case Token.OPEN_BRACKET:
            case Token.ABSTRACT:
            case Token.CLASS:
            case Token.ENUM:
            case Token.INTERFACE:
            case Token.INTERNAL:
            case Token.NAMESPACE:
            case Token.PRIVATE:
            case Token.PROTECTED:
            case Token.PUBLIC:
            case Token.SEALED:
            case Token.STATIC:
            case Token.STRUCT:
                return(InputKind.CompilationUnit);

            // Definitely expression
            case Token.FIXED:
            case Token.BOOL:
            case Token.BYTE:
            case Token.CHAR:
            case Token.DECIMAL:
            case Token.DOUBLE:
            case Token.FLOAT:
            case Token.INT:
            case Token.LONG:
            case Token.NEW:
            case Token.OBJECT:
            case Token.SBYTE:
            case Token.SHORT:
            case Token.STRING:
            case Token.UINT:
            case Token.ULONG:
                return(InputKind.StatementOrExpression);

            // These need deambiguation help
            case Token.USING:
                t = tokenizer.token();
                if (t == Token.EOF)
                {
                    return(InputKind.EOF);
                }

                if (t == Token.IDENTIFIER)
                {
                    return(InputKind.CompilationUnit);
                }
                return(InputKind.StatementOrExpression);


            // Distinguish between:
            //    delegate opt_anonymous_method_signature block
            //    delegate type
            case Token.DELEGATE:
                t = tokenizer.token();
                if (t == Token.EOF)
                {
                    return(InputKind.EOF);
                }
                if (t == Token.OPEN_PARENS || t == Token.OPEN_BRACE)
                {
                    return(InputKind.StatementOrExpression);
                }
                return(InputKind.CompilationUnit);

            // Distinguih between:
            //    unsafe block
            //    unsafe as modifier of a type declaration
            case Token.UNSAFE:
                t = tokenizer.token();
                if (t == Token.EOF)
                {
                    return(InputKind.EOF);
                }
                if (t == Token.OPEN_PARENS)
                {
                    return(InputKind.StatementOrExpression);
                }
                return(InputKind.CompilationUnit);

            // These are errors: we list explicitly what we had
            // from the grammar, ERROR and then everything else

            case Token.READONLY:
            case Token.OVERRIDE:
            case Token.ERROR:
                return(InputKind.Error);

            // This catches everything else allowed by
            // expressions.  We could add one-by-one use cases
            // if needed.
            default:
                return(InputKind.StatementOrExpression);
            }
        }
示例#47
0
文件: Parser.cs 项目: hww/VARP
 private static Syntax ParseDot(Token thisToken, Tokenizer moreTokens)
 {
     return(null);
 }
示例#48
0
 internal CharacterLocation Location(int chars)
 {
     return(Tokenizer.HumanizeCharacterLocation(chars, Lines));
 }
示例#49
0
 public bool Parse(Tokenizer tokenizer) => tokenizer.Blank();
示例#50
0
文件: Parser.cs 项目: hww/VARP
        private static Syntax ParseList(Token thisToken, Tokenizer moreTokens)
        {
            // Is a list/vector
            var   listContents = new List <Syntax>();
            Token dotToken     = null;

            var nextToken = moreTokens.ReadToken();

            while (nextToken != null && nextToken.Type != TokenType.CloseBracket)
            {
                // Parse this token
                listContents.Add(ParseToken(nextToken, moreTokens));

                // Fetch the next token
                nextToken = moreTokens.ReadToken();
                if (nextToken == null)
                {
                    throw SchemeError.SyntaxError("parser", "Improperly formed list.", dotToken);
                }

                if (nextToken.Type == TokenType.Dot)
                {
                    if (dotToken != null || thisToken.Type != TokenType.OpenBracket)
                    {
                        throw SchemeError.SyntaxError("parser", "Improperly formed dotted list", nextToken);
                    }
                    dotToken  = nextToken;
                    nextToken = moreTokens.ReadToken();
                    if (nextToken == null)
                    {
                        throw SchemeError.SyntaxError("parser", "Improperly formed dotted list", dotToken);
                    }
                    if (nextToken.Type == TokenType.CloseBracket)
                    {
                        throw SchemeError.SyntaxError("parser", "Improperly formed dotted list", dotToken);
                    }
                    listContents.Add(ParseToken(nextToken, moreTokens));
                    nextToken = moreTokens.ReadToken();
                    if (nextToken.Type != TokenType.CloseBracket)
                    {
                        throw SchemeError.SyntaxError("parser", "Improperly formed dotted list", dotToken);
                    }
                    break;
                }
            }

            if (nextToken == null)
            {
                // Missing ')'
                throw SchemeError.SyntaxError("parser", "missing close parenthesis", thisToken);
            }

            if (dotToken != null)
            {
                if (listContents.Count == 2)
                {
                    return(new Syntax(new ValuePair(listContents[0], listContents[1]), thisToken));
                }
                else
                {
                    throw SchemeError.SyntaxError("parser", "improper dot syntax", thisToken);
                }
            }
            else
            {
                if (listContents.Count == 0)
                {
                    return(new Syntax(Value.Nil, thisToken));
                }
                else
                {
                    return(new Syntax(ValueLinkedList.FromList <Syntax>(listContents), thisToken));
                }
            }
        }
示例#51
0
        /// <summary>
        /// Assemble the code.
        /// Return true on no errors, else false
        /// Inspect the State property to see returned items
        /// </summary>
        /// <param name="filename"></param>
        /// <param name="textOut"></param>
        /// <returns></returns>
        public bool Assemble(string filename, TextWriter textOut)
        {
            State        = new AsmState();
            State.Output = new Output(textOut);

            State.Output.Info("");
            State.Output.Info("");
            State.Output.Info($"Assembling file {filename}");

            if (!File.Exists(filename))
            {
                State.Output.Error($"File {filename} does not exist.");
                return(false);
            }

            State.Lines = Tokenizer.GetLines(File.ReadLines(filename), State.Output);

            // determine CPU
            foreach (var line in State.Lines)
            {
                if (line?.Opcode?.Text.ToLower() == ".cpu")
                {
                    State.Cpu = MakeCpu(line?.Operand?.Text, State.Output);
                }
            }
            if (State.Cpu == null)
            {
                State.Output.Info("CPU not detected, assuming 6809. Use '.cpu' directive to set.");
                State.Cpu = new Cpu6809(false);
                State.Cpu.Initialize(State.Output);
            }



            // clean all opcodes
            foreach (var line in State.Lines)
            {
                if (line.Opcode != null)
                {
                    line.Opcode.Text = State.Cpu.FixupMnemonic(line);
                }
            }

            if (State.Lines == null)
            {
                return(false);
            }
            State.Output.Info($"{State.Lines.Count} lines tokenized.");

            // handle #ifdef, #undef, etc
            Preprocess();

            //foreach (var line in lines)
            //    WriteLine(line.ToString());

            // get structs, fill in sizes, validate
            if (!MakeStructs())
            {
                return(false);
            }

            State.Output.Info($"{State.Symbols.GetStructs().Count} structs degrizzled.");

//            foreach (var s in structs)
//                WriteLine($"Struct {s}");

            var labelLines = State.Lines.Where(d => d.Label != null);

            foreach (var labelLine in labelLines)
            {
                State.Symbols.AddSymbol(new Label(labelLine), State.Output);
            }

            State.Output.Info($"{State.Symbols.Count} labels cryoambulated.");

            if (!Assemble())
            {
                return(false);
            }

            return(CreateRom());
        }
示例#52
0
        private SortedFileList <ExternalProject> CreateExternalProjectsList()
        {
            SortedFileList <ExternalProject> externalProjects = new SortedFileList <ExternalProject>();

            using (StreamReader sr = File.OpenText(ProjectFile))
            {
                string line;

                while (null != (line = sr.ReadLine()))
                {
                    if (line.StartsWith("Project("))
                    {
                        IList <string> words = Tokenizer.GetCommandlineWords(line);

                        if (words.Count < 5 || words[1] != "=")
                        {
                            continue;
                        }

                        Guid   projectType = new Guid(words[0].Substring(8).TrimEnd(')').Trim('\"'));
                        string projectName = FilterWord(words[2]);
                        string projectFile = QQnPath.Combine(ProjectPath, FilterWord(words[3]));
                        Guid   projectGuid = new Guid(FilterWord(words[4]));

                        if (projectType != solutionItem && File.Exists(projectFile))
                        {
                            if (QQnPath.ExtensionEquals(projectFile, ".vcproj"))
                            {
                                externalProjects.Add(projectFile, new VCBuildProject(projectGuid, projectFile, projectName, Parameters));
                            }
                        }
                    }
                }
            }

            if (BuildProperties == null)
            {
                Refresh();
            }

            // The property CurrentSolutionConfigurationContents contains the 'real' configuration of external projects
            string configData;

            if (BuildProperties.TryGetValue("CurrentSolutionConfigurationContents", out configData))
            {
                XmlDocument doc = new XmlDocument();
                doc.LoadXml(configData);

                foreach (ExternalProject ep in externalProjects)
                {
                    XmlNode node = doc.SelectSingleNode("//ProjectConfiguration[@Project='" + ep.ProjectGuid.ToString("B").ToUpperInvariant() + "']");

                    if (node != null)
                    {
                        ep.AddBuildConfiguration(node.InnerText);
                    }
                }
            }

            return(externalProjects);
        }
示例#53
0
        /// <summary>
        /// Adds classification spans to the given collection.
        /// Scans a contiguous sub-<paramref name="span"/> of a larger code span which starts at <paramref name="codeStartLine"/>.
        /// </summary>
        private void AddClassifications(Tokenizer tokenizer, List <ClassificationSpan> classifications, SnapshotSpan span)
        {
            Debug.Assert(span.Length > 0);

            var snapshot  = span.Snapshot;
            int firstLine = snapshot.GetLineNumberFromPosition(span.Start);
            int lastLine  = snapshot.GetLineNumberFromPosition(span.End - 1);

            Contract.Assert(firstLine >= 0);

            _tokenCache.EnsureCapacity(snapshot.LineCount);

            // find the closest line preceding firstLine for which we know categorizer state, stop at the codeStartLine:
            LineTokenization lineTokenization;
            int    currentLine = _tokenCache.IndexOfPreviousTokenization(firstLine, 0, out lineTokenization) + 1;
            object state       = lineTokenization.State;

            while (currentLine <= lastLine)
            {
                if (!_tokenCache.TryGetTokenization(currentLine, out lineTokenization))
                {
                    lineTokenization         = TokenizeLine(tokenizer, snapshot, state, currentLine);
                    _tokenCache[currentLine] = lineTokenization;
                }

                state = lineTokenization.State;

                for (int i = 0; i < lineTokenization.Tokens.Length; i++)
                {
                    var token = lineTokenization.Tokens[i];
                    if (token.Category == TokenCategory.IncompleteMultiLineStringLiteral)
                    {
                        // we need to walk backwards to find the start of this multi-line string...

                        TokenInfo startToken = token;
                        int       validPrevLine;
                        int       length = startToken.SourceSpan.Length;
                        if (i == 0)
                        {
                            length += GetLeadingMultiLineStrings(tokenizer, snapshot, firstLine, currentLine, out validPrevLine, ref startToken);
                        }
                        else
                        {
                            validPrevLine = currentLine;
                        }

                        if (i == lineTokenization.Tokens.Length - 1)
                        {
                            length += GetTrailingMultiLineStrings(tokenizer, snapshot, currentLine, state);
                        }

                        var multiStrSpan = new Span(SnapshotSpanToSpan(snapshot, startToken, validPrevLine).Start, length);
                        classifications.Add(
                            new ClassificationSpan(
                                new SnapshotSpan(snapshot, multiStrSpan),
                                _provider.StringLiteral
                                )
                            );
                    }
                    else
                    {
                        var classification = ClassifyToken(span, token, currentLine);

                        if (classification != null)
                        {
                            classifications.Add(classification);
                        }
                    }
                }

                currentLine++;
            }
        }
        private void button1_Click(object sender, EventArgs e)
        {
            utilizador = "";
            IStemmer  stemmer = new EnglishStemmer();
            Tokenizer TK      = new Tokenizer();

            words.Clear();


            if (!String.IsNullOrEmpty(textBox1.Text))
            {
                utilizador = textBox1.Text;
                Console.WriteLine(utilizador);
                if (!db.checkUserFile(utilizador, data.Replace('.', ':').Replace('-', '/')))
                {
                    MessageBox.Show("Este utilizador não existe", "Erro",
                                    MessageBoxButtons.OK, MessageBoxIcon.Error);
                    return;
                }
                text  = Regex.Replace(text, @"[^\w\s]", "").ToLower().Replace("\r\n", " ");
                words = TK.Tokenize(text);
                words.RemoveAll(item => item.Length == 0);
            }
            else
            {
                if (!String.IsNullOrEmpty(richTextBox1.Text))
                {
                    documentos.Clear();
                    string   tex        = Regex.Replace(richTextBox1.Text, @"[^\w\s]", "").ToLower();
                    string[] paragrafos = tex.Split(new[] { '\r', '\n' });
                    documentos = paragrafos.ToList();
                    documentos.RemoveAt(0);

                    words = TK.Tokenize(paragrafos[0]);
                    words.RemoveAll(item => item.Length == 0);
                }
                else
                {
                    return;
                }
            }

            if (StemmedcheckBox.Checked)
            {
                stemm = true;
            }
            else
            {
                stemm = false;
            }

            int N = 3; //numero de N-Grams

            Dictionary <string, int>[] ListasNGram = new Dictionary <string, int> [4];
            ListasNGram[0] = new Dictionary <string, int>();
            ListasNGram[1] = new Dictionary <string, int>();
            ListasNGram[2] = new Dictionary <string, int>();
            ListasNGram[3] = new Dictionary <string, int>();

            for (int y = 1; y <= N; y++)
            {
                for (int i = 0; i < (words.Count - y + 1); i++)
                {
                    String s      = "";
                    int    inicio = i;
                    int    fim    = i + y;
                    for (int j = inicio; j < fim; j++)
                    {
                        if (stemm)
                        {
                            s = s + " " + stemmer.Stem(words[j]);
                        }
                        else
                        {
                            s = s + " " + words[j];
                        }
                    }
                    int value;
                    if (!ListasNGram[y].TryGetValue(s, out value))
                    {
                        ListasNGram[y].Add(s, 1);
                    }
                    else
                    {
                        ListasNGram[y][s] = value + 1;
                    }
                }
            }
            checkedListBox1.Items.Clear();
            checkedListBox2.Items.Clear();
            checkedListBox3.Items.Clear();

            for (int i = 1; i <= N; i++)
            {
                foreach (var LNG in ListasNGram[i].OrderByDescending(key => key.Value))
                {
                    switch (i)
                    {
                    case 1:
                        checkedListBox1.Items.Add(LNG.Key + " : " + LNG.Value, false);
                        break;

                    case 2:
                        checkedListBox2.Items.Add(LNG.Key + " : " + LNG.Value, false);
                        break;

                    case 3:
                        checkedListBox3.Items.Add(LNG.Key + " : " + LNG.Value, false);
                        break;
                    }
                }
            }
        }
示例#55
0
        /// <summary>
        /// Parses the contents of a .con or .ai file, and converts the contents into C# objects
        /// </summary>
        /// <param name="fileContents">A dictionary of file contents [LineNumber => LineContents]</param>
        /// <param name="workingFile">A reference to the ConFile that contains the contents</param>
        internal static async Task ParseFileLines(
            Dictionary <int, string> fileContents,
            ConFile workingFile,
            ExecuteInstruction run)
        {
            // ============
            // First we convert our confile lines into parsed tokens
            // ============
            Token[]   fileTokens = Tokenizer.Tokenize(workingFile, ref fileContents);
            TokenArgs tokenArgs;
            Scope     currentScope = workingFile.Scope;

            // ============
            // Now we create an object reference of all objects in the Confile
            // before parsing the object properties, which **can** reference an object
            // in the same file before its defined
            // NOTE: Do not create object references for .Active and .safeActive
            // ============
            foreach (Token token in fileTokens.Where(x => x.Kind == TokenType.ObjectStart).OrderBy(x => x.Position))
            {
                // Create the object
                var           Method   = token.TokenArgs.ReferenceType.GetMethod(token.TokenArgs.PropertyName);
                ConFileObject template = Method.Invoke(token);

                // Finally, register the object with the ObjectManager
                currentScope.AddObject(template, token);
                Logger.Info($"Created {token.TokenArgs.ReferenceType} \"{template.Name}\"", workingFile, token.Position);
            }

            // ============
            // Finally, we load all of the object properties, and assign them to their
            // respective objects
            // ============

            // Create our needed objects
            RemComment    comment    = null;
            ConFileObject currentObj = null;
            ReferenceType type;
            var           builder = new StringBuilder();

            // We use a for loop here so we can skip rem blocks and statements
            for (int i = 0; i < fileTokens.Length; i++)
            {
                // Grab token value
                Token token = fileTokens[i];
                try
                {
                    switch (token.Kind)
                    {
                    case TokenType.ObjectStart:
                    case TokenType.ActiveSwitch:
                        // NOTE: the object was created before this loop!
                        currentObj = currentScope.GetObject(token);
                        currentScope.SetActiveObject(currentObj);

                        // === Objects are already added to the working file before hand === //
                        // Add object reference to file
                        workingFile.AddEntry(currentObj, token);

                        // Reset comment
                        comment = null;

                        // Log
                        Logger.Info($"Loading object properties for \"{currentObj.Name}\"",
                                    workingFile, token.Position
                                    );
                        break;

                    case TokenType.ObjectProperty:
                        // Convert args to an object
                        tokenArgs = token.TokenArgs;

                        // Get the last used object
                        type       = tokenArgs.ReferenceType;
                        currentObj = currentScope.GetActiveObject(type);

                        // Make sure we have an object to work with and the object
                        // reference matches our current working object
                        if (currentObj == null)
                        {
                            // If we are here, we have an issue...
                            string error = $"Failed to set property \"{token.TokenArgs.ReferenceType.Name}.\""
                                           + $"{token.TokenArgs.PropertyName}. No object reference set!";
                            throw new ParseException(error, token);
                        }

                        // Let the object parse its own lines...
                        try
                        {
                            currentObj.Parse(token);

                            // Ensure comment is null
                            comment = null;
                        }
                        catch (Exception e)
                        {
                            Logger.Error(e.Message, workingFile, token.Position, e);
                            throw;
                        }
                        break;

                    case TokenType.RemComment:
                        // Create a new comment if we need to
                        if (comment == null)
                        {
                            comment = new RemComment(token);
                        }

                        // Add comment to the current string
                        comment.AppendLine(token.Value);
                        break;

                    case TokenType.BeginRem:
                        RemComment rem = new RemComment(token);
                        rem.IsRemBlock = true;

                        // Skip every line until we get to the endRem
                        builder.AppendLine(token.Value);
                        i = ScopeUntil(TokenType.EndRem, fileTokens, i, builder);

                        // Set rem value
                        rem.Value = builder.ToString().TrimEnd();
                        workingFile.AddEntry(rem, rem.Token);

                        // Clear the string builder
                        builder.Clear();
                        break;

                    case TokenType.IfStart:
                        Statement statement = new Statement(token);
                        if (token.Kind == TokenType.IfStart)
                        {
                            // Skip every line until we get to the endIf
                            builder.AppendLine(token.Value);
                            i = ScopeUntil(TokenType.EndIf, fileTokens, i, builder);

                            // Clear the string builder
                            statement.Token.Value = builder.ToString().TrimEnd();
                            builder.Clear();
                        }

                        // Add entry
                        workingFile.AddEntry(statement, statement.Token);
                        break;

                    case TokenType.Run:
                    case TokenType.Include:
                        // Just add to as a string
                        RunStatement stmt = new RunStatement(token);
                        workingFile.AddEntry(stmt, stmt.Token);

                        // Do we execute the statement?
                        if (run == ExecuteInstruction.Skip)
                        {
                            continue;
                        }

                        // Create new scope for execution
                        Scope runScope = currentScope;

                        // Are we executing in a new scope?
                        if (run == ExecuteInstruction.ExecuteInNewScope)
                        {
                            // For now, we just inherit the parent scope type
                            runScope = new Scope(currentScope, currentScope.ScopeType);
                            runScope.MissingObjectHandling = MissingObjectHandling.CheckParent;
                        }

                        // Get the filepath
                        string filePath = Path.GetDirectoryName(workingFile.FilePath);
                        string fileName = Path.Combine(filePath, stmt.FileName);

                        // Define file arguments
                        runScope.SetArguments(stmt.Arguments);

                        // Load the file
                        try
                        {
                            ConFile include = await LoadFileAsync(fileName, runScope, run);

                            workingFile.ExecutedIncludes.Add(include);
                        }
                        catch (FileNotFoundException)     // Only acceptable exception
                        {
                            fileName = Path.GetFileName(fileName);
                            Logger.Warning($"Failed to run file \"{fileName}\". File Not Found",
                                           workingFile, token.Position);
                        }
                        break;

                    case TokenType.Constant:
                    case TokenType.Variable:
                        // Set the new expression reference in Scope
                        Expression exp = new Expression(token);
                        currentScope.Expressions[exp.Name] = exp;

                        // Add expression to the confile as well
                        workingFile.AddEntry(exp, exp.Token);
                        break;

                    case TokenType.None:
                        // Dont attach comment to a property if we have an empty line here
                        if (comment != null)
                        {
                            workingFile.AddEntry(comment, comment.Token);
                            comment = null;
                        }

                        // Throw error if the line is not empty
                        if (!String.IsNullOrWhiteSpace(token.Value))
                        {
                            string message = $"Unable to parse file entry \"{token.Value}\" on line {token.Position}";
                            throw new ParseException(message, token);
                        }
                        break;
                    }
                }
                catch (Exception e)
                {
                    Logger.Error(e.Message, token.File, token.Position, e);
                    throw;
                }
            }

            // Finalize this confile
            workingFile.Finish();
        }
示例#56
0
        protected override IEnumerable <IResult> GetResults(IQuery query)
        {
            var args        = Tokenizer.ToList(query.Text);
            var commandName = args.Count > 0 ? args.PopHead() : String.Empty;
            var argsText    = query.Text.Substring(commandName.Length);

            Func <Sidi.CommandLine.Action, Priority> isMatch = a =>
            {
                var matchCommand = MatchLength(a.Name, commandName) > 0;
                var matchTags    = MatchesTags(query, $"#{a.Source.Instance.GetType().Name} #command");

                if (!matchCommand && !matchTags)
                {
                    return(Priority.None);
                }

                if (!IsVisible(context, a))
                {
                    return(Priority.None);
                }

                return(Priority.Normal - 1 + (matchCommand ? 1 : 0) + (matchTags ? 1 : 0));
            };

            int selectedPathCount = query.Context.SelectedPathList.Count;
            var parameterString   = args.Skip(1).Join(" ");

            // if paths are selected, handle actions that take a path argument with high priority
            Func <Sidi.CommandLine.Action, bool> isFileAction = a =>
            {
                return
                    ((selectedPathCount > 0) &&
                     (
                         (selectedPathCount == 1 && TakesSingleParameter <LPath>(a)) ||
                         (selectedPathCount >= 1 && TakesSingleParameter <PathList>(a))
                     ) &&
                     (
                         MatchLength(a.Name, commandName) >= 0 ||
                         MatchLength(GetMatchText(a), commandName) >= 0
                     ));
            };

            return(Actions.Select(pa =>
            {
                /*
                 * if (isFileAction(pa))
                 * {
                 *  return ToIActionPathList(context, pa, query.Context.SelectedPathList).ToResult(Priority.High);
                 * }
                 * else
                 */
                {
                    var priority = isMatch(pa);
                    if (priority > Priority.None)
                    {
                        return ToIAction(context, pa, argsText).ToResult(priority);
                    }
                    else
                    {
                        return null;
                    }
                }
            }).Where(_ => _ != null));
        }
示例#57
0
 public static Exception /*!*/ CreateArgumentError(EncoderFallbackException /*!*/ e, RubyEncoding /*!*/ encoding)
 {
     return(RubyExceptions.CreateArgumentError(String.Format("character U+{0:X4} can't be encoded in {1}",
                                                             e.CharUnknownHigh != '\0' ? Tokenizer.ToCodePoint(e.CharUnknownHigh, e.CharUnknownLow) : (int)e.CharUnknown, encoding)));
 }
示例#58
0
 public bool Parse(Tokenizer tokenizer) => tokenizer.Sym(Facts.Statement.Start);
示例#59
0
        public async Task <CommandResult <PaymentUpdateModel> > StartOrderAsync(PaymentUpdateModel model)
        {
            try
            {
                if (Validate(model))
                {
                    using (var transaction = transactionScope.BeginTransaction())
                    {
                        var productContract = query.Get <ProductEntity>().Where(x => x.Id == model.Product.Id).SelectProductContract().Single();
                        var customerEntity  = await _customerService.CreateAsync(model.Customer);

                        var orderEntity = new OrderEntity
                        {
                            Id            = Guid.NewGuid(),
                            CustomerId    = customerEntity.Id,
                            ProductId     = model.Product.Id,
                            Start         = DateTime.Now,
                            OrderStatusId = (int)OrderStatusEnum.PENDING,
                            Token         = Tokenizer.Token(),
                            OrderTypeId   = (int)OrderTypeEnum.PAYU,
                            Quantity      = 1,
                        };
                        store.Create(orderEntity);
                        // Send request to PayU
                        var payuProduct = new PayuProductContract
                        {
                            Name      = productContract.FullName,
                            Quantity  = 1,
                            UnitPrice = (int)(productContract.GrossPrice * 100),
                        };
                        var payuBuyer = new PayuBuyerContract
                        {
                            FirstName = customerEntity.FirstName,
                            LastName  = customerEntity.LastName,
                            Email     = customerEntity.Email,
                            Phone     = customerEntity.Phone,
                            Language  = PayuConsts.Language,
                        };
                        var payuResult = await _payuManager.CreateOrderAsync(orderEntity.Token, payuProduct, payuBuyer);

                        if (payuResult.Succeeded)
                        {
                            var payuOutput = payuResult.Value;
                            model.RedirectUrl = payuOutput.RedirectUri;
                        }
                        else
                        {
                            orderEntity.OrderStatusId = (int)OrderStatusEnum.PENDING;
                            store.Update(orderEntity);
                            // cancel order
                        }
                        transaction.Commit();
                        return(new CommandResult <PaymentUpdateModel>(model));
                    }
                }
                return(new CommandResult <PaymentUpdateModel>(model, Localization.Resource.Validation_Summary_Error));
            }
            catch (Exception e)
            {
                log.Error(nameof(StartOrderAsync), model, e);
                return(new CommandResult <PaymentUpdateModel>(model, e));
            }
        }
示例#60
0
 public override bool tokenizable(Tokenizer t)
 {
     return(this.jsonCharacters.Contains(t.input.peek()));
 }