コード例 #1
0
ファイル: driver.cs プロジェクト: nekresh/mono
		void tokenize_file (CompilationUnit file, CompilerContext ctx)
		{
			Stream input;

			try {
				input = File.OpenRead (file.Name);
			} catch {
				Report.Error (2001, "Source file `" + file.Name + "' could not be found");
				return;
			}

			using (input){
				SeekableStreamReader reader = new SeekableStreamReader (input, RootContext.Encoding);
				Tokenizer lexer = new Tokenizer (reader, file, ctx);
				int token, tokens = 0, errors = 0;

				while ((token = lexer.token ()) != Token.EOF){
					tokens++;
					if (token == Token.ERROR)
						errors++;
				}
				Console.WriteLine ("Tokenized: " + tokens + " found " + errors + " errors");
			}
			
			return;
		}
コード例 #2
0
		void tokenize_file (SourceFile sourceFile, ModuleContainer module)
		{
			Stream input;

			try {
				input = File.OpenRead (sourceFile.Name);
			} catch {
				Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found");
				return;
			}

			using (input){
				SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding);
				var file = new CompilationSourceFile (module, sourceFile);

				Tokenizer lexer = new Tokenizer (reader, file);
				int token, tokens = 0, errors = 0;

				while ((token = lexer.token ()) != Token.EOF){
					tokens++;
					if (token == Token.ERROR)
						errors++;
				}
				Console.WriteLine ("Tokenized: " + tokens + " found " + errors + " errors");
			}
			
			return;
		}
コード例 #3
0
ファイル: driver.cs プロジェクト: robterrell/playscript-mono
		void tokenize_file (SourceFile sourceFile, ModuleContainer module, ParserSession session)
		{
			Stream input;

			try {
				input = File.OpenRead (sourceFile.Name);
			} catch {
				Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found");
				return;
			}

			using (input) {
				SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding);
				var file = new CompilationSourceFile (module, sourceFile);

				if (sourceFile.FileType == SourceFileType.CSharp) {
					Tokenizer lexer = new Tokenizer (reader, file, session);
					int token, tokens = 0, errors = 0;
	
					while ((token = lexer.token ()) != Token.EOF){
						tokens++;
						if (token == Token.ERROR)
							errors++;
					}
				} else {
					Mono.PlayScript.Tokenizer lexer = new Mono.PlayScript.Tokenizer (reader, file, session);
					lexer.ParsingPlayScript = sourceFile.PsExtended;
					int token, tokens = 0, errors = 0;
	
					while ((token = lexer.token ()) != Mono.PlayScript.Token.EOF){
						tokens++;
						if (token == Mono.PlayScript.Token.ERROR)
							errors++;
					}
				}
			}
			
			return;
		}
コード例 #4
0
ファイル: eval.cs プロジェクト: KAW0/Alter-Native
		//
		// Deambiguates the input string to determine if we
		// want to process a statement or if we want to
		// process a compilation unit.
		//
		// This is done using a top-down predictive parser,
		// since the yacc/jay parser can not deambiguage this
		// without more than one lookahead token.   There are very
		// few ambiguities.
		//
		InputKind ToplevelOrStatement (SeekableStreamReader seekable)
		{
			Tokenizer tokenizer = new Tokenizer (seekable, source_file);
			
			int t = tokenizer.token ();
			switch (t){
			case Token.EOF:
				return InputKind.EOF;
				
			// These are toplevels
			case Token.EXTERN:
			case Token.OPEN_BRACKET:
			case Token.ABSTRACT:
			case Token.CLASS:
			case Token.ENUM:
			case Token.INTERFACE:
			case Token.INTERNAL:
			case Token.NAMESPACE:
			case Token.PRIVATE:
			case Token.PROTECTED:
			case Token.PUBLIC:
			case Token.SEALED:
			case Token.STATIC:
			case Token.STRUCT:
				return InputKind.CompilationUnit;
				
			// Definitely expression
			case Token.FIXED:
			case Token.BOOL:
			case Token.BYTE:
			case Token.CHAR:
			case Token.DECIMAL:
			case Token.DOUBLE:
			case Token.FLOAT:
			case Token.INT:
			case Token.LONG:
			case Token.NEW:
			case Token.OBJECT:
			case Token.SBYTE:
			case Token.SHORT:
			case Token.STRING:
			case Token.UINT:
			case Token.ULONG:
				return InputKind.StatementOrExpression;

			// These need deambiguation help
			case Token.USING:
				t = tokenizer.token ();
				if (t == Token.EOF)
					return InputKind.EOF;

				if (t == Token.IDENTIFIER)
					return InputKind.CompilationUnit;
				return InputKind.StatementOrExpression;


			// Distinguish between:
			//    delegate opt_anonymous_method_signature block
			//    delegate type 
			case Token.DELEGATE:
				t = tokenizer.token ();
				if (t == Token.EOF)
					return InputKind.EOF;
				if (t == Token.OPEN_PARENS || t == Token.OPEN_BRACE)
					return InputKind.StatementOrExpression;
				return InputKind.CompilationUnit;

			// Distinguih between:
			//    unsafe block
			//    unsafe as modifier of a type declaration
			case Token.UNSAFE:
				t = tokenizer.token ();
				if (t == Token.EOF)
					return InputKind.EOF;
				if (t == Token.OPEN_PARENS)
					return InputKind.StatementOrExpression;
				return InputKind.CompilationUnit;
				
		        // These are errors: we list explicitly what we had
			// from the grammar, ERROR and then everything else

			case Token.READONLY:
			case Token.OVERRIDE:
			case Token.ERROR:
				return InputKind.Error;

			// This catches everything else allowed by
			// expressions.  We could add one-by-one use cases
			// if needed.
			default:
				return InputKind.StatementOrExpression;
			}
		}
コード例 #5
0
ファイル: cs-parser.cs プロジェクト: segaman/NRefactory
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, Report report, ParserSession session)
{
	this.file = file;
	current_container = current_namespace = file;
	
	this.module = file.Module;
	this.compiler = file.Compiler;
	this.settings = compiler.Settings;
	this.report = report;
	
	lang_version = settings.Version;
	yacc_verbose_flag = settings.VerboseParserFlag;
	doc_support = settings.DocumentationFile != null;
	lexer = new Tokenizer (reader, file, session);
	oob_stack = new Stack<object> ();
	lbag = session.LocationsBag;
	use_global_stacks = session.UseJayGlobalArrays;
	parameters_bucket = session.ParametersStack;
}
コード例 #6
0
ファイル: cs-parser.cs プロジェクト: Ein/monodevelop
public CSharpParser (SeekableStreamReader reader, CompilationUnit file, CompilerContext ctx)
{
	if (RootContext.EvalMode)
		undo = new Undo ();

	this.file = file;
	this.compiler = ctx;
	current_namespace = new NamespaceEntry (ctx, null, file, null);
	current_class = current_namespace.SlaveDeclSpace;
	current_container = current_class.PartialContainer; // == RootContest.ToplevelTypes
	oob_stack.Clear ();
	lexer = new Tokenizer (reader, file, ctx);
	
	use_global_stacks = true;
}
コード例 #7
0
			public Position (Tokenizer t)
			{
				position = t.reader.Position;
				line = t.line;
				ref_line = t.ref_line;
				col = t.col;
				hidden = t.hidden;
				putback_char = t.putback_char;
				previous_col = t.previous_col;
				if (t.ifstack != null && t.ifstack.Count != 0)
					ifstack = (Stack)t.ifstack.Clone ();
				parsing_generic_less_than = t.parsing_generic_less_than;
				current_token = t.current_token;
			}
コード例 #8
0
ファイル: cs-parser.cs プロジェクト: okrmartin/monodevelop
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file)
{
	this.file = file;
	current_namespace = file.NamespaceContainer;
	
	this.module = current_namespace.Module;
	this.compiler = module.Compiler;
	this.settings = compiler.Settings;
	lang_version = settings.Version;
	doc_support = settings.Documentation != null;
	current_class = current_namespace.SlaveDeclSpace;
	current_container = current_class.PartialContainer; // == RootContest.ToplevelTypes
	oob_stack.Clear ();
	lexer = new Tokenizer (reader, file, compiler);
	
	use_global_stacks = true;
}
コード例 #9
0
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, Report report)
{
	this.file = file;
	current_container = current_namespace = file;
	
	this.module = file.Module;
	this.compiler = file.Compiler;
	this.settings = compiler.Settings;
	this.report = report;
	
	lang_version = settings.Version;
	yacc_verbose_flag = settings.VerboseParserFlag;
	doc_support = settings.DocumentationFile != null;
	oob_stack.Clear ();
	lexer = new Tokenizer (reader, file);

#if FULL_AST
	lbag = new LocationsBag ();
#else
	lbag = null;
#endif
	
	use_global_stacks = true;
}