void tokenize_file(SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { // input = File.OpenRead (file.Name); input = sourceFile.GetDataStream(); // aeroson's edit } catch { Report.Error(2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input) { SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding); var file = new CompilationSourceFile(module, sourceFile); Tokenizer lexer = new Tokenizer(reader, file, session, ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) { errors++; } } Console.WriteLine("Tokenized: " + tokens + " found " + errors + " errors"); } return; }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { // input = File.OpenRead (file.Name); input = file.GetDataStream(); // aeroson's edit } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close(); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse(reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(input)); } reader.Dispose(); input.Close(); }
void tokenize_file(SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = sourceFile.GetDataStream(); } catch { Report.Error(2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input) { SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding); var file = new CompilationSourceFile(module, sourceFile); Tokenizer lexer = new Tokenizer(reader, file, session, ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) errors++; } Console.WriteLine("Tokenized: " + tokens + " found " + errors + " errors"); } return; }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { input = file.GetDataStream(); } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close(); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse(reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(input)); } reader.Dispose(); input.Close(); }