static void Parse(string file) { Stopwatch st = new Stopwatch(); st.Start(); SeekableStreamReader ssr = new SeekableStreamReader(File.OpenRead(file), Encoding.UTF8); SourceFile sf = new SourceFile(Path.GetFileName(file), file, 1); ModuleContext mctx = new ModuleContext(new Context.CompilerContext(new CompilerSettings())); CompilationSourceFile csf = new CompilationSourceFile(mctx, sf); ParserSession ps = new ParserSession(); // Tokenizer cs = new Tokenizer(ssr,csf,ps,mc.Compiler.Report); VSharpParser vp = new VSharpParser(ssr, csf, csf.Compiler.Report, ps, mctx); vp.parse(); //// ResolveScope IProjectContent pc = new VSharpProjectContent(); pc = pc.AddOrUpdateFiles(csf); pc = pc.AddAssemblyReferences(MinimalCorlib.Instance); var c = pc.CreateCompilation(); ResolveContext rc = new ResolveContext(c, csf.Compiler.Report); csf.DoResolve(rc); st.Stop(); Console.WriteLine(st.Elapsed); }
public SourceFileAttribute(string file) { SeekableStreamReader ssr = new SeekableStreamReader(File.OpenRead(file), Encoding.UTF8); SourceFile sf = new SourceFile(Path.GetFileName(file), file, 1); ModuleContext mctx = new ModuleContext(new VSC.Context.CompilerContext(new CompilerSettings(), false)); CompilationSourceFile csf = new CompilationSourceFile(mctx, sf); ParserSession ps = new ParserSession(); // Tokenizer cs = new Tokenizer(ssr,csf,ps,mc.Compiler.Report); VSharpParser vp = new VSharpParser(ssr, csf, csf.Compiler.Report, ps, mctx); vp.parse(); // ResolveScope IProjectContent pc = new VSharpProjectContent(); pc = pc.AddOrUpdateFiles(csf); pc = pc.AddAssemblyReferences(MinimalCorlib.Instance); var c = pc.CreateCompilation(); ResolveContext rc = new ResolveContext(c, csf.Compiler.Report); csf.DoResolve(rc); Errors = (csf.Compiler.Report.Printer as ListReportPrinter).Messages; }
private static async Task <string> ReadLineFromEndAsync(SeekableStreamReader reader) { const int bufferSize = 128; char[] buffer = /*stackalloc*/ new char[bufferSize]; string line = ""; while (reader.Position > 0) { // Read a chunk. Do not do this char-wise for performance reasons long bytesToRead = Math.Min(reader.Position, bufferSize); reader.Seek(-bytesToRead, SeekOrigin.Current); int bytesRead = await reader.ReadBlockAsync(buffer); reader.Seek(-bytesRead, SeekOrigin.Current); // Keep reading until a NL is found for (int i = (int)Math.Min(bytesRead - 1, reader.Position); i >= 0; i--) { char c = buffer[i]; if (c == '\n') { reader.Seek(i, SeekOrigin.Current); return(line); } if (c != '\r') { line = c + line; } } } return(null); }
void tokenize_file(SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = sourceFile.GetDataStream(); } catch { Report.Error(2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input) { SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding); var file = new CompilationSourceFile(module, sourceFile); Tokenizer lexer = new Tokenizer(reader, file, session, ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) { errors++; } } Console.WriteLine("Tokenized: " + tokens + " found " + errors + " errors"); } return; }
public static void Parse(SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report) { var file = new CompilationSourceFile(module, sourceFile); module.AddTypeContainer(file); CSharpParser parser = new CSharpParser(reader, file, report, session); parser.parse(); }
public static void Parse(SeekableStreamReader reader, SourceFile source, ModuleContainer module, ParserSession session, Report report) { // Create the compilation source CompilationSourceFile file = new CompilationSourceFile(module, source); // Add to module module.AddTypeContainer(file); // Create the parser and run CSharpParser parser = new CSharpParser(reader, file, report, session); parser.parse(); }
public static void GenerateEmbedClasses(ModuleContainer module, ParserSession session, Report report) { List <EmbedData> embeds = new List <EmbedData>(); FindEmbedClasses(module, module, embeds); if (embeds.Count == 0) { return; } var os = new StringWriter(); os.Write(@" // Generated embed loader classes "); foreach (var e in embeds) { var loc = e._field.Location; e._field.Initializer = new TypeOf(new MemberAccess(new SimpleName("_embed_loaders", loc), e._className), loc); os.Write(@" namespace _embed_loaders {{ internal class {1} : PlayScript.EmbedLoader {{ public {1}() : base({2}, {3}, {4}, {5}, {6}) {{ }} }} }} ", PsConsts.PsRootNamespace, e._className, e.source, e.mimeType, e.embedAsCFF, e.fontFamily, e.symbol); } string fileStr = os.ToString(); var path = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(System.IO.Path.GetFullPath(module.Compiler.Settings.OutputFile)), "embed.g.cs"); System.IO.File.WriteAllText(path, fileStr); byte[] byteArray = Encoding.ASCII.GetBytes(fileStr); var input = new MemoryStream(byteArray, false); var reader = new SeekableStreamReader(input, System.Text.Encoding.UTF8); SourceFile file = new SourceFile(path, path, 0); file.FileType = SourceFileType.CSharp; Driver.Parse(reader, file, module, session, report); }
private void DoParse(SourceFile file, ModuleContainer module, ParserSession session, Report report, SeekableStreamReader reader) { Parse(reader, file, module, session, report); if (!_ctx.Settings.GenerateDebugInfo || report.Errors != 0 || file.HasChecksum) { return; } reader.Stream.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(reader.Stream)); }
public void Parse(SourceFile source, ModuleContainer module, ParserSession session, Report report) { Stream input = null; try { // Get the stream input = source.GetDataStream(); } catch { // Generate an error Report.Error(2001, "Failed to open file '{0}' for reading", source.Name); return; } // Check for header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Failed to open file '{0}' for reading because it is a binary file. A text file was expected", source.Name); input.Close(); return; } // Back to start input.Position = 0; // Create a seekable stream SeekableStreamReader reader = new SeekableStreamReader(input, context.Settings.Encoding, session.StreamReaderBuffer); // Parse the source Parse(reader, source, module, session, report); if (context.Settings.GenerateDebugInfo == true && report.Errors == 0 && source.HasChecksum == false) { // Back to start input.Position = 0; // Get the session checksum MD5 checksum = session.GetChecksumAlgorithm(); // Apply the checksum source.SetChecksum(checksum.ComputeHash(input)); } // Dispose of streams reader.Dispose(); input.Close(); }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input = null; SeekableStreamReader reader = null; try { if (file.GetInputStream != null) { reader = file.GetInputStream(file); if (reader == null) { throw new FileNotFoundException("Delegate returned null", file.Name); } } else { input = File.OpenRead(file.Name); } } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } if (reader == null) { using (input) { // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); return; } input.Position = 0; reader = new SeekableStreamReader(input, _ctx.Settings.Encoding, session.StreamReaderBuffer); DoParse(file, module, session, report, reader); } } else { DoParse(file, module, session, report, reader); } }
// Methods public void TokenizeFile(SourceFile source, ModuleContainer module, ParserSession session) { Stream input = null; try { // Get the stream input = source.GetDataStream(); } catch { // Generate an error Report.Error(2001, "Failed to open file '{0}' for reading", source.Name); return; } // Manage the stream correctly using (input) { // Create a seekable stream SeekableStreamReader reader = new SeekableStreamReader(input, context.Settings.Encoding); // Create compilation source CompilationSourceFile file = new CompilationSourceFile(module, source); // Create a token lexer Tokenizer lexer = new Tokenizer(reader, file, session, context.Report); int currentToken = 0; int tokenCount = 0; int errorCount = 0; while ((currentToken = lexer.token()) != Token.EOF) { // Increment the count tokenCount++; // Check for error token if (currentToken == Token.ERROR) { errorCount++; } } } }
private void DoTokenize(SourceFile sourceFile, ModuleContainer module, ParserSession session, SeekableStreamReader reader) { var file = new CompilationSourceFile(module, sourceFile); var lexer = new Tokenizer(reader, file, session, _ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) { errors++; } } Console.WriteLine("Tokenized: " + tokens + " found " + errors + " errors"); }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session) { Stream input; try { input = file.GetDataStream(); } catch { Debug.Log("Source file `{0}' konnte nicht gefunden werden" + file.Name); return; } SeekableStreamReader reader = new SeekableStreamReader(input, compilerContext.Settings.Encoding, session.StreamReaderBuffer); Parse(reader, file, module, session); reader.Dispose(); input.Close(); }
/// <summary> /// Parse a G-code file /// </summary> /// <param name="fileName">File to analyze</param> /// <returns>Information about the file</returns> public static async Task <ParsedFileInfo> Parse(string fileName) { FileStream fileStream = new FileStream(fileName, FileMode.Open); SeekableStreamReader reader = new SeekableStreamReader(fileStream); try { ParsedFileInfo result = new ParsedFileInfo { FileName = await FilePath.ToVirtualAsync(fileName), Size = fileStream.Length, LastModified = File.GetLastWriteTime(fileName) }; if (fileStream.Length > 0) { List <float> filamentConsumption = new List <float>(); await ParseHeader(reader, filamentConsumption, result); await ParseFooter(reader, fileStream.Length, filamentConsumption, result); result.Filament = filamentConsumption; if (result.FirstLayerHeight + result.LayerHeight > 0F && result.Height > 0F) { result.NumLayers = (int?)(Math.Round((result.Height - result.FirstLayerHeight) / result.LayerHeight) + 1); } } reader.Close(); fileStream.Close(); return(result); } catch { reader.Close(); fileStream.Close(); throw; } }
private void TokenizeFile(SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input = null; SeekableStreamReader reader = null; try { if (sourceFile.GetInputStream != null) { reader = sourceFile.GetInputStream(sourceFile); if (reader == null) { throw new FileNotFoundException("Delegate returned null", sourceFile.Name); } } else { input = File.OpenRead(sourceFile.Name); } } catch { Report.Error(2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } if (reader == null) { using (input) { reader = new SeekableStreamReader(input, _ctx.Settings.Encoding); DoTokenize(sourceFile, module, session, reader); } } else { DoTokenize(sourceFile, module, session, reader); } }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { input = file.GetDataStream(); } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close(); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse(reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(input)); } reader.Dispose(); input.Close(); }
private static async Task ParseHeader(SeekableStreamReader reader, ParsedFileInfo partialFileInfo) { // Every time CTS.Token is accessed a copy is generated. Hence we cache one until this method completes CancellationToken token = Program.CancelSource.Token; List <float> filamentConsumption = new List <float>(); bool inRelativeMode = false, lastLineHadInfo = false; do { token.ThrowIfCancellationRequested(); string line = await reader.ReadLineAsync(); if (line == null) { break; } bool gotNewInfo = false; // See what code to deal with Code code = new Code(line); if (code.Type == CodeType.GCode && partialFileInfo.FirstLayerHeight == 0) { if (code.MajorNumber == 91) { // G91 code (relative positioning) inRelativeMode = true; gotNewInfo = true; } else if (inRelativeMode) { // G90 (absolute positioning) inRelativeMode = (code.MajorNumber != 90); gotNewInfo = true; } else if (code.MajorNumber == 0 || code.MajorNumber == 1) { // G0/G1 is a move, see if there is a Z parameter present CodeParameter zParam = code.Parameter('Z'); if (zParam != null) { float z = zParam; if (z <= Settings.MaxLayerHeight) { partialFileInfo.FirstLayerHeight = z; gotNewInfo = true; } } } } else if (code.Type == CodeType.Comment) { gotNewInfo |= partialFileInfo.LayerHeight == 0 && FindLayerHeight(line, ref partialFileInfo); gotNewInfo |= FindFilamentUsed(line, ref filamentConsumption); gotNewInfo |= partialFileInfo.GeneratedBy == "" && FindGeneratedBy(line, ref partialFileInfo); gotNewInfo |= partialFileInfo.PrintTime == 0 && FindPrintTime(line, ref partialFileInfo); gotNewInfo |= partialFileInfo.SimulatedTime == 0 && FindSimulatedTime(line, ref partialFileInfo); } if (!gotNewInfo && !lastLineHadInfo && IsFileInfoComplete(partialFileInfo)) { break; } lastLineHadInfo = gotNewInfo; }while (reader.Position < Settings.FileInfoReadLimit); foreach (float filament in filamentConsumption) { partialFileInfo.Filament.Add(filament); } }
public static void GenerateDynamicPartialClasses(ModuleContainer module, ParserSession session, Report report) { List <Class> classes = new List <Class>(); FindDynamicClasses(module, classes); if (classes.Count == 0) { return; } var os = new StringWriter(); os.Write(@" // Generated dynamic class partial classes "); foreach (var cl in classes) { os.Write(@" namespace {1} {{ partial class {2} : PlayScript.IDynamicClass {{ private PlayScript.IDynamicClass __dynamicProps; dynamic PlayScript.IDynamicClass.__GetDynamicValue(string name) {{ object value = null; if (__dynamicProps != null) {{ value = __dynamicProps.__GetDynamicValue(name); }} return value; }} bool PlayScript.IDynamicClass.__TryGetDynamicValue(string name, out object value) {{ if (__dynamicProps != null) {{ return __dynamicProps.__TryGetDynamicValue(name, out value); }} else {{ value = PlayScript.Undefined._undefined; return false; }} }} void PlayScript.IDynamicClass.__SetDynamicValue(string name, object value) {{ if (__dynamicProps == null) {{ __dynamicProps = new PlayScript.DynamicProperties(this); }} __dynamicProps.__SetDynamicValue(name, value); }} bool PlayScript.IDynamicClass.__DeleteDynamicValue(object name) {{ if (__dynamicProps != null) {{ return __dynamicProps.__DeleteDynamicValue(name); }} return false; }} bool PlayScript.IDynamicClass.__HasDynamicValue(string name) {{ if (__dynamicProps != null) {{ return __dynamicProps.__HasDynamicValue(name); }} return false; }} System.Collections.IEnumerable PlayScript.IDynamicClass.__GetDynamicNames() {{ if (__dynamicProps != null) {{ return __dynamicProps.__GetDynamicNames(); }} return null; }} }} }} ", PsConsts.PsRootNamespace, ((ITypeDefinition)cl).Namespace, cl.MemberName.Basename); } string fileStr = os.ToString(); var path = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(System.IO.Path.GetFullPath(module.Compiler.Settings.OutputFile)), "dynamic.g.cs"); System.IO.File.WriteAllText(path, fileStr); byte[] byteArray = Encoding.ASCII.GetBytes(fileStr); var input = new MemoryStream(byteArray, false); var reader = new SeekableStreamReader(input, System.Text.Encoding.UTF8); SourceFile file = new SourceFile(path, path, 0); file.FileType = SourceFileType.CSharp; Driver.Parse(reader, file, module, session, report); }
private static async Task ParseFooter(SeekableStreamReader reader, long length, ParsedFileInfo partialFileInfo) { CancellationToken token = Program.CancelSource.Token; reader.Seek(0, SeekOrigin.End); bool inRelativeMode = false, lastLineHadInfo = false; float? lastZ = null; List <float> filamentConsumption = new List <float>(partialFileInfo.Filament); do { token.ThrowIfCancellationRequested(); // Read another line string line = await ReadLineFromEndAsync(reader); if (line == null) { break; } bool gotNewInfo = false; // See what code to deal with Code code = new Code(line); if (code.Type == CodeType.GCode && partialFileInfo.Height == 0) { if (code.MajorNumber == 90) { // G90 code (absolute positioning) implies we were in relative mode inRelativeMode = true; gotNewInfo = true; } else if (inRelativeMode) { // G91 code (relative positioning) implies we were in absolute mode inRelativeMode = (code.MajorNumber != 91); gotNewInfo = true; } else if (code.MajorNumber == 0 || code.MajorNumber == 1) { // G0/G1 is a move, see if there is a Z parameter present // Users tend to place their own lift Z code at the end, so attempt to read two G0/G1 Z // codes and check the height differene between them CodeParameter zParam = code.Parameter('Z'); if (zParam != null && (code.Comment == null || !code.Comment.TrimStart().StartsWith("E"))) { gotNewInfo = true; if (lastZ == null) { lastZ = zParam; } else { float z = zParam; if (lastZ - z > Settings.MaxLayerHeight) { partialFileInfo.Height = z; } else { partialFileInfo.Height = lastZ.Value; } break; } } } } else if (code.Type == CodeType.Comment) { gotNewInfo |= partialFileInfo.LayerHeight == 0 && FindLayerHeight(line, ref partialFileInfo); gotNewInfo |= FindFilamentUsed(line, ref filamentConsumption); // gotNewInfo |= partialFileInfo.GeneratedBy == "") && FindGeneratedBy(line, ref partialFileInfo); gotNewInfo |= partialFileInfo.PrintTime == 0 && FindPrintTime(line, ref partialFileInfo); gotNewInfo |= partialFileInfo.SimulatedTime == 0 && FindSimulatedTime(line, ref partialFileInfo); } if (!gotNewInfo && !lastLineHadInfo && IsFileInfoComplete(partialFileInfo)) { break; } lastLineHadInfo = gotNewInfo; }while (length - reader.Position < Settings.FileInfoReadLimit); partialFileInfo.Filament.Clear(); foreach (float filament in filamentConsumption) { partialFileInfo.Filament.Add(filament); } if (lastZ != null && partialFileInfo.Height == 0) { partialFileInfo.Height = lastZ.Value; } }