public tokens(scanner.Scanner scanner = default, int current = default, error err = default, token.Pos @base = default) { this.scanner = scanner; this.current = current; this.err = err; this.@base = @base; }
public parser(scanner.Scanner scanner = default, int tok = default, @string lit = default, @string id = default, map <@string, ptr <types.Package> > sharedPkgs = default, map <@string, ptr <types.Package> > localPkgs = default) { this.scanner = scanner; this.tok = tok; this.lit = lit; this.id = id; this.sharedPkgs = sharedPkgs; this.localPkgs = localPkgs; }
public static ref Tokenizer NewTokenizer(@string name, io.Reader r, ref os.File file) { scanner.Scanner s = default; s.Init(r); // Newline is like a semicolon; other space characters are fine. s.Whitespace = 1L << (int)('\t') | 1L << (int)('\r') | 1L << (int)(' '); // Don't skip comments: we need to count newlines. s.Mode = scanner.ScanChars | scanner.ScanFloats | scanner.ScanIdents | scanner.ScanInts | scanner.ScanStrings | scanner.ScanComments; s.Position.Filename = name; s.IsIdentRune = isIdentRune; return(ref new Tokenizer(s: &s, base : src.NewFileBase(name, objabi.AbsFile(objabi.WorkingDir(), name, *flags.TrimPath)), line: 1, file: file, )); }