public FileAST Parse(SourceFileModel fileModel) { ContextFile fileContext = new ContextFile(this.projectContext, fileModel); List <LineTokenCollection> Tokens = Scan(fileContext, fileModel); //foreach (LineTokenCollection ltc in Tokens) //{ // Console.WriteLine(ltc); // foreach(LexToken tok in ltc.ToList()) // { // Console.Write(tok.Text+" "); // } // Console.WriteLine(); //} FileRawParser parser = new FileRawParser(); FileRaw fileRaw = parser.Parse(Tokens, fileContext); //FileMutilTypeRaw FileASTParser fileASTParser = new FileASTParser(); FileAST fileAST = fileASTParser.Parse(fileRaw, fileContext); //FileSource fileType = ParseSingleMutil(fileMutilType); //if (fileType != null) //{ // fileType.FileModel = fileModel; // fileType.ProjectContext = this.projectContext; //} //return fileType; return(fileAST); }
public Exp Parse(List <Token> tokens, ContextFile fileContext) { tape = new TokenTape(tokens, fileContext); Exp exp = ParseAssign(); return(exp); }
public List <SectionRaw> Parse(IEnumerable <LineTokenCollection> lineTokens, ContextFile fileContext) { this.fileContext = fileContext; Sections.Clear(); List <LineTokenCollection> tokens = new List <LineTokenCollection>(lineTokens); tape = new ArrayTape <LineTokenCollection>(tokens.ToArray()); while (tape.HasCurrent) { if (IsSectionHead(tape.Current)) { SectionRaw section = ParseSection(); if (section != null) { Sections.Add(section); } } else { if (tape.Current.Count > 0) { error(tape.Current.FirstToken, "错误的段头"); } tape.MoveNext(); } } return(Sections); }
public FileAST(ContextFile fileContext) { FileContext = fileContext; EnumASTList = new List <EnumAST>(); DimASTList = new List <DimAST>(); ClassASTList = new List <ClassAST>(); }
public void Reset(SourceReader sr, ContextFile fileContext) { reader = sr; this.fileContext = fileContext; line = 1; col = 1; }
public MethodParameter(MethodName methodNameAST, ProcNameRaw.ProcParameter raw) { MethodNameAST = methodNameAST; ParameterRaw = raw; MethodContext = this.MethodNameAST.MethodAST.MethodContext; FileContext = this.MethodNameAST.MethodAST.ASTClass.FileContext; }
public void Reset(SourceReader sr, ContextFile fileContext) { reader = sr; this.fileContext = fileContext; line = 1; //ResetCol(); lineTokens.Clear(); }
public List <PropertyASTRaw> Parse(IEnumerable <LexToken> tokens, ContextFile fileContext) { this.fileContext = fileContext; List <LexToken> tokens2 = new List <LexToken>(tokens); tape = new TokenTape(tokens2, fileContext); List <PropertyASTRaw> asts = ParseDimList(); return(asts); }
public List <SectionImportRaw.PackageRaw> Parse(IEnumerable <LexToken> tokens, ContextFile fileContext) { this.fileContext = fileContext; List <LexToken> tokens2 = new List <LexToken>(tokens); tape = new TokenTape(tokens2.ToArray(), fileContext); List <SectionImportRaw.PackageRaw> asts = ParsePackageList(); return(asts); }
private Task <IEnumerable <Log_Master> > FileLog_ListAsync(int?level = null, DateTime?from = null, DateTime?to = null) { IEnumerable <Log_Master> entities = null; if (_configuration.UseDailyLogFile) { using (var ctx = new ContextFile(_configuration.File_FolderPath)) entities = ctx.GetList(level, from, to).Result; } return(Task.FromResult(entities)); }
private List <Token> ScanFileCode(ContextFile fileContext, ZFileModel fileModel) { if (fileModel.ZFileInfo.IsVirtual) { return(new List <Token> ()); } string srcFile = fileModel.ZFileInfo.RealFilePath;// zCompileClassModel.GetSrcFullPath(); FileSourceReader reader = new FileSourceReader(srcFile); List <Token> tokens2 = ScanReaderTokens(reader, fileContext); return(tokens2); }
public List <Token> Scan(ContextFile fileContext, ZFileModel fileModel) { List <Token> Tokens = new List <Token>(); List <Token> preTokens = ScanPreCode(fileModel.ZFileInfo.FilePreText, fileContext); Tokens.AddRange(preTokens); List <Token> fileTokens = ScanFileCode(fileContext, fileModel); Tokens.AddRange(fileTokens); return(Tokens); }
public FileType Parse(ZFileModel fileModel) { ContextFile fileContext = new ContextFile(this.projectContext, fileModel); List <Token> Tokens = Scan(fileContext, fileModel); FileSectionParser parser = new FileSectionParser(); FileMutilType fileMutilType = parser.Parse(Tokens, fileContext); FileType fileType = ParseSingleMutil(fileMutilType); fileType.FileModel = fileModel; fileType.ProjectContext = this.projectContext; return(fileType); }
public SectionProcRaw Parse(IEnumerable <LexToken> tokens, ContextFile fileContext, SectionProcRaw procAST) { this.fileContext = fileContext; this.ast = procAST; List <LexToken> tokens2 = new List <LexToken>(tokens); tape = new TokenTape(tokens2.ToArray(), fileContext); ast.NamePart = ParseProcName(); tape.Match(TokenKindSymbol.Colon); ast.RetToken = ParseRetProc(); return(ast); }
private List <Token> ScanPreCode(string preCode, ContextFile fileContext) { if (string.IsNullOrEmpty(preCode)) { return(new List <Token>()); } StringSourceReader reader = new StringSourceReader(preCode); List <Token> tokens2 = ScanReaderTokens(reader, fileContext); foreach (var token in tokens2) { token.Line = -token.Line - 1; token.Col = token.Col - 1000;//方法体以行列区分,所以减去一些。 } return(tokens2); }
private List <LineTokenCollection> ScanTextCode(string code, ContextFile fileContext, int startLine) { if (string.IsNullOrEmpty(code)) { return(new List <LineTokenCollection>()); } StringSourceReader reader = new StringSourceReader(code); List <LineTokenCollection> tokens2 = ScanReaderTokens(reader, fileContext, startLine); //foreach (var token in tokens2) //{ // token.Line = -token.Line - 1; // token.Col = token.Col - 1000;//方法体以行列区分,所以减去一些。 //} return(tokens2); }
public FileRaw Parse(IEnumerable <LineTokenCollection> lineTokens, ContextFile fileContext) { SectionParser sectionParser = new SectionParser(); List <LineTokenCollection> tempLineTokens = new List <LineTokenCollection>(); foreach (var item in lineTokens) { if (item.Count > 0) { tempLineTokens.Add(item); } } Sections = sectionParser.Parse(tempLineTokens, fileContext); fileRaw = new FileRaw(); fileRaw.Sections = Sections; return(fileRaw); }
public FileClass(ContextFile fileContext, FileMutilType fmt) { this.FileContext = fileContext; ClassSection = fmt.Classes[0]; ImporteSection = fmt.ImporteSection; Proces = fmt.Proces; UseSection = fmt.UseSection; Constructors = fmt.Constructors; if (fmt.Dimes.Count > 0) { DimSection = fmt.Dimes[0]; } if (fmt.Propertieses.Count > 0) { PropertiesesSection = fmt.Propertieses[0]; } }
public FileAST Parse(FileRaw fileRaw, ContextFile fileContext) { fileAST = new FileAST(fileContext); tape = new ArrayTape <SectionRaw>(fileRaw.Sections); while (tape.HasCurrent) { SectionRaw section = tape.Current; if (section is SectionImportRaw) { fileAST.ImportSection = new SectionImport(fileAST, section as SectionImportRaw); tape.MoveNext(); } else if (section is SectionUseRaw) { //fileAST.UseSection = (section as SectionUse); //fileAST.UseSection.FileContext = fileAST.FileContext; //tape.MoveNext(); fileAST.UseSection = new SectionUse(fileAST, section as SectionUseRaw); //fileAST.ImporteSection.FileContext = fileAST.FileContext; tape.MoveNext(); } //else if (section is SectionNameRaw) //{ // ParseType(); //} else if ((section is SectionExtendsRaw) || (section is SectionPropertiesRaw) || (section is SectionProcRaw) || section is SectionNameRaw) { //ParseTypeSection(); var tempTypeBody = ParseTempTypeBody(); TypeAST tast = tempTypeBody.Parse(); fileAST.AddTypeAST(tast); } else { throw new CCException(); } } return(fileAST); }
public FileMutilType Parse(List <Token> tokens, ContextFile fileContext) { tape = new TokenTape(tokens, fileContext); fileMY = new FileMutilType(); fileMY.FileContext = fileContext; while (tape.CurrentKind != TokenKind.EOF) { TokenKind nkind = tape.Next.Kind; if (nkind == TokenKind.Colon && tape.CurrentKind == TokenKind.Ident) { SectionBase section = ParseSection(); if (section != null) { fileMY.AddSection(section); } } else if (tape.CurrentKind == TokenKind.NewLine) { SkipNewLine(); } else { SectionProc section = ParseProc(); if (section != null) { if (section.NamePart.IsConstructor()) { SectionConstructor constructor = new SectionConstructor(section); fileMY.AddSection(constructor); } else { fileMY.AddSection(section); } } } } return(fileMY); }
public Task LogInformation(string message, string data = null, [CallerMemberName] string caller = "", [CallerLineNumber] int line = 0) { var entity = new Log_Master() { DateTime = DateTime.Now, LevelID = (int)LogLevel.Information, CallerMemberName = caller, CallerMemberLineNumber = line, Message = message, FullData = data }; if (_configuration.UseDailyLogFile) { using (var ctx = new ContextFile(_configuration.File_FolderPath)) ctx.Log(entity); } if (_configuration.UseSQLite) { using (var ctx = new ContextSQLite(_configuration.SQLite_FullPath)) { ctx.Logger_Masters.Add(entity); ctx.SaveEntitiesAsync(); } } if (_configuration.UseSQL) { using (var ctx = new ContextSQL(_configuration.SQL_ConnectionString)) { entity.ID = 0; ctx.Logger_Masters.Add(entity); ctx.SaveEntitiesAsync(); } } return(Task.CompletedTask); }
public List <LineTokenCollection> Scan(ContextFile fileContext, SourceFileModel fileModel) { List <LineTokenCollection> Tokens = new List <LineTokenCollection>(); if (!string.IsNullOrWhiteSpace(fileModel.PreSourceCode)) { List <LineTokenCollection> preTokens = ScanTextCode(fileModel.PreSourceCode, fileContext, fileModel.PreSourceStartLine); Tokens.AddRange(preTokens); } if (!string.IsNullOrWhiteSpace(fileModel.RealSourceCode)) { List <LineTokenCollection> realTokens = ScanTextCode(fileModel.RealSourceCode, fileContext, fileModel.RealSourceStartLine); Tokens.AddRange(realTokens); } if (!string.IsNullOrWhiteSpace(fileModel.BackSourceCode)) { List <LineTokenCollection> backTokens = ScanTextCode(fileModel.BackSourceCode, fileContext, fileModel.BackSourceStartLine); Tokens.AddRange(backTokens); } return(Tokens); }
private List <Token> ScanReaderTokens(SourceReader reader, ContextFile fileContext) { List <Token> tokens = tokenizer.Scan(reader, fileContext); return(tokens); }
public TokenTape(IEnumerable <LexToken> tokens, ContextFile fileContext) : base(tokens.ToArray()) { this.fileContext = fileContext; }
public FileEnum(ContextFile fileContext, List <SectionEnum> enumSections) { this.FileContext = fileContext; EnumSections = enumSections; }
public static void Errorf(ContextFile fileContext, CodePosition postion, string msgFormat, params string[] msgParams) { fileContext.Errorf(postion, msgFormat, msgParams); }
public ExpTape(List <Exp> tokens, ContextFile fileContext) : base(tokens.ToArray()) { this.fileContext = fileContext; }
public List <Token> Scan(SourceReader sr, ContextFile fileContext) { Reset(sr, fileContext); //report("开始"); tokenList.Clear(); while (ch != END) { //report("ch="+ch+" "+(int)ch); char nextChar = GetNext(); if (ch == ' ' || ch == '\t') { //report("SkipSpace"); SkipWhiteSpace(); } else if (ch == '/' && nextChar == '/') { SkipSingleLineComment(); } else if (ch == '/' && nextChar == '*') { SkipMutilLineComment(); } else if (ch == '/') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.DIV }; tokenList.Add(tok); Next(); } else if (ch == '"' || ch == '“' || ch == '”') { string str = scanString(); Token tok = new Token() { Col = col - 1, Line = line, Text = str, Kind = TokenKind.LiteralString }; tokenList.Add(tok); } else if (ch == '\r' && nextChar == '\n') { Token tok = ScanNewLine(2);// new Token() { Col = col, Line = line, Text = "\r\n", Kind = TokenKind.NewLine }; //report("扫描换行符"); //Next(); Next(); //col = 1; //line++; tokenList.Add(tok); } else if (ch == '\n' || ch == '\r') { Token tok = ScanNewLine(1); tokenList.Add(tok); //SkipLine(); //Token tok = new Token() { Col = col, Line = line, Text = "\r\n", Kind = TokenKind.NewLine }; //Next(); //col = 1; //line++; //tokenList.Add(tok); } else if ("0123456789".IndexOf(ch) != -1) { string str = scanNumber(); var temp = col; if (StringHelper.IsInt(str)) { Token tok = new Token() { Col = temp, Line = line, Text = str, Kind = TokenKind.LiteralInt }; tokenList.Add(tok); } else if (StringHelper.IsFloat(str)) { Token tok = new Token() { Col = temp, Line = line, Text = str, Kind = TokenKind.LiteralFloat }; tokenList.Add(tok); } else { lexError(str + "不是正确的数字"); } } else if (ch == '+' || ch == '+') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.ADD }; tokenList.Add(tok); Next(); } else if (ch == '-' || ch == '-') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.SUB }; tokenList.Add(tok); Next(); } else if ((ch == '=' || ch == '=') && (nextChar == '=' || nextChar == '=')) { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.EQ }; tokenList.Add(tok); Next(); Next(); } else if ((ch == '=' || ch == '=') && (nextChar == '>')) { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.AssignTo }; tokenList.Add(tok); Next(); Next(); } else if ((ch == '=' || ch == '=')) { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.Assign }; tokenList.Add(tok); Next(); } else if ((ch == '*')) { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.MUL }; tokenList.Add(tok); Next(); } else if (ch == ',' || ch == ',') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.Comma }; tokenList.Add(tok); Next(); } else if (ch == ';' || ch == ';') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.Semi }; tokenList.Add(tok); Next(); } else if (ch == '(' || ch == '(') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.LBS }; tokenList.Add(tok); Next(); } else if (ch == ')' || ch == ')') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.RBS }; tokenList.Add(tok); Next(); } else if (ch == '>' && GetNext() == '=') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.GE }; tokenList.Add(tok); Next(); Next(); } else if (ch == '>') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.GT }; tokenList.Add(tok); Next(); } else if (ch == '<' && nextChar == '=') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.LE }; tokenList.Add(tok); Next(); Next(); } else if (ch == '<') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.LT }; tokenList.Add(tok); Next(); } else if ((nextChar == '!' || nextChar == '!') && (nextChar == '=' || nextChar == '=')) { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.NE }; tokenList.Add(tok); Next(); Next(); } /*else if (ch == ':' && nextChar == ':') * { * Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.Colond }; * tokenList.Add(tok); * Next(); Next(); * }*/ else if (ch == ':' || ch == ':') { Token tok = new Token() { Col = col, Line = line, Kind = TokenKind.Colon }; tokenList.Add(tok); Next(); } else if ((ch >= 'A' && ch <= 'Z') /*|| (ch == '_') */ || (ch >= 'a' && ch <= 'z') || ChineseHelper.IsChineseLetter(ch)) { var tempCol = col; var tempLine = line; Token t1 = scanKeyIdent(); //if (t1.GetText().StartsWith("否则如果") || t1.GetText().StartsWith("否则") || t1.GetText().StartsWith("如果")) //{ // Console.WriteLine("否则如果"); //} //tokenList.Add(t1); if (t1.GetText() == "说明") { // char nchar = GetNext(); if (ch == ':' || ch == ':') { SkipSingleLineComment(); continue;; } } addIdentOrKey(t1); } else if (char.IsControl(ch)) { while (char.IsControl(ch) && ch != END) { Next(); if ((int)ch == 13) { line++; col = 1; } } } else { lexError("无法识别" + (int)ch + ": '" + ch + "' "); Next(); } } return(tokenList); }
public List <LineTokenCollection> Scan(SourceReader sr, ContextFile fileContext, int startLine) { Reset(sr, fileContext); line = startLine; curToks = new LineTokenCollection(); //report("开始"); //InitLineFirst(); while (ch != END) { //report("ch="+ch+" "+(int)ch); //if (line == 33 )// ch == '控' && line == 18) //{ // //report("col:" + col); // Widther.IsDebug = true; //} //else if ( line == 34)// ch == '控' && line == 18) //{ // Widther.IsDebug = true; //} //else //{ // Widther.IsDebug = false; //} char nextChar = GetNext(); if (ch == ' ' || ch == '\t') { //report("SkipSpace"); SkipWhiteSpace(); } else if (ch == '/' && nextChar == '/') { SkipSingleLineComment(); } else if (ch == '/' && nextChar == '*') { SkipMutilLineComment(); } else if (ch == '/') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.DIV);// { Col = col, Line = line, }; curToks.Add(tok); Next(); } else if (ch == '"' || ch == '“' || ch == '”') { string str = scanString(); LexTokenLiteral tok = new LexTokenLiteral(line, col - 1, TokenKindLiteral.LiteralString, str);// { Col = col - 1, Line = line, Text = str, Kind = TokenKindSymbol.LiteralString }; curToks.Add(tok); } else if (ch == '\r' && nextChar == '\n') { Next(); Next(); AddLineToken();//lineTokens.Add(curToks); curToks = new LineTokenCollection(); ScanNewLine(); } else if (ch == '\n' || ch == '\r') { Next(); AddLineToken(); //lineTokens.Add(curToks); curToks = new LineTokenCollection(); ScanNewLine(); } else if ("0123456789".IndexOf(ch) != -1) { string str = scanNumber(); var temp = col; if (StringHelper.IsInt(str)) { LexTokenLiteral tok = new LexTokenLiteral(line, temp, TokenKindLiteral.LiteralInt, str); curToks.Add(tok); } else if (StringHelper.IsFloat(str)) { LexTokenLiteral tok = new LexTokenLiteral(line, temp, TokenKindLiteral.LiteralFloat, str); curToks.Add(tok); } else { lexError(str + "不是正确的数字"); } } else if (ch == '+') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.ADD); curToks.Add(tok); Next(); } else if (ch == '-') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.SUB); curToks.Add(tok); Next(); } else if ((ch == '=') && (nextChar == '=')) { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.EQ); curToks.Add(tok); Next(); Next(); } else if ((ch == '=') && (nextChar == '>')) { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.AssignTo); curToks.Add(tok); Next(); Next(); } else if ((ch == '=')) { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.Assign); curToks.Add(tok); Next(); } else if ((ch == '*')) { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.MUL); curToks.Add(tok); Next(); } else if (ch == ',' || ch == ',') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.Comma); curToks.Add(tok); Next(); } else if (ch == ';' || ch == ';') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.Semi); curToks.Add(tok); Next(); } else if (ch == '(' || ch == '(') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.LBS); curToks.Add(tok); Next(); } else if (ch == ')' || ch == ')') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.RBS); curToks.Add(tok); Next(); } else if (ch == '>' && GetNext() == '=') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.GE); curToks.Add(tok); Next(); Next(); } else if (ch == '>') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.GT); curToks.Add(tok); Next(); } else if (ch == '<' && nextChar == '=') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.LE); curToks.Add(tok); Next(); Next(); } else if (ch == '<') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.LT); curToks.Add(tok); Next(); } else if ((nextChar == '!' || nextChar == '!') && (nextChar == '=' || nextChar == '=')) { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.NE); curToks.Add(tok); Next(); Next(); } else if (ch == ':' || ch == ':') { LexTokenSymbol tok = new LexTokenSymbol(line, col, TokenKindSymbol.Colon);// { Col = col, Line = line, Kind = TokenKindSymbol.Colon }; curToks.Add(tok); Next(); } else if ((ch >= 'A' && ch <= 'Z') /*|| (ch == '_') */ || (ch >= 'a' && ch <= 'z') || ChineseHelper.IsChineseLetter(ch)) { var tempCol = col; var tempLine = line; LexToken t1 = scanIdentToken(); if (t1.Text == "说明") { if (ch == ':' || ch == ':') { SkipSingleLineComment(); continue;; } } addIdentOrKey(t1); } else if (char.IsControl(ch)) { while (char.IsControl(ch) && ch != END) { Next(); if ((int)ch == 13) { ScanNewLine(); } } } else { lexError("无法识别" + (int)ch + ": '" + ch + "' "); Next(); } } if (curToks != null && curToks.Count > 0) { AddLineToken(); } return(lineTokens); }
public TokenTape(List <Token> tokens, ContextFile fileContext) : base(tokens.ToArray(), Token.EOF) { this.fileContext = fileContext; }