public void AnalyType() { if (IsContains) { return; } var ValueExp = Raw.ValueExp; if (ValueExp == null) { Errorf(Raw.NameToken.Position, "'{0}'没有声明类型", DimName); } else if (ValueExp.RawTokens.Count == 0) { Errorf(Raw.NameToken.Position, "'{0}'没有声明类型", DimName); } else { LexTokenText typeToken = GetDimTypeToken(ValueExp); if (typeToken == null) { Errorf(Raw.NameToken.Position, "'{0}'声明的不是类型", DimName); } else { DimType = typeToken.Text; } } }
public LexToken ToSingleToken() { string newText = string.Join("", TypeTokens.Select(P => P.Text)); LexToken firstToken = TypeTokens[0]; LexTokenText newToken = new LexTokenText(firstToken.Line, firstToken.Col, newText);//firstToken.Kind, return(newToken); }
public bool Match(TokenKindKeyword tokKind) { if (!this.Current.IsKind(tokKind)) { error(this.Current, this.Current.ToCode() + "不正确,应该是" + LexTokenText.GetTextByKind(tokKind)); return(false); } else { MoveNext(); return(true); } }
private Exp ParseNameValueExp() { Exp leftExp = ParseBinaryLogicExp(); if (tape.HasCurrent && tape.Current.IsKind(TokenKindSymbol.Colon)) { tape.MoveNext(); Exp rightExp = ParseBinaryLogicExp(); if (leftExp is ExpChain) { ExpChain chainExp = leftExp as ExpChain; if (chainExp.SubCount == 1) { object varobj = chainExp.RawElements[0]; if (varobj is LexTokenText) { LexTokenText textToken = (varobj as LexTokenText); ExpNameValue expNameValue = new ExpNameValue(this.expContext, textToken, rightExp); return(expNameValue); } else { tape.error("参数名称错误"); return(rightExp); } } else { tape.error("参数名称的长度不是1"); return(rightExp); } } else if (leftExp is ExpVarBase) { ExpVarBase leftVarExp = (leftExp as ExpVarBase); LexToken varToken = leftVarExp.VarToken; ExpNameValue expNameValue = new ExpNameValue(this.expContext, varToken, rightExp); return(expNameValue); } else { tape.error("调用过程时指定的参数名称只能是标识符"); return(rightExp); } } else { return(leftExp); } }
private LexTokenText ParseRetProc() { if (tape.HasCurrent && tape.Current.IsKind(TokenKindKeyword.Ident)) { LexTokenText headToken = (LexTokenText)tape.Current; tape.MoveNext(); //if (tape.CurrentKind == TokenKindSymbol.NewLine) //{ // SkipNewLine(); //} return(headToken); } return(null); }
private void AnalyNameItemText(LexTokenText nameToken) { ContextImportUse contextiu = this.ASTFile.FileContext.ImportUseContext; string typeName = nameToken.Text; if (contextiu.ContainsUserZTypeName(typeName)) { this.ASTFile.FileContext.Errorf(nameToken.Position, "'{0}'重复使用", typeName); } else { contextiu.AddUseZTypeName(typeName); _TextStructTokens.Add(nameToken); } }
private bool IsKeyword(object Data, string keytext) { //if (!IsToken(Data)) return false; if (!(Data is LexTokenText)) { return(false); } LexTokenText token = Data as LexTokenText; if (token.Text != keytext) { return(false); } return(true); }
private LexTokenText GetDimTypeToken(ExpRaw ValueExp) { if (ValueExp == null) { return(null); } if (ValueExp.RawTokens.Count == 0) { return(null); } LexToken token = ValueExp.RawTokens[0]; if (!(token is LexTokenText)) { return(null); } LexTokenText result = (LexTokenText)token; return(result); }
private Exp ParseDe() { ExpDe deexp = new ExpDe(this.ExpContext); deexp.KeyToken = (LexTokenText)Tape.Current; deexp.SubjectExp = PopChainsExp(); MoveNext(); if (Tape.Current is LexTokenText) { LexTokenText tok = (LexTokenText)(Tape.Current); if (tok.Kind == TokenKindKeyword.Ident || tok.Kind == TokenKindKeyword.Each) { deexp.RightToken = tok; MoveNext(); } } //deexp.SetContextExp(this.expContext); return(deexp); }
public void AnalyNameItemType(LexTokenText nameToken) { ContextImportUse importUseContext = this.ASTFile.FileContext.ImportUseContext; string typeName = nameToken.Text; var ztypes = importUseContext.SearchByTypeName(typeName); if (ztypes.Length == 0) { this.ASTFile.FileContext.Errorf(nameToken.Position, "没有搜索到'{0}'", typeName); return; } var descType = ztypes[0]; if (descType is ZLClassInfo) { ZLClassInfo zclass = descType as ZLClassInfo; if (zclass.IsStatic) { importUseContext.AddUseType(zclass); } else { this.ASTFile.FileContext.Errorf(nameToken.Position, "'{0}'不是唯一类型,不能被导入类", typeName); } } else if (descType is ZLEnumInfo) { ZLEnumInfo zenum = descType as ZLEnumInfo; importUseContext.AddUseType(zenum); } else if (descType is ZLDimInfo) { ZLDimInfo zdim = descType as ZLDimInfo; importUseContext.AddDimType(zdim); } else { throw new CCException(); } }
private bool ParseItem() { if (!Tape.HasCurrent) { return(false); } var data = Tape.Current; if (featurer.IsDe(data)) { var obj = ParseDe(); chains.Push(obj); return(true); } else if (featurer.IsNewfault(data))// (cf.IsNewfault) { var obj = ParseNewfault(); chains.Push(obj); return(true); //MoveNext(); //return false; } else if (featurer.IsDi(data))//(cf.IsDi) { var obj = ParseDi(); chains.Push(obj); return(true); } else if (featurer.IsExp(data))//if (cf.IsExp) { var obj = ParseItemExp(); chains.Push(obj); return(true); } else if (featurer.IsLocalVar(data) ||//cf.IsLocalVar featurer.IsLiteral(data) ||//cf.IsLiteral featurer.IsThisProperty(data) || featurer.IsSuperProperty(data) ||//cf.IsSuperProperty featurer.IsUsedEnumItem(data) ||//cf.IsUsedEnumItem featurer.IsUsedProperty(data) ||// cf.IsUsedProperty featurer.IsParameter(data) ||//cf.IsParameter featurer.IsUsedField(data) || featurer.IsThisField(data) ) { Exp exp1 = ParseExpect_Var(); ExpBracket bracketBracket = WarpExp(exp1); chains.Push(bracketBracket); return(true); } else if (featurer.IsThisClassName(data) || featurer.IsImportTypeName(data)) { Exp exp = ParseTypes(); chains.Push(exp); if (exp is ExpTypeBase) { if (Tape.HasCurrent) { var data2 = Tape.Current; if (!(featurer.IsDi(data2) || featurer.IsDe(data2))) { var b2 = ParseItem(); if (b2) { var nextObj = PeekChains(); if ((nextObj is ExpBracket) || (nextObj is ExpLiteral) || (nextObj is ExpVarBase) ) { var argExp = (Exp)PopChains(); var typeExp = (ExpTypeBase)PopChains(); Exp newexp = ParseToExpNew(typeExp, argExp); //obj = newexp; chains.Push(newexp); return(true); } } } } return(true); } } else if (featurer.IsIdent(data)) { LexTokenText lexToken = (LexTokenText)data; if (lexToken.Text == "是" || lexToken.Text == "否") { LexTokenLiteral literalToken = new LexTokenLiteral(lexToken.Line, lexToken.Col, lexToken.Text == "是" ? TokenKindLiteral.True : TokenKindLiteral.False, lexToken.Text); ExpLiteral literalExp = new ExpLiteral(this.ExpContext, literalToken); Exp exp2 = literalExp.Analy(); chains.Push(exp2); MoveNext(); } else if (featurer.IsProcNamePart(lexToken.Text)) { Exp exp = ParseProcNamePart(); chains.Push(exp); } else { //ExpErrorToken errorExp = new ExpErrorToken(this.ExpContext, lexToken); //Exp exp1 = errorExp.Analy(); ExpLocalVar varExp = new ExpLocalVar(this.ExpContext, lexToken); varExp.IsAssignTo = this.IsAssignTo; Exp exp1 = varExp.Analy(); ExpBracket bracketBracket = WarpExp(exp1); chains.Push(bracketBracket); MoveNext(); } return(true); } else { throw new CCException(); } return(true); }
//public ExpTagNew() //{ //} //public ExpTagNew(Exp parentExp, ExpTypeBase typeExp, LexTokenText keyToken) // : base(parentExp) //{ // KeyToken = keyToken; //} //public ExpTagNew(Exp parentExp, LexTokenText keyToken) // : base(parentExp) //{ // KeyToken = keyToken; //} public ExpTagNew(ContextExp expContext, LexTokenText keyToken) : base(expContext) { KeyToken = keyToken; }
public LexToken[] Split(LexToken token) { string src = token.Text; //if (segementer.IWDict.ContainsText(src)) //{ // return new LexToken[]{ token}; //} //{ // string[] strarr3 = segementer.Cut("之和"); // Debugr.WriteLine(strarr3.Length); // string[] strarr2 = segementer.Cut("之差"); // Debugr.WriteLine(strarr2.Length); //} string[] strarr = segementer.Cut(src); if (strarr.Length == 1) { return new LexToken[] { token } } ; List <LexToken> list = new List <LexToken>(); int col = token.Col; int line = token.Line; foreach (string text in strarr) { //LexToken tok = null; if (StringHelper.IsInt(text)) { LexToken tok = new LexTokenLiteral(line, col, TokenKindLiteral.LiteralInt, text); list.Add(tok); } else if (StringHelper.IsFloat(text)) { LexToken tok = new LexTokenLiteral(line, col, TokenKindLiteral.LiteralFloat, text); list.Add(tok); } else if (text.Length == 2) { if (segementer.ContainerWord(text[0].ToString()) || segementer.ContainerWord(text[1].ToString())) { LexToken tok1 = new LexTokenText(line, col, text[0].ToString()); list.Add(tok1); LexToken tok2 = new LexTokenText(line, col, text[1].ToString()); list.Add(tok2); } else { LexToken tok = new LexTokenText(line, col, text); list.Add(tok); } } else { LexToken tok = new LexTokenText(line, col, text); //new LexToken() { Line = line, Col = col, Kind = token.Kind, Text = text }; list.Add(tok); } col += text.Length; } return(list.ToArray()); } }