/// <summary> /// 是字符串 /// </summary> public bool IsString(object Data) { if (!(Data is LexTokenLiteral)) { return(false); } LexTokenLiteral token = Data as LexTokenLiteral; if (token.Kind != TokenKindLiteral.LiteralString) { return(false); } return(true); //if (!IsToken(Data)) return false; //if (IsDe(Data)) return false; //if (IsDi(Data)) return false; //if (IsLiteral(Data)) return false; //if (IsNewfault(Data)) return false; //var text = this.GetText(Data); //return this.ProcContext.IsImportClassName(text); //get //{ // return IsToken && !(IsDi || IsDe || IsLiteral || IsNewfault); //} }
/// <summary>Migrate a lexical token.</summary> /// <param name="lexToken">The lexical token to migrate.</param> /// <param name="error">An error message which indicates why migration failed (if any).</param> /// <returns>Returns whether migration succeeded.</returns> public override bool TryMigrate(ILexToken lexToken, out string error) { if (!base.TryMigrate(lexToken, out error)) { return(false); } // migrate token input arguments if (lexToken is LexTokenToken token && token.HasInputArgs()) { ConditionType?conditionType = this.GetConditionType(token.Name); // 1.15 drops {{token:search}} form in favor of {{token |contains=search}} if (this.LocalTokenNames.Value.Contains(token.Name) || (conditionType != null && this.TokensWhichDroppedSearchForm.Contains(conditionType.Value))) { var parts = new List <ILexToken>(token.InputArgs.Parts); if (parts[0] is LexTokenLiteral literal) { literal.MigrateTo($"|contains={literal.Text}"); } else { parts.Insert(0, new LexTokenLiteral("|contains=")); } token.InputArgs.MigrateTo(parts.ToArray()); } // 1.15 changes {{Random: choices | pinned-key}} to {{Random: choices |key=pinned-key}} if (conditionType == ConditionType.Random) { LexTokenLiteral lexSeparator = token.InputArgs.Parts.OfType <LexTokenLiteral>().FirstOrDefault(p => p.ToString().Contains("|")); if (lexSeparator != null && !Regex.IsMatch(lexSeparator.Text, @"\|\s*key\s*=")) { int sepIndex = lexSeparator.Text.IndexOf("|", StringComparison.Ordinal); string newText = lexSeparator.Text.Substring(0, sepIndex + 1) + "key=" + lexSeparator.Text.Substring(sepIndex + 1).TrimStart(); lexSeparator.MigrateTo(newText); } } } return(true); }
private bool ParseItem() { if (!Tape.HasCurrent) { return(false); } var data = Tape.Current; if (featurer.IsDe(data)) { var obj = ParseDe(); chains.Push(obj); return(true); } else if (featurer.IsNewfault(data))// (cf.IsNewfault) { var obj = ParseNewfault(); chains.Push(obj); return(true); //MoveNext(); //return false; } else if (featurer.IsDi(data))//(cf.IsDi) { var obj = ParseDi(); chains.Push(obj); return(true); } else if (featurer.IsExp(data))//if (cf.IsExp) { var obj = ParseItemExp(); chains.Push(obj); return(true); } else if (featurer.IsLocalVar(data) ||//cf.IsLocalVar featurer.IsLiteral(data) ||//cf.IsLiteral featurer.IsThisProperty(data) || featurer.IsSuperProperty(data) ||//cf.IsSuperProperty featurer.IsUsedEnumItem(data) ||//cf.IsUsedEnumItem featurer.IsUsedProperty(data) ||// cf.IsUsedProperty featurer.IsParameter(data) ||//cf.IsParameter featurer.IsUsedField(data) || featurer.IsThisField(data) ) { Exp exp1 = ParseExpect_Var(); ExpBracket bracketBracket = WarpExp(exp1); chains.Push(bracketBracket); return(true); } else if (featurer.IsThisClassName(data) || featurer.IsImportTypeName(data)) { Exp exp = ParseTypes(); chains.Push(exp); if (exp is ExpTypeBase) { if (Tape.HasCurrent) { var data2 = Tape.Current; if (!(featurer.IsDi(data2) || featurer.IsDe(data2))) { var b2 = ParseItem(); if (b2) { var nextObj = PeekChains(); if ((nextObj is ExpBracket) || (nextObj is ExpLiteral) || (nextObj is ExpVarBase) ) { var argExp = (Exp)PopChains(); var typeExp = (ExpTypeBase)PopChains(); Exp newexp = ParseToExpNew(typeExp, argExp); //obj = newexp; chains.Push(newexp); return(true); } } } } return(true); } } else if (featurer.IsIdent(data)) { LexTokenText lexToken = (LexTokenText)data; if (lexToken.Text == "是" || lexToken.Text == "否") { LexTokenLiteral literalToken = new LexTokenLiteral(lexToken.Line, lexToken.Col, lexToken.Text == "是" ? TokenKindLiteral.True : TokenKindLiteral.False, lexToken.Text); ExpLiteral literalExp = new ExpLiteral(this.ExpContext, literalToken); Exp exp2 = literalExp.Analy(); chains.Push(exp2); MoveNext(); } else if (featurer.IsProcNamePart(lexToken.Text)) { Exp exp = ParseProcNamePart(); chains.Push(exp); } else { //ExpErrorToken errorExp = new ExpErrorToken(this.ExpContext, lexToken); //Exp exp1 = errorExp.Analy(); ExpLocalVar varExp = new ExpLocalVar(this.ExpContext, lexToken); varExp.IsAssignTo = this.IsAssignTo; Exp exp1 = varExp.Analy(); ExpBracket bracketBracket = WarpExp(exp1); chains.Push(bracketBracket); MoveNext(); } return(true); } else { throw new CCException(); } return(true); }
public ExpLiteral(ContextExp expContext, LexTokenLiteral literalToken) : base(expContext) { LiteralToken = literalToken; }
public LexToken[] Split(LexToken token) { string src = token.Text; //if (segementer.IWDict.ContainsText(src)) //{ // return new LexToken[]{ token}; //} //{ // string[] strarr3 = segementer.Cut("之和"); // Debugr.WriteLine(strarr3.Length); // string[] strarr2 = segementer.Cut("之差"); // Debugr.WriteLine(strarr2.Length); //} string[] strarr = segementer.Cut(src); if (strarr.Length == 1) { return new LexToken[] { token } } ; List <LexToken> list = new List <LexToken>(); int col = token.Col; int line = token.Line; foreach (string text in strarr) { //LexToken tok = null; if (StringHelper.IsInt(text)) { LexToken tok = new LexTokenLiteral(line, col, TokenKindLiteral.LiteralInt, text); list.Add(tok); } else if (StringHelper.IsFloat(text)) { LexToken tok = new LexTokenLiteral(line, col, TokenKindLiteral.LiteralFloat, text); list.Add(tok); } else if (text.Length == 2) { if (segementer.ContainerWord(text[0].ToString()) || segementer.ContainerWord(text[1].ToString())) { LexToken tok1 = new LexTokenText(line, col, text[0].ToString()); list.Add(tok1); LexToken tok2 = new LexTokenText(line, col, text[1].ToString()); list.Add(tok2); } else { LexToken tok = new LexTokenText(line, col, text); list.Add(tok); } } else { LexToken tok = new LexTokenText(line, col, text); //new LexToken() { Line = line, Col = col, Kind = token.Kind, Text = text }; list.Add(tok); } col += text.Length; } return(list.ToArray()); } }