// public ParserState(TokenList list) { List = list; Iter = list.GetEnumerator(); Iter.MoveNext(); CurrentToken = Iter.Current; NextIter = list.GetEnumerator(); NextIter.MoveNext(); NextIter.MoveNext(); NextToken = NextIter.Current; // //Subject = BuiltinDefs.Self.CreateName(); Subject = Expression.Nil; }
/// <summary> /// 生成されたパーサでパースする. /// </summary> /// <param name="text">処理対象のテキスト.</param> public void Parse(string text) { if (tokenizer == null || fields == null || expession == null) { return; } Debug.WriteLine(expession.ToString()); tokenizer.BeforeAddToken += MatchToken; // トークンに分解する TokenList <TokenType> tokens = tokenizer.Tokenize(text); // パース GeneratorContext context = new GeneratorContext(tokens.GetEnumerator()); context.Fields = fields; context.MatchField += MatchField; expession.Interpret(context); /* * foreach (string key in context.Fields.Keys) { * Debug.WriteLine(context.Fields[key].ToString()); * } * //*/ }
public decimal GetRatingForSentence(string channel, string sentence) { var rating = 0m; var tokenSentence = new TokenList(sentence); var nextToken = tokenSentence.GetEnumerator(); var hasNextToken = nextToken.MoveNext(); var usedStems = new List <Stem>(); while (hasNextToken) { var stem = StemManager.GetStemForToken(nextToken.Current); if (stem != null && ChannelMemoryManager.Instance.HasStemInChannel(channel, stem) && !usedStems.Contains(stem)) { rating += ChannelMemoryManager.Instance.GetValueForStem(channel, stem); usedStems.Add(stem); } hasNextToken = nextToken.MoveNext(); } if (tokenSentence.Get().Count == 0) { return(rating); } var power = Math.Pow(tokenSentence.Get().Count, 0.5); var multiplier = (decimal)(1 / power); rating = rating * multiplier; return(rating); }
// public TokenList Read(Stream str) { Tokenizer tokenizer = new Tokenizer(); TokenList inList = tokenizer.Tokenize(str); // TokenList outList = new TokenList(); CurrentTokenIter = inList.GetEnumerator(); NextTokenIter = inList.GetEnumerator(); NextTokenIter.MoveNext(); NextToken = NextTokenIter.Current; // ReadTokenLists(outList); // //DebugTextWriter writer = new DebugTextWriter(); StringWriter writer = new StringWriter(); inList.RawPrint(writer); #if DEBUG_PREPROCESSOR Debug.Print(writer.ToString()); #endif return(outList); }
/// <summary> /// EBNF ファイルをパース. /// </summary> /// <param name="text">JSON ファイル.</param> /// <returns>JSON オブジェクト.</returns> public static GeneratedParser Parse(string text) { // トークンに分解する TokenList <TokenType> tokens = Tokenize(text); // パース EBNFContext context = new EBNFContext(tokens.GetEnumerator()); EBNFExpression exp = new EBNFExpression(); exp.Interpret(context); // 生成されたパーサを返す GeneratedParser parser = new GeneratedParser( context.Tokenizer, context.Fields, context.Root ); return(parser); }
public void TrainTokenList(TokenList tokenList, Token currentToken, List <Token> existingTokens, List <Token> linkedTokens = null) { var tokenListTotal = tokenList.Get().Count; var currentTokenCounter = 0; var nextToken = tokenList.GetEnumerator(); var hasNextToken = nextToken.MoveNext(); while (hasNextToken) { currentTokenCounter++; if (TokenManager.DoesWordTextExist(nextToken.Current, currentToken, out var outIndex)) { currentToken = TokenManager.TrainExistingToken(currentToken, outIndex); if (existingTokens != null && existingTokens.Contains(currentToken)) { existingTokens = null; } } else { if (existingTokens != null && existingTokens.Count > 0 && (existingTokens.Count + currentTokenCounter - 1) == tokenListTotal && existingTokens[0].WordText.Equals(nextToken.Current)) { currentToken = TokenManager.TrainReferenceExistingToken(currentToken, existingTokens[0], outIndex); existingTokens.RemoveAt(0); } else { currentToken = TokenManager.TrainNewToken(currentToken, nextToken.Current, outIndex); } } if (linkedTokens?.Count > 0) { TokenManager.LinkTokensAndRemoveFirstItem(currentToken, linkedTokens); } hasNextToken = nextToken.MoveNext(); } }
public List <Token> GetExisitingTokens(TokenList tokenList, Token token) { var existingTokens = new List <Token>(); var nextToken = tokenList.GetEnumerator(); var hasNextToken = nextToken.MoveNext(); while (hasNextToken) { if (TokenManager.DoesWordTextExist(nextToken.Current, token, out var outIndex)) { token = TokenManager.GetTokenForID(token.ChildrenTokens[outIndex]); //Insert at front of list since the reverse training will need to be inverted existingTokens.Insert(0, token); } else { break; } hasNextToken = nextToken.MoveNext(); } return(existingTokens); }
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return(TokenList.GetEnumerator()); }
IEnumerator <CodeToken> IEnumerable <CodeToken> .GetEnumerator() { return(TokenList.GetEnumerator()); }
// Constructor - just store the tokenizer public EvaluatorWithoutPrecidence(TokenList <ArithmeticExpressionToken> tokens) { _tokens = tokens.GetEnumerator(); }