private static (GenericLexer <IN> .Config, GenericToken[]) GetConfigAndGenericTokens <IN>(IDictionary <IN, List <LexemeAttribute> > attributes) where IN : struct { var config = new GenericLexer <IN> .Config(); var lexerAttribute = typeof(IN).GetCustomAttribute <LexerAttribute>(); if (lexerAttribute != null) { config.IgnoreWS = lexerAttribute.IgnoreWS; config.IgnoreEOL = lexerAttribute.IgnoreEOL; config.WhiteSpace = lexerAttribute.WhiteSpace; config.KeyWordIgnoreCase = lexerAttribute.KeyWordIgnoreCase; } var statics = new List <GenericToken>(); foreach (var lexeme in attributes.Values.SelectMany(list => list)) { statics.Add(lexeme.GenericToken); if (lexeme.IsIdentifier) { config.IdType = lexeme.IdentifierType; if (lexeme.IdentifierType == IdentifierType.Custom) { config.IdentifierStartPattern = ParseIdentifierPattern(lexeme.IdentifierStartPattern); config.IdentifierRestPattern = ParseIdentifierPattern(lexeme.IdentifierRestPattern); } } } return(config, statics.Distinct().ToArray()); }
public static void BuildCallbacks <IN>(GenericLexer <IN> lexer) where IN : struct { var attributes = (CallBacksAttribute[])typeof(IN).GetCustomAttributes(typeof(CallBacksAttribute), true); Type callbackClass = attributes[0].CallBacksClass; ExtractCallBacks(callbackClass, lexer); }
public static void AddCallback <IN>(GenericLexer <IN> lexer, MethodInfo method, IN token) where IN : struct { var t = typeof(IN); var rt = method.ReturnType; var ps = method.GetParameters(); var callbackDelegate = (Func <Token <IN>, Token <IN> >)Delegate.CreateDelegate(typeof(Func <Token <IN>, Token <IN> >), method); lexer.AddCallBack(token, callbackDelegate); }
private static void AddExtensions <IN>(Dictionary <IN, LexemeAttribute> extensions, BuildExtension <IN> extensionBuilder, GenericLexer <IN> lexer) where IN : struct { if (extensionBuilder != null) { foreach (var attr in extensions) { extensionBuilder(attr.Key, attr.Value, lexer); } } }
public static void ExtractCallBacks <IN>(Type callbackClass, GenericLexer <IN> lexer) where IN : struct { var methods = callbackClass.GetMethods().ToList(); methods = methods.Where(m => { var attributes = m.GetCustomAttributes().ToList(); var attr = attributes.Find(a => a.GetType() == typeof(TokenCallbackAttribute)); return(m.IsStatic && attr != null); }).ToList(); foreach (var method in methods) { var attributes = method.GetCustomAttributes(typeof(TokenCallbackAttribute), false).Cast <TokenCallbackAttribute>().ToList <TokenCallbackAttribute>(); AddCallback(lexer, method, EnumConverter.ConvertIntToEnum <IN>(attributes[0].EnumValue)); } }
public static void AddCallback <IN>(GenericLexer <IN> lexer, MethodInfo method, IN token) where IN : struct { var callbackDelegate = (Func <Token <IN>, Token <IN> >)Delegate.CreateDelegate(typeof(Func <Token <IN>, Token <IN> >), method); lexer.AddCallBack(token, callbackDelegate); }
private static BuildResult <ILexer <IN> > BuildGenericLexer <IN>(Dictionary <IN, List <LexemeAttribute> > attributes, BuildExtension <IN> extensionBuilder, BuildResult <ILexer <IN> > result) where IN : struct { result = CheckStringAndCharTokens(attributes, result); var(config, tokens) = GetConfigAndGenericTokens(attributes); config.ExtensionBuilder = extensionBuilder; var lexer = new GenericLexer <IN>(config, tokens); var Extensions = new Dictionary <IN, LexemeAttribute>(); foreach (var pair in attributes) { var tokenID = pair.Key; var lexemes = pair.Value; foreach (var lexeme in lexemes) { try { if (lexeme.IsStaticGeneric) { lexer.AddLexeme(lexeme.GenericToken, tokenID); } if (lexeme.IsKeyWord) { foreach (var param in lexeme.GenericTokenParameters) { lexer.AddKeyWord(tokenID, param); } } if (lexeme.IsSugar) { foreach (var param in lexeme.GenericTokenParameters) { lexer.AddSugarLexem(tokenID, param, lexeme.IsLineEnding); } } if (lexeme.IsString) { var(delimiter, escape) = GetDelimiters(lexeme, "\"", "\\"); lexer.AddStringLexem(tokenID, delimiter, escape); } if (lexeme.IsChar) { var(delimiter, escape) = GetDelimiters(lexeme, "'", "\\"); lexer.AddCharLexem(tokenID, delimiter, escape); } if (lexeme.IsExtension) { Extensions[tokenID] = lexeme; } } catch (Exception e) { result.AddError(new InitializationError(ErrorLevel.FATAL, e.Message)); } } } AddExtensions(Extensions, extensionBuilder, lexer); var comments = GetCommentsAttribute(result); if (!result.IsError) { foreach (var comment in comments) { NodeCallback <GenericToken> callbackSingle = match => { match.Properties[GenericLexer <IN> .DerivedToken] = comment.Key; match.Result.IsComment = true; match.Result.CommentType = CommentType.Single; return(match); }; NodeCallback <GenericToken> callbackMulti = match => { match.Properties[GenericLexer <IN> .DerivedToken] = comment.Key; match.Result.IsComment = true; match.Result.CommentType = CommentType.Multi; return(match); }; foreach (var commentAttr in comment.Value) { var fsmBuilder = lexer.FSMBuilder; var hasSingleLine = !string.IsNullOrWhiteSpace(commentAttr.SingleLineCommentStart); if (hasSingleLine) { lexer.SingleLineComment = commentAttr.SingleLineCommentStart; fsmBuilder.GoTo(GenericLexer <IN> .start); fsmBuilder.ConstantTransition(commentAttr.SingleLineCommentStart); fsmBuilder.Mark(GenericLexer <IN> .single_line_comment_start); fsmBuilder.End(GenericToken.Comment); fsmBuilder.CallBack(callbackSingle); } var hasMultiLine = !string.IsNullOrWhiteSpace(commentAttr.MultiLineCommentStart); if (hasMultiLine) { lexer.MultiLineCommentStart = commentAttr.MultiLineCommentStart; lexer.MultiLineCommentEnd = commentAttr.MultiLineCommentEnd; fsmBuilder.GoTo(GenericLexer <IN> .start); fsmBuilder.ConstantTransition(commentAttr.MultiLineCommentStart); fsmBuilder.Mark(GenericLexer <IN> .multi_line_comment_start); fsmBuilder.End(GenericToken.Comment); fsmBuilder.CallBack(callbackMulti); } } } } result.Result = lexer; return(result); }