public static Sentence Tokenize(string ini, DelimiterDictionary delimiters) { var lines = ini.Split(new[] { Environment.NewLine }, StringSplitOptions.None); var globalSection = SectionFactory.CreateSection(Grammar.GlobalSectionName, delimiters); var appendSentence = new Action <Sentence>(next => { globalSection.After.Last().Next = next; }); for (var i = 0; i < lines.Length; i++) { var line = lines[i].TrimStart(); var tokens = TokenizeLine(line, delimiters); appendSentence(new Sentence { Line = i, Tokens = tokens }); } return(globalSection); }
public static Sentence Tokenize(string ini, DelimiterDictionary delimiters) { var lines = ini.Split(new[] { Environment.NewLine }, StringSplitOptions.None); var globalSection = SectionFactory.CreateSection(Grammar.GlobalSectionName, delimiters); var appendSentence = new Action<Sentence>(next => { globalSection.After.Last().Next = next; }); for (var i = 0; i < lines.Length; i++) { var line = lines[i].TrimStart(); var tokens = TokenizeLine(line, delimiters); appendSentence(new Sentence { Line = i, Tokens = tokens }); } return globalSection; }
public static Sentence CreateComment(string text, DelimiterDictionary delimiters) { var section = new Sentence { Type = SentenceType.Comment, Tokens = new List<Token> { new Token(TokenType.CommentIndicator, delimiters.TokenTypes[TokenType.CommentIndicator]), new Token(TokenType.Comment, text), } }; return section; }
public static Sentence CreateSection(string name, DelimiterDictionary delimiters) { var section = new Sentence { Type = SentenceType.Section, Tokens = new List<Token> { new Token(TokenType.LeftSectionDelimiter, delimiters.TokenTypes[TokenType.LeftSectionDelimiter]), new Token(TokenType.Section, name), new Token(TokenType.RightSectionDelimiter, delimiters.TokenTypes[TokenType.RightSectionDelimiter]), } }; return section; }
public static Sentence CreateComment(string text, DelimiterDictionary delimiters) { var section = new Sentence { Type = SentenceType.Comment, Tokens = new List <Token> { new Token(TokenType.CommentIndicator, delimiters.TokenTypes[TokenType.CommentIndicator]), new Token(TokenType.Comment, text), } }; return(section); }
public static Sentence CreateProperty(string name, string value, DelimiterDictionary delimiters) { var property = new Sentence { Type = SentenceType.Property, Tokens = new List<Token> { new Token(TokenType.Property, name), new Token(TokenType.ProperetyValueDelimiter, delimiters.TokenTypes[TokenType.ProperetyValueDelimiter]), new Token(TokenType.Value, value), } }; return property; }
public static Sentence CreateSection(string name, DelimiterDictionary delimiters) { var section = new Sentence { Type = SentenceType.Section, Tokens = new List <Token> { new Token(TokenType.LeftSectionDelimiter, delimiters.TokenTypes[TokenType.LeftSectionDelimiter]), new Token(TokenType.Section, name), new Token(TokenType.RightSectionDelimiter, delimiters.TokenTypes[TokenType.RightSectionDelimiter]), } }; return(section); }
public static Sentence CreateProperty(string name, string value, DelimiterDictionary delimiters) { var property = new Sentence { Type = SentenceType.Property, Tokens = new List <Token> { new Token(TokenType.Property, name), new Token(TokenType.ProperetyValueDelimiter, delimiters.TokenTypes[TokenType.ProperetyValueDelimiter]), new Token(TokenType.Value, value), } }; return(property); }
internal static List<Token> TokenizeLine(string line, DelimiterDictionary delimiters) { var isEmptyLine = string.IsNullOrWhiteSpace(line); if (isEmptyLine) { return new List<Token>(); } // initialize tokens with start-of-line var delimiterTokens = new List<Token> { new Token(Grammar.Space) { Type = TokenType.StartOfLine, } }; // determine delimiter tokens for (var i = 0; i < line.Length; i++) { var c = line[i]; TokenType tokenType; var isDelimiterToken = delimiters.Delimiters.TryGetValue(c, out tokenType); if (!isDelimiterToken) { continue; } var isEscapedToken = Grammar.EscapableTokens.Contains(tokenType) && i > 0 && line[i - 1] == Grammar.Backslash; if (isEscapedToken) { continue; } var isInlineCommentIndicator = tokenType == TokenType.CommentIndicator && i > 0; if (isInlineCommentIndicator) { continue; } // ignore inline section delimiters var isInlineLeftSectionDelimiter = tokenType == TokenType.LeftSectionDelimiter && i > 0; var isInlineRightSectionDelimiter = tokenType == TokenType.RightSectionDelimiter && i <= line.Length - 1 && delimiterTokens.Skip(1).FirstOrDefault()?.Type != TokenType.LeftSectionDelimiter; var ignoreInlineSectionDelimiter = isInlineLeftSectionDelimiter || isInlineRightSectionDelimiter; if (ignoreInlineSectionDelimiter) { continue; } // ignore property/value delimiter if first or already found var startsWithPropertyValueDelimiter = tokenType == TokenType.ProperetyValueDelimiter && i == 0; var containsPropertyValueDelimiter = delimiterTokens.Any(t => t.Type == TokenType.ProperetyValueDelimiter); var ignorePropertyValueDelimiter = startsWithPropertyValueDelimiter || containsPropertyValueDelimiter; if (ignorePropertyValueDelimiter) { continue; } // collect token var token = new Token(c) { FromColumn = i, Type = tokenType }; delimiterTokens.Add(token); } // add end-of-line token delimiterTokens.Add(new Token(Grammar.Space) { Type = TokenType.EndOfLine, FromColumn = line.Length }); var lineTokens = CollectText(line, delimiterTokens); return lineTokens; }
internal static List <Token> TokenizeLine(string line, DelimiterDictionary delimiters) { var isEmptyLine = string.IsNullOrWhiteSpace(line); if (isEmptyLine) { return(new List <Token>()); } // initialize tokens with start-of-line var delimiterTokens = new List <Token> { new Token(Grammar.Space) { Type = TokenType.StartOfLine, } }; // determine delimiter tokens for (var i = 0; i < line.Length; i++) { var c = line[i]; TokenType tokenType; var isDelimiterToken = delimiters.Delimiters.TryGetValue(c, out tokenType); if (!isDelimiterToken) { continue; } var isEscapedToken = Grammar.EscapableTokens.Contains(tokenType) && i > 0 && line[i - 1] == Grammar.Backslash; if (isEscapedToken) { continue; } var isInlineCommentIndicator = tokenType == TokenType.CommentIndicator && i > 0; if (isInlineCommentIndicator) { continue; } // ignore inline section delimiters var isInlineLeftSectionDelimiter = tokenType == TokenType.LeftSectionDelimiter && i > 0; var isInlineRightSectionDelimiter = tokenType == TokenType.RightSectionDelimiter && i <= line.Length - 1 && delimiterTokens.Skip(1).FirstOrDefault()?.Type != TokenType.LeftSectionDelimiter; var ignoreInlineSectionDelimiter = isInlineLeftSectionDelimiter || isInlineRightSectionDelimiter; if (ignoreInlineSectionDelimiter) { continue; } // ignore property/value delimiter if first or already found var startsWithPropertyValueDelimiter = tokenType == TokenType.ProperetyValueDelimiter && i == 0; var containsPropertyValueDelimiter = delimiterTokens.Any(t => t.Type == TokenType.ProperetyValueDelimiter); var ignorePropertyValueDelimiter = startsWithPropertyValueDelimiter || containsPropertyValueDelimiter; if (ignorePropertyValueDelimiter) { continue; } // collect token var token = new Token(c) { FromColumn = i, Type = tokenType }; delimiterTokens.Add(token); } // add end-of-line token delimiterTokens.Add(new Token(Grammar.Space) { Type = TokenType.EndOfLine, FromColumn = line.Length }); var lineTokens = CollectText(line, delimiterTokens); return(lineTokens); }