private static CharacterLocation HumanizeCharacterLocation(string content, int characterIndex, List <int> lines) { if (lines == null) { lines = new List <int>(); lines.AddRange(NewlineFinder.Matches(content).OfType <Match>().Select(k => k.Index)); } var line = Array.BinarySearch(lines.ToArray(), characterIndex); line = line < 0 ? ~line : line; var charIdx = characterIndex; //in both of these cases, we want to increment the char index by one to account for the '\n' that is skipped in the indexes. if (line < lines.Count && line > 0) { charIdx = characterIndex - (lines[line - 1] + 1); } else if (line > 0) { charIdx = characterIndex - (lines.LastOrDefault() + 1); } var retval = new CharacterLocation { //Humans count from 1, so let's do that, too (hence the "+1" on these). Line = line + 1, Character = charIdx + 1 }; return(retval); }
internal static CharacterLocation HumanizeCharacterLocation(int characterIndex, int[] lines) { var line = Array.BinarySearch(lines, characterIndex); line = line < 0 ? ~line : line; var charIdx = characterIndex; //in both of these cases, we want to increment the char index by one to account for the '\n' that is skipped in the indexes. if (line < lines.Length && line > 0) { charIdx = characterIndex - (lines[line - 1] + 1); } else if (line > 0) { charIdx = characterIndex - (lines.LastOrDefault() + 1); } var textLocation = new CharacterLocation { //Humans count from 1, so let's do that, too (hence the "+1" on these). Line = line + 1, Character = charIdx + 1 }; return(textLocation); }
/// <summary> /// Creates a new Token Pair /// </summary> /// <param name="type"></param> /// <param name="value"></param> /// <param name="tokenLocation"></param> public TokenPair(TokenType type, string value, CharacterLocation tokenLocation) { Type = type; Value = value; TokenLocation = tokenLocation; }
internal static Tokenizer.HeaderArgumentType?ValidateArgument(ref string expression, ICollection <IMorestachioError> parseErrors, CharacterLocation tokenLocation, List <int> lines, int tokenIndex) { if (expression.Length == 0) { parseErrors.Add(new MorestachioSyntaxError( tokenLocation.AddWindow(new CharacterSnippedLocation(0, 0, expression)), "#var", "", "#var name = ", "expected ether an path expression or an string value")); return(null); } if (Tokenizer.IsStringDelimiter(expression[0])) { //its a string constant if (!Tokenizer.IsStringDelimiter(expression[expression.Length - 1])) { parseErrors.Add(new MorestachioSyntaxError( tokenLocation.AddWindow(new CharacterSnippedLocation(0, expression.Length, expression)), "#var", "", "#var name = " + expression[0], "expected " + expression[0])); return(null); } var expectStringDelimiter = false; var delimiter = expression[0]; var resultString = ""; expression = expression.Substring(1, expression.Length - 2); for (int i = 0; i < expression.Length; i++) { var c = expression[i]; if (expectStringDelimiter) { resultString += c; if (c == delimiter) { expectStringDelimiter = false; } } else { if (c == '\\') { expectStringDelimiter = true; } else { resultString += c; } if (c == delimiter) { parseErrors.Add(new MorestachioSyntaxError( tokenLocation.AddWindow(new CharacterSnippedLocation(0, i, expression)), "#var", "", expression, "Unexpected " + c + ". Expected ether an escaped \\" + c + " or end of string")); return(null); } } } expression = resultString; return(Tokenizer.HeaderArgumentType.String); } else { Tokenizer.Validated(expression, tokenIndex, lines, parseErrors); return(Tokenizer.HeaderArgumentType.Expression); } }
public static TokenPair[] Tokenize(string tokenValue, CharacterLocation tokenLocation, List <int> lines, int tokenIndex, ICollection <IMorestachioError> parseErrors, ParserOptions options) { var variableNameIndex = tokenValue.IndexOf("#var "); if (variableNameIndex != 0) { parseErrors.Add(new MorestachioSyntaxError( tokenLocation.AddWindow(new CharacterSnippedLocation(0, 0, tokenValue)), "#var", "", "#var name", "Expected #var")); return(new TokenPair[0]); } tokenValue = tokenValue.Substring("#var ".Length); string variableName = null; int i = 0; for (; i < tokenValue.Length; i++) { var c = tokenValue[i]; if (c == '=') { variableName = tokenValue.Substring(0, i).Trim(' '); break; } if (!char.IsLetter(c) && c != ' ') { parseErrors.Add(new MorestachioSyntaxError( tokenLocation.AddWindow(new CharacterSnippedLocation(0, i, tokenValue)), "#var", "", "#var name", "Invalid character detected. Expected only spaces or letters.")); return(new TokenPair[0]); } } if (variableName == null) { parseErrors.Add(new MorestachioSyntaxError( tokenLocation.AddWindow(new CharacterSnippedLocation(0, "#var ".Length, tokenValue)), "#var", "", "#var name", "expected variable name")); } var expression = tokenValue.Substring(tokenValue.IndexOf('=')).Trim(' ', '='); if (string.IsNullOrEmpty(expression)) { parseErrors.Add(new MorestachioSyntaxError( tokenLocation.AddWindow(new CharacterSnippedLocation(0, "#var ".Length, tokenValue)), "#var", "", "#var name = ", "expected ether an path expression or an string value")); return(new TokenPair[0]); } var tokens = new List <TokenPair>(); tokens.Add(new TokenPair(TokenType.VariableDeclaration, variableName, tokenLocation)); var formats = Tokenizer.EnumerateFormats(expression, lines, tokenIndex, parseErrors); if (!formats.Any()) { var headerArgumentType = ValidateArgument(ref expression, parseErrors, tokenLocation.Offset("#var ".Length + i), lines, tokenIndex); if (headerArgumentType == null) { return(new TokenPair[0]); } switch (headerArgumentType) { case Tokenizer.HeaderArgumentType.String: tokens.Add(new TokenPair(TokenType.Content, expression, Tokenizer.HumanizeCharacterLocation(tokenIndex, lines))); break; case Tokenizer.HeaderArgumentType.Expression: tokens.Add(new TokenPair(TokenType.EscapedSingleValue, expression, Tokenizer.HumanizeCharacterLocation(tokenIndex, lines))); break; } } else { tokens.AddRange(Tokenizer.TokenizeFormattables(formats, expression, null, lines, tokenIndex, parseErrors, options)); } tokens.Add(new TokenPair(TokenType.VariableSet, variableName, tokenLocation)); return(tokens.ToArray()); }