/// <summary> /// Decodes the specified number of bytes of the phrase starting /// at the specified starting index. /// </summary> /// <returns>The decoded phrase.</returns> /// <param name="options">The parser options to use.</param> /// <param name="phrase">The phrase to decode.</param> /// <param name="startIndex">The starting index.</param> /// <param name="count">The number of bytes to decode.</param> /// <exception cref="System.ArgumentNullException"> /// <para><paramref name="options"/> is <c>null</c>.</para> /// <para>-or-</para> /// <para><paramref name="phrase"/> is <c>null</c>.</para> /// </exception> /// <exception cref="System.ArgumentOutOfRangeException"> /// <paramref name="startIndex"/> and <paramref name="count"/> do not specify /// a valid range in the byte array. /// </exception> public static string DecodePhrase(ParserOptions options, byte[] phrase, int startIndex, int count) { if (options == null) throw new ArgumentNullException ("options"); if (phrase == null) throw new ArgumentNullException ("phrase"); if (startIndex < 0 || startIndex > phrase.Length) throw new ArgumentOutOfRangeException ("startIndex"); if (count < 0 || startIndex + count > phrase.Length) throw new ArgumentOutOfRangeException ("count"); if (count == 0) return string.Empty; unsafe { fixed (byte* inbuf = phrase) { var tokens = TokenizePhrase (options, inbuf, startIndex, count); return DecodeTokens (options, tokens, phrase, inbuf, count); } } }
private static string GenerateCodeBlockFromFile(string languageAsString, string value, string documentationVersion, ParserOptions options) { var language = (Language)Enum.Parse(typeof(Language), languageAsString, true); var samplesDirectory = options.GetPathToCodeDirectory(language, documentationVersion); var values = value.Split('@'); var section = values[0]; var file = values[1]; string content; var builder = new StringBuilder(); switch (language) { case Language.Csharp: content = ExtractSectionFromCsharpFile(section, Path.Combine(samplesDirectory, file)); break; case Language.Java: content = ExtractSectionFromJavaFile(section, Path.Combine(samplesDirectory, file)); break; default: throw new NotSupportedException(language.ToString()); } builder.AppendLine(string.Format("<pre class='line-numbers'><code class='{0}'>{1}</code></pre>", ConvertLanguageToCssClass(language), content)); return builder.ToString(); }
public dynamic Parse(string code, ParserOptions options) { dynamic program = null; _lineNumber = (code.Length > 0) ? 1 : 0; _lineStart = 0; _length = code.Length; _buffer = null; _state = new State { AllowIn = true, LabelSet = new Dictionary<string, object>(), LastParenthesized = null, InFunctionBody = false, InIteration = false, InSwitch = false }; if (_length > 0) { _source = StringToArray(code).ToList(); } return ParseProgram(); }
public static void ValidateArguments (ParserOptions options, byte[] buffer) { if (options == null) throw new ArgumentNullException ("options"); if (buffer == null) throw new ArgumentNullException ("buffer"); }
public static void ValidateArguments (ParserOptions options, string text) { if (options == null) throw new ArgumentNullException ("options"); if (text == null) throw new ArgumentNullException ("text"); }
public static JObject LoadPublicGoogleDoc(string docIdOrUrl, ParserOptions options = ParserOptions.None) { const string GDRIVE_DOWNLOAD = @"https://docs.google.com/feeds/download/documents/export/Export?id={0}&exportFormat=txt"; // extract document ID from url var match = Regex.Match(docIdOrUrl, @"[-\w]{25,}"); var docId = (match.Success) ? match.Value : docIdOrUrl; return LoadUrl(string.Format(GDRIVE_DOWNLOAD, docId)); }
public static FlagMatch FromArgumentString(string arg, ParserOptions options) { var match = Regex.Match(arg, string.Format("{0}(.+?)({1}(.*))?$", Util.RegexForAnyLiteral(options.LongFlagHeaders.Union(options.ShortFlagHeaders)), Util.RegexForAnyLiteral(options.FlagValueSeparators) )); return match.Success ? new FlagMatch(match, options) : null; }
public static string GenerateCodeBlocks(string content, string documentationVersion, ParserOptions options) { content = CodeTabsFinder.Replace(content, match => GenerateCodeTabsBlock(match.Groups[1].Value.Trim(), documentationVersion, options)); content = CodeWithLanguageFinder.Replace(content, match => GenerateCodeBlockFromFile(match.Groups[1].Value.Trim(), match.Groups[2].Value.Trim(), documentationVersion, options)); content = CodeWithoutLanguageFinder.Replace(content, match => GenerateCodeBlockFromFile("csharp", match.Groups[1].Value.Trim(), documentationVersion, options)); content = CodeBlockFinder.Replace(content, match => GenerateCodeBlock(match.Groups[1].Value.Trim(), match.Groups[2].Value.Trim())); return content; }
internal TypeParser(ParserOptions parserOptions) : base(parserOptions) { _typeRefParser = new TypeRefParser(new StrangerTypeParser(parserOptions), parserOptions); _typeParameterParser = new TypeParameterParser(_typeRefParser, parserOptions); _eventParser = new EventParser(_typeRefParser, parserOptions); _fieldParser = new FieldParser(_typeRefParser, parserOptions); _methodParser = new MethodParser(_typeParameterParser, _typeRefParser, parserOptions); _propertyParser = new PropertyParser(_typeRefParser, parserOptions); }
public ParserOptions ToParserOptions() { var parserOptions = new ParserOptions(); foreach (var variable in Variables) { parserOptions.AddVariable(variable); } return parserOptions; }
private FlagMatch(Match match, ParserOptions options) : this(options) { Header = match.Groups[1].Value; FlagName = match.Groups[2].Value; AssignmentOperator = match.Groups.Count >= 5 ? match.Groups[4].Value : null; AssignmentValue = match.Groups.Count >= 6 ? match.Groups[5].Value : null; Type |= options.ShortFlagHeaders.Contains(Header) ? FlagType.Short : 0; Type |= options.LongFlagHeaders.Contains(Header) ? FlagType.Long : 0; }
public static void ValidateArguments (ParserOptions options, byte[] buffer, int startIndex) { if (options == null) throw new ArgumentNullException ("options"); if (buffer == null) throw new ArgumentNullException ("buffer"); if (startIndex < 0 || startIndex > buffer.Length) throw new ArgumentOutOfRangeException ("startIndex"); }
// // Constructor takes the string to parse and the culture. // internal Scanner(string expressionToParse, ParserOptions options) { // We currently have no support (and no scenarios) for disallowing property references // in Conditions. ErrorUtilities.VerifyThrow(0 != (options & ParserOptions.AllowProperties), "Properties should always be allowed."); _expression = expressionToParse; _parsePoint = 0; _errorState = false; _errorPosition = -1; // invalid _options = options; }
public static void ValidateArguments (ParserOptions options, byte[] buffer, int startIndex, int length) { if (options == null) throw new ArgumentNullException (nameof (options)); if (buffer == null) throw new ArgumentNullException (nameof (buffer)); if (startIndex < 0 || startIndex > buffer.Length) throw new ArgumentOutOfRangeException (nameof (startIndex)); if (length < 0 || length > (buffer.Length - startIndex)) throw new ArgumentOutOfRangeException (nameof (length)); }
private static string GenerateCodeTabsBlock(string content, string documentationVersion, ParserOptions options) { var tabs = new List<CodeTab>(); var matches = CodeTabFinder.Matches(content); foreach (Match match in matches) { var languageAndTitle = match.Groups[1].Value.Trim(); var parts = languageAndTitle.Split(':'); var languageAsString = parts[0]; var title = parts.Length > 1 ? parts[1] : null; var value = match.Groups[2].Value.Trim(); tabs.Add(GenerateCodeTabFromFile(languageAsString, title, value, documentationVersion, options)); } matches = CodeTabBlockFinder.Matches(content); foreach (Match match in matches) tabs.Add(GenerateCodeTab(match.Groups[1].Value.Trim(), match.Groups[2].Value.Trim())); var builder = new StringBuilder(); builder.AppendLine("<div class='code-tabs'>"); builder.AppendLine("<ul class='nav nav-tabs'>"); for (int index = 0; index < tabs.Count; index++) { var tab = tabs[index]; builder.AppendLine(string.Format("<li class='code-tab {2}'><a href='#{0}' data-toggle='tab'>{1}</a></li>", tab.Id, tab.Title ?? ConvertLanguageToDisplayName(tab.Language), index == 0 ? "active" : string.Empty)); } builder.AppendLine("</ul>"); builder.AppendLine("<div class='tab-content'>"); for (int index = 0; index < tabs.Count; index++) { var tab = tabs[index]; builder.AppendLine(string.Format("<div class='tab-pane code-tab {1}' id='{0}'>", tab.Id, index == 0 ? "active" : string.Empty)); builder.AppendLine(string.Format("<pre class='line-numbers'><code class='{0}'>{1}</code></pre>", ConvertLanguageToCssClass(tab.Language), tab.Content)); builder.AppendLine("</div>"); } builder.AppendLine("</div>"); builder.AppendLine("</div>"); return builder.ToString(); }
/// <summary> /// Evaluates a string representing a condition from a "condition" attribute. /// If the condition is a malformed string, it throws an InvalidProjectFileException. /// This method uses cached expression trees to avoid generating them from scratch every time it's called. /// This method is thread safe and is called from engine and task execution module threads /// Logging service may be null. /// </summary> internal static bool EvaluateConditionCollectingConditionedProperties <P, I> ( string condition, ParserOptions options, Expander <P, I> expander, ExpanderOptions expanderOptions, Dictionary <string, List <string> > conditionedPropertiesTable, string evaluationDirectory, ElementLocation elementLocation, ILoggingService loggingServices, BuildEventContext buildEventContext, IFileSystem fileSystem, ProjectRootElementCacheBase projectRootElementCache = null ) where P : class, IProperty where I : class, IItem { ErrorUtilities.VerifyThrowArgumentNull(condition, nameof(condition)); ErrorUtilities.VerifyThrowArgumentNull(expander, nameof(expander)); ErrorUtilities.VerifyThrowArgumentLength(evaluationDirectory, nameof(evaluationDirectory)); ErrorUtilities.VerifyThrowArgumentNull(buildEventContext, nameof(buildEventContext)); // An empty condition is equivalent to a "true" condition. if (condition.Length == 0) { return(true); } // If the condition wasn't empty, there must be a location for it ErrorUtilities.VerifyThrowArgumentNull(elementLocation, nameof(elementLocation)); // Get the expression tree cache for the current parsing options. var cachedExpressionTreesForCurrentOptions = s_cachedExpressionTrees.GetOrAdd( (int)options, _ => new ExpressionTreeForCurrentOptionsWithSize(new ConcurrentDictionary <string, ConcurrentStack <GenericExpressionNode> >(StringComparer.Ordinal))); cachedExpressionTreesForCurrentOptions = FlushCacheIfLargerThanThreshold(options, cachedExpressionTreesForCurrentOptions); // Get the pool of expressions for this condition. var expressionPool = cachedExpressionTreesForCurrentOptions.GetOrAdd(condition, _ => new ConcurrentStack <GenericExpressionNode>()); // Try and see if there's an available expression tree in the pool. // If not, parse a new expression tree and add it back to the pool. if (!expressionPool.TryPop(out var parsedExpression)) { var conditionParser = new Parser(); #region REMOVE_COMPAT_WARNING conditionParser.LoggingServices = loggingServices; conditionParser.LogBuildEventContext = buildEventContext; #endregion parsedExpression = conditionParser.Parse(condition, options, elementLocation); } bool result; var state = new ConditionEvaluationState <P, I>( condition, expander, expanderOptions, conditionedPropertiesTable, evaluationDirectory, elementLocation, fileSystem, projectRootElementCache); // We are evaluating this expression now and it can cache some state for the duration, // so we don't want multiple threads working on the same expression lock (parsedExpression) { try { result = parsedExpression.Evaluate(state); } finally { parsedExpression.ResetState(); if (!s_disableExpressionCaching) { // Finished using the expression tree. Add it back to the pool so other threads can use it. expressionPool.Push(parsedExpression); } } } return(result); }
internal NamespaceParser(ParserOptions parserOptions) : base(parserOptions) { _typeParser = new TypeParser(parserOptions); _descriptionFiles = Directory.EnumerateFiles(Path.GetDirectoryName(parserOptions.SharpDoxConfig.InputFile), "*.sdnd", SearchOption.AllDirectories).ToList(); }
/// <summary> /// Should return an document item that will be invoked when parsing the Template /// </summary> /// <param name="token"></param> /// <param name="options"></param> /// <param name="buildStack"></param> /// <returns></returns> public abstract IDocumentItem Parse(TokenPair token, ParserOptions options, Stack <DocumentScope> buildStack);
/// <summary> /// Should return True if the Token is produced by this provider and should be parsed with this provider /// </summary> /// <param name="token"></param> /// <param name="options"></param> /// <returns></returns> public abstract bool ShouldParse(TokenPair token, ParserOptions options);
/// <inheritdoc /> public override IDocumentItem CreateDocumentItem(string tag, string value, TokenPair token, ParserOptions options) { return(new MorestachioLocalizationDocumentItem(token.MorestachioExpression)); }
public void ExecuteTestCase(string jsFilePath) { var options = new ParserOptions { Range = true, Loc = true, Tokens = true, SourceType = SourceType.Script }; string treeFilePath, failureFilePath, moduleFilePath; if (jsFilePath.EndsWith(".source.js")) { treeFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension((Path.GetFileNameWithoutExtension(jsFilePath)))) + ".tree.json"; failureFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension((Path.GetFileNameWithoutExtension(jsFilePath)))) + ".failure.json"; moduleFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension((Path.GetFileNameWithoutExtension(jsFilePath)))) + ".module.json"; } else { treeFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(jsFilePath)) + ".tree.json"; failureFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(jsFilePath)) + ".failure.json"; moduleFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(jsFilePath)) + ".module.json"; } // Convert to LF to match the number of chars the parser finds var script = File.ReadAllText(jsFilePath); script = script.Replace(Environment.NewLine, "\n"); if (jsFilePath.EndsWith(".source.js")) { var parser = new JavaScriptParser(script); var program = parser.ParseProgram(); var source = program.Body.First().As <VariableDeclaration>().Declarations.First().As <VariableDeclarator>().Init.As <Literal>().StringValue; script = source; } string expected = ""; bool invalid = false; var filename = Path.GetFileNameWithoutExtension(jsFilePath); var isModule = filename.Contains("module") || filename.Contains("export") || filename.Contains("import"); options.SourceType = isModule ? SourceType.Module : SourceType.Script; if (File.Exists(moduleFilePath)) { options.SourceType = SourceType.Module; expected = File.ReadAllText(moduleFilePath); } else if (File.Exists(treeFilePath)) { expected = File.ReadAllText(treeFilePath); } else if (File.Exists(failureFilePath)) { invalid = true; expected = File.ReadAllText(failureFilePath); } invalid |= filename.Contains("error") || filename.Contains("invalid"); if (!invalid) { options.Tolerant = true; var actual = ParseAndFormat(script, options); Assert.True(CompareTrees(actual, expected), jsFilePath); } else { options.Tolerant = false; // TODO: check the accuracy of the message and of the location Assert.Throws <ParserException>(() => ParseAndFormat(script, options)); } }
public CustomMimeParser(ParserOptions options, Stream stream, MimeFormat format) : base(options, stream, format) { }
public TemplateContext(ScriptObject builtin) { BuiltinObject = builtin ?? GetDefaultBuiltinObject(); EnableOutput = true; EnableBreakAndContinueAsReturnOutsideLoop = false; EnableRelaxedTargetAccess = false; EnableRelaxedMemberAccess = true; EnableRelaxedFunctionAccess = false; EnableRelaxedIndexerAccess = true; LoopLimit = 1000; RecursiveLimit = 100; LimitToString = 0; ObjectRecursionLimit = 0; MemberRenamer = StandardMemberRenamer.Default; RegexTimeOut = TimeSpan.FromSeconds(10); TemplateLoaderParserOptions = new ParserOptions(); TemplateLoaderLexerOptions = LexerOptions.Default; NewLine = Environment.NewLine; Language = ScriptLang.Default; _outputs = new FastStack <IScriptOutput>(4); _output = new StringBuilderOutput(); _outputs.Push(_output); _globalStores = new FastStack <IScriptObject>(4); _availableLocalContexts = new FastStack <LocalContext>(4); _localContexts = new FastStack <LocalContext>(4); _availableStores = new FastStack <ScriptObject>(4); _cultures = new FastStack <CultureInfo>(4); _caseValues = new FastStack <object>(4); _availableTags = new FastStack <Dictionary <object, object> >(4); _sourceFiles = new FastStack <string>(4); _memberAccessors = new Dictionary <Type, IObjectAccessor>(); _listAccessors = new Dictionary <Type, IListAccessor>(); _loops = new FastStack <ScriptLoopStatementBase>(4); BlockDelegates = new FastStack <ScriptBlockStatement>(4); _availablePipeArguments = new FastStack <ScriptPipeArguments>(4); _pipeArguments = new FastStack <ScriptPipeArguments>(4); _availableScriptExpressionLists = new FastStack <List <ScriptExpression> >(4); _availableReflectionArguments = new object[ScriptFunctionCall.MaximumParameterCount + 1][]; for (int i = 0; i < _availableReflectionArguments.Length; i++) { _availableReflectionArguments[i] = new object[i]; } _isFunctionCallDisabled = false; CachedTemplates = new Dictionary <string, Template>(); Tags = new Dictionary <object, object>(); // Ensure that builtin is registered first PushGlobal(BuiltinObject); }
public void AddFormatterToMorestachio(ParserOptions options) { AddFormatterToMorestachio(GlobalFormatterModels, options); }
public static Engine ExecuteWithErrorHandle(this Engine engine, string source, ParserOptions parserOptions = null) { try { if (parserOptions != null) { return(engine.Execute(source, parserOptions)); } return(engine.Execute(source)); } catch (Exception ex) { Kooboo.Data.Log.Instance.Exception.WriteException(ex); throw new Exception(GetInnerError(ex)); } }
private object FormatConditonal(object sourceObject, string name, object[] arguments, IEnumerable <MorestachioFormatterModel> formatterGroup, ParserOptions options) { if (name == null) { options.Formatters.Write(() => nameof(MorestachioFormatterService) + " | Name is null. Skip formatter"); return(FormatterMatcher.FormatterFlow.Skip); } if (sourceObject == null) { options.Formatters.Write(() => nameof(MorestachioFormatterService) + " | Source Object is null. Skip formatter"); return(FormatterMatcher.FormatterFlow.Skip); } var directMatch = formatterGroup.Where(e => name.ToString().Equals(e.Name)).ToArray(); arguments = new object[] { sourceObject }.Concat(arguments).ToArray(); var type = sourceObject.GetType(); var originalObject = sourceObject; if (!directMatch.Any()) { options.Formatters.Write(() => { var aggregate = formatterGroup.Any() ? formatterGroup.Select(e => e.Name).Aggregate((e, f) => e + "," + f) : ""; return ($"{nameof(MorestachioFormatterService)} | No match Found for name: '{name}' Possible values for '{type}' are [{aggregate}]"); }); return(FormatterMatcher.FormatterFlow.Skip); } foreach (var morestachioFormatterModel in directMatch) { originalObject = sourceObject; options.Formatters.Write(() => $"{nameof(MorestachioFormatterService)} | Test {morestachioFormatterModel.Name}"); var target = morestachioFormatterModel.Function; var localGen = morestachioFormatterModel.InputType.GetGenericArguments(); var templateGen = type.GetGenericArguments(); if (morestachioFormatterModel.InputType.ContainsGenericParameters) { if (localGen.Any() != templateGen.Any()) { if (type.IsArray) { templateGen = new[] { type.GetElementType() }; } else { options.Formatters.Write(() => $"{nameof(MorestachioFormatterService)}| Generic type mismatch"); continue; } } if (!morestachioFormatterModel.InputType.ContainsGenericParameters) { options.Formatters.Write(() => $"{nameof(MorestachioFormatterService)}| Type has Generic but Method not"); continue; } if (localGen.Length != templateGen.LongLength) { options.Formatters.Write(() => $"{nameof(MorestachioFormatterService)}| Generic type count mismatch"); continue; } target = target.MakeGenericMethod(templateGen); } else { if (!morestachioFormatterModel.InputType.IsInstanceOfType(originalObject)) { options.Formatters .Write(() => $"{nameof(MorestachioFormatterService)}| Generic Type mismatch. Expected '{morestachioFormatterModel?.InputType}' but got {originalObject?.GetType()}"); continue; } } try { var canInvokeFormatter = CanMethodCalledWith(target, arguments); if (canInvokeFormatter == null) { options.Formatters.Write( () => $"{nameof(MorestachioFormatterService)} | Invalid usage of parameter"); continue; } options.Formatters.Write(() => $"{nameof(MorestachioFormatterService)}| Execute"); originalObject = target.Invoke(null, canInvokeFormatter); options.Formatters.Write(() => $"{nameof(MorestachioFormatterService)}| Formatter created '{originalObject}'"); return(originalObject); } catch (Exception ex) { options.Formatters.Write(() => $"{nameof(MorestachioFormatterService)}| calling of formatter has thrown a exception: '{ex}'"); continue; } } return(FormatterMatcher.FormatterFlow.Skip); }
public Engine Execute(string source, ParserOptions parserOptions) { var parser = new JavaScriptParser(); return(Execute(parser.Parse(source, parserOptions))); }
/// <summary> /// Goes through the template and evaluates all tokens that are enclosed by {{ }}. /// </summary> /// <param name="parserOptions"></param> /// <param name="context"></param> /// <returns></returns> public static async TokenizerResultPromise Tokenize(ParserOptions parserOptions, TokenzierContext context) { var templateString = parserOptions.Template; var scopestack = new Stack <ScopeStackItem>(); var partialsNames = new List <string>(parserOptions.PartialsStore?.GetNames() ?? new string[0]); context.SetLocation(0); var tokens = new List <TokenPair>(); void BeginElse(TokenMatch match) { var firstNonContentToken = tokens .AsReadOnly() .Reverse() .FirstOrDefault(e => !e.Type.Equals(TokenType.Content)); if (!firstNonContentToken.Type.Equals(TokenType.IfClose)) { context.Errors .Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "find if block for else", firstNonContentToken.Value, "{{/if}}", "Could not find an /if block for this else")); } else { scopestack.Push(new ScopeStackItem(TokenType.Else, firstNonContentToken.Value, match.Index)); tokens.Add(new TokenPair(TokenType.Else, firstNonContentToken.Value, context.CurrentLocation)); } } void EndIf(TokenMatch match, string expected) { if (!scopestack.Any()) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "if", "{{#if name}}")); } else { var item1 = scopestack.Peek(); if (item1.TokenType == TokenType.If || item1.TokenType == TokenType.IfNot) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.IfClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "if", "{{#if name}}")); } } } string TrimToken(string token, string keyword, char key = '#') { token = token.TrimStart(key); if (keyword != null) { token = token.Trim().Substring(keyword.Length); } return(token.Trim()); } foreach (var match in MatchTokens(templateString, context)) { var tokenValue = match.Value; var trimmedToken = tokenValue .Remove(0, context.PrefixToken.Length); trimmedToken = trimmedToken.Remove(trimmedToken.Length - context.SuffixToken.Length); if (context.CommentIntend > 0) { if (trimmedToken == "/!") { context.CommentIntend--; if (context.CommentIntend == 0) { //move forward in the string. if (context.Character > match.Index + match.Length) { throw new InvalidOperationException("Internal index location error"); } context.SetLocation(match.Index + match.Length); } } else if (trimmedToken.Equals("!")) { context.CommentIntend++; } } else { //yield front content. if (match.Index > context.Character) { tokens.Add(new TokenPair(TokenType.Content, templateString.Substring(context.Character, match.Index - context.Character), context.CurrentLocation)); } context.SetLocation(match.Index + context.PrefixToken.Length); if (trimmedToken.StartsWith("#declare ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "declare "); scopestack.Push(new ScopeStackItem(TokenType.PartialDeclarationOpen, token, match.Index)); if (string.IsNullOrWhiteSpace(token)) { context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "open", "declare", "{{#declare name}}", " Missing the Name.")); } else { partialsNames.Add(token); tokens.Add(new TokenPair(TokenType.PartialDeclarationOpen, token, context.CurrentLocation)); } } else if (trimmedToken.Equals("/declare", StringComparison.CurrentCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.PartialDeclarationOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.PartialDeclarationClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "declare", "{{#declare name}}")); } } else if (trimmedToken.StartsWith("#include ", true, CultureInfo.InvariantCulture)) { var token = trimmedToken.TrimStart('#').Trim(); var partialRegex = PartialIncludeRegEx.Match(token); var partialName = partialRegex.Groups[1].Value; var partialContext = partialRegex.Groups[2].Value; if (!string.IsNullOrWhiteSpace(partialContext)) { partialContext = token.Substring(partialRegex.Groups[2].Index + "WITH ".Length); } if (string.IsNullOrWhiteSpace(partialName) || !partialsNames.Contains(partialName)) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "use", "include", "{{#include name}}", $" There is no Partial declared '{partialName}'. Partial names are case sensitive and must be declared before an include.")); } else { IMorestachioExpression exp = null; if (!string.IsNullOrWhiteSpace(partialContext)) { exp = ExpressionParser.ParseExpression(partialContext, context); } var tokenPair = new TokenPair(TokenType.RenderPartial, partialName, context.CurrentLocation, exp); tokens.Add(tokenPair); } } else if (trimmedToken.StartsWith("#each ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "each"); var eval = EvaluateNameFromToken(token); token = eval.Value; var alias = eval.Name; scopestack.Push(new ScopeStackItem(TokenType.CollectionOpen, alias ?? token, match.Index)); if (token.Trim() != "") { token = token.Trim(); ScopingBehavior?scopeBehavior = null; if (!string.IsNullOrWhiteSpace(alias)) { if (token.EndsWith("NoScope", StringComparison.InvariantCultureIgnoreCase)) { scopeBehavior = ScopingBehavior.DoNotScope; } if (token.EndsWith("WithScope", StringComparison.InvariantCultureIgnoreCase)) { scopeBehavior = ScopingBehavior.ScopeAnyway; } } tokens.Add(new TokenPair(TokenType.CollectionOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context), scopeBehavior)); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation("each ".Length + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (trimmedToken.Equals("/each", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.CollectionOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.CollectionClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "each", "{{#each name}}")); } } else if (trimmedToken.StartsWith("#while ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "while"); scopestack.Push(new ScopeStackItem(TokenType.WhileLoopOpen, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.WhileLoopOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.Equals("/while", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.WhileLoopOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.WhileLoopClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "while", "{{#while Expression}}")); } } else if (trimmedToken.StartsWith("#do ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "do"); scopestack.Push(new ScopeStackItem(TokenType.DoLoopOpen, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.DoLoopOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.Equals("/do", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.DoLoopOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.DoLoopClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "do", "{{#do Expression}}")); } } else if (trimmedToken.StartsWith("#repeat ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "repeat"); scopestack.Push(new ScopeStackItem(TokenType.RepeatLoopOpen, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.RepeatLoopOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.Equals("/repeat", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.RepeatLoopOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.RepeatLoopClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "repeat", "{{#repeat Expression}}")); } } else if (trimmedToken.StartsWith("#if ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "if"); var eval = EvaluateNameFromToken(token); token = eval.Value; if (eval.Name != null) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "^if", "AS", "No Alias")); } scopestack.Push(new ScopeStackItem(TokenType.If, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.If, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.StartsWith("^if ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "if", '^'); var eval = EvaluateNameFromToken(token); token = eval.Value; if (eval.Name != null) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "^if", "AS", "No Alias")); } scopestack.Push(new ScopeStackItem(TokenType.IfNot, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.IfNot, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.Equals("/if", StringComparison.InvariantCultureIgnoreCase)) { EndIf(match, "/If"); } else if (trimmedToken.Equals("#ifelse", StringComparison.InvariantCultureIgnoreCase)) { EndIf(match, "#ifelse"); BeginElse(match); } else if (trimmedToken.Equals("#else", StringComparison.InvariantCultureIgnoreCase)) { BeginElse(match); } else if (trimmedToken.Equals("/else", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.Else) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.ElseClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "else", "{{#else name}}")); } } else if (trimmedToken.StartsWith("#var ", true, CultureInfo.InvariantCulture)) { tokens.Add(ExpressionParser.TokenizeVariableAssignment(trimmedToken, context, TokenType.VariableVar)); } else if (trimmedToken.StartsWith("#let ", true, CultureInfo.InvariantCulture)) { tokens.Add(ExpressionParser.TokenizeVariableAssignment(trimmedToken, context, TokenType.VariableLet)); } else if (trimmedToken.StartsWith("^")) { //open inverted group var token = trimmedToken.TrimStart('^').Trim(); var eval = EvaluateNameFromToken(token); token = eval.Value; var alias = eval.Name; scopestack.Push(new ScopeStackItem(TokenType.InvertedElementOpen, alias ?? token, match.Index)); tokens.Add(new TokenPair(TokenType.InvertedElementOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation(1 + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (trimmedToken.StartsWith("&")) { //escaped single element var token = trimmedToken.TrimStart('&').Trim(); tokens.Add(new TokenPair(TokenType.UnescapedSingleValue, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else if (trimmedToken.StartsWith("!")) { //it's a comment drop this on the floor, no need to even yield it. if (trimmedToken.Equals("!")) { //except for when its a block comment then set the isCommentBlock flag context.CommentIntend++; } } else if (trimmedToken.Equals("#NL", StringComparison.InvariantCultureIgnoreCase)) { tokens.Add(new TokenPair(TokenType.WriteLineBreak, trimmedToken, context.CurrentLocation)); } else if (trimmedToken.Equals("#TNL", StringComparison.InvariantCultureIgnoreCase)) { tokens.Add(new TokenPair(TokenType.TrimLineBreak, trimmedToken, context.CurrentLocation)); } else if (trimmedToken.Equals("#TNLS", StringComparison.InvariantCultureIgnoreCase)) { tokens.Add(new TokenPair(TokenType.TrimLineBreaks, trimmedToken, context.CurrentLocation)); } else if (trimmedToken.Equals("#TRIMALL", StringComparison.InvariantCultureIgnoreCase)) { tokens.Add(new TokenPair(TokenType.TrimEverything, trimmedToken, context.CurrentLocation)); } else if (trimmedToken.StartsWith("#SET OPTION ", StringComparison.InvariantCultureIgnoreCase)) { var token = TrimToken(trimmedToken, "SET OPTION "); var expectEquals = false; string name = null; IMorestachioExpression value = null; for (int i = 0; i < token.Length; i++) { var c = token[i]; if (IsWhiteSpaceDelimiter(c)) { expectEquals = true; continue; } if (expectEquals || c == '=') { if (c != '=') { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#SET OPTION Name = Value}}", $" Expected to find '=' or whitespace after name but found '{c}'")); } else { name = token.Substring(0, i - 1).Trim(); value = ExpressionParser.ParseExpression(token.Substring(i + 1).Trim(), context); break; } } } if (string.IsNullOrWhiteSpace(name)) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#SET OPTION Name = Value}}", $" Expected to find '=' after name")); break; } if (value == null) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#SET OPTION Name = Value}}", $" Expected to find an expression after '='")); break; } await context.SetOption(name, value, parserOptions); } //else if (tokenValue.Equals("{{/TRIMALL}}", StringComparison.InvariantCultureIgnoreCase)) //{ // tokens.Add(new TokenPair(TokenType.StopTrimEverything, tokenValue, context.CurrentLocation)); //} else { //check for custom DocumentItem provider var customDocumentProvider = parserOptions.CustomDocumentItemProviders.FirstOrDefault(e => e.ShouldTokenize(trimmedToken)); if (customDocumentProvider != null) { var tokenPairs = customDocumentProvider .Tokenize(new CustomDocumentItemProvider.TokenInfo(trimmedToken, context, scopestack), parserOptions); tokens.AddRange(tokenPairs); } else if (trimmedToken.StartsWith("#")) { //open group var token = trimmedToken.TrimStart('#').Trim(); var eval = EvaluateNameFromToken(token); token = eval.Value; var alias = eval.Name; scopestack.Push(new ScopeStackItem(TokenType.ElementOpen, alias ?? token, match.Index)); tokens.Add(new TokenPair(TokenType.ElementOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation(3 + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (trimmedToken.StartsWith("/")) { var token = trimmedToken.TrimStart('/').Trim(); //close group if (!scopestack.Any()) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#path}}", " There are more closing elements then open.")); } else { var item = scopestack.Peek(); if ((item.TokenType == TokenType.ElementOpen || item.TokenType == TokenType.InvertedElementOpen) && item.Value == token) { scopestack.Pop(); tokens.Add(new TokenPair(TokenType.ElementClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#path}}", " There are more closing elements then open.")); } } } else { //unsingle value. var token = trimmedToken.Trim(); tokens.Add(new TokenPair(TokenType.EscapedSingleValue, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } } //move forward in the string. if (context.Character > match.Index + match.Length) { throw new InvalidOperationException("Internal index location error"); } context.SetLocation(match.Index + match.Length); } } if (context.Character < templateString.Length) { tokens.Add(new TokenPair(TokenType.Content, templateString.Substring(context.Character), context.CurrentLocation)); } if (scopestack.Any() || parserOptions.CustomDocumentItemProviders.Any(f => f.ScopeStack.Any())) { foreach (var unclosedScope in scopestack .Concat(parserOptions.CustomDocumentItemProviders.SelectMany(f => f.ScopeStack)) .Select(k => { return(new { scope = k.TokenType.ToString(), location = HumanizeCharacterLocation(k.Index, context.Lines) }); }).Reverse()) { context.Errors.Add(new MorestachioUnclosedScopeError(unclosedScope.location .AddWindow(new CharacterSnippedLocation(1, -1, "")), unclosedScope.scope, "")); } } return(new TokenizerResult(tokens)); }
public void CleanUp() { ParserOptions.Dispose(); }
public static ParserOptions __CreateInstance(ParserOptions.Internal native, bool skipVTables = false) { return new ParserOptions(native, skipVTables); }
public static void AssertTemplate(string expected, string input, bool isLiquid = false, bool isRoundtripTest = false, bool supportExactRoundtrip = true, object model = null, bool specialLiquid = false, bool expectParsingErrorForRountrip = false) { var parserOptions = new ParserOptions() { LiquidFunctionsToScriban = isLiquid }; var lexerOptions = new LexerOptions() { Mode = isLiquid ? ScriptMode.Liquid : ScriptMode.Default }; if (isRoundtripTest) { lexerOptions.KeepTrivia = true; } if (specialLiquid) { parserOptions.ExpressionDepthLimit = 500; } #if EnableTokensOutput { Console.WriteLine("Tokens"); Console.WriteLine("======================================"); var lexer = new Lexer(input, options: lexerOptions); foreach (var token in lexer) { Console.WriteLine($"{token.Type}: {token.GetText(input)}"); } Console.WriteLine(); } #endif string roundtripText = null; // We loop first on input text, then on roundtrip while (true) { bool isRoundtrip = roundtripText != null; bool hasErrors = false; #if SCRIBAN_ASYNC bool hasException = false; #endif if (isRoundtrip) { Console.WriteLine("Roundtrip"); Console.WriteLine("======================================"); Console.WriteLine(roundtripText); lexerOptions.Mode = ScriptMode.Default; if (lexerOptions.Mode == ScriptMode.Default && !isLiquid && supportExactRoundtrip) { Console.WriteLine("Checking Exact Roundtrip - Input"); Console.WriteLine("======================================"); TextAssert.AreEqual(input, roundtripText); } input = roundtripText; } else { Console.WriteLine("Input"); Console.WriteLine("======================================"); Console.WriteLine(input); } var template = Template.Parse(input, "text", parserOptions, lexerOptions); var result = string.Empty; #if SCRIBAN_ASYNC var resultAsync = string.Empty; #endif if (template.HasErrors) { hasErrors = true; for (int i = 0; i < template.Messages.Count; i++) { var message = template.Messages[i]; if (i > 0) { result += "\n"; } result += message; } if (specialLiquid && !isRoundtrip) { throw new InvalidOperationException("Parser errors: " + result); } } else { if (isRoundtripTest) { result = template.ToText(); } else { Assert.NotNull(template.Page); if (!isRoundtrip) { // Dumps the roundtrip version var lexerOptionsForTrivia = lexerOptions; lexerOptionsForTrivia.KeepTrivia = true; var templateWithTrivia = Template.Parse(input, "input", parserOptions, lexerOptionsForTrivia); roundtripText = templateWithTrivia.ToText(); } try { // Setup a default model context for the tests if (model == null) { var scriptObj = new ScriptObject { ["page"] = new ScriptObject { ["title"] = "This is a title" }, ["user"] = new ScriptObject { ["name"] = "John" }, ["product"] = new ScriptObject { ["title"] = "Orange", ["type"] = "fruit" }, ["products"] = new ScriptArray() { new ScriptObject { ["title"] = "Orange", ["type"] = "fruit" }, new ScriptObject { ["title"] = "Banana", ["type"] = "fruit" }, new ScriptObject { ["title"] = "Apple", ["type"] = "fruit" }, new ScriptObject { ["title"] = "Computer", ["type"] = "electronics" }, new ScriptObject { ["title"] = "Mobile Phone", ["type"] = "electronics" }, new ScriptObject { ["title"] = "Table", ["type"] = "furniture" }, new ScriptObject { ["title"] = "Sofa", ["type"] = "furniture" }, } }; scriptObj.Import(typeof(SpecialFunctionProvider)); model = scriptObj; } // Render sync { var context = NewTemplateContext(isLiquid); context.PushOutput(new TextWriterOutput(new StringWriter() { NewLine = "\n" })); var contextObj = new ScriptObject(); contextObj.Import(model); context.PushGlobal(contextObj); result = template.Render(context); } #if SCRIBAN_ASYNC // Render async { var asyncContext = NewTemplateContext(isLiquid); asyncContext.PushOutput(new TextWriterOutput(new StringWriter() { NewLine = "\n" })); var contextObj = new ScriptObject(); contextObj.Import(model); asyncContext.PushGlobal(contextObj); resultAsync = template.RenderAsync(asyncContext).Result; } #endif } catch (Exception exception) { #if SCRIBAN_ASYNC hasException = true; #endif if (specialLiquid) { throw; } else { result = GetReason(exception); } } } } var testContext = isRoundtrip ? "Roundtrip - " : String.Empty; Console.WriteLine($"{testContext}Result"); Console.WriteLine("======================================"); Console.WriteLine(result); Console.WriteLine($"{testContext}Expected"); Console.WriteLine("======================================"); Console.WriteLine(expected); if (isRoundtrip && expectParsingErrorForRountrip) { Assert.True(hasErrors, "The roundtrip test is expecting an error"); Assert.AreNotEqual(expected, result); } else { TextAssert.AreEqual(expected, result); } #if SCRIBAN_ASYNC if (!isRoundtrip && !isRoundtripTest && !hasErrors && !hasException) { Console.WriteLine("Checking async"); Console.WriteLine("======================================"); TextAssert.AreEqual(expected, resultAsync); } #endif if (isRoundtripTest || isRoundtrip || hasErrors) { break; } } }
private ParserOptions(ParserOptions.Internal native, bool skipVTables = false) : this(__CopyValue(native), skipVTables) { __ownsNativeInstance = true; NativeToManagedMap[__Instance] = this; }
/// <inheritdoc /> public override IEnumerable <TokenPair> Tokenize(TokenInfo token, ParserOptions options) { yield return(new TokenPair(OpenTag.Trim(), token.Token, token.TokenizerContext.CurrentLocation, ExpressionParser.ParseExpression(token.Token.Remove(0, OpenTag.Length).Trim(), token.TokenizerContext))); }
private static ParserOptions.Internal* __CopyValue(ParserOptions.Internal native) { var ret = Marshal.AllocHGlobal(192); CppSharp.Parser.ParserOptions.Internal.cctor_1(ret, new global::System.IntPtr(&native)); return (ParserOptions.Internal*) ret; }
public bool EvaluateConditionWithCurrentState(ProjectElement element, ExpanderOptions expanderOptions, ParserOptions parserOptions) { return(EvaluateCondition(element, expanderOptions, parserOptions, _expander, this)); }
private void Parse(CancellationToken cancellationToken) { CollectingErrorSink sink = null; int version; Parser parser; //Log?.Log(TraceEventType.Verbose, $"Parse begins: {Name}"); lock (_syncObj) { version = _buffer.Version; var options = new ParserOptions { StubFile = FilePath != null && Path.GetExtension(FilePath).Equals(".pyi", FileSystem.StringComparison) }; if (ModuleType == ModuleType.User) { sink = new CollectingErrorSink(); options.ErrorSink = sink; } parser = Parser.CreateParser(new StringReader(_buffer.Text), Interpreter.LanguageVersion, options); } var ast = parser.ParseFile(Uri); //Log?.Log(TraceEventType.Verbose, $"Parse complete: {Name}"); lock (_syncObj) { cancellationToken.ThrowIfCancellationRequested(); if (version != _buffer.Version) { throw new OperationCanceledException(); } // Stored nodes are no longer valid. _astMap.Clear(); _astMap[this] = ast; _parseErrors = sink?.Diagnostics ?? Array.Empty <DiagnosticsEntry>(); // Do not report issues with libraries or stubs if (sink != null) { _diagnosticsService?.Replace(Uri, _parseErrors, DiagnosticSource.Parser); } ContentState = State.Parsed; Analysis = new EmptyAnalysis(Services, this); } NewAst?.Invoke(this, EventArgs.Empty); if (ContentState < State.Analyzing) { ContentState = State.Analyzing; var analyzer = Services.GetService <IPythonAnalyzer>(); analyzer.EnqueueDocumentForAnalysis(this, ast, version); } lock (_syncObj) { _parsingTask = null; } }
/// <inheritdoc /> public override IDocumentItem CreateDocumentItem(string tagKeyword, string value, TokenPair token, ParserOptions options, IEnumerable <ITokenOption> tagCreationOptions) { return(new TagDocumentItem(token.TokenLocation, _action, tagKeyword, value, tagCreationOptions)); }
static unsafe string DecodeTokens (ParserOptions options, IList<Token> tokens, byte[] input, byte* inbuf, int length) { var decoded = new StringBuilder (length); var qp = new QuotedPrintableDecoder (true); var base64 = new Base64Decoder (); var output = new byte[length]; Token token; int len; fixed (byte* outbuf = output) { for (int i = 0; i < tokens.Count; i++) { token = tokens[i]; if (token.Encoding != ContentEncoding.Default) { // In order to work around broken mailers, we need to combine the raw // decoded content of runs of identically encoded word tokens before // converting to unicode strings. ContentEncoding encoding = token.Encoding; int codepage = token.CodePage; IMimeDecoder decoder; int outlen, n; byte* outptr; // find the end of this run (and measure the buffer length we'll need) for (n = i + 1; n < tokens.Count; n++) { if (tokens[n].Encoding != encoding || tokens[n].CodePage != codepage) break; } // base64 / quoted-printable decode each of the tokens... if (encoding == ContentEncoding.Base64) decoder = base64; else decoder = qp; outptr = outbuf; outlen = 0; do { // Note: by not resetting the decoder state each loop, we effectively // treat the payloads as one continuous block, thus allowing us to // handle cases where a hex-encoded triplet of a quoted-printable // encoded payload is split between 2 or more encoded-word tokens. len = DecodeToken (tokens[i], decoder, inbuf, outptr); outptr += len; outlen += len; i++; } while (i < n); decoder.Reset (); i--; var unicode = CharsetUtils.ConvertToUnicode (options, codepage, output, 0, outlen, out len); decoded.Append (unicode, 0, len); } else if (token.Is8bit) { // *sigh* I hate broken mailers... var unicode = CharsetUtils.ConvertToUnicode (options, input, token.StartIndex, token.Length, out len); decoded.Append (unicode, 0, len); } else { // pure 7bit ascii, a breath of fresh air... byte* inptr = inbuf + token.StartIndex; byte* inend = inptr + token.Length; while (inptr < inend) decoded.Append ((char) *inptr++); } } } return decoded.ToString (); }
/// <summary> /// Should return any kind of token Pair that encapsulates the value for the DocumentItem /// </summary> /// <param name="token"></param> /// <param name="options"></param> /// <returns></returns> public abstract IEnumerable <TokenPair> Tokenize(TokenInfo token, ParserOptions options);
/// <summary> /// Decodes the phrase. /// </summary> /// <remarks> /// Decodes the phrase(s) within the specified buffer using the supplied parser options. /// </remarks> /// <returns>The decoded phrase.</returns> /// <param name="options">The parser options to use.</param> /// <param name="phrase">The phrase to decode.</param> /// <exception cref="System.ArgumentNullException"> /// <para><paramref name="options"/> is <c>null</c>.</para> /// <para>-or-</para> /// <para><paramref name="phrase"/> is <c>null</c>.</para> /// </exception> public static string DecodePhrase (ParserOptions options, byte[] phrase) { return DecodePhrase (options, phrase, 0, phrase.Length); }
/// <summary> /// ctor /// </summary> /// <param name="index">the current index of the item inside the collection</param> /// <param name="last">true if its the last item</param> /// <param name="options"></param> /// <param name="key"></param> public ContextCollection(long index, bool last, [NotNull] ParserOptions options, string key) : base(options, key) { Index = index; Last = last; }
/// <summary> /// Decodes unstructured text. /// </summary> /// <remarks> /// Decodes the unstructured text buffer using the specified parser options. /// </remarks> /// <returns>The decoded text.</returns> /// <param name="options">The parser options to use.</param> /// <param name="text">The text to decode.</param> /// <exception cref="System.ArgumentNullException"> /// <para><paramref name="options"/> is <c>null</c>.</para> /// <para>-or-</para> /// <para><paramref name="text"/> is <c>null</c>.</para> /// </exception> public static string DecodeText (ParserOptions options, byte[] text) { return DecodeText (options, text, 0, text.Length); }
public static void LogDebug(string eventId, string message, [ExternalData] ParserOptions options) { options.Logger?.LogDebug(eventId, message, null); }
/// <summary> /// Checks whether or not the enum has the given flag set. /// </summary> /// <param name="en"></param> /// <param name="cmp"></param> /// <returns></returns> public static bool HasFlag(this ParserOptions en, ParserOptions cmp) { return((en & cmp) != 0); }
protected ParserOptions(ParserOptions.Internal* native, bool isInternalImpl = false) { __Instance = new global::System.IntPtr(native); }
public void ExecuteTestCase(string fixture) { var options = new ParserOptions { Tokens = true }; string treeFilePath, failureFilePath, moduleFilePath; var jsFilePath = Path.Combine(GetFixturesPath(), "Fixtures", fixture); if (jsFilePath.EndsWith(".source.js")) { treeFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(Path.GetFileNameWithoutExtension(jsFilePath))) + ".tree.json"; failureFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(Path.GetFileNameWithoutExtension(jsFilePath))) + ".failure.json"; moduleFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(Path.GetFileNameWithoutExtension(jsFilePath))) + ".module.json"; } else { treeFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(jsFilePath)) + ".tree.json"; failureFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(jsFilePath)) + ".failure.json"; moduleFilePath = Path.Combine(Path.GetDirectoryName(jsFilePath), Path.GetFileNameWithoutExtension(jsFilePath)) + ".module.json"; } var script = File.ReadAllText(jsFilePath); if (jsFilePath.EndsWith(".source.js")) { var parser = new JavaScriptParser(script); var program = parser.ParseScript(); var source = program.Body.First().As <VariableDeclaration>().Declarations.First().As <VariableDeclarator>().Init.As <Literal>().StringValue; script = source; } var expected = ""; var invalid = false; var filename = Path.GetFileNameWithoutExtension(jsFilePath); var isModule = filename.Contains("module") || filename.Contains("export") || filename.Contains("import"); if (!filename.Contains(".module")) { isModule &= !jsFilePath.Contains("dynamic-import") && !jsFilePath.Contains("script"); } var sourceType = isModule ? SourceType.Module : SourceType.Script; #pragma warning disable 162 if (File.Exists(moduleFilePath)) { sourceType = SourceType.Module; expected = File.ReadAllText(moduleFilePath); if (WriteBackExpectedTree) { var actual = ParseAndFormat(sourceType, script, options); if (!CompareTreesInternal(actual, expected)) { File.WriteAllText(moduleFilePath, actual); } } } else if (File.Exists(treeFilePath)) { expected = File.ReadAllText(treeFilePath); if (WriteBackExpectedTree) { var actual = ParseAndFormat(sourceType, script, options); if (!CompareTreesInternal(actual, expected)) { File.WriteAllText(treeFilePath, actual); } } } else if (File.Exists(failureFilePath)) { invalid = true; expected = File.ReadAllText(failureFilePath); if (WriteBackExpectedTree) { var actual = ParseAndFormat(sourceType, script, options); if (!CompareTreesInternal(actual, expected)) { File.WriteAllText(failureFilePath, actual); } } #pragma warning restore 162 } else { // cannot compare return; } invalid |= filename.Contains("error") || filename.Contains("invalid") && !filename.Contains("invalid-yield-object-"); if (!invalid) { options.Tolerant = true; var actual = ParseAndFormat(sourceType, script, options); CompareTrees(actual, expected, jsFilePath); } else { options.Tolerant = false; // TODO: check the accuracy of the message and of the location Assert.Throws <ParserException>(() => ParseAndFormat(sourceType, script, options)); } }
private static void* __CopyValue(ParserOptions.Internal native) { var ret = Marshal.AllocHGlobal(144); CppSharp.Parser.ParserOptions.Internal.cctor_2(ret, new global::System.IntPtr(&native)); return ret.ToPointer(); }
internal PropertyParser(TypeRefParser typeRefParser, ParserOptions parserOptions) : base(parserOptions) { _typeRefParser = typeRefParser; }
public static ParserOptions __CreateInstance(ParserOptions.Internal native) { return new ParserOptions(native); }
public static int WeekOfDate(DateTimeOffset time, [ExternalData] ParserOptions parserOptions) { return(WeekOfDate(time.DateTime, parserOptions)); }
private ParserOptions(ParserOptions.Internal native) : this(__CopyValue(native)) { __ownsNativeInstance = true; NativeToManagedMap[__Instance] = this; }
public static DateTimeOffset ParseDateTimeOffset(string text, [ExternalData] ParserOptions parserOptions) { return(DateTimeOffset.Parse(text, parserOptions.CultureInfo)); }
static unsafe IList<Token> TokenizeText (ParserOptions options, byte* inbuf, int startIndex, int length) { byte* text, word, inptr = inbuf + startIndex; byte* inend = inptr + length; var tokens = new List<Token> (); bool encoded = false; Token token = null; Token lwsp = null; bool ascii; int n; while (inptr < inend) { text = inptr; while (inptr < inend && IsLwsp (*inptr)) inptr++; if (inptr > text) lwsp = new Token ((int) (text - inbuf), (int) (inptr - text)); else lwsp = null; if (inptr < inend) { word = inptr; ascii = true; if (options.Rfc2047ComplianceMode == RfcComplianceMode.Loose) { // Make an extra effort to detect and separate encoded-word // tokens that have been merged with other words. bool is_rfc2047 = false; if (inptr + 2 < inend && *inptr == '=' && *(inptr + 1) == '?') { inptr += 2; // skip past the charset (if one is even declared, sigh) while (inptr < inend && *inptr != '?') { ascii = ascii && IsAscii (*inptr); inptr++; } // sanity check encoding type if (inptr + 3 >= inend || *inptr != '?' || !IsBbQq (*(inptr + 1)) || *(inptr + 2) != '?') { ascii = true; goto non_rfc2047; } inptr += 3; // find the end of the rfc2047 encoded word token while (inptr + 2 < inend && !(*inptr == '?' && *(inptr + 1) == '=')) { ascii = ascii && IsAscii (*inptr); inptr++; } if (inptr + 2 > inend || *inptr != '?' || *(inptr + 1) != '=') { // didn't find an end marker... inptr = word + 2; ascii = true; goto non_rfc2047; } is_rfc2047 = true; inptr += 2; } non_rfc2047: if (!is_rfc2047) { // stop if we encounter a possible rfc2047 encoded // token even if it's inside another word, sigh. while (inptr < inend && !IsLwsp (*inptr)) { if (inptr + 2 < inend && *inptr == '=' && *(inptr + 1) == '?') break; ascii = ascii && IsAscii (*inptr); inptr++; } } } else { // find the end of a run of text... while (inptr < inend && !IsLwsp (*inptr)) { ascii = ascii && IsAscii (*inptr); inptr++; } } n = (int) (inptr - word); if (TryGetEncodedWordToken (inbuf, word, n, out token)) { // rfc2047 states that you must ignore all whitespace between // encoded-word tokens if (!encoded && lwsp != null) { // previous token was not encoded, so preserve whitespace tokens.Add (lwsp); } tokens.Add (token); encoded = true; } else { // append the lwsp and atom tokens if (lwsp != null) tokens.Add (lwsp); token = new Token ((int) (word - inbuf), n); token.Is8bit = !ascii; tokens.Add (token); encoded = false; } } else { // append the trailing lwsp token if (lwsp != null) tokens.Add (lwsp); break; } } return tokens; }
static bool EvaluateCondition(ProjectElement element, ExpanderOptions expanderOptions, ParserOptions parserOptions, Expander <P, I> expander, LazyItemEvaluator <P, I, M, D> lazyEvaluator) { if (element.Condition.Length == 0) { return(true); } bool result = ConditionEvaluator.EvaluateCondition ( element.Condition, parserOptions, expander, expanderOptions, GetCurrentDirectoryForConditionEvaluation(element, lazyEvaluator), element.ConditionLocation, lazyEvaluator._loggingService, lazyEvaluator._buildEventContext ); return(result); }
internal static string DecodePhrase (ParserOptions options, byte[] phrase, int startIndex, int count, out int codepage) { codepage = Encoding.UTF8.CodePage; if (count == 0) return string.Empty; unsafe { fixed (byte* inbuf = phrase) { var tokens = TokenizePhrase (options, inbuf, startIndex, count); // collect the charsets used to encode each encoded-word token // (and the number of tokens each charset was used in) var codepages = new Dictionary<int, int> (); foreach (var token in tokens) { if (token.CodePage == 0) continue; if (!codepages.ContainsKey (token.CodePage)) codepages.Add (token.CodePage, 1); else codepages[token.CodePage]++; } int max = 0; foreach (var kvp in codepages) { if (kvp.Value <= max) continue; max = Math.Max (kvp.Value, max); codepage = kvp.Key; } return DecodeTokens (options, tokens, phrase, inbuf, count); } } }
/// <summary> /// Will be called to produce an Document item that must be executed /// </summary> public abstract IDocumentItem CreateDocumentItem(string tag, string value, TokenPair token, ParserOptions options, IEnumerable <ITokenOption> tagCreationOptions);
/// <summary> /// Decodes unstructured text. /// </summary> /// <remarks> /// Decodes the unstructured text buffer starting at the given index and spanning /// across the specified number of bytes using the supplied parser options. /// </remarks> /// <returns>The decoded text.</returns> /// <param name="options">The parser options to use.</param> /// <param name="text">The text to decode.</param> /// <param name="startIndex">The starting index.</param> /// <param name="count">The number of bytes to decode.</param> /// <exception cref="System.ArgumentNullException"> /// <para><paramref name="options"/> is <c>null</c>.</para> /// <para>-or-</para> /// <para><paramref name="text"/> is <c>null</c>.</para> /// </exception> /// <exception cref="System.ArgumentOutOfRangeException"> /// <paramref name="startIndex"/> and <paramref name="count"/> do not specify /// a valid range in the byte array. /// </exception> public static string DecodeText (ParserOptions options, byte[] text, int startIndex, int count) { if (options == null) throw new ArgumentNullException ("options"); if (text == null) throw new ArgumentNullException ("text"); if (startIndex < 0 || startIndex > text.Length) throw new ArgumentOutOfRangeException ("startIndex"); if (count < 0 || startIndex + count > text.Length) throw new ArgumentOutOfRangeException ("count"); if (count == 0) return string.Empty; unsafe { fixed (byte* inbuf = text) { var tokens = TokenizeText (options, inbuf, startIndex, count); return DecodeTokens (options, tokens, text, inbuf, count); } } }
/// <inheritdoc /> public override bool ShouldParse(TokenPair token, ParserOptions options, IEnumerable <ITokenOption> tagCreationOptions) { return(token.Type.Equals(TagOpen.Trim()) || token.Type.Equals(TagClose)); }
internal FieldParser(TypeRefParser typeRefParser, ParserOptions parserOptions) : base(parserOptions) { _typeRefParser = typeRefParser; }
public void TestFrontMatterOnly() { var options = new ParserOptions(); var input = @"+++ variable = 1 name = 'yes' +++ This is after the frontmatter: {{ name }} {{ variable + 1 }}"; input = input.Replace("\r\n", "\n"); var lexer = new Lexer(input, null, new LexerOptions() { Mode = ScriptMode.FrontMatterOnly }); var parser = new Parser(lexer, options); var page = parser.Run(); foreach (var message in parser.Messages) { Console.WriteLine(message); } Assert.False(parser.HasErrors); // Check that the parser finished parsing on the first code exit }} // and hasn't tried to run the lexer on the remaining text Assert.AreEqual(new TextPosition(30, 3, 0), parser.CurrentSpan.Start); Assert.AreEqual(new TextPosition(33, 3, 3), parser.CurrentSpan.End); var startPositionAfterFrontMatter = parser.CurrentSpan.End.NextLine(); // Make sure that we have a front matter Assert.NotNull(page.FrontMatter); Assert.Null(page.Body); var context = new TemplateContext(); // Evaluate front-matter var frontResult = context.Evaluate(page.FrontMatter); Assert.Null(frontResult); lexer = new Lexer(input, null, new LexerOptions() { StartPosition = startPositionAfterFrontMatter }); parser = new Parser(lexer); page = parser.Run(); foreach (var message in parser.Messages) { Console.WriteLine(message); } Assert.False(parser.HasErrors); context.Evaluate(page); var pageResult = context.Output.ToString(); TextAssert.AreEqual("This is after the frontmatter: yes\n2", pageResult); }