public void TestChangeDefaultLanguage() { var tokenMatcher = new TokenMatcher("no"); var parser = new Parser(new AstBuilder<Feature>()); var jsonSerializerSettings = new JsonSerializerSettings(); jsonSerializerSettings.Formatting = Formatting.Indented; jsonSerializerSettings.NullValueHandling = NullValueHandling.Ignore; var parsingResult = parser.Parse(new TokenScanner(new StringReader("Egenskap: i18n support")), tokenMatcher); var astText = LineEndingHelper.NormalizeLineEndings(JsonConvert.SerializeObject(parsingResult, jsonSerializerSettings)); string expected = LineEndingHelper.NormalizeLineEndings(@"{ ""Tags"": [], ""Location"": { ""Line"": 1, ""Column"": 1 }, ""Language"": ""no"", ""Keyword"": ""Egenskap"", ""Name"": ""i18n support"", ""ScenarioDefinitions"": [], ""Comments"": [] }"); Assert.AreEqual(expected, astText); }
public QueryTokenHandler(TokenMatcher matcher, TokenConsumer consumer, int priority) { this.matcher = matcher; this.consumer = consumer; this.priority = priority; this.id = 0; }
private Feature ParseFeatureFile(string featureFile) { using (var reader = new StreamReader(featureFile)) { var parser = new Parser(); var dialectProvider = new AugurkDialectProvider(Options.Language); var tokenScanner = new TokenScanner(reader); var tokenMatcher = new TokenMatcher(dialectProvider); var document = parser.Parse(tokenScanner, tokenMatcher); if (document.Feature == null) { throw new InvalidOperationException("Feature file failed to parse."); } var feature = document.Feature.ConvertToFeature(dialectProvider.GetDialect(document.Feature.Language, document.Feature.Location)); feature.SourceFilename = featureFile; // change directory to feature-files directory string savedDirectory = Directory.GetCurrentDirectory(); Directory.SetCurrentDirectory(Path.GetDirectoryName(featureFile)); feature.Description = ProcessDescription(feature.Description); foreach (var scenario in feature.Scenarios) { scenario.Description = ProcessDescription(scenario.Description); } // reset current directory Directory.SetCurrentDirectory(savedDirectory); return(feature); } }
public void TestChangeDefaultLanguage() { var tokenMatcher = new TokenMatcher("no"); var parser = new Parser(new AstBuilder <GherkinDocument>()); var jsonSerializerSettings = new JsonSerializerSettings(); jsonSerializerSettings.Formatting = Formatting.Indented; jsonSerializerSettings.NullValueHandling = NullValueHandling.Ignore; var parsingResult = parser.Parse(new TokenScanner(new StringReader("Egenskap: i18n support")), tokenMatcher); var astText = LineEndingHelper.NormalizeLineEndings(JsonConvert.SerializeObject(parsingResult, jsonSerializerSettings)); string expected = LineEndingHelper.NormalizeLineEndings(@"{ ""Feature"": { ""Tags"": [], ""Location"": { ""Line"": 1, ""Column"": 1 }, ""Language"": ""no"", ""Keyword"": ""Egenskap"", ""Name"": ""i18n support"", ""Children"": [] }, ""Comments"": [] }"); Assert.AreEqual(expected, astText); }
private bool IsKeyWord(string line, out string formatted_line, out TokenType keywordType) { formatted_line = ""; keywordType = TokenType.Other; Token token = ToToken(line); if (TokenMatcher.Match_FeatureLine(token) || TokenMatcher.Match_ScenarioLine(token) || TokenMatcher.Match_ScenarioOutlineLine(token) || TokenMatcher.Match_BackgroundLine(token)) { string keyword = token.MatchedKeyword; keywordType = token.MatchedType; formatted_line = keyword + ": " + token.MatchedText; return(true); } if (TokenMatcher.Match_ExamplesLine(token)) { string keyword = token.MatchedKeyword; keywordType = token.MatchedType; formatted_line = IDENT2 + keyword + ": " + token.MatchedText; return(true); } return(false); }
private bool IsComment(string line, out string formatted_line) { Token token = ToToken(line); formatted_line = line.TrimEnd(); return(TokenMatcher.Match_Comment(token)); }
private void HandleNonCustomAction(IPluginExecutionContext context, ITracingService tracing, IOrganizationService service) { var target = context.InputParameters.ContainsKey("Target") ? context.InputParameters["Target"] as Entity : null; if (target == null) { return; } var targetField = _config.TargetField; var template = _config.Template; var templateField = _config.TemplateField; var dataSource = GenerateDataSource(context, target); if (!CheckExecutionCriteria(_config, dataSource, service, tracing)) { tracing.Trace("Execution criteria not met, aborting"); return; } ValidateConfig(targetField, template, templateField); var templateText = RetrieveTemplate(template, templateField, dataSource, service, tracing); if (string.IsNullOrEmpty(templateText)) { tracing.Trace("Template is empty, aborting"); return; } var output = TokenMatcher.ProcessTokens(templateText, dataSource, _organizationConfig, service, tracing); target[targetField] = output; TriggerUpdateConditionally(output, dataSource, _config, service); }
public QueryTokenHandler(TokenMatcher matcher, TokenConsumer consumer, int id, int priority) { this.matcher = matcher; this.consumer = consumer; this.priority = priority; this.id = id; userData = new Dictionary <string, object>(); }
public PatternExtractorVisitor() { matcher = new TokenMatcher(); Embedded.Patterns.Domains.ForEach((name, pattern) => { matcher.AddPattern(pattern, name); }); }
public SpecFlowFeature Parse(TextReader featureFileReader, string sourceFilePath) { var parser = new Parser <SpecFlowFeature>(new SpecFlowAstBuilder(sourceFilePath)); var tokenMatcher = new TokenMatcher(dialectProvider); var feature = parser.Parse(new TokenScanner(featureFileReader), tokenMatcher); CheckSemanticErrors(feature); return(feature); }
internal static ModuleStatementSyntax FromMatcher(TokenMatcher matcher) { List<SyntaxNodeOrToken> list; list = matcher.TryMatch(SyntaxKind.ModuleKeyword, SyntaxKind.IdentifierToken); //if (list != null) //{ // return Syntax.ModuleStatement(null, null, list[0].AsToken(), list[1].AsToken()); //} return null; }
public PatternExtractorVisitor() { matcher = new TokenMatcher(); // Support UTC DateTimes matcher.RegisterTransformer <ToDateTimeUtcTransformer>(); Embedded.Patterns.Domains.ForEach((name, pattern) => { matcher.AddPattern(pattern, name); }); }
public DeveroomGherkinDocument Parse(TextReader featureFileReader, string sourceFilePath) { var tokenScanner = (ITokenScanner) new HotfixTokenScanner(featureFileReader); var tokenMatcher = new TokenMatcher(DialectProvider); _astBuilder = new DeveroomGherkinAstBuilder(sourceFilePath, () => tokenMatcher.CurrentDialect); var parser = new InternalParser(_astBuilder, AstBuilder.RecordStateForLine, _monitoringService); var gherkinDocument = parser.Parse(tokenScanner, tokenMatcher); CheckSemanticErrors(gherkinDocument); return(gherkinDocument); }
private bool IsStep(string line, out string formatted_line) { formatted_line = ""; Token token = ToToken(line); if (TokenMatcher.Match_StepLine(token)) { string keyword = token.MatchedKeyword; formatted_line = IDENT2 + keyword.Trim() + " " + token.MatchedText; return(true); } return(false); }
private bool SimpleMatchLanguage(Token token) { try { if (TokenMatcher.Match_Language(token)) { return(true); } return(false); } catch (Exception) { return(false); } }
/// <summary> /// Creates a new instance of the <see cref="WhoisParser"/> class. /// </summary> public WhoisParser() { matcher = new TokenMatcher(); reader = new ResourceReader(); statusParser = new WhoisStatusParser(); FixUps = new List <IFixup>(); // Register default transformers matcher.RegisterTransformer <CleanDomainStatusTransformer>(); matcher.RegisterTransformer <ToHostNameTransformer>(); // Register default FixUps FixUps.Add(new MultipleContactFixup()); FixUps.Add(new WhoisIsocOrgIlFixup()); }
private TokenMatcher CreateIanaTemplate() { var matcher = new TokenMatcher(); var resourceNames = resourceReader.GetNames("whois.iana.org", "tld"); foreach (var resourceName in resourceNames) { var content = resourceReader.GetContent(resourceName); matcher.RegisterTemplate(content); } return(matcher); }
private void TryUpdateLanguage(string line) { Token token = ToToken(line); try { if (TokenMatcher.Match_Language(token)) { NotifyCurrentGherkinLanguage(token.MatchedText); } } catch (Exception ex) { EventAggregator <StatusChangedArg> .Instance.Publish(this, new StatusChangedArg(ex.Message)); } }
public void TestMultipleFeatures() { var tokenMatcher = new TokenMatcher(); var parser = new Parser(new AstBuilder <GherkinDocument>()); var jsonSerializerSettings = new JsonSerializerSettings(); jsonSerializerSettings.Formatting = Formatting.Indented; jsonSerializerSettings.NullValueHandling = NullValueHandling.Ignore; var parsingResult1 = parser.Parse(new TokenScanner(new StringReader("Feature: Test")), tokenMatcher); var astText1 = LineEndingHelper.NormalizeLineEndings(JsonConvert.SerializeObject(parsingResult1, jsonSerializerSettings)); var parsingResult2 = parser.Parse(new TokenScanner(new StringReader("Feature: Test2")), tokenMatcher); var astText2 = LineEndingHelper.NormalizeLineEndings(JsonConvert.SerializeObject(parsingResult2, jsonSerializerSettings)); string expected1 = LineEndingHelper.NormalizeLineEndings(@"{ ""Feature"": { ""Tags"": [], ""Location"": { ""Line"": 1, ""Column"": 1 }, ""Language"": ""en"", ""Keyword"": ""Feature"", ""Name"": ""Test"", ""Children"": [] }, ""Comments"": [] }"); string expected2 = LineEndingHelper.NormalizeLineEndings(@"{ ""Feature"": { ""Tags"": [], ""Location"": { ""Line"": 1, ""Column"": 1 }, ""Language"": ""en"", ""Keyword"": ""Feature"", ""Name"": ""Test2"", ""Children"": [] }, ""Comments"": [] }"); Assert.AreEqual(expected1, astText1); Assert.AreEqual(expected2, astText2); }
private TokenMatcher CreateIanaTemplate() { var matcher = new TokenMatcher(); matcher.RegisterTransformer<CleanDomainStatusTransformer>(); matcher.RegisterTransformer<ToHostNameTransformer>(); var resourceNames = resourceReader.GetNames("whois.iana.org"); foreach (var resourceName in resourceNames) { var content = resourceReader.GetContent(resourceName); matcher.RegisterTemplate(content); } return matcher; }
public void TestMultipleFeatures() { var tokenMatcher = new TokenMatcher(); var parser = new Parser(new AstBuilder<Feature>()); var jsonSerializerSettings = new JsonSerializerSettings(); jsonSerializerSettings.Formatting = Formatting.Indented; jsonSerializerSettings.NullValueHandling = NullValueHandling.Ignore; var parsingResult1 = parser.Parse(new TokenScanner(new StringReader("Feature: Test")), tokenMatcher); var astText1 = LineEndingHelper.NormalizeLineEndings(JsonConvert.SerializeObject(parsingResult1, jsonSerializerSettings)); var parsingResult2 = parser.Parse(new TokenScanner(new StringReader("Feature: Test2")), tokenMatcher); var astText2 = LineEndingHelper.NormalizeLineEndings(JsonConvert.SerializeObject(parsingResult2, jsonSerializerSettings)); string expected1 = LineEndingHelper.NormalizeLineEndings(@"{ ""Tags"": [], ""Location"": { ""Line"": 1, ""Column"": 1 }, ""Language"": ""en"", ""Keyword"": ""Feature"", ""Name"": ""Test"", ""ScenarioDefinitions"": [], ""Comments"": [] }"); string expected2 = LineEndingHelper.NormalizeLineEndings(@"{ ""Tags"": [], ""Location"": { ""Line"": 1, ""Column"": 1 }, ""Language"": ""en"", ""Keyword"": ""Feature"", ""Name"": ""Test2"", ""ScenarioDefinitions"": [], ""Comments"": [] }"); Assert.AreEqual(expected1, astText1); Assert.AreEqual(expected2, astText2); }
private bool IsKeyWord(string line, out TokenType keywordType) { keywordType = TokenType.Other; Token token = ToToken(line); if (TokenMatcher.Match_FeatureLine(token) || TokenMatcher.Match_ScenarioLine(token) || TokenMatcher.Match_ScenarioOutlineLine(token) || TokenMatcher.Match_BackgroundLine(token) || TokenMatcher.Match_ExamplesLine(token)) { string keyword = token.MatchedKeyword; keywordType = token.MatchedType; if (keywordType == TokenType.FeatureLine) { NotifyCurrentGherkinLanguage(token.MatchedGherkinDialect.Language); } return(true); } return(false); }
/// <summary> /// Determines whether a WHOIS response is a redirect response to another WHOIS server. /// </summary> public bool IsARedirectRecord(WhoisResponse response, out WhoisRedirect redirect) { redirect = null; var pattern = Embedded.Patterns.Redirects.VerisignGrs; var tokenizer = new TokenMatcher(); tokenizer.AddPattern(pattern, "verisign-grs.com"); if (tokenizer.TryMatch <WhoisRedirect>(response.Content, out var match)) { Log.Debug("Found redirect for {0} to {1}", response.Domain, match.Result.Url); redirect = match.Result; if (string.IsNullOrEmpty(redirect.Url) == false) { return(true); } } return(false); }
public void Execute(IServiceProvider serviceProvider) { var context = serviceProvider.GetService(typeof(IPluginExecutionContext)) as IPluginExecutionContext; var crmTracing = serviceProvider.GetService(typeof(ITracingService)) as ITracingService; var serviceFactory = serviceProvider.GetService(typeof(IOrganizationServiceFactory)) as IOrganizationServiceFactory; var service = serviceFactory.CreateOrganizationService(context.UserId); var target = GetTarget(context); var targetRef = GetTargetRef(context); if (target == null && targetRef == null) { return; } var shouldExecute = CheckExecutionCondition(target, service, crmTracing); if (!shouldExecute) { crmTracing.Trace("Execution condition not met, aborting"); return; } var attributes = target != null ? target.Attributes.Keys.ToList() : null; var filteredAttributes = config.Value.CapturedFields != null ? attributes.Where(a => config.Value.CapturedFields.Any(f => string.Equals(a, f, StringComparison.InvariantCultureIgnoreCase))).ToList() : attributes; var eventData = new EventData { UpdatedFields = filteredAttributes, EventRecordReference = target?.ToEntityReference() ?? targetRef }; var eventTarget = string.IsNullOrEmpty(config.Value.ParentLookupName) && eventData.EventRecordReference.Id != Guid.Empty ? eventData.EventRecordReference : GetValue <EntityReference>(config.Value.ParentLookupName, target, context.PreEntityImages); if (eventTarget == null) { crmTracing.Trace("Failed to find parent, exiting"); return; } var subscriptionsQuery = service.Query("oss_subscription") .Where(e => e .Attribute(a => a .Named(config.Value.SubscriptionLookupName) .Is(ConditionOperator.Equal) .To(eventTarget.Id) ) .Attribute(a => a .Named("statecode") .Is(ConditionOperator.Equal) .To(0) ) ) .IncludeColumns("ownerid", "oss_emailnotificationsenabled", "oss_emailnotificationssender") .Link(l => l .FromEntity("oss_subscription") .ToEntity("systemuser") .FromAttribute("ownerid") .ToAttribute("systemuserid") .With.LinkType(JoinOperator.LeftOuter) .Link(l2 => l2 .FromEntity("systemuser") .ToEntity("usersettings") .FromAttribute("systemuserid") .ToAttribute("systemuserid") .With.LinkType(JoinOperator.LeftOuter) .With.Alias("usersettings") .IncludeColumns("localeid") ) ); if (!config.Value.NotifyCurrentUser) { subscriptionsQuery.AddCondition( (a => a .Named("ownerid") .Is(ConditionOperator.NotEqual) .To(context.UserId) ) ); } var subscriptions = subscriptionsQuery.RetrieveAll(); var serializedNotification = JsonSerializer.Serialize(eventData); var eventType = GetEventType(context); var messageConfig = config.Value.MessageConfig ?? new Dictionary <string, string>(); var messages = subscriptions.Select(s => s.GetAttributeValue <AliasedValue>("usersettings.localeid")?.Value as int?) .Select(locale => locale != null ? locale.Value.ToString() : "default") .Distinct() .ToDictionary( (k) => k, (k) => messageConfig.ContainsKey(k) ? TokenMatcher.ProcessTokens(messageConfig[k], target, new OrganizationConfig(), service, crmTracing) : (messageConfig.ContainsKey("default") ? TokenMatcher.ProcessTokens(messageConfig["default"], target, new OrganizationConfig(), service, crmTracing) : null) ); subscriptions.ForEach(subscription => { var localeCode = subscription.GetAttributeValue <AliasedValue>("usersettings.localeid")?.Value as int?; var locale = localeCode != null ? localeCode.Value.ToString() : "default"; var message = messages.ContainsKey(locale) ? messages[locale] : null; var user = subscription.GetAttributeValue <EntityReference>("ownerid"); var notification = new Entity { LogicalName = "oss_notification", Attributes = { ["oss_subscriptionid"] = subscription.ToEntityReference(), ["ownerid"] = user, ["oss_event"] = new OptionSetValue((int)eventType), [config.Value.NotificationLookupName] = eventTarget, ["oss_data"] = serializedNotification, ["oss_text"] = message } }; if (config.Value.GlobalNotificationConfig != null) { var messageKey = config.Value.GlobalNotificationConfig.Keys.FirstOrDefault(k => k == locale) ?? config.Value.GlobalNotificationConfig.Keys.FirstOrDefault(k => k == "default"); if (messageKey != null) { var notificationMessage = config.Value.GlobalNotificationConfig[messageKey]; var payload = new GlobalNotificationActionPayload { EntityName = eventTarget?.LogicalName, Message = notificationMessage }; notification["oss_globalnotificationactionpayload"] = JsonSerializer.Serialize(payload); } } notification.Id = service.Create(notification); var emailNotificationsEnabled = subscription.GetAttributeValue <bool>("oss_emailnotificationsenabled"); if (!emailNotificationsEnabled) { return; } SendEmailNotification(message, locale, user, subscription, notification, service); }); }
public void TestFeatureAfterParseError() { var tokenMatcher = new TokenMatcher(); var parser = new Parser(new AstBuilder<GherkinDocument>()); var jsonSerializerSettings = new JsonSerializerSettings(); jsonSerializerSettings.Formatting = Formatting.Indented; jsonSerializerSettings.NullValueHandling = NullValueHandling.Ignore; try { parser.Parse(new TokenScanner(new StringReader(@"# a comment Feature: Foo Scenario: Bar Given x ``` unclosed docstring")), tokenMatcher); Assert.Fail("ParserException expected"); } catch (ParserException) { } var parsingResult2 = parser.Parse(new TokenScanner(new StringReader(@"Feature: Foo Scenario: Bar Given x """""" closed docstring """"""")), tokenMatcher); var astText2 = LineEndingHelper.NormalizeLineEndings(JsonConvert.SerializeObject(parsingResult2, jsonSerializerSettings)); string expected2 = LineEndingHelper.NormalizeLineEndings(@"{ ""Feature"": { ""Tags"": [], ""Location"": { ""Line"": 1, ""Column"": 1 }, ""Language"": ""en"", ""Keyword"": ""Feature"", ""Name"": ""Foo"", ""Children"": [ { ""Tags"": [], ""Location"": { ""Line"": 2, ""Column"": 3 }, ""Keyword"": ""Scenario"", ""Name"": ""Bar"", ""Steps"": [ { ""Location"": { ""Line"": 3, ""Column"": 5 }, ""Keyword"": ""Given "", ""Text"": ""x"", ""Argument"": { ""Location"": { ""Line"": 4, ""Column"": 7 }, ""Content"": ""closed docstring"" } } ] } ] }, ""Comments"": [] }"); Assert.AreEqual(expected2, astText2); }
public void NegativeMatchReturnsNull(TokenMatcher matcher, string input) { matcher.GetTokens(input).ShouldBeEmpty(); }
public void PositiveMatchReturnsTokens(TokenMatcher matcher, string input, Token[] expected) { matcher.GetTokens(input).ShouldBe(expected); }
public QueryTokenHandler(TokenMatcher matcher, TokenConsumer consumer, int priority) : this(matcher, consumer, 0, priority) { }
public QueryTokenHandler(TokenMatcher matcher, TokenConsumer consumer, int id, int priority) : this(matcher, consumer, priority) { this.id = id; }
private bool IsStep(string line) { Token token = ToToken(line); return(TokenMatcher.Match_StepLine(token)); }
public void TestFeatureAfterParseError() { var tokenMatcher = new TokenMatcher(); var parser = new Parser(new AstBuilder <Feature>()); var jsonSerializerSettings = new JsonSerializerSettings(); jsonSerializerSettings.Formatting = Formatting.Indented; jsonSerializerSettings.NullValueHandling = NullValueHandling.Ignore; try { parser.Parse(new TokenScanner(new StringReader(@"# a comment Feature: Foo Scenario: Bar Given x ``` unclosed docstring")), tokenMatcher); Assert.Fail("ParserException expected"); } catch (ParserException) { } var parsingResult2 = parser.Parse(new TokenScanner(new StringReader(@"Feature: Foo Scenario: Bar Given x """""" closed docstring """"""")), tokenMatcher); var astText2 = LineEndingHelper.NormalizeLineEndings(JsonConvert.SerializeObject(parsingResult2, jsonSerializerSettings)); string expected2 = LineEndingHelper.NormalizeLineEndings(@"{ ""Tags"": [], ""Location"": { ""Line"": 1, ""Column"": 1 }, ""Language"": ""en"", ""Keyword"": ""Feature"", ""Name"": ""Foo"", ""ScenarioDefinitions"": [ { ""Tags"": [], ""Location"": { ""Line"": 2, ""Column"": 3 }, ""Keyword"": ""Scenario"", ""Name"": ""Bar"", ""Steps"": [ { ""Location"": { ""Line"": 3, ""Column"": 5 }, ""Keyword"": ""Given "", ""Text"": ""x"", ""Argument"": { ""Location"": { ""Line"": 4, ""Column"": 7 }, ""ContentType"": """", ""Content"": ""closed docstring"" } } ] } ], ""Comments"": [] }"); Assert.AreEqual(expected2, astText2); }
/// <summary> /// Creates a new instance of the <see cref="WhoisParser"/> class. /// </summary> public WhoisParser() { matcher = new TokenMatcher(); reader = new ResourceReader(); statusParser = new WhoisResponseStatusParser(); }
public void WholeOperationIndexFinderCorrectlyFindsIndex(string operation, string statement, int expectedIndex) { Assert.Equal(expectedIndex, TokenMatcher.WholeOperationIndexFinder(operation, statement)); }
public QueryTokenHandler(TokenMatcher matcher, TokenConsumer consumer, int id, TokenHandlerPriority priority) : this(matcher, consumer, id, (int)priority) { }
public Lexer(TextReader input) { // Store input reader this.input = input; // Add matchers in order of precedance this.matchers += new TokenMatcher(MatchWhiteSpace); this.matchers += new TokenMatcher(MatchSingleLineComment); this.matchers += new TokenMatcher(MatchMultiLineComment); this.matchers += new TokenMatcher(MatchStringLiteral); this.matchers += new TokenMatcher(MatchKeyword); this.matchers += new TokenMatcher(MatchIdentifier); // Reset lexer this.Reset(); }