public override Token Eval(TokenExtractor extractor) { EnsureExtractor(extractor, nameof(extractor)); // TODO: Remover tratamento de cursor do extrator // Isso será garantido pelo anterior? var cursor = extractor.SaveCursor(); try { var c = extractor.NextChar(); if (c == _char) { return(new Token(c) { ContentPosBegin = cursor.Position, ContentLength = extractor.SaveCursor().Position - cursor.Position }); } } catch (Exception) { extractor.RollbackCursor(cursor); throw; } extractor.RollbackCursor(cursor); return(null); }
public override Token Eval(TokenExtractor extractor) { EnsureExtractor(extractor, nameof(extractor)); var cursor = extractor.SaveCursor(); try { Token token = null; foreach (var grammarElement in _list) { token = grammarElement.Eval(extractor); if (token != null) { break; } } if (token == null) { extractor.RollbackCursor(cursor); return(null); } return(token); } catch (Exception) { extractor.RollbackCursor(cursor); throw; } }
public override Token Eval(TokenExtractor extractor) { EnsureExtractor(extractor, nameof(extractor)); var cursor = extractor.SaveCursor(); var content = new StringBuilder(); try { var token = base.Eval(extractor); if (token == null) { extractor.RollbackCursor(cursor); return(null); } foreach (var childToken in (token.Content as Token[])) { content.Append((char)childToken.Content); } } catch (Exception) { extractor.RollbackCursor(cursor); throw; } return(new Token(content.ToString()) { ContentPosBegin = cursor.Position, ContentLength = extractor.SaveCursor().Position - cursor.Position }); }
public override Token Eval(TokenExtractor extractor) { EnsureExtractor(extractor, nameof(extractor)); var cursor = extractor.SaveCursor(); var content = new List <Token>(); try { foreach (var grammarElement in _list) { var token = grammarElement.Eval(extractor); if (token == null) { extractor.RollbackCursor(cursor); return(null); } content.Add(token); } } catch (Exception) { extractor.RollbackCursor(cursor); throw; } return(new Token(content.ToArray()) { ContentPosBegin = cursor.Position, ContentLength = extractor.SaveCursor().Position - cursor.Position }); }
public async Task TokenExtractor_Async_Test() { var t = new TokenExtractor(new CurlyBracketDefinition()); (await t.ExtractAsync("Hoang {{Duy} Bao")).ToList() .Should().HaveCount(1); }
public async Task AutoLaunch_Sets_Correct_RedirectUri() { // Arrange mocks var autoLaunchOptions = new BankIdLoginOptions(new List <string>(), null, false, true, false); var mockProtector = new Mock <IBankIdLoginOptionsProtector>(); mockProtector .Setup(protector => protector.Unprotect(It.IsAny <string>())) .Returns(autoLaunchOptions); var client = CreateServer( o => { o.AuthenticationBuilder.Services.TryAddTransient <IBankIdLauncher, TestBankIdLauncher>(); o.UseSimulatedEnvironment().AddSameDevice(); }, DefaultAppConfiguration(async context => { await context.ChallengeAsync(BankIdAuthenticationDefaults.SameDeviceAuthenticationScheme); }), services => { services.AddTransient(s => mockProtector.Object); }) .CreateClient(); // Arrange csrf info var loginResponse = await client.GetAsync("/BankIdAuthentication/Login?returnUrl=%2F&loginOptions=X&orderRef=Y"); var loginCookies = loginResponse.Headers.GetValues("set-cookie"); var loginContent = await loginResponse.Content.ReadAsStringAsync(); var csrfToken = TokenExtractor.ExtractRequestVerificationTokenFromForm(loginContent); // Arrange acting request var testReturnUrl = "/TestReturnUrl"; var testOptions = "TestOptions"; var initializeRequest = new JsonContent(new { returnUrl = testReturnUrl, loginOptions = testOptions }); initializeRequest.Headers.Add("Cookie", loginCookies); initializeRequest.Headers.Add("RequestVerificationToken", csrfToken); // Act var transaction = await client.PostAsync("/BankIdAuthentication/Api/Initialize", initializeRequest); // Assert Assert.Equal(HttpStatusCode.OK, transaction.StatusCode); var responseContent = await transaction.Content.ReadAsStringAsync(); var responseObject = JsonConvert.DeserializeAnonymousType(responseContent, new { RedirectUri = "", OrderRef = "", IsAutoLaunch = false }); Assert.True(responseObject.IsAutoLaunch); var encodedReturnParam = UrlEncoder.Default.Encode(testReturnUrl); var expectedUrl = $"http://localhost/BankIdAuthentication/Login?returnUrl={encodedReturnParam}&loginOptions={testOptions}"; Assert.Equal(expectedUrl, responseObject.RedirectUri); }
public void Reconhece_Caractere_Invalido() { var element = new CharGrammarElement('a'); var extractor = TokenExtractor.FromString("A"); var token = element.Eval(extractor); Assert.Null(token); }
public void NextChar_RetornaOToken_NaPosicao() { var ext = TokenExtractor.FromString("123"); Assert.Equal('1', ext.NextChar()); Assert.Equal('2', ext.NextChar()); Assert.Equal('3', ext.NextChar()); }
public void Requer_String_AoInstanciar() { var ext = Assert.Throws <ArgumentNullException>( () => TokenExtractor.FromString(null) ); Assert.Equal("code", ext.ParamName); }
public void TokenExtractor_Support_DuplicateOfToken_Test() { var t = new TokenExtractor(new CurlyBracketDefinition()); t.Extract("Hoang [{Duy}] Bao").ToList() .Should().HaveCount(1) .And.Subject.First().Token.Should().Be("{Duy}"); }
public void TokenExtractor_InCorrect_Token_Test() { var t = new TokenExtractor(new CurlyBracketDefinition()); t.Extract("Hoang ]Duy[ Bao").ToList() .Should().HaveCount(0); t.Extract("Hoang Duy Bao[[").ToList() .Should().HaveCount(0); }
public void AvaliaCorretamente_UmaGramaticaSimples() { var grammar = new SampleGrammar1(); var parser = new Parser(grammar); var extractor1 = TokenExtractor.FromString(SampleGrammar1.SampleCode1); var tree1 = parser.Parse(extractor1); SampleGrammar1.AssertSampleCode1(tree1); }
public void Reconhece_Caractere_Valido(char validChar) { var element = new CharGrammarElement(validChar); var onCharString = new string(new[] { validChar }); var extractor = TokenExtractor.FromString(onCharString); var token = element.Eval(extractor); Assert.NotNull(token); Assert.Equal(validChar, token.Content); }
public void SaveCursor_Retorna_APosicaoAtualNoCodigo() { var ext = TokenExtractor.FromString("123"); ext.NextChar(); ext.NextChar(); // Dois tokens à frente var cursor = ext.SaveCursor(); Assert.Equal(2, cursor.Position); }
public void RollbackCursor_RequerCursorNoRange() { var ext = TokenExtractor.FromString(""); var cursor = new TokenExtractorCursor { Position = 1 }; Assert.Throws <IndexOutOfRangeException>( () => ext.RollbackCursor(cursor) ); }
public void ParaCodigoVazio_ArvoreEhInvalida_MesmoSeGramaticaNaoDisser() { var grammarElementMock = new Mock <GrammarElement>(); var grammar = new Grammar(grammarElementMock.Object); var extractor = TokenExtractor.FromString(""); var parser = new Parser(grammar); var tree = parser.Parse(extractor); Assert.False(tree.IsValid); Assert.True(extractor.EndOfCode); }
public void Parse_Retorna_ArvoreSintaticaInvalida_ParaCodigoVazio() { var grammarElement = new Mock <GrammarElement>().Object; var grammar = new Grammar(grammarElement); var extractor = TokenExtractor.FromString(""); var parser = new Parser(grammar); var tree = parser.Parse(extractor); Assert.NotNull(tree); Assert.False(tree.IsValid); }
public void DesconsideraCaracteres_Conforme_IgnoreDelegate() { /* * A IDEIA BASICA SOBRE IGNORAR: * ----------------------------- * > O token é que determina se vai ou não ignorar * algo antes iniciar o consumo de seus caracteres. * * > Pra isso, além de receber o TokenExtractor, ele * precisa receber o [ignorer] * * > Normalmente o ignorar é feito no início do processo, * logo antes de consumir o primeiro [char] * * # Imagine um "ignorar espaços vazios [espaço, tab, nova linha, etc], * # Agora imagine os tokens de: * - Comentário de única linha -> // comentário aqui * - Comentário de várias linhas -> (* várias linhas *) * - Strings -> "uma string qualquer" * # Esses devem ignorar os caracteres iniciais, mas uma vez * que estão dentro de seu escopo, devem considerar os "espaços vazios" * * > Imaginamos que o elemento [CharGrammarElement] não faz mais * sentido aqui. Talvez um [WordGrammarElement] faça mais sentido. * Se será uma palavra de um único caractere ele é que determina */ bool ignore(char c) => new[] { ' ', '*', '-' }.Contains(c); var ABRoot = new AndListGrammarElement(new[] { new CharGrammarElement('A'), new CharGrammarElement('B') }); var grammar = new Grammar(ABRoot, ignore); var parser = new Parser(grammar); var code = TokenExtractor.FromString( "- - * A - - --- ****** B ** -----" ); var tree = parser.Parse(code); Assert.NotNull(tree); Assert.NotNull(tree.RootNode); Assert.NotNull(tree.RootNode.Token); Assert.NotNull(tree.RootNode.Token.Content); Assert.True(tree.IsValid); Assert.IsType <Token[]>(tree.RootNode.Token.Content); Assert.Equal(2, (tree.RootNode.Token.Content as Token[]).Length); Assert.Equal("A", ((tree.RootNode.Token.Content as Token[])[0] as Token).Content.ToString()); Assert.Equal("B", ((tree.RootNode.Token.Content as Token[])[1] as Token).Content.ToString()); }
public void EndOfCode_Indica_QuandoNoFim() { var ext = TokenExtractor.FromString("az"); var noEOC = ext.EndOfCode; ext.NextChar(); ext.NextChar(); var yesEOC = ext.EndOfCode; Assert.False(noEOC); Assert.True(yesEOC); }
public void Reconhece_ListaInvalida_AoAvaliar(string text) { var list = new[] { new CharGrammarElement('1'), new CharGrammarElement('2'), new CharGrammarElement('3') }; var grammar = new OrListGrammarElement(list); var extractor = TokenExtractor.FromString(text); var token = grammar.Eval(extractor); Assert.Null(token); }
public async Task Extract_TemplatesWithSpecialCharacters_ExtractionSuccessfull() { // Arrange Mock <ILogger <TokenMapper> > logger = new Mock <ILogger <TokenMapper> >(); Mock <ILogger <TokenExtractor> > extractionLogger = new Mock <ILogger <TokenExtractor> >(); ITokenMapper mapper = new TokenMapper(logger.Object); ITokenExtractor tokenExtractor = new TokenExtractor(mapper, extractionLogger.Object); // Act List <Extraction> extractions = await tokenExtractor.Extract("Hello\n${uuid}"); // Assert Assert.Single(extractions); Assert.Equal("Hello\n{0}", extractions[0].Template); Assert.Equal("uuid", extractions[0].Tokens[0].Key); }
public async Task Extract_TemplatesWithJson_ExtractionSuccessfull() { // Arrange Mock <ILogger <TokenMapper> > logger = new Mock <ILogger <TokenMapper> >(); Mock <ILogger <TokenExtractor> > extractionLogger = new Mock <ILogger <TokenExtractor> >(); ITokenMapper mapper = new TokenMapper(logger.Object); ITokenExtractor tokenExtractor = new TokenExtractor(mapper, extractionLogger.Object); // Act List <Extraction> extractions = await tokenExtractor.Extract("{\"Hello\": \"World\", \"id\": \"${uuid}\"}"); // Assert Assert.Single(extractions); Assert.Equal("{{\"Hello\": \"World\", \"id\": \"{0}\"}}", extractions[0].Template); Assert.Equal("uuid", extractions[0].Tokens[0].Key); }
public void Reconhece_ListaValida_AoAvaliar() { var charList = new[] { new CharGrammarElement('h'), new CharGrammarElement('h'), new CharGrammarElement('h') }; var grammar = new CharListGrammarElement(charList); var extractor = TokenExtractor.FromString("hhh"); var token = grammar.Eval(extractor); Assert.NotNull(token); Assert.Equal("hhh", (string)token.Content); }
public void Reconhece_ListaValida_AoAvaliar(string text) { var list = new[] { new CharGrammarElement('a'), new CharGrammarElement('b'), new CharGrammarElement('c') }; var grammar = new OrListGrammarElement(list); var extractor = TokenExtractor.FromString(text); var token = grammar.Eval(extractor); Assert.NotNull(token); Assert.IsType <char>(token.Content); Assert.Contains((char)token.Content, new char[] { 'a', 'b', 'c' }); }
public async Task Extract_TemplateWithOneToken_TokenExtracted() { // Arrange Mock <ILogger <TokenMapper> > logger = new Mock <ILogger <TokenMapper> >(); Mock <ILogger <TokenExtractor> > extractionLogger = new Mock <ILogger <TokenExtractor> >(); ITokenMapper mapper = new TokenMapper(logger.Object); ITokenExtractor tokenExtractor = new TokenExtractor(mapper, extractionLogger.Object); // Act List <Extraction> extractions = await tokenExtractor.Extract("I am a log for ${uuid}"); // Assert Extraction extraction = extractions.First(); Assert.Equal("I am a log for {0}", extraction.Template); Assert.Equal("uuid", extraction.Tokens.First().Key); }
public void ArvoreSintaticaEhInvalida_SeTodoCodigo_NaoForConsumido() { var grammarElementMock = new Mock <GrammarElement>(); var grammar = new Grammar(grammarElementMock.Object); var extractor = TokenExtractor.FromString("a"); var parser = new Parser(grammar); // Uma gramática é válida se retornar um [Token] não nulo grammarElementMock .Setup(el => el.Eval(It.IsAny <TokenExtractor>())) .Returns(new Token(null)); var tree = parser.Parse(extractor); Assert.False(tree.IsValid); Assert.False(extractor.EndOfCode); }
public async Task Extract_TemplateWithTwoTokenInJson_TokensExtracted() { // Arrange Mock <ILogger <TokenMapper> > logger = new Mock <ILogger <TokenMapper> >(); Mock <ILogger <TokenExtractor> > extractionLogger = new Mock <ILogger <TokenExtractor> >(); ITokenMapper mapper = new TokenMapper(logger.Object); ITokenExtractor tokenExtractor = new TokenExtractor(mapper, extractionLogger.Object); // Act List <Extraction> extractions = await tokenExtractor.Extract("{\"Hello\":\"${randomitem(world,universe)}\", \"id\": \"${uuid}\"}"); // Assert Extraction extraction = extractions.First(); Assert.Equal("{{\"Hello\":\"{0}\", \"id\": \"{1}\"}}", extraction.Template); Assert.Equal("randomitem", extraction.Tokens[0].Key); Assert.Equal("uuid", extraction.Tokens[1].Key); }
public void Consome_SoOsCaracteres_Necessarios() { var charList = new[] { new CharGrammarElement('h'), new CharGrammarElement('h'), new CharGrammarElement('h') }; var grammar = new CharListGrammarElement(charList); var extractor = TokenExtractor.FromString("hhhhhh"); var posBegin = extractor.SaveCursor(); var token = grammar.Eval(extractor); Assert.NotNull(token); Assert.Equal("hhh", (string)token.Content); Assert.Equal(0, posBegin.Position); Assert.Equal(3, extractor.SaveCursor().Position); }
public void Reconhece_TodosTokens_Validos() { var blanks = new OrListGrammarElement(new[] { new CharGrammarElement(' '), new CharGrammarElement('\t'), }); var grammar = new WhileGrammarElement(blanks); var extractor = TokenExtractor.FromString(" \t \t \t final!"); var token = grammar.Eval(extractor); Assert.NotNull(token); Assert.IsType <Token[]>(token.Content); Assert.Equal(11, (token.Content as Token[]).Length); Assert.Equal(" \t \t \t ", (token.Content as Token[]).Aggregate("", (current, tk) => { return(current + tk.Content.ToString()); })); }
public void Reconhece_ListaValida_AoAvaliar() { var list = new[] { new CharGrammarElement('a'), new CharGrammarElement('b'), new CharGrammarElement('c') }; var grammar = new AndListGrammarElement(list); var extractor = TokenExtractor.FromString("abc"); var token = grammar.Eval(extractor); Assert.NotNull(token); Assert.IsType <Token[]>(token.Content); Assert.Equal(3, (token.Content as Token[]).Length); Assert.Equal("a", (token.Content as Token[])[0].Content.ToString()); Assert.Equal("b", (token.Content as Token[])[1].Content.ToString()); Assert.Equal("c", (token.Content as Token[])[2].Content.ToString()); }