public void TestAddAttribute_4() { Attributes instance = new Attributes(); instance.AddAttribute(HeaderKeys.Algorithm, AlgorithmValues.AES_GCM_128, Attributes.PROTECTED); instance.AddAttribute(HeaderKeys.ContentType, AlgorithmValues.AES_GCM_128, Attributes.UNPROTECTED); instance.AddAttribute(HeaderKeys.EncryptionAlgorithm, AlgorithmValues.AES_GCM_128, Attributes.DO_NOT_SEND); CBORObject cn; cn = instance.FindAttribute(HeaderKeys.Algorithm, Attributes.PROTECTED); Assert.AreEqual(cn, AlgorithmValues.AES_GCM_128); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.Algorithm, Attributes.UNPROTECTED)); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.Algorithm, Attributes.DO_NOT_SEND)); cn = instance.FindAttribute(HeaderKeys.ContentType, Attributes.UNPROTECTED); Assert.AreEqual(cn, AlgorithmValues.AES_GCM_128); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.ContentType, Attributes.PROTECTED)); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.ContentType, Attributes.DO_NOT_SEND)); cn = instance.FindAttribute(HeaderKeys.EncryptionAlgorithm, Attributes.DO_NOT_SEND); Assert.AreEqual(cn, AlgorithmValues.AES_GCM_128); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.EncryptionAlgorithm, Attributes.UNPROTECTED)); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.EncryptionAlgorithm, Attributes.PROTECTED)); }
public void testAddAttribute_4() { Attributes instance = new Attributes(); instance.AddAttribute(HeaderKeys.Algorithm, AlgorithmValues.AES_CBC_MAC_128_128, Attributes.PROTECTED); instance.AddAttribute(HeaderKeys.ContentType, AlgorithmValues.AES_CBC_MAC_128_64, Attributes.UNPROTECTED); instance.AddAttribute(HeaderKeys.CounterSignature, AlgorithmValues.AES_CBC_MAC_256_64, Attributes.DO_NOT_SEND); CBORObject cn; cn = instance.FindAttribute(HeaderKeys.Algorithm, Attributes.PROTECTED); Assert.AreEqual(cn, AlgorithmValues.AES_CBC_MAC_128_128); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.Algorithm, Attributes.UNPROTECTED)); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.Algorithm, Attributes.DO_NOT_SEND)); cn = instance.FindAttribute(HeaderKeys.ContentType, Attributes.UNPROTECTED); Assert.AreEqual(cn, AlgorithmValues.AES_CBC_MAC_128_64); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.ContentType, Attributes.PROTECTED)); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.ContentType, Attributes.DO_NOT_SEND)); cn = instance.FindAttribute(HeaderKeys.CounterSignature, Attributes.DO_NOT_SEND); Assert.AreEqual(cn, AlgorithmValues.AES_CBC_MAC_256_64); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.CounterSignature, Attributes.UNPROTECTED)); Assert.AreEqual(null, instance.FindAttribute(HeaderKeys.CounterSignature, Attributes.PROTECTED)); }
public void Length_should_equal_original_length() { var attributes = new Attributes(); attributes.AddAttribute(string.Empty, string.Empty, string.Empty, string.Empty, string.Empty); attributes.AddAttribute(string.Empty, string.Empty, string.Empty, string.Empty, string.Empty); var copy = new Attributes(attributes); Assert.Equal(copy.Length, attributes.Length); }
private bool ParseProcedureCall(string identifier, Lexer lexer, Attributes attributes) { var successfullyParsed = true; var procedureCallToken = lexer.GetCurrentToken(); var environments = attributes["ENVS"] as Environments; //SEM: Validar que la subrutina exista en el scope y sea un procedimiento if (!environments.ExistsProcedure(identifier)) { LogProcedureNotFound(lexer.GetCurrentToken(), identifier); successfullyParsed = false; } var procedureCallTree = new ProcedureCall(); procedureCallTree.Identifier = identifier; procedureCallTree.Reference = environments.FindProcedure(identifier); attributes.AddAttribute(procedureCallTree.ActualParameters, "PARMS"); successfullyParsed &= ParserFactory.GetActualParametersParser().Parse(lexer, lexer.GetNextToken(), attributes); var procedure = environments.GetSubroutine(procedureCallTree.Reference); if (procedure != null) { // SEM: Verifico que la cantidad y el tipo de los parametros usados en la llamada coincidan con los de la // definición de la funcion. if (!FormalParametersParser.ValidateFormalParametersVsActualParameters( procedure.Environment.FormalParameters.ToList(), procedureCallTree.ActualParameters.Parameters)) { LogInvalidParametersCountOrType(identifier, procedureCallToken); successfullyParsed = false; } // SEM: Verifico que no se pasen arrys completos como parametro de un procedimiento. if (ActualParametersParser.VerifyIfArraysArePassedAsParameters(environments, procedureCallTree.ActualParameters.Parameters)) { LogArrayCantBeSubroutineParameter(procedureCallToken, procedure.Identifier); successfullyParsed = false; } } attributes.RemoveAttribute("PARMS"); ValidateToken(lexer.GetCurrentToken(), TokenType.RightParenthesis); ValidateToken(lexer.GetNextToken(), TokenType.EndOfInstruction); attributes.AddAttribute(procedureCallTree, "AOC"); return(successfullyParsed); }
public bool Parse(Lexer lexer, Token token, Attributes attributes) { var successfullyParsed = true; try { var environments = new Environments(); environments.Push(new Environment() { IsGlobal = true }); attributes.AddAttribute(environments, "ENVS"); successfullyParsed &= ParserFactory.GetEnvironmentParser().Parse(lexer, token, attributes); ValidateToken(lexer.GetCurrentToken(), TokenType.EndOfFile); } catch (Exception ex) { successfullyParsed = false; Logger(ex.Message); } return(successfullyParsed); }
public TermCollectorAnonymousInnerClassHelper(int maxSize, JCG.PriorityQueue <ScoreTerm> stQueue) { this.maxSize = maxSize; this.stQueue = stQueue; maxBoostAtt = Attributes.AddAttribute <IMaxNonCompetitiveBoostAttribute>(); visitedTerms = new Dictionary <BytesRef, ScoreTerm>(); }
private void TraverseNode(HtmlNode htmlNode) { if (htmlNode == null || htmlNode.NodeType == HtmlNodeType.Comment) { return; } var attributes = new Attributes(); if (htmlNode.HasAttributes) { foreach (HtmlAttribute attribute in htmlNode.Attributes) { attributes.AddAttribute(null, htmlNode.Name, attribute.Name, null, attribute.Value); } } ContentHandler.StartElement(null, htmlNode.Name, htmlNode.Name, attributes); if (htmlNode.NodeType == HtmlNodeType.Text) { ContentHandler.Characters(htmlNode.InnerText.ToCharArray(), 0, htmlNode.InnerText.Length); } else if (htmlNode.HasChildNodes) { foreach (HtmlNode childNode in htmlNode.ChildNodes) { TraverseNode(childNode); } } ContentHandler.EndElement(null, htmlNode.Name, htmlNode.Name); }
private bool ParseAssignment(string identifier, Lexer lexer, Attributes attributes) { var successfullyParsed = true; var environments = attributes["ENVS"] as Environments; //SEM: Validar que el asignable que sea variable (no constante) if (environments.ExistsConstant(identifier)) { LogConstantIsNotAssignable(lexer.GetCurrentToken()); successfullyParsed = false; } else { //SEM: Validar que el asignable exista en el scope if (!environments.ExistsVariable(identifier) && !environments[0].ExistsParameter(identifier)) { LogIdentifierNotFound(identifier, lexer.GetCurrentToken()); successfullyParsed = false; } } var tree = new Assignment(); var identifierExpression = new IdentifierExpression(); identifierExpression.Name = identifier; if (environments.ExistsVariable(identifier)) { var variableRefence = environments.FindVariable(identifier); var variable = environments.GetLocal(variableRefence); identifierExpression.Reference = variableRefence; if (variable != null) { identifierExpression.Type = variable.Type; } } else { var parameterReference = environments[0].FindParameter(identifier); var parameter = environments[0].GetParameter(parameterReference); identifierExpression.Reference = parameterReference; if (parameter != null) { identifierExpression.Type = parameter.DataType; } } tree.Destination = identifierExpression; successfullyParsed &= ParseAssignedExpression(lexer, lexer.GetCurrentToken(), attributes, tree); attributes.AddAttribute(tree, "AOC"); return(successfullyParsed); }
public TermCollectorAnonymousInnerClassHelper(TopTermsRewrite <Q> outerInstance, int maxSize, PriorityQueue <ScoreTerm> stQueue) { this.OuterInstance = outerInstance; this.MaxSize = maxSize; this.StQueue = stQueue; maxBoostAtt = Attributes.AddAttribute <IMaxNonCompetitiveBoostAttribute>(); visitedTerms = new Dictionary <BytesRef, ScoreTerm>(); }
public T AddAttribute <T>(T attribute) where T : IAttribute { if (m_Dispose) { return(default(T)); } return(m_Attributes.AddAttribute <T>(attribute)); }
/// <summary> /// Constructor for enumeration of all terms from specified <c>reader</c> which share a prefix of /// length <paramref name="prefixLength"/> with <paramref name="term"/> and which have a fuzzy similarity > /// <paramref name="minSimilarity"/>. /// <para/> /// After calling the constructor the enumeration is already pointing to the first /// valid term if such a term exists. /// </summary> /// <param name="terms"> Delivers terms. </param> /// <param name="atts"> <see cref="AttributeSource"/> created by the rewrite method of <see cref="MultiTermQuery"/> /// thats contains information about competitive boosts during rewrite. It is also used /// to cache DFAs between segment transitions. </param> /// <param name="term"> Pattern term. </param> /// <param name="minSimilarity"> Minimum required similarity for terms from the reader. Pass an integer value /// representing edit distance. Passing a fraction is deprecated. </param> /// <param name="prefixLength"> Length of required common prefix. Default value is 0. </param> /// <param name="transpositions"> Transpositions </param> /// <exception cref="IOException"> if there is a low-level IO error </exception> public FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, float minSimilarity, int prefixLength, bool transpositions) { boostAtt = Attributes.AddAttribute <IBoostAttribute>(); if (minSimilarity >= 1.0f && minSimilarity != (int)minSimilarity) { throw new ArgumentException("fractional edit distances are not allowed"); } if (minSimilarity < 0.0f) { throw new ArgumentOutOfRangeException(nameof(minSimilarity), "minimumSimilarity cannot be less than 0"); // LUCENENET specific - changed from IllegalArgumentException to ArgumentOutOfRangeException (.NET convention) } if (prefixLength < 0) { throw new ArgumentOutOfRangeException(nameof(prefixLength), "prefixLength cannot be less than 0"); // LUCENENET specific - changed from IllegalArgumentException to ArgumentOutOfRangeException (.NET convention) } this.m_terms = terms; this.term = term; // convert the string into a utf32 int[] representation for fast comparisons string utf16 = term.Text; this.m_termText = new int[utf16.CodePointCount(0, utf16.Length)]; for (int cp, i = 0, j = 0; i < utf16.Length; i += Character.CharCount(cp)) { m_termText[j++] = cp = utf16.CodePointAt(i); } this.m_termLength = m_termText.Length; this.dfaAtt = atts.AddAttribute <ILevenshteinAutomataAttribute>(); //The prefix could be longer than the word. //It's kind of silly though. It means we must match the entire word. this.m_realPrefixLength = prefixLength > m_termLength ? m_termLength : prefixLength; // if minSimilarity >= 1, we treat it as number of edits if (minSimilarity >= 1f) { this.m_minSimilarity = 0; // just driven by number of edits m_maxEdits = (int)minSimilarity; m_raw = true; } else { this.m_minSimilarity = minSimilarity; // calculate the maximum k edits for this similarity m_maxEdits = InitialMaxDistance(this.m_minSimilarity, m_termLength); m_raw = false; } if (transpositions && m_maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) { throw UnsupportedOperationException.Create("with transpositions enabled, distances > " + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE + " are not supported "); } this.transpositions = transpositions; this.m_scaleFactor = 1.0f / (1.0f - this.m_minSimilarity); this.maxBoostAtt = atts.AddAttribute <IMaxNonCompetitiveBoostAttribute>(); bottom = maxBoostAtt.MaxNonCompetitiveBoost; bottomTerm = maxBoostAtt.CompetitiveTerm; BottomChanged(null, true); }
private static void UpdateExpressionAttribute(Attributes attributes, Expression tree) { if (attributes.ContainsAttribute("EXP")) { attributes["EXP"] = tree; } else { attributes.AddAttribute(tree, "EXP"); } }
private bool ParseFunctionCall(Lexer lexer, Attributes attributes, string identifierName, out Expression tree) { var successfullyParsed = true; var functionCall = new FunctionCallExpression(); var environments = attributes["ENVS"] as Environments; var functionCallToken = lexer.GetCurrentToken(); // SEM: Verifico que exista una funcion con el identificador indicado. if (!environments.ExistsFunction(identifierName)) { LogFunctionNotFound(lexer.GetCurrentToken(), identifierName); successfullyParsed = false; } functionCall.Identifier = identifierName; functionCall.Reference = environments.FindFunction(identifierName); attributes.AddAttribute(functionCall.ActualParameters, "PARMS"); successfullyParsed &= ParserFactory.GetActualParametersParser().Parse(lexer, lexer.GetNextToken(), attributes); var function = environments.GetSubroutine(functionCall.Reference) as Function; if (function != null) { // SEM: Verifico que la cantidad y el tipo de los parametros usados en la llamada coincidan con los de la // definición de la funcion. if (!FormalParametersParser.ValidateFormalParametersVsActualParameters( function.Environment.FormalParameters.ToList(), functionCall.ActualParameters.Parameters)) { LogInvalidParametersCountOrType(identifierName, functionCallToken); successfullyParsed = false; } // SEM: Verifico que no se pasen arrays completos como parametro de una funcion. if (ActualParametersParser.VerifyIfArraysArePassedAsParameters(environments, functionCall.ActualParameters.Parameters)) { LogArrayCantBeSubroutineParameter(functionCallToken, functionCall.Identifier); successfullyParsed = false; } functionCall.Type = function.ReturnType; } attributes.RemoveAttribute("PARMS"); ValidateToken(lexer.GetCurrentToken(), TokenType.RightParenthesis); tree = functionCall; return(successfullyParsed); }
public AutomatonFuzzyTermsEnum(FuzzyTermsEnum outerInstance, TermsEnum tenum, CompiledAutomaton[] compiled) : base(tenum, false) { this.outerInstance = outerInstance; boostAtt = Attributes.AddAttribute <IBoostAttribute>(); this.matchers = new ByteRunAutomaton[compiled.Length]; for (int i = 0; i < compiled.Length; i++) { this.matchers[i] = compiled[i].RunAutomaton; } termRef = new BytesRef(outerInstance.term.Text); }
public bool Parse(Lexer lexer, Token token, Attributes attributes) { var successfullyParsed = true; var tree = new Read(); try { ValidateToken(token, TokenType.Read); token = lexer.GetNextToken(); successfullyParsed &= ParserFactory.GetExpressionParser().Parse(lexer, token, attributes); // SEM: Verifico que la expresion no sea nula if (attributes.ContainsAttribute("EXP")) { var exp = attributes["EXP"] as Expression; // SEM: Verifico que la expresion no sea nula if (exp != null) { // SEM: Verifico que el tipo de la expresion sea un asignable. if (!(exp is IdentifierExpression) && !(exp is PositionInArrayExpression)) { LogInvalidAssignableType(token); successfullyParsed = false; } else { tree.Destination = exp; attributes.AddAttribute(tree, "READ"); } } } else { LogNullExpression(token); successfullyParsed = false; } ValidateToken(lexer.GetCurrentToken(), TokenType.EndOfInstruction); } catch (Exception ex) { successfullyParsed = false; Logger(ex.Message); ErrorRecovery(lexer); } return(successfullyParsed); }
public bool Parse(Lexer lexer, Token token, Attributes attributes) { var successfullyParsed = true; var printables = new List <IPrintable>(); try { ValidateToken(token, TokenType.Write, TokenType.WriteLine); successfullyParsed &= ParsePrintables(lexer, attributes, printables); if (token.Type == TokenType.WriteLine) { var tree = new ShowLn(); tree.Values = printables; attributes.AddAttribute(tree, "SHOW"); } else { var tree = new Show(); tree.Values = printables; attributes.AddAttribute(tree, "SHOW"); } } catch (Exception ex) { successfullyParsed = false; Logger(ex.Message); ErrorRecovery(lexer); } return(successfullyParsed); }
public void testAddAttribute_1() { CBORObject label = CBORObject.FromObject(new byte[1]); CBORObject value = null; int where = Attributes.PROTECTED; Attributes instance = new Attributes(); try { instance.AddAttribute(label, value, where); } catch (CoseException e) { Assert.AreEqual(e.Message, "Labels must be integers or strings"); } }
public void testAddAttribute_2() { CBORObject label = CBORObject.FromObject(1); CBORObject value = CBORObject.FromObject(2); int where = 0; Attributes instance = new Attributes(); try { instance.AddAttribute(label, value, where); } catch (CoseException e) { Assert.AreEqual(e.Message, "Invalid attribute location given"); } }
public void removeAttribute() { Attributes instance = new Attributes(); instance.AddAttribute(HeaderKeys.Algorithm, AlgorithmValues.AES_CBC_MAC_128_128, Attributes.PROTECTED); CBORObject cn; cn = instance.FindAttribute(HeaderKeys.Algorithm); Assert.AreEqual(cn, AlgorithmValues.AES_CBC_MAC_128_128); instance.RemoveAttribute(HeaderKeys.Algorithm); cn = instance.FindAttribute(HeaderKeys.Algorithm); Assert.AreEqual(cn, null); }
/// <summary> /// Constructor for enumeration of all terms from specified <c>reader</c> which share a prefix of /// length <c>prefixLength</c> with <c>term</c> and which have a fuzzy similarity > /// <c>minSimilarity</c>. /// <para/> /// After calling the constructor the enumeration is already pointing to the first /// valid term if such a term exists. /// </summary> /// <exception cref="IOException">If there is a low-level I/O error.</exception> public LinearFuzzyTermsEnum(SlowFuzzyTermsEnum outerInstance) : base(outerInstance.m_terms.GetEnumerator()) { this.outerInstance = outerInstance; this.boostAtt = Attributes.AddAttribute <IBoostAttribute>(); this.text = new int[outerInstance.m_termLength - outerInstance.m_realPrefixLength]; System.Array.Copy(outerInstance.m_termText, outerInstance.m_realPrefixLength, text, 0, text.Length); string prefix = UnicodeUtil.NewString(outerInstance.m_termText, 0, outerInstance.m_realPrefixLength); prefixBytesRef = new BytesRef(prefix); this.d = new int[this.text.Length + 1]; this.p = new int[this.text.Length + 1]; SetInitialSeekTerm(prefixBytesRef); }
private bool ParseAssignmentToArray(string identifier, Lexer lexer, Attributes attributes) { var tree = new Assignment(); Expression posInArray; var expressionParser = ParserFactory.GetExpressionParser() as ExpressionParser; var successfullyParsed = expressionParser.ParsePositionInArray(lexer, attributes, identifier, out posInArray); ValidateToken(lexer.GetCurrentToken(), TokenType.RightSquareBracket); tree.Destination = posInArray; successfullyParsed &= ParseAssignedExpression(lexer, lexer.GetNextToken(), attributes, tree); attributes.AddAttribute(tree, "AOC"); return(successfullyParsed); }
private void TraverseNode(HtmlNode htmlNode) { if (htmlNode == null || htmlNode.NodeType == HtmlNodeType.Comment) { return; } var attributes = new Attributes(); if (htmlNode.HasAttributes) { foreach (HtmlAttribute attribute in htmlNode.Attributes) { attributes.AddAttribute(null, htmlNode.Name, attribute.Name, null, attribute.Value); } } ContentHandler.StartElement(null, htmlNode.Name, htmlNode.Name, attributes); if (htmlNode.NodeType == HtmlNodeType.Text) { ContentHandler.Characters(htmlNode.InnerText.ToCharArray(), 0, htmlNode.InnerText.Length); } else if (htmlNode.HasChildNodes) { foreach (HtmlNode childNode in htmlNode.ChildNodes) { TraverseNode(childNode); } } ContentHandler.EndElement(null, htmlNode.Name, htmlNode.Name); }
/// <summary> /// Sets an attribute and its value into an <see cref="Sax.IAttributes"/> object. /// Attempts to set a namespace declaration are ignored. /// </summary> /// <param name="atts">The <see cref="Sax.Helpers.Attributes"/> object</param> /// <param name="name">The name (Qname) of the attribute</param> /// <param name="type">The type of the attribute</param> /// <param name="value">The value of the attribute</param> public virtual void SetAttribute(Attributes atts, string name, string type, string value) { if (name.Equals("xmlns") || name.StartsWith("xmlns:")) { return; } string ns = GetNamespace(name, true); string localName = GetLocalName(name); int i = atts.GetIndex(name); if (i == -1) { name = name.Intern(); if (type == null) { type = "CDATA"; } if (!type.Equals("CDATA")) { value = Normalize(value); } atts.AddAttribute(ns, localName, name, type, value); } else { if (type == null) { type = atts.GetType(i); } if (!type.Equals("CDATA")) { value = Normalize(value); } atts.SetAttribute(i, ns, localName, name, type, value); } }
public TermRangeTermsEnumAnonymousInnerClassHelper(MultiTermQueryAnonymousInnerClassHelper outerInstance, TermsEnum iterator, BytesRef bref1, BytesRef bref2) : base(iterator, bref1, bref2, true, true) { this.OuterInstance = outerInstance; boostAtt = Attributes.AddAttribute <IBoostAttribute>(); }
public bool Parse(Lexer lexer, Token token, Attributes attributes) { var successfullyParsed = true; Expression condition = null; try { ValidateToken(token, TokenType.ConditionalIf); var expressionAttributes = Attributes.Create(attributes.ToArray()); successfullyParsed &= ParserFactory.GetExpressionParser().Parse(lexer, lexer.GetNextToken(), expressionAttributes); // SEM: La condición en un IF no puede ser una expresión nula. if (expressionAttributes.ContainsAttribute("EXP")) { condition = expressionAttributes["EXP"] as Expression; // SEM: La condición en un IF no puede ser una expresión nula. if (condition != null) { // SEM: La condición en un IF sólo puede ser una expresión booleana if (condition.Type != DataType.Boolean) { LogTypeExpressionInvalid(token, DataType.Boolean, DataType.Integer); successfullyParsed = false; } } else { LogNullExpression(token, DataType.Boolean); successfullyParsed = false; } } else { LogNullExpression(token, DataType.Boolean); successfullyParsed = false; } ValidateToken(lexer.GetCurrentToken(), TokenType.ConditionalThen); var thenBodyAttributes = Attributes.Create(attributes["ENVS"], "ENVS"); var thenStatements = new Statements(); thenBodyAttributes.AddAttribute(thenStatements, "STMS"); successfullyParsed &= ParserFactory.GetBodyParser().Parse(lexer, lexer.GetNextToken(), thenBodyAttributes); token = lexer.GetCurrentToken(); if (token.Is(TokenType.ConditionalEnd)) { ValidateToken(lexer.GetNextToken(), TokenType.EndOfInstruction); var tree = new IfThen(); tree.Condition = condition; tree.Statements = thenStatements; attributes.AddAttribute(tree, "IF"); } else if (token.Is(TokenType.ConditionalElse)) { var elseBodyAttributes = Attributes.Create(attributes["ENVS"], "ENVS"); var elseStatements = new Statements(); elseBodyAttributes.AddAttribute(elseStatements, "STMS"); successfullyParsed &= ParserFactory.GetBodyParser().Parse(lexer, lexer.GetNextToken(), elseBodyAttributes); ValidateToken(lexer.GetCurrentToken(), TokenType.ConditionalEnd); ValidateToken(lexer.GetNextToken(), TokenType.EndOfInstruction); var tree = new IfThenElse(); tree.Condition = condition; tree.Then = thenStatements; tree.Else = elseStatements; attributes.AddAttribute(tree, "IF"); } } catch (Exception ex) { successfullyParsed = false; Logger(ex.Message); ErrorRecovery(lexer); } return(successfullyParsed); }
public bool Parse(Lexer lexer, Token token, Attributes attributes) { var successfullyParsed = true; try { var tree = new While(); ValidateToken(token, TokenType.IterationWhile); var expressionAttributes = Attributes.Create(attributes.ToArray()); successfullyParsed &= ParserFactory.GetExpressionParser().Parse(lexer, lexer.GetNextToken(), expressionAttributes); // SEM: La condición en un WHILE no puede ser una expresión nula. if (expressionAttributes.ContainsAttribute("EXP")) { tree.Condition = expressionAttributes["EXP"] as Expression; // SEM: La condición en un WHILE no puede ser una expresión nula. if (tree.Condition != null) { // SEM: La condición en un WHILE sólo puede ser una expresión booleana if (tree.Condition.Type != DataType.Boolean) { LogTypeExpressionInvalid(token, DataType.Boolean, DataType.Integer); successfullyParsed = false; } } else { LogNullExpression(token, DataType.Boolean); successfullyParsed = false; } } else { LogNullExpression(token, DataType.Boolean); successfullyParsed = false; } ValidateToken(lexer.GetCurrentToken(), TokenType.IterationDo); var bodyAttributes = Attributes.Create(attributes["ENVS"], "ENVS"); bodyAttributes.AddAttribute(tree.Statements, "STMS"); successfullyParsed &= ParserFactory.GetBodyParser().Parse(lexer, lexer.GetNextToken(), bodyAttributes); ValidateToken(lexer.GetCurrentToken(), TokenType.IterationEnd); ValidateToken(lexer.GetNextToken(), TokenType.EndOfInstruction); attributes.AddAttribute(tree, "WHILE"); } catch (Exception ex) { successfullyParsed = false; Logger(ex.Message); ErrorRecovery(lexer); } return(successfullyParsed); }
static void _AddAttributes(Attributes msg, CBORObject map, CBORObject items, int destination) { foreach (CBORObject cborKey2 in items.Keys) { CBORObject cborValue = items[cborKey2]; CBORObject cborKey = cborKey2; string strKey = cborKey.AsString(); if ((strKey.Length > 4) && (strKey.Substring(strKey.Length - 4, 4) == "_hex")) { cborKey = CBORObject.FromObject(strKey.Substring(0, strKey.Length - 4)); cborValue = CBORObject.FromObject(FromHex(cborValue.AsString())); } if (cborKey.AsString() == "comment") { continue; } switch (cborKey.AsString()) { case "alg": break; case "kid": binFromText: break; case "epk": break; case "spk": break; case "salt": goto binFromText; case "apu_id": goto binFromText; case "apv_id": goto binFromText; case "apu_nonce": goto binFromText; case "apv_nonce": goto binFromText; case "apu_other": goto binFromText; case "apv_other": goto binFromText; case "pub_other": goto binFromText; case "priv_other": goto binFromText; case "spk_kid": goto binFromText; case "IV": goto binFromText; case "partialIV": goto binFromText; case "crit": break; case "op time": { DateTime when = DateTime.Parse(cborValue.AsString()); cborValue = CBORObject.FromObject( (long)(when - new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)).TotalSeconds); } break; case "ctyp": break; case "x5u": break; case "x5u-sender": break; default: break; } switch (destination) { case 0: msg.AddAttribute(cborKey, cborValue, Attributes.PROTECTED); break; case 1: msg.AddAttribute(cborKey, cborValue, Attributes.UNPROTECTED); break; case 2: msg.AddAttribute(cborKey, cborValue, Attributes.DO_NOT_SEND); break; case 4: map[cborKey] = cborValue; break; } } }
private void InitializeInstanceFields() { boostAtt = Attributes.AddAttribute <IBoostAttribute>(); }
internal virtual void InitializeInstanceFields() { boostAtt = Attributes.AddAttribute <IBoostAttribute>(); }
private void TraverseNode(HtmlNode htmlNode) { if (htmlNode.NodeType == HtmlNodeType.Element) { var attributes = new Attributes(); if (htmlNode.HasAttributes) { foreach (HtmlAttribute attribute in htmlNode.Attributes) { attributes.AddAttribute(null, htmlNode.Name, attribute.Name, null, attribute.Value); } } //Debug.WriteLine($"START: {htmlNode.Name}"); ContentHandler.StartElement(null, htmlNode.Name, htmlNode.Name, attributes); if (htmlNode.HasChildNodes) { foreach (HtmlNode childNode in htmlNode.ChildNodes) { if (childNode.NodeType == HtmlNodeType.Text) { //Debug.WriteLine(childNode.InnerText); ContentHandler.Characters(childNode.InnerText.ToCharArray(), 0, childNode.InnerText.Length); } TraverseNode(childNode); } } //Debug.WriteLine($"ENDE: {htmlNode.Name}"); ContentHandler.EndElement(null, htmlNode.Name, htmlNode.Name); } else { if (htmlNode.HasChildNodes) { foreach (HtmlNode childNode in htmlNode.ChildNodes) { TraverseNode(childNode); } } } //if (!(htmlNode.NodeType == HtmlNodeType.Comment || // htmlNode.NodeType == HtmlNodeType.Text || // htmlNode.NodeType == HtmlNodeType.)) //{ // ContentHandler.StartElement(null, htmlNode.Name, htmlNode.Name, null); // if (htmlNode.HasChildNodes) // foreach (HtmlNode childNode in htmlNode.ChildNodes) // TraverseNode(childNode); // ContentHandler.EndElement(null, htmlNode.Name, htmlNode.Name); //} //else //{ // if (htmlNode.HasChildNodes) // foreach (HtmlNode childNode in htmlNode.ChildNodes) // TraverseNode(childNode); //} }