public void ShouldReturnStringIdentifierWhenGivenIdentifierTokenContainingDot() { ExpressionLexer lexer = new ExpressionLexer("m.i.something.k", true, false); string result = lexer.ReadDottedIdentifier(false); Assert.Equal("m.i.something.k", result); }
public void PeekingShouldThrowWhenIncorrectCharacterAtStart() { ExpressionLexer lexer = new ExpressionLexer("#$*@#", false, false); Action peek = () => lexer.PeekNextToken(); peek.Throws <ODataException>(ODataErrorStrings.ExpressionLexer_InvalidCharacter("#", "0", "#$*@#")); }
private double CalculateString(string expression) { ExpressionLexer lexer = new ExpressionLexer(new StringReader(expression)); ExpressionCompiler compiler = new ExpressionCompiler(lexer); return(compiler.GetExpression().Compile()()); }
/// <summary> /// Create a new FunctionCallParser. /// </summary> /// <param name="lexer">Lexer positioned at a function identifier.</param> /// <param name="parser">The UriQueryExpressionParser.</param> public FunctionCallParser(ExpressionLexer lexer, UriQueryExpressionParser parser) { ExceptionUtils.CheckArgumentNotNull(lexer, "lexer"); ExceptionUtils.CheckArgumentNotNull(parser, "parser"); this.lexer = lexer; this.parser = parser; }
/// <summary> /// Constructor. /// </summary> /// <param name="maxDepth">The maximum depth of each part of the query - a recursion limit.</param> /// <param name="lexer">The ExpressionLexer containing text to be parsed.</param> internal UriQueryExpressionParser(int maxDepth, ExpressionLexer lexer) { Debug.Assert(maxDepth >= 0, "maxDepth >= 0"); Debug.Assert(lexer != null, "lexer != null"); this.maxDepth = maxDepth; this.lexer = lexer; }
public void ShouldNotThrowWhenCurrentTokenIsExpressionKind() { ExpressionLexer lexer = new ExpressionLexer("null", true, false); Action validate = () => lexer.ValidateToken(ExpressionTokenKind.NullLiteral); validate.ShouldNotThrow(); }
public void ShouldThrowWhenCurrentTokenIsNotExpressionKind() { ExpressionLexer lexer = new ExpressionLexer("null", true, false); Action validate = () => lexer.ValidateToken(ExpressionTokenKind.Question); validate.ShouldThrow <ODataException>().WithMessage(ODataErrorStrings.ExpressionLexer_SyntaxError(4, "null")); }
public void ShouldThrowWhenGivenStarInDontAcceptStarMode() { ExpressionLexer lexer = new ExpressionLexer("m.*", true, false); Action read = () => lexer.ReadDottedIdentifier(false); read.ShouldThrow <ODataException>(ODataErrorStrings.ExpressionLexer_SyntaxError("2", "m.*")); }
public void StarMustBeLastTokenInDottedIdentifier() { ExpressionLexer lexer = new ExpressionLexer("m.*.blah", true, false); Action read = () => lexer.ReadDottedIdentifier(true); read.ShouldThrow <ODataException>(ODataErrorStrings.ExpressionLexer_SyntaxError("2", "m.*.blah")); }
public void ShouldNotThrowWhenGivenStarInAcceptStarMode() { ExpressionLexer lexer = new ExpressionLexer("m.*", true, false); string result = lexer.ReadDottedIdentifier(true); result.Should().Be("m.*"); }
public void ShouldThrowWhenNotGivenIdentifierToken() { ExpressionLexer lexer = new ExpressionLexer("2.43", false, false); Action read = () => lexer.ReadDottedIdentifier(false); read.ShouldThrow <ODataException>().WithMessage(ODataErrorStrings.ExpressionLexer_SyntaxError("0", "2.43")); }
public void ShouldReturnStringIdentifierWhenGivenIdentifierTokenContainingWhitespace() { ExpressionLexer lexer = new ExpressionLexer(" m.i.something.k", true, false); string result = lexer.ReadDottedIdentifier(false); result.Should().Be("m.i.something.k"); }
/// <summary> /// Runs a unit test on SelectExpandTermParser.ParseTerm() for a $expand term, and gives the lexer back for positional verification. /// </summary> private PathSegmentToken ParseExpandTerm(string term, out ExpressionLexer lexer) { lexer = new ExpressionLexer(term, true /*moveToFirstToken*/, true /*useSemicolonDelimiter*/); var parser = new SelectExpandTermParser(lexer, 100 /*maxPathLength*/, false /*isSelect*/); return(parser.ParseTerm()); }
public void ShouldReturnLiteralWhenNoSuffixLiteralTokenInt() { ExpressionLexer lexer = new ExpressionLexer("5", false, false); object result = lexer.ReadLiteralToken(); result.Should().BeOfType <int>().And.Be(5); }
public void ShouldReturnDateLiteralWhenNoSuffixDateLiteralToken() { ExpressionLexer lexer = new ExpressionLexer("2014-09-19", false, false); object result = lexer.ReadLiteralToken(); result.Should().BeOfType <Date>().And.Be(new Date(2014, 9, 19)); }
public void ShouldReturnStringIdentifierWhenGivenIdentifierToken() { ExpressionLexer lexer = new ExpressionLexer("misomethingk", true, false); string result = lexer.ReadDottedIdentifier(false); result.Should().Be("misomethingk"); }
public void ShouldReturnLiteralWhenNoSuffixLiteralTokenDecimal() { ExpressionLexer lexer = new ExpressionLexer("3258.678765765489753678965390", false, false); object result = lexer.ReadLiteralToken(); result.Should().BeOfType <decimal>().And.Be(3258.678765765489753678965390m); }
public void ShouldThrowWhenNotLiteralToken() { ExpressionLexer lexer = new ExpressionLexer("potato", false, false); Action read = () => lexer.ReadLiteralToken(); read.ShouldThrow <ODataException>().WithMessage(ODataErrorStrings.ExpressionLexer_ExpectedLiteralToken("potato")); }
public void ShouldReturnLiteralWhenNoSuffixLiteralTokenShortTimeOfDay() { ExpressionLexer lexer = new ExpressionLexer("12:30:03", false, false); object result = lexer.ReadLiteralToken(); result.Should().BeOfType <TimeOfDay>().And.Be((new TimeOfDay(12, 30, 3, 0))); }
public void ShouldReturnLiteralWhenNoSuffixLiteralTokenLong() { ExpressionLexer lexer = new ExpressionLexer(int.MaxValue + "000", false, false); object result = lexer.ReadLiteralToken(); result.Should().BeOfType <long>().And.Be(((long)int.MaxValue) * 1000); }
public void ShouldThrowWhenIncorrectCharacterAtStart() { ExpressionLexer lexer = new ExpressionLexer("#$*@#", false, false); Action nextToken = () => lexer.NextToken(); nextToken.ShouldThrow <ODataException>().WithMessage(ODataErrorStrings.ExpressionLexer_InvalidCharacter("#", "0", "#$*@#")); }
private static FunctionCallParser GetFunctionCallParser(string expression) { var lexer = new ExpressionLexer(expression, true, false); UriQueryExpressionParser parser = new UriQueryExpressionParser(345, lexer); return(new FunctionCallParser(lexer, parser /*resolveAlias*/)); }
private static ICollection <FunctionParameterToken> HandleComplexOrCollectionParameterValueIfExists(IEdmModel model, IEdmOperation operation, ICollection <FunctionParameterToken> parameterTokens, bool enableCaseInsensitive, bool enableUriTemplateParsing = false) { ICollection <FunctionParameterToken> partiallyParsedParametersWithComplexOrCollection = new Collection <FunctionParameterToken>(); foreach (FunctionParameterToken paraToken in parameterTokens) { FunctionParameterToken funcParaToken; IEdmOperationParameter functionParameter = operation.FindParameter(paraToken.ParameterName); if (enableCaseInsensitive && functionParameter == null) { functionParameter = ODataUriResolver.ResolveOpearationParameterNameCaseInsensitive(operation, paraToken.ParameterName); // The functionParameter can not be null here, else this method won't be called. funcParaToken = new FunctionParameterToken(functionParameter.Name, paraToken.ValueToken); } else { funcParaToken = paraToken; } FunctionParameterAliasToken aliasToken = funcParaToken.ValueToken as FunctionParameterAliasToken; if (aliasToken != null) { aliasToken.ExpectedParameterType = functionParameter.Type; } LiteralToken valueToken = funcParaToken.ValueToken as LiteralToken; string valueStr = null; if (valueToken != null && (valueStr = valueToken.Value as string) != null && !string.IsNullOrEmpty(valueToken.OriginalText)) { var lexer = new ExpressionLexer(valueToken.OriginalText, true /*moveToFirstToken*/, false /*useSemicolonDelimiter*/, true /*parsingFunctionParameters*/); if (lexer.CurrentToken.Kind == ExpressionTokenKind.BracketedExpression) { object result; UriTemplateExpression expression; if (enableUriTemplateParsing && UriTemplateParser.TryParseLiteral(lexer.CurrentToken.Text, functionParameter.Type, out expression)) { result = expression; } else { // ExpressionTokenKind.BracketedExpression means text like [{\"Street\":\"NE 24th St.\",\"City\":\"Redmond\"},{\"Street\":\"Pine St.\",\"City\":\"Seattle\"}] // so now try convert it into complex or collection type value: result = ODataUriUtils.ConvertFromUriLiteral(valueStr, ODataVersion.V4, model, functionParameter.Type); } LiteralToken newValueToken = new LiteralToken(result, valueToken.OriginalText); FunctionParameterToken newFuncParaToken = new FunctionParameterToken(funcParaToken.ParameterName, newValueToken); partiallyParsedParametersWithComplexOrCollection.Add(newFuncParaToken); continue; } } partiallyParsedParametersWithComplexOrCollection.Add(funcParaToken); } return(partiallyParsedParametersWithComplexOrCollection); }
public void ShouldReturnLiteralWhenNoSuffixLiteralTokenLong() { ExpressionLexer lexer = new ExpressionLexer(int.MaxValue + "000", false, false); object result = lexer.ReadLiteralToken(); var longValue = Assert.IsType <long>(result); Assert.Equal(((long)int.MaxValue) * 1000, longValue); }
public void ShouldReturnLiteralWhenNoSuffixLiteralTokenDecimal() { ExpressionLexer lexer = new ExpressionLexer("3258.678765765489753678965390", false, false); object result = lexer.ReadLiteralToken(); var decimalValue = Assert.IsType <decimal>(result); Assert.Equal(3258.678765765489753678965390m, decimalValue); }
public void ShouldOutputTokenWhenNoError() { ExpressionLexer lexer = new ExpressionLexer("null", false, false); ExpressionToken result = lexer.PeekNextToken(); Assert.NotEqual(result, lexer.CurrentToken); Assert.Equal(ExpressionTokenKind.NullLiteral, result.Kind); }
public void ExpandIdAsFunctionWithDot() { ExpressionLexer l = new ExpressionLexer("id1.id2.id3(", moveToFirstToken: true, useSemicolonDelimiter: false); Assert.True(l.ExpandIdentifierAsFunction()); Assert.Equal("id1.id2.id3", l.CurrentToken.Text); Assert.Equal(0, l.CurrentToken.Position); }
public void ExpandIdAsFunctionFail_DoesNotEndWithId() { ExpressionLexer l = new ExpressionLexer("id1.(", moveToFirstToken: true, useSemicolonDelimiter: false); Assert.False(l.ExpandIdentifierAsFunction()); Assert.Equal("id1", l.CurrentToken.Text); Assert.Equal(0, l.CurrentToken.Position); }
public void ShouldReturnLiteralWhenNoSuffixLiteralTokenShortTimeOfDay() { ExpressionLexer lexer = new ExpressionLexer("12:30:03", false, false); object result = lexer.ReadLiteralToken(); var timeOfDay = Assert.IsType <TimeOfDay>(result); Assert.Equal(new TimeOfDay(12, 30, 3, 0), timeOfDay); }
public void ExpandIdAsFunctionFail_WhitespaceInBetween() { ExpressionLexer l = new ExpressionLexer("id1.id2 .id3(", moveToFirstToken: true, useSemicolonDelimiter: false); Assert.False(l.ExpandIdentifierAsFunction()); Assert.Equal("id1", l.CurrentToken.Text); Assert.Equal(0, l.CurrentToken.Position); }
/// <summary> /// Parse an Identifier into the right QueryToken /// </summary> /// <param name="parameters">parameters passed in to the UriQueryExpressionParser</param> /// <param name="functionCallParser">Object to use to handle parsing function calls.</param> public IdentifierTokenizer(HashSet<string> parameters, IFunctionCallParser functionCallParser) { ExceptionUtils.CheckArgumentNotNull(parameters, "parameters"); ExceptionUtils.CheckArgumentNotNull(functionCallParser, "functionCallParser"); this.lexer = functionCallParser.Lexer; this.parameters = parameters; this.functionCallParser = functionCallParser; }
public static Expression Parse(string source) { var lexer = new ExpressionLexer(source); Expression result = ParseTernary(lexer); lexer.GetNextToken(ExprTokenType.EOF); result.Source = source; return result; }
public void FunctionParameterParserShouldSupportBracketedExpressionsInFilterOrderby() { ExpressionLexer lexer = new ExpressionLexer("address={\'City\' : \'Seattle\'})", true, false, false); ICollection<NamedFunctionParameterNode> parameterNodes; TryParseFunctionParameters(lexer, null, out parameterNodes).Should().BeTrue(); parameterNodes.Should().HaveCount(1); var parameter = parameterNodes.Single(); parameter.Name.Should().Be("address"); parameter.Value.As<ConstantNode>().Value.Should().BeOfType<ODataComplexValue>(); }
/// <summary> /// Parse expression text into Token. /// </summary> /// <param name="expressionText">The expression string to Parse.</param> /// <returns>The lexical token representing the expression text.</returns> internal QueryToken ParseSearch(string expressionText) { Debug.Assert(expressionText != null, "expressionText != null"); this.recursionDepth = 0; this.lexer = new SearchLexer(expressionText); QueryToken result = this.ParseExpression(); this.lexer.ValidateToken(ExpressionTokenKind.End); return result; }
private static Expression ParseFactor(ExpressionLexer lexer) { ExprTokenType tokenType = lexer.PeekNextToken(); if (tokenType == ExprTokenType.Number) return new PrimitiveExpression((int)lexer.GetNextToken(ExprTokenType.Number)); if (tokenType == ExprTokenType.String) return new PrimitiveExpression((string)lexer.GetNextToken(ExprTokenType.String)); if (tokenType == ExprTokenType.Symbol) { string symbol = (string) lexer.GetNextToken(ExprTokenType.Symbol); if (lexer.PeekNextToken() == ExprTokenType.Dot) { lexer.GetNextToken(ExprTokenType.Dot); Expression exprInContext = ParseFactor(lexer); return new ContextExpression(symbol, exprInContext, new Expression[0]); } if (lexer.PeekNextToken() == ExprTokenType.Open) { lexer.GetNextToken(ExprTokenType.Open); var parameters = new List<Expression>(); while (lexer.PeekNextToken() != ExprTokenType.Close) { if (parameters.Count > 0) lexer.GetNextToken(ExprTokenType.Comma); parameters.Add(ParseCondCombo(lexer)); } lexer.GetNextToken(ExprTokenType.Close); if (lexer.PeekNextToken() == ExprTokenType.Dot) { lexer.GetNextToken(ExprTokenType.Dot); Expression exprInContext = ParseFactor(lexer); return new ContextExpression(symbol, exprInContext, parameters.ToArray()); } return new FunctionExpression(symbol, parameters.ToArray()); } return new SymbolExpression(symbol); } if (tokenType == ExprTokenType.Open) { lexer.GetNextToken(ExprTokenType.Open); Expression result = ParseCondCombo(lexer); lexer.GetNextToken(ExprTokenType.Close); return result; } if (tokenType == ExprTokenType.Minus || tokenType == ExprTokenType.NOT) { lexer.GetNextToken(tokenType); return new UnaryExpression(ParseFactor(lexer), tokenType); } throw new ParseException("Unexpected token " + tokenType, lexer.CurrentPosition); }
/// <summary> /// Tries to parse a collection of function parameters for path. /// </summary> /// <param name="parenthesisExpression">The contents of the parentheses portion of the current path segment.</param> /// <param name="configuration">The ODataUriParserConfiguration to create a UriQueryExpressionParser.</param> /// <param name="splitParameters">The parameters if they were successfully split.</param> /// <returns>Whether the parameters could be split.</returns> internal static bool TrySplitOperationParameters(string parenthesisExpression, ODataUriParserConfiguration configuration, out ICollection<FunctionParameterToken> splitParameters) { ExpressionLexer lexer = new ExpressionLexer(parenthesisExpression, true /*moveToFirstToken*/, false /*useSemicolonDelimeter*/, true /*parsingFunctionParameters*/); UriQueryExpressionParser parser = new UriQueryExpressionParser(configuration.Settings.FilterLimit, lexer); var ret = parser.TrySplitOperationParameters(ExpressionTokenKind.End, out splitParameters); // check duplicate names if (splitParameters.Select(t => t.ParameterName).Distinct().Count() != splitParameters.Count) { throw new ODataException(ODataErrorStrings.FunctionCallParser_DuplicateParameterOrEntityKeyName); } return ret; }
/// <summary> /// Build the SelectOption strategy. /// TODO: Really should not take the clauseToParse here. Instead it should be provided with a call to ParseSelect() or ParseExpand(). /// </summary> /// <param name="clauseToParse">the clause to parse</param> /// <param name="maxRecursiveDepth">max recursive depth</param> /// <param name="enableCaseInsensitiveBuiltinIdentifier">Whether to allow case insensitive for builtin identifier.</param> public SelectExpandParser(string clauseToParse, int maxRecursiveDepth, bool enableCaseInsensitiveBuiltinIdentifier = false) { this.maxRecursiveDepth = maxRecursiveDepth; // Set max recursive depth for path, $filter, $orderby and $search to maxRecursiveDepth in case they were not be be specified. this.MaxPathDepth = maxRecursiveDepth; this.MaxFilterDepth = maxRecursiveDepth; this.MaxOrderByDepth = maxRecursiveDepth; this.MaxSearchDepth = maxRecursiveDepth; // Sets up our lexer. We don't turn useSemicolonDelimiter on since the parsing code for expand options, // which is the only thing that needs it, is in a different class that uses it's own lexer. this.lexer = clauseToParse != null ? new ExpressionLexer(clauseToParse, false /*moveToFirstToken*/, false /*useSemicolonDelimiter*/) : null; this.enableCaseInsensitiveBuiltinIdentifier = enableCaseInsensitiveBuiltinIdentifier; }
/// <summary> /// Converts the given <paramref name="value"/> to a corresponding CLR type. Expects the /// <paramref name="value"/> to have already been properly unescaped from an actual Uri. /// </summary> /// <param name="value">Value from a Uri to be converted.</param> /// <param name="version">Version to be compliant with.</param> /// <param name="model">Optional model to perform verification against.</param> /// <param name="typeReference">Optional IEdmTypeReference to perform verification against. /// Callers must provide a <paramref name="model"/> containing this type if it is specified.</param> /// <returns>A CLR object that the <paramref name="value"/> represents or an EnumNode.</returns> public static object ConvertFromUriLiteral(string value, ODataVersion version, IEdmModel model, IEdmTypeReference typeReference) { ExceptionUtils.CheckArgumentNotNull(value, "value"); if (typeReference != null && model == null) { throw new ODataException(ODataErrorStrings.ODataUriUtils_ConvertFromUriLiteralTypeRefWithoutModel); } if (model == null) { model = Microsoft.OData.Edm.Library.EdmCoreModel.Instance; } // Let ExpressionLexer try to get a primitive ExpressionLexer lexer = new ExpressionLexer(value, false /*moveToFirstToken*/, false /*useSemicolonDelimeter*/); Exception error; ExpressionToken token; lexer.TryPeekNextToken(out token, out error); if (token.Kind == ExpressionTokenKind.BracketedExpression) { return ODataUriConversionUtils.ConvertFromComplexOrCollectionValue(value, model, typeReference); } QueryNode enumConstNode; if ((token.Kind == ExpressionTokenKind.Identifier) // then try parsing the entire text as enum value && EnumBinder.TryBindIdentifier(lexer.ExpressionText, null, model, out enumConstNode)) { return ((ConstantNode)enumConstNode).Value; } object result = lexer.ReadLiteralToken(); // If we have a typeReference then perform verification and convert if necessary if (typeReference != null) { result = ODataUriConversionUtils.VerifyAndCoerceUriPrimitiveLiteral(result, model, typeReference); } return result; }
/// <summary> /// Parses null literals. /// </summary> /// <param name="lexer">The lexer to use.</param> /// <returns>The literal token produced by building the given literal.</returns> private static LiteralToken ParseNullLiteral(ExpressionLexer lexer) { Debug.Assert(lexer != null, "lexer != null"); Debug.Assert(lexer.CurrentToken.Kind == ExpressionTokenKind.NullLiteral, "this.lexer.CurrentToken.InternalKind == ExpressionTokenKind.NullLiteral"); LiteralToken result = new LiteralToken(null, lexer.CurrentToken.Text); lexer.NextToken(); return result; }
/// <summary> /// Parse the complex/collection value in parameter alias. /// </summary> /// <param name="queryToken">The parsed token.</param> /// <param name="parameterType">The expected parameter type.</param> /// <param name="model">The model</param> /// <returns>Token with complex/collection value passed.</returns> private static QueryToken ParseComplexOrCollectionAlias(QueryToken queryToken, IEdmTypeReference parameterType, IEdmModel model) { LiteralToken valueToken = queryToken as LiteralToken; string valueStr; if (valueToken != null && (valueStr = valueToken.Value as string) != null && !string.IsNullOrEmpty(valueToken.OriginalText)) { var lexer = new ExpressionLexer(valueToken.OriginalText, true /*moveToFirstToken*/, false /*useSemicolonDelimiter*/, true /*parsingFunctionParameters*/); if (lexer.CurrentToken.Kind == ExpressionTokenKind.BracketedExpression) { object result = valueStr; if (!parameterType.IsEntity() && !parameterType.IsEntityCollectionType()) { result = ODataUriUtils.ConvertFromUriLiteral(valueStr, ODataVersion.V4, model, parameterType); } // For non-primitive type, we have to pass parameterType to LiteralToken, then to ConstantNode so the service can know what the type it is. return new LiteralToken(result, valueToken.OriginalText, parameterType); } } return queryToken; }
public void FunctionParameterParserShouldResolveAliasesInFilterOrderby() { Dictionary<string, string> aliasValues = new Dictionary<string, string>() { { "@a", "null" } }; ParameterAliasValueAccessor paramAliasAccessor = new ParameterAliasValueAccessor(aliasValues); ExpressionLexer lexer = new ExpressionLexer("address=@a)", true, false, true); ICollection<NamedFunctionParameterNode> parameterTokens; TryParseFunctionParameters(lexer, paramAliasAccessor, out parameterTokens).Should().BeTrue(); parameterTokens.Should().HaveCount(1); parameterTokens.Single().ShouldHaveParameterAliasNode("address", "@a"); // verify alias value node: paramAliasAccessor.ParameterAliasValueNodesCached["@a"].ShouldBeConstantQueryNode((object)null); }
/// <summary> /// Parses a literal. /// </summary> /// <param name="lexer">The lexer to use.</param> /// <returns>The literal query token or null if something else was found.</returns> internal static LiteralToken TryParseLiteral(ExpressionLexer lexer) { Debug.Assert(lexer != null, "lexer != null"); switch (lexer.CurrentToken.Kind) { case ExpressionTokenKind.BooleanLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetBoolean(false), Microsoft.OData.Core.Metadata.EdmConstants.EdmBooleanTypeName); case ExpressionTokenKind.DateLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetDate(false), Microsoft.OData.Core.Metadata.EdmConstants.EdmDateTypeName); case ExpressionTokenKind.DateTimeOffsetLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetTemporal(EdmPrimitiveTypeKind.DateTimeOffset, false), Microsoft.OData.Core.Metadata.EdmConstants.EdmDateTimeOffsetTypeName); case ExpressionTokenKind.DurationLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetTemporal(EdmPrimitiveTypeKind.Duration, false), Microsoft.OData.Core.Metadata.EdmConstants.EdmDurationTypeName); case ExpressionTokenKind.DecimalLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetDecimal(false), Microsoft.OData.Core.Metadata.EdmConstants.EdmDecimalTypeName); case ExpressionTokenKind.NullLiteral: return ParseNullLiteral(lexer); case ExpressionTokenKind.StringLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetString(true), Microsoft.OData.Core.Metadata.EdmConstants.EdmStringTypeName); case ExpressionTokenKind.Int64Literal: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetInt64(false), Microsoft.OData.Core.Metadata.EdmConstants.EdmInt64TypeName); case ExpressionTokenKind.IntegerLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetInt32(false), Microsoft.OData.Core.Metadata.EdmConstants.EdmInt32TypeName); case ExpressionTokenKind.DoubleLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetDouble(false), Microsoft.OData.Core.Metadata.EdmConstants.EdmDoubleTypeName); case ExpressionTokenKind.SingleLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetSingle(false), Microsoft.OData.Core.Metadata.EdmConstants.EdmSingleTypeName); case ExpressionTokenKind.GuidLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetGuid(false), Microsoft.OData.Core.Metadata.EdmConstants.EdmGuidTypeName); case ExpressionTokenKind.BinaryLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetBinary(true), Microsoft.OData.Core.Metadata.EdmConstants.EdmBinaryTypeName); case ExpressionTokenKind.GeographyLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetSpatial(EdmPrimitiveTypeKind.Geography, false), Microsoft.OData.Core.Metadata.EdmConstants.EdmGeographyTypeName); case ExpressionTokenKind.GeometryLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetSpatial(EdmPrimitiveTypeKind.Geometry, false), Microsoft.OData.Core.Metadata.EdmConstants.EdmGeometryTypeName); case ExpressionTokenKind.QuotedLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetString(true), Microsoft.OData.Core.Metadata.EdmConstants.EdmStringTypeName); case ExpressionTokenKind.BracketedExpression: { // TODO: need a BracketLiteralToken for real complex type vaule like [\"Barky\",\"Junior\"] or {...} LiteralToken result = new LiteralToken(lexer.CurrentToken.Text, lexer.CurrentToken.Text); lexer.NextToken(); return result; } case ExpressionTokenKind.TimeOfDayLiteral: return ParseTypedLiteral(lexer, EdmCoreModel.Instance.GetTemporal(EdmPrimitiveTypeKind.TimeOfDay, false), Microsoft.OData.Core.Metadata.EdmConstants.EdmTimeOfDayTypeName); default: return null; } }
private static Expression ParseTernary(ExpressionLexer lexer) { var expr = ParseCondCombo(lexer); if (lexer.PeekNextToken() == ExprTokenType.QuestionMark) { lexer.GetNextToken(ExprTokenType.QuestionMark); var trueValue = ParseTernary(lexer); lexer.GetNextToken(ExprTokenType.Colon); var falseValue = ParseTernary(lexer); return new TernaryExpression(expr, trueValue, falseValue); } return expr; }
private static IFunctionCallParser GetRealFunctionCallParser(string expression) { ExpressionLexer lexer = new ExpressionLexer(expression, true, false); UriQueryExpressionParser parser = new UriQueryExpressionParser(345, lexer); return new FunctionCallParser(lexer, parser /*resolveAlias*/); }
/// <summary> /// Parse expression text into Token. /// </summary> /// <param name="expressionText">The expression string to Parse.</param> /// <returns>The lexical token representing the expression text.</returns> internal QueryToken ParseExpressionText(string expressionText) { Debug.Assert(expressionText != null, "expressionText != null"); this.recursionDepth = 0; this.lexer = CreateLexerForFilterOrOrderByOrApplyExpression(expressionText); QueryToken result = this.ParseExpression(); this.lexer.ValidateToken(ExpressionTokenKind.End); return result; }
private static Expression ParseCondCombo(ExpressionLexer lexer) { return RecursiveDescentStep(lexer, _condComboTokens, ParseCond, (o, lhs, rhs) => new LogicalExpression(o, lhs, rhs)); }
private static Expression ParseExpr(ExpressionLexer lexer) { return RecursiveDescentStep(lexer, _exprTokens, ParseTerm, (o, lhs, rhs) => new BinaryExpression(o, lhs, rhs)); }
private static Expression ParseCond(ExpressionLexer lexer) { return RecursiveDescentStep(lexer, _condTokens, ParseExpr, (o, lhs, rhs) => new CompareExpression(o, lhs, rhs)); }
/// <summary> /// Runs a unit test on SelectExpandTermParser.ParseTerm() for a $expand term, and gives the lexer back for positional verification. /// </summary> private PathSegmentToken ParseExpandTerm(string term, out ExpressionLexer lexer) { lexer = new ExpressionLexer(term, true /*moveToFirstToken*/, true/*useSemicolonDelimiter*/); var parser = new SelectExpandTermParser(lexer, 100 /*maxPathLength*/, false /*isSelect*/); return parser.ParseTerm(); }
private static Expression RecursiveDescentStep(ExpressionLexer lexer, ExprTokenType[] tokens, StepDownDelegate stepDown, CreateBinaryDelegate createBinary) { Expression expr = stepDown(lexer); while (true) { ExprTokenType? token = lexer.CheckNextToken(tokens); if (token.HasValue) expr = createBinary(token.Value, expr, stepDown(lexer)); else break; } return expr; }
/// <summary> /// Parses parameter alias into token. /// </summary> /// <param name="lexer">The lexer to use.</param> /// <returns>The parameter alias token.</returns> private static FunctionParameterAliasToken ParseParameterAlias(ExpressionLexer lexer) { Debug.Assert(lexer != null, "lexer != null"); FunctionParameterAliasToken ret = new FunctionParameterAliasToken(lexer.CurrentToken.Text); lexer.NextToken(); return ret; }
public void FunctionParameterParserShoulHandleUnResolvedAliasesInFilterOrderby() { ExpressionLexer lexer = new ExpressionLexer("address=@a)", true, false, true); ICollection<NamedFunctionParameterNode> parameterTokens; TryParseFunctionParameters(lexer, null, out parameterTokens).Should().BeTrue(); parameterTokens.Should().HaveCount(1); parameterTokens.Single().ShouldHaveParameterAliasNode("address", "@a"); }
/// <summary> /// Parses the $orderby expression. /// </summary> /// <param name="orderBy">The $orderby expression string to parse.</param> /// <returns>The enumeraion of lexical tokens representing order by tokens.</returns> internal IEnumerable<OrderByToken> ParseOrderBy(string orderBy) { Debug.Assert(orderBy != null, "orderBy != null"); this.recursionDepth = 0; this.lexer = CreateLexerForFilterOrOrderByOrApplyExpression(orderBy); List<OrderByToken> orderByTokens = new List<OrderByToken>(); while (true) { QueryToken expression = this.ParseExpression(); bool ascending = true; if (this.TokenIdentifierIs(ExpressionConstants.KeywordAscending)) { this.lexer.NextToken(); } else if (this.TokenIdentifierIs(ExpressionConstants.KeywordDescending)) { this.lexer.NextToken(); ascending = false; } OrderByToken orderByToken = new OrderByToken(expression, ascending ? OrderByDirection.Ascending : OrderByDirection.Descending); orderByTokens.Add(orderByToken); if (this.lexer.CurrentToken.Kind != ExpressionTokenKind.Comma) { break; } this.lexer.NextToken(); } this.lexer.ValidateToken(ExpressionTokenKind.End); return new ReadOnlyCollection<OrderByToken>(orderByTokens); }
public void FunctionParameterParserShouldNotAdvanceLexerIfNotANamedValue() { ExpressionLexer lexer = new ExpressionLexer("a?foo,bar", true, false, true); ICollection<NamedFunctionParameterNode> parameterTokens; TryParseFunctionParameters(lexer, null, out parameterTokens).Should().BeFalse(); lexer.Position.Should().Be(0); }
internal IEnumerable<QueryToken> ParseApply(string apply) { Debug.Assert(apply != null, "apply != null"); List<QueryToken> transformationTokens = new List<QueryToken>(); if (string.IsNullOrEmpty(apply)) { return transformationTokens; } this.recursionDepth = 0; this.lexer = CreateLexerForFilterOrOrderByOrApplyExpression(apply); while (true) { switch (this.lexer.CurrentToken.GetIdentifier()) { case ExpressionConstants.KeywordAggregate: transformationTokens.Add(ParseAggregate()); break; case ExpressionConstants.KeywordFilter: transformationTokens.Add(ParseApplyFilter()); break; case ExpressionConstants.KeywordGroupBy: transformationTokens.Add(ParseGroupBy()); break; default: throw ParseError(ODataErrorStrings.UriQueryExpressionParser_KeywordOrIdentifierExpected(supportedKeywords, this.lexer.CurrentToken.Position, this.lexer.ExpressionText)); } // '/' indicates there are more transformations if (this.lexer.CurrentToken.Kind != ExpressionTokenKind.Slash) { break; } this.lexer.NextToken(); } this.lexer.ValidateToken(ExpressionTokenKind.End); return new ReadOnlyCollection<QueryToken>(transformationTokens); }
private static bool TryParseFunctionParameters(ExpressionLexer lexer, ParameterAliasValueAccessor paramAliasAccessor, out ICollection<NamedFunctionParameterNode> parsedParameterNodes) { UriQueryExpressionParser parser = new UriQueryExpressionParser(345, lexer); ICollection<FunctionParameterToken> splitParameters; parsedParameterNodes = null; if (parser.TrySplitFunctionParameters(out splitParameters)) { var parsedParameters = FunctionCallBinder.BindSegmentParameters(new ODataUriParserConfiguration(HardCodedTestModel.TestModel) { ParameterAliasValueAccessor = paramAliasAccessor }, HardCodedTestModel.GetFunctionImportIsAddressGood().Function, splitParameters); parsedParameterNodes = parsedParameters.Select(s => new NamedFunctionParameterNode(s.Name, s.Value as QueryNode)).ToList(); return true; } return false; }
/// <summary> /// Parses typed literals. /// </summary> /// <param name="lexer">The lexer to use.</param> /// <param name="targetTypeReference">Expected type to be parsed.</param> /// <param name="targetTypeName">The EDM type name of the expected type to be parsed.</param> /// <returns>The literal token produced by building the given literal.</returns> private static LiteralToken ParseTypedLiteral(ExpressionLexer lexer, IEdmPrimitiveTypeReference targetTypeReference, string targetTypeName) { Debug.Assert(lexer != null, "lexer != null"); object targetValue; string reason; if (!UriPrimitiveTypeParser.TryUriStringToPrimitive(lexer.CurrentToken.Text, targetTypeReference, out targetValue, out reason)) { string message; if (reason == null) { message = ODataErrorStrings.UriQueryExpressionParser_UnrecognizedLiteral( targetTypeName, lexer.CurrentToken.Text, lexer.CurrentToken.Position, lexer.ExpressionText); } else { message = ODataErrorStrings.UriQueryExpressionParser_UnrecognizedLiteralWithReason( targetTypeName, lexer.CurrentToken.Text, lexer.CurrentToken.Position, lexer.ExpressionText, reason); } throw ParseError(message); } LiteralToken result = new LiteralToken(targetValue, lexer.CurrentToken.Text); lexer.NextToken(); return result; }
/// <summary> /// Constructs a term parser. /// </summary> /// <param name="lexer">Lexer to use for parsing the term. Should be position at the term to parse.</param> /// <param name="maxPathLength">Max length of a select or expand path.</param> /// <param name="isSelect">True if we are parsing select, false if we are parsing expand.</param> internal SelectExpandTermParser(ExpressionLexer lexer, int maxPathLength, bool isSelect) { this.lexer = lexer; this.maxPathLength = maxPathLength; this.isSelect = isSelect; }