/// <summary> /// Returns the entities as per the specifications in the get request /// </summary> protected virtual async Task <GetAggregateResponse> GetAggregateImplAsync(GetAggregateArguments args) { // Parse the parameters var filter = FilterExpression.Parse(args.Filter); var select = AggregateSelectExpression.Parse(args.Select); // Prepare the query var query = GetRepository().AggregateQuery <TEntity>(); // Retrieve the user permissions for the current view var permissions = await UserPermissions(Constants.Read); var permissionsCount = permissions.Count(); // Filter out permissions with masks that would be violated by the filter argument // orderby on the other hand is always mandated to be a subset of the selected parameters // and those in turn must be universally visible to the user, so no need to check orderby var defaultMask = GetDefaultMask() ?? new MaskTree(); permissions = FilterViolatedPermissionsForAggregateQuery(permissions, defaultMask, filter, select); var filteredPermissionCount = permissions.Count(); var isPartial = permissionsCount != filteredPermissionCount; // Apply read permissions FilterExpression permissionsCriteria = GetReadPermissionsCriteria(permissions); query = query.Filter(permissionsCriteria); // Filter query = query.Filter(filter); // Apply the top parameter var top = args.Top == 0 ? int.MaxValue : args.Top; // 0 means get all top = Math.Min(top, MAXIMUM_AGGREGATE_RESULT_SIZE + 1); query = query.Top(top); // Apply the select, which has the general format 'Select=A,B/C,D' query = query.Select(select); // Load the data in memory var result = await query.ToListAsync(); // Put a limit on the number of data points returned, to prevent DoS attacks if (result.Count > MAXIMUM_AGGREGATE_RESULT_SIZE) { var msg = _localizer["Error_NumberOfDataPointsExceedsMaximum0", MAXIMUM_AGGREGATE_RESULT_SIZE]; throw new BadRequestException(msg); } // Finally return the result return(new GetAggregateResponse { Top = args.Top, IsPartial = isPartial, Result = result, RelatedEntities = new Dictionary <string, IEnumerable <Entity> >() // TODO: Add ancestors of tree dimensions }); }
/// <summary> /// Returns an aggregated list of dynamic rows and any tree dimension ancestors as per the specifications in <paramref name="args"/>. /// </summary> public virtual async Task <AggregateResult> GetAggregate(GetAggregateArguments args, CancellationToken cancellation) { await Initialize(cancellation); // Parse the parameters var filter = ExpressionFilter.Parse(args.Filter); var having = ExpressionHaving.Parse(args.Having); var select = ExpressionAggregateSelect.Parse(args.Select); var orderby = ExpressionAggregateOrderBy.Parse(args.OrderBy); // Prepare the query var query = QueryFactory().AggregateQuery <TEntity>(); // Retrieve and Apply read permissions var permissionsFilter = await UserPermissionsFilter(PermissionActions.Read, cancellation); query = query.Filter(permissionsFilter); // Important // Filter and Having query = query.Filter(filter); query = query.Having(having); // Apply the top parameter var top = args.Top == 0 ? int.MaxValue : args.Top; // 0 means get all top = Math.Min(top, MaximumAggregateResultSize + 1); query = query.Top(top); // Apply the select, which has the general format 'Select=A+B.C,Sum(D)' query = query.Select(select); // Apply the orderby, which has the general format 'A+B.C desc,Sum(D) asc' query = query.OrderBy(orderby); // Load the data in memory var output = await query.ToListAsync(QueryContext, cancellation); var data = output.Rows; var ancestors = output.Ancestors.Select(e => new DimensionAncestorsResult(e.Result, e.IdIndex, e.MinIndex)); // Put a limit on the number of data points returned, to prevent DoS attacks if (data.Count > MaximumAggregateResultSize) { var msg = _localizer["Error_NumberOfDataPointsExceedsMaximum0", MaximumAggregateResultSize]; throw new ServiceException(msg); } // Return return(new AggregateResult(data, ancestors)); }
public virtual async Task <ActionResult <GetAggregateResponse> > GetAggregate([FromQuery] GetAggregateArguments args) { return(await ControllerUtilities.InvokeActionImpl(async() => { var result = await GetAggregateImplAsync(args); return Ok(result); }, _logger)); }
public async Task <StatementResult> GetStatement(StatementArguments args, CancellationToken cancellation) { await Initialize(cancellation); // Step 1: Prepare the filters string undatedFilter = UndatedFilter(args); var beforeOpeningFilterBldr = new StringBuilder(undatedFilter); var betweenFilterBldr = new StringBuilder(undatedFilter); var beforeClosingFilterBldr = new StringBuilder(undatedFilter); if (args.FromDate != null) { beforeOpeningFilterBldr.Append($" and {nameof(DetailsEntry.Line)}.{nameof(LineForQuery.PostingDate)} lt '{args.FromDate.Value:yyyy-MM-dd}'"); // < betweenFilterBldr.Append($" and {nameof(DetailsEntry.Line)}.{nameof(LineForQuery.PostingDate)} ge '{args.FromDate.Value:yyyy-MM-dd}'"); // >= } if (args.ToDate != null) { betweenFilterBldr.Append($" and {nameof(DetailsEntry.Line)}.{nameof(LineForQuery.PostingDate)} le '{args.ToDate.Value:yyyy-MM-dd}'"); // <= beforeClosingFilterBldr.Append($" and {nameof(DetailsEntry.Line)}.{nameof(LineForQuery.PostingDate)} le '{args.ToDate.Value:yyyy-MM-dd}'"); // <= } string beforeOpeningFilter = beforeOpeningFilterBldr.ToString(); string betweenDatesFilter = betweenFilterBldr.ToString(); string beforeClosingFilter = beforeClosingFilterBldr.ToString(); // Step 2: Load the entries var factArgs = new GetArguments { Select = args.Select, Top = args.Skip + args.Top, // We need this to compute openining balance, we do the skipping later in memory Skip = 0, // args.Skip, OrderBy = $"{nameof(DetailsEntry.Line)}.{nameof(LineForQuery.PostingDate)},{nameof(DetailsEntry.Direction)} desc,{nameof(DetailsEntry.Id)}", CountEntities = true, Filter = betweenDatesFilter, }; var result = await GetEntities(factArgs, cancellation); var data = result.Data; var count = result.Count; // Step 3: Load the opening balances string valueExp = $"sum({nameof(DetailsEntry.Value)} * {nameof(DetailsEntry.Direction)})"; string quantityExp = $"sum({nameof(DetailsEntry.BaseQuantity)} * {nameof(DetailsEntry.Direction)})"; string monetaryValueExp = $"sum({nameof(DetailsEntry.MonetaryValue)} * {nameof(DetailsEntry.Direction)})"; var openingArgs = new GetAggregateArguments { Filter = beforeOpeningFilter, Select = $"{valueExp},{quantityExp},{monetaryValueExp}" }; var openingResult = await GetAggregate(openingArgs, cancellation); var openingData = openingResult.Data; decimal opening = (decimal)(openingData[0][0] ?? 0m); decimal openingQuantity = (decimal)(openingData[0][1] ?? 0m); decimal openingMonetaryValue = (decimal)(openingData[0][2] ?? 0m); // step (4) Add the Acc. column decimal acc = opening; decimal accQuantity = openingQuantity; decimal accMonetaryValue = openingMonetaryValue; foreach (var entry in data) { acc += (entry.Value ?? 0m) * entry.Direction ?? throw new InvalidOperationException("Bug: Missing Direction"); entry.Accumulation = acc; entry.EntityMetadata[nameof(entry.Accumulation)] = FieldMetadata.Loaded; accQuantity += (entry.BaseQuantity ?? 0m) * entry.Direction ?? throw new InvalidOperationException("Bug: Missing Direction"); entry.QuantityAccumulation = accQuantity; entry.EntityMetadata[nameof(entry.QuantityAccumulation)] = FieldMetadata.Loaded; accMonetaryValue += (entry.MonetaryValue ?? 0m) * entry.Direction ?? throw new InvalidOperationException("Bug: Missing Direction"); entry.MonetaryValueAccumulation = accMonetaryValue; entry.EntityMetadata[nameof(entry.MonetaryValueAccumulation)] = FieldMetadata.Loaded; } // Step (5) Load closing (if the data page is not the complete result) decimal closing; decimal closingQuantity; decimal closingMonetaryValue; if (args.Skip + args.Top >= count.Value) { var closingArgs = new GetAggregateArguments { Filter = beforeClosingFilter, Select = $"{valueExp},{quantityExp},{monetaryValueExp}" }; var closingResult = await GetAggregate(closingArgs, cancellation); var closingData = closingResult.Data; closing = (decimal)(closingData[0][0] ?? 0m); closingQuantity = (decimal)(closingData[0][1] ?? 0m); closingMonetaryValue = (decimal)(closingData[0][2] ?? 0m); } else { closing = acc; closingQuantity = accQuantity; closingMonetaryValue = accMonetaryValue; } data = data.Skip(args.Skip).ToList(); // Skip in memory return(new StatementResult(data, opening, openingQuantity, openingMonetaryValue, closing, closingQuantity, closingMonetaryValue, count.Value)); }
public virtual async Task <ActionResult <GetAggregateResponse> > GetAggregate([FromQuery] GetAggregateArguments args, CancellationToken cancellation) { return(await ControllerUtilities.InvokeActionImpl(async() => { using var _ = _instrumentation.Block(nameof(GetAggregate)); // Calculate server time at the very beginning for consistency var serverTime = DateTimeOffset.UtcNow; // Sometimes select is so huge that it is passed as a header instead if (string.IsNullOrWhiteSpace(args.Select)) { args.Select = Request.Headers["X-Select"].FirstOrDefault(); } // Load the data var(data, ancestors, isPartial) = await GetFactService().GetAggregate(args, cancellation); // Finally return the result var result = new GetAggregateResponse { IsPartial = isPartial, ServerTime = serverTime, Result = data, DimensionAncestors = ancestors, }; return Ok(result); }, _logger)); }
public virtual async Task <ActionResult <GetAggregateResponse> > GetAggregate([FromQuery] GetAggregateArguments args, CancellationToken cancellation) { // Calculate server time at the very beginning for consistency var serverTime = DateTimeOffset.UtcNow; // Sometimes select is so huge that it is passed as a header instead if (string.IsNullOrWhiteSpace(args.Select)) { args.Select = Request.Headers["X-Select"].FirstOrDefault(); } // Load the data var result = await GetFactService().GetAggregate(args, cancellation); var data = result.Data; var ancestors = result.Ancestors.Select(e => new DimensionAncestors { IdIndex = e.IdIndex, MinIndex = e.MinIndex, Result = e.Data.ToList() }); // Finally return the result var response = new GetAggregateResponse { ServerTime = serverTime, Result = data, DimensionAncestors = ancestors, }; return(Ok(response)); }