private static CursorPageSlice <TEntity> PostProcessResultsIntoCursorPageSlice <TEntity>( List <CursorResult <TEntity> > results, SqlQuerySliceInfo sqlQuerySliceInfo, int?totalCount ) where TEntity : class { bool hasPreviousPage = false; bool hasNextPage = false; if (sqlQuerySliceInfo.IsPreviousPagePossible) { var firstCursor = results.FirstOrDefault(); hasPreviousPage = firstCursor?.CursorIndex > 1; //Cursor Index is 1 Based; 0 would be the Cursor before the First } if (sqlQuerySliceInfo.IsNextPagePossible) { //GENERALLY This should Always Be True as we always increment the EndIndex if there is the Possibility that there might // be a NEXT Page, and the ExpectedCount is always a value that should satisfy the processing // (e.g. ExpectedCount might be int.MaxValue which would ensure our Take is always successful to get ALL Results). if (sqlQuerySliceInfo.IsEndIndexOverFetchedForNextPageCheck && sqlQuerySliceInfo.ExpectedCount < int.MaxValue) { hasNextPage = results.Count > sqlQuerySliceInfo.ExpectedCount; if (hasNextPage) { results.RemoveAt(results.Count - 1); } } } //Return a CursorPagedResult decorator for the results along with the Total Count! var cursorPage = new CursorPageSlice <TEntity>(results, totalCount, hasPreviousPage, hasNextPage); return(cursorPage); }
/// <summary> /// BBernard /// Borrowed and Adapted from RepoDb source code: SqlServerStatementBuilder.CreateBatchQuery. /// NOTE: Due to internally only accessible elements it was easier to just construct the query as needed /// to allow us to return the RowNumber as the CursorIndex! /// </summary> /// <typeparam name="TEntity"></typeparam> /// <param name="tableName"></param> /// <param name="fields"></param> /// <param name="orderBy"></param> /// <param name="where">May be either a QueryGroup or RawSqlWhere object</param> /// <param name="hints"></param> /// <param name="afterCursorIndex"></param> /// <param name="firstTake"></param> /// <param name="beforeCursorIndex"></param> /// <param name="lastTake"></param> /// <param name="includeTotalCountQuery"></param> /// <returns></returns> public static SqlQuerySliceInfo BuildSqlServerBatchSliceQuery <TEntity>( string tableName, IEnumerable <Field> fields, IEnumerable <OrderField> orderBy, object where = null, string hints = null, int?afterCursorIndex = null, int?firstTake = null, int?beforeCursorIndex = null, int?lastTake = null, bool includeTotalCountQuery = true ) where TEntity : class { var dbSetting = RepoDbSettings.SqlServerSettings; string cursorIndexName = nameof(IHaveCursor.CursorIndex); var fieldsList = fields.ToList(); var orderByList = orderBy.ToList(); var orderByLookup = orderByList.ToLookup(o => o.Name.ToLower()); //Ensure that we Remove any risk of Name conflicts with the CursorIndex field on the CTE // because we are dynamically adding ROW_NUMBER() as [CursorIndex]! And, ensure // that there are no conflicts with the OrderBy var cteFields = new List <Field>(fieldsList); cteFields.RemoveAll(f => f.Name.Equals(cursorIndexName, StringComparison.OrdinalIgnoreCase)); //We must ensure that all OrderBy fields are also part of the CTE Select Clause so that they are // actually available to be sorted on (or else 'Invalid Column Errors' will occur if the field is not // originally part of the Select Fields list. var missingSortFieldNames = orderByList.Select(o => o.Name).Except(cteFields.Select(f => f.Name)).ToList(); if (missingSortFieldNames.Count > 0) { cteFields.AddRange(missingSortFieldNames.Select(n => new Field(n))); } var selectFields = new List <Field>(); selectFields.AddRange(fieldsList); selectFields.Add(new Field(cursorIndexName)); // Initialize the builder var cteBuilder = new QueryBuilder(); //Support either QueryGroup object model or Raw SQL; which enables support for complex Field processing & filtering not supported // by QueryField/QueryGroup objects (e.g. LOWER(), TRIM()), use of Sql Server Full Text Searching on Fields (e.g. CONTAINS(), FREETEXT()), etc. //var sqlWhereDataFilter = whereQueryGroup?.GetString(0, dbSetting) ?? whereRawSql; var sqlWhereDataFilter = where switch { QueryGroup whereQueryGroup => whereQueryGroup?.GetString(0, dbSetting), RawSqlWhere whereRawSql => whereRawSql.RawSqlWhereClause, string whereRawSql => whereRawSql, _ => null }; bool isWhereFilterSpecified = !string.IsNullOrWhiteSpace(sqlWhereDataFilter); //Dynamically build/optimize the core data SQL that will be used as a CTE wrapped by the Pagination logic! cteBuilder.Clear() .Select() .RowNumber().Over().OpenParen().OrderByFrom(orderByList, dbSetting).CloseParen().As($"[{cursorIndexName}],") .FieldsFrom(cteFields, dbSetting) .From().TableNameFrom(tableName, dbSetting) .HintsFrom(hints); if (isWhereFilterSpecified) { cteBuilder .Where() .WriteText(sqlWhereDataFilter); } var sqlCte = cteBuilder.GetString(); var sqlWhereClauseSliceInfo = BuildRelaySpecQuerySliceInfo(cursorIndexName, afterCursorIndex, beforeCursorIndex, firstTake, lastTake); // Build the base Paginated Query var sqlBuilder = new QueryBuilder(); sqlBuilder.Clear() .With() .WriteText("CTE").As().OpenParen() //Dynamically insert the CTE that is built separately... .WriteText(sqlCte) .CloseParen() .Select() .FieldsFrom(selectFields, dbSetting) .From().WriteText("CTE") //Implement Relay Spec Cursor Slicing Algorithm! .WriteText(sqlWhereClauseSliceInfo.SQL) .OrderByFrom(orderByList, dbSetting) .End(); if (includeTotalCountQuery) { //////Look for PKey Field to use as the Count Column... as this just makes sense... //////NOTE: COUNT(1) may not work as expected when column permission are in use, so we use a real field. //////NOTE: ////var countField = PropertyCache.Get<TEntity>().FirstOrDefault(p => p.GetPrimaryAttribute() != null)?.AsField() //// ?? selectFields.FirstOrDefault(); //Add SECOND Count Query into the Query so it can be executed as an efficient MultipleQuery! //NOTE: For optimization we do not need to Sort or, select more than one field to get the Total Count, // the only thing that changes this result is the Where filter fields! //NOTE: TO FIX RISK of Null values being skipped by the Count aggregation, this is changed to use the standard COUNT(*), // which is RepoDb's default behavior if field is null, to ensure that we get the true row count and nothing is // eliminated due to Null values. //NOTE We also rely on SqlServer to optimize this query instead of trying to do too much ourselves (with other unknown risks // such as column permissions, etc. sqlBuilder.Select() .Count(null, dbSetting) .From().TableNameFrom(tableName, dbSetting) .HintsFrom(hints); if (isWhereFilterSpecified) { sqlBuilder .Where() .WriteText(sqlWhereDataFilter); } sqlBuilder.End(); } // Build the Query and other Slice Info metadata needed for optimal pagination... var sqlQuery = sqlBuilder.GetString(); var sqlQuerySliceInfo = new SqlQuerySliceInfo() { SQL = sqlQuery, ExpectedCount = sqlWhereClauseSliceInfo.ExpectedCount, IsPreviousPagePossible = sqlWhereClauseSliceInfo.IsPreviousPagePossible, IsNextPagePossible = sqlWhereClauseSliceInfo.IsNextPagePossible, IsEndIndexOverFetchedForNextPageCheck = sqlWhereClauseSliceInfo.IsEndIndexOverFetchedForNextPageCheck }; // Return the query return(sqlQuerySliceInfo); }
private static SqlQuerySliceInfo BuildRelaySpecQuerySliceInfo(string cursorFieldName, int?afterCursorIndex, int?beforeCursorIndex, int?firstTake, int?lastTake) { //Implement Cursor pagination algorithm in alignment with industry accepted Relay Spec. for GraphQL //For more info. see: https://relay.dev/graphql/connections.htm#sec-Pagination-algorithm AssertCursorPagingArgsAreValid( after: afterCursorIndex, before: beforeCursorIndex, first: firstTake, last: lastTake ); string where = string.Empty; int?startIndex = null; int?endIndex = null; int count = 0; //******************************************************************************** //FIRST we process after/before args... //******************************************************************************** //NOTE: SQL Server BETWEEN operation is INCLUSIVE therefore to compute ordinal indexes from teh After/Before Cursors // we increment/decrement by One (1) to always get the item actual after or actually before the cursor specified! if (afterCursorIndex.HasValue && beforeCursorIndex.HasValue)// Both 'after' & 'before' args { //Both Before & After are specified; This scenario is discouraged by the Spec but not prohibited! startIndex = ((int)afterCursorIndex + 1); endIndex = ((int)beforeCursorIndex - 1); count = ((int)endIndex - (int)startIndex) + 1; } else if (afterCursorIndex.HasValue) //Only 'after' arg { startIndex = ((int)afterCursorIndex + 1); count = int.MaxValue; } else if (beforeCursorIndex.HasValue) //Only 'before' arg { // When taking from the End we know that the start is 1 (vice versa is not true) startIndex = 1; endIndex = ((int)beforeCursorIndex - 1); count = (int)endIndex; } else //NEITHER After or Before where Specified... { //Even now start from 1; with undefined End... startIndex = 1; count = int.MaxValue; } //******************************************************************************** //SECOND we process first/last args which may applied in combination... //******************************************************************************** //NOTE: This may be the first time endIndex is initialized, based on FIRST filter step above... if (firstTake.HasValue && count > firstTake) { endIndex = (startIndex + (int)firstTake) - 1; count = (int)firstTake; } //Last can be combined in addition to First above (not exclusive) //NOTE: We only allow the Last Filter Here if there are enough items expected (expected count) // to be available; meaning the expected count from the FIRST filter step above has to // be greater than the take from the end (as outlined in Relay Spec). if (lastTake.HasValue && count > lastTake) { if (endIndex.HasValue) { startIndex = ((int)endIndex - (int)lastTake) + 1; } else { //This is the Case where We are requested to select from the End of all results // dynamically because no Before Cursor (ending cursor) was provided... //Therefore this requires a special Query condition to Dynamically compute // the currently unknown StartIndex... startIndex = null; endIndex = null; } //In both of these cases teh total count expected will be the Last Take size // since it's smaller than the original count expected from the FIRST filter step above... count = (int)lastTake; } var sqlSliceInfo = new SqlQuerySliceInfo() { ExpectedCount = count }; //Finally we can convert the Mapped Start & End indexes to valid Where Clause... if (startIndex.HasValue && endIndex.HasValue) { //NOTE: We INCREMENT End Index by one here to intentionally Over Fetch so that we can optimize // the computation of HasNextPage without knowing the Full table Count! sqlSliceInfo.SQL = $"WHERE ([{cursorFieldName}] BETWEEN {startIndex} AND {endIndex + 1})"; sqlSliceInfo.IsPreviousPagePossible = true; sqlSliceInfo.IsNextPagePossible = true; sqlSliceInfo.IsEndIndexOverFetchedForNextPageCheck = true; } else if (startIndex.HasValue) { sqlSliceInfo.SQL = $"WHERE ([{cursorFieldName}] >= {startIndex})"; sqlSliceInfo.IsPreviousPagePossible = true; sqlSliceInfo.IsNextPagePossible = false; sqlSliceInfo.IsEndIndexOverFetchedForNextPageCheck = false; } //NOTE: Is Always False as note by Re-sharper; in all cases where EndIndex has a value so will StartIndex also have a value hitting the first case above! // But algorithmically it feels right to just leave this since we may optimize code above and risk changing this in the future else if (endIndex.HasValue) { //NOTE: We INCREMENT End Index by one here to intentionally Over Fetch so that we can optimize // the computation of HasNextPage without knowing the Full table Count! sqlSliceInfo.SQL = $"WHERE ([{cursorFieldName}] <= {endIndex + 1})"; sqlSliceInfo.IsPreviousPagePossible = false; sqlSliceInfo.IsNextPagePossible = true; sqlSliceInfo.IsEndIndexOverFetchedForNextPageCheck = true; } //Handle Unique Case for when 'Last' was provided without any 'Before' cursor! //NOTE: Is Always True (if we get here) as note by Re-sharper; because we intentionally null out StartIndex for this case to hit // when LastTake is specified without an AFTER or BEFORE! // But algorithmically it feels right to just leave this since we may optimize code above and risk changing this in the future else if (lastTake.HasValue) { //Computing from the End is the most complex so some performance hit is likely acceptable since it's a less used use-case. //NOTE: This must use the Dynamic Count() to determine the End of the existing result set... sqlSliceInfo.SQL = $"WHERE ([{cursorFieldName}] > (SELECT (COUNT([{cursorFieldName}]) - {(int)lastTake}) FROM CTE))"; sqlSliceInfo.IsPreviousPagePossible = true; sqlSliceInfo.IsNextPagePossible = false; sqlSliceInfo.IsEndIndexOverFetchedForNextPageCheck = false; } return(sqlSliceInfo); }
private static async Task <CursorPageSlice <TEntity> > ExecuteBatchSliceQueryAsync <TEntity>( this DbConnection dbConn, SqlQuerySliceInfo sqlQuerySliceInfo, object queryParams = null, string tableName = null, int?commandTimeout = null, IDbTransaction transaction = null, Action <string> logTrace = null, CancellationToken cancellationToken = default ) where TEntity : class { var commandText = sqlQuerySliceInfo.SQL; logTrace?.Invoke($"Query: {commandText}"); var timer = Stopwatch.StartNew(); //Ensure that the DB Connection is open (RepoDb provided extension)... await dbConn.EnsureOpenAsync(cancellationToken).ConfigureAwait(false); logTrace?.Invoke($"DB Connection Established in: {timer.ToElapsedTimeDescriptiveFormat()}"); //Re-use the RepoDb Execute Reader method to get benefits of Command & Param caches, etc. await using var reader = (DbDataReader) await dbConn.ExecuteReaderAsync( commandText : commandText, param : queryParams, commandType : CommandType.Text, transaction : transaction, commandTimeout : commandTimeout, cancellationToken : cancellationToken ).ConfigureAwait(false); //BBernard //We NEED to manually process the Reader externally here! //Therefore, this had to be Code borrowed from RepoDb Source (DataReader.ToEnumerableAsync<TEntity>(...) // core code so that we clone minimal amount of logic outside of RepoDb due to 'internal' scope. var results = new List <CursorResult <TEntity> >(); int?totalCount = null; if (reader != null && !reader.IsClosed) { //BBernard - 08/09/2021 //BUGFIX: Fix issue where when there are no results we still need to check and see if TotalCount has any results... if (reader.HasRows) { const string cursorIndexPropName = nameof(IHaveCursor.CursorIndex); //It's exposed for easy use by extensions so we use Brute Force Reflection to get the Model Mapping Function // because RepoDb does this very well already! //NOTE: The complied Mapping Func (delegate) that is retrieved is lazy loaded into a static cache reference // by Generic Type <TEntity> for extremely high performance once initialized! var repoDbModelMappingFunc = await GetRepoDbModelMappingFuncByBruteForce <TEntity>(dbConn, reader, tableName, transaction, cancellationToken); while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { //Dynamically read the Entity from the Results... TEntity entity = repoDbModelMappingFunc(reader); //Manually Process the Cursor for each record... var cursorIndex = Convert.ToInt32(reader.GetValue(cursorIndexPropName)); //This allows us to extract the CursorIndex field and return in a Decorator class // so there's NO REQUIREMENT that the Model (TEntity) have any special fields/interfaces added. var cursorResult = new CursorResult <TEntity>(entity, cursorIndex); results.Add(cursorResult); } } //Now attempt to step to the Total Count query result... //Note: We know to attempt getting the TotalCount if there is a second result set available. if (await reader.NextResultAsync(cancellationToken).ConfigureAwait(false) && await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { //This is a Scalar query so the first ordinal value is the Total Count! totalCount = Convert.ToInt32(reader.GetValue(0)); } } timer.Stop(); logTrace?.Invoke($"Query Execution Time: {timer.ToElapsedTimeDescriptiveFormat()}"); //Process the Results and determine Pagination metadata, etc. var cursorPage = PostProcessResultsIntoCursorPageSlice(results, sqlQuerySliceInfo, totalCount); return(cursorPage); }