private void EndLoadingCollections(ICollectionPersister persister, IList <LoadingCollectionEntry> matchedCollectionEntries) { if (matchedCollectionEntries == null || matchedCollectionEntries.Count == 0) { if (log.IsDebugEnabled()) { log.Debug("no collections were found in result set for role: {0}", persister.Role); } return; } int count = matchedCollectionEntries.Count; if (log.IsDebugEnabled()) { log.Debug("{0} collections were found in result set for role: {1}", count, persister.Role); } var cacheBatcher = new CacheBatcher(LoadContext.PersistenceContext.Session); for (int i = 0; i < count; i++) { EndLoadingCollection(matchedCollectionEntries[i], persister, data => cacheBatcher.AddToBatch(persister, data)); } cacheBatcher.ExecuteBatch(); if (log.IsDebugEnabled()) { log.Debug("{0} collections initialized for role: {1}", count, persister.Role); } }
private async Task EndLoadingCollectionsAsync(ICollectionPersister persister, IList <LoadingCollectionEntry> matchedCollectionEntries, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); if (matchedCollectionEntries == null || matchedCollectionEntries.Count == 0) { if (log.IsDebugEnabled()) { log.Debug("no collections were found in result set for role: {0}", persister.Role); } return; } int count = matchedCollectionEntries.Count; if (log.IsDebugEnabled()) { log.Debug("{0} collections were found in result set for role: {1}", count, persister.Role); } var cacheBatcher = new CacheBatcher(LoadContext.PersistenceContext.Session); for (int i = 0; i < count; i++) { await(EndLoadingCollectionAsync(matchedCollectionEntries[i], persister, data => cacheBatcher.AddToBatch(persister, data), cancellationToken)).ConfigureAwait(false); } await(cacheBatcher.ExecuteBatchAsync(cancellationToken)).ConfigureAwait(false); if (log.IsDebugEnabled()) { log.Debug("{0} collections initialized for role: {1}", count, persister.Role); } }
/// <summary> /// Finish the process of collection-loading for this bound result set. Mainly this /// involves cleaning up resources and notifying the collections that loading is /// complete. /// </summary> /// <param name="persister">The persister for which to complete loading.</param> /// <param name="skipCache">Indicates if collection must not be put in cache.</param> /// <param name="cacheBatcher">The cache batcher used to batch put the collections into the cache.</param> public void EndLoadingCollections(ICollectionPersister persister, bool skipCache, CacheBatcher cacheBatcher) { if (!loadContexts.HasLoadingCollectionEntries && (localLoadingCollectionKeys.Count == 0)) { return; } // in an effort to avoid concurrent-modification-exceptions (from // potential recursive calls back through here as a result of the // eventual call to PersistentCollection#endRead), we scan the // internal loadingCollections map for matches and store those matches // in a temp collection. the temp collection is then used to "drive" // the #endRead processing. List <CollectionKey> toRemove = new List <CollectionKey>(); List <LoadingCollectionEntry> matches = new List <LoadingCollectionEntry>(); foreach (CollectionKey collectionKey in localLoadingCollectionKeys) { ISessionImplementor session = LoadContext.PersistenceContext.Session; LoadingCollectionEntry lce = loadContexts.LocateLoadingCollectionEntry(collectionKey); if (lce == null) { log.Warn("In CollectionLoadContext#endLoadingCollections, localLoadingCollectionKeys contained [{0}], but no LoadingCollectionEntry was found in loadContexts", collectionKey); } else if (lce.ResultSet == resultSet && lce.Persister == persister) { matches.Add(lce); if (lce.Collection.Owner == null) { session.PersistenceContext.AddUnownedCollection(new CollectionKey(persister, lce.Key), lce.Collection); } if (log.IsDebugEnabled()) { log.Debug("removing collection load entry [{0}]", lce); } // todo : i'd much rather have this done from #endLoadingCollection(CollectionPersister,LoadingCollectionEntry)... loadContexts.UnregisterLoadingCollectionXRef(collectionKey); toRemove.Add(collectionKey); } } localLoadingCollectionKeys.ExceptWith(toRemove); EndLoadingCollections(persister, matches, skipCache, cacheBatcher); if ((localLoadingCollectionKeys.Count == 0)) { // todo : hack!!! // NOTE : here we cleanup the load context when we have no more local // LCE entries. This "works" for the time being because really // only the collection load contexts are implemented. Long term, // this cleanup should become part of the "close result set" // processing from the (sandbox/jdbc) jdbc-container code. loadContexts.Cleanup(resultSet); } }
protected List <object> DoList() { bool statsEnabled = session.Factory.Statistics.IsStatisticsEnabled; var stopWatch = new Stopwatch(); if (statsEnabled) { stopWatch.Start(); } int rowCount = 0; var results = new List <object>(); var hydratedObjects = new List <object> [Translators.Count]; List <EntityKey[]>[] subselectResultKeys = new List <EntityKey[]> [Translators.Count]; bool[] createSubselects = new bool[Translators.Count]; var cacheBatcher = new CacheBatcher(session); try { using (var reader = resultSetsCommand.GetReader(_timeout)) { if (log.IsDebugEnabled()) { log.Debug("Executing {0} queries", translators.Count); } for (int i = 0; i < translators.Count; i++) { ITranslator translator = Translators[i]; QueryParameters parameter = Parameters[i]; int entitySpan = translator.Loader.EntityPersisters.Length; hydratedObjects[i] = entitySpan > 0 ? new List <object>() : null; RowSelection selection = parameter.RowSelection; int maxRows = Loader.Loader.HasMaxRows(selection) ? selection.MaxRows : int.MaxValue; if (!dialect.SupportsLimitOffset || !translator.Loader.UseLimit(selection, dialect)) { Loader.Loader.Advance(reader, selection); } if (parameter.HasAutoDiscoverScalarTypes) { translator.Loader.AutoDiscoverTypes(reader, parameter, null); } LockMode[] lockModeArray = translator.Loader.GetLockModes(parameter.LockModes); EntityKey optionalObjectKey = Loader.Loader.GetOptionalObjectKey(parameter, session); createSubselects[i] = translator.Loader.IsSubselectLoadingEnabled; subselectResultKeys[i] = createSubselects[i] ? new List <EntityKey[]>() : null; translator.Loader.HandleEmptyCollections(parameter.CollectionKeys, reader, session); EntityKey[] keys = new EntityKey[entitySpan]; // we can reuse it each time if (log.IsDebugEnabled()) { log.Debug("processing result set"); } IList tempResults = new List <object>(); int count; for (count = 0; count < maxRows && reader.Read(); count++) { if (log.IsDebugEnabled()) { log.Debug("result set row: {0}", count); } rowCount++; object result = translator.Loader.GetRowFromResultSet( reader, session, parameter, lockModeArray, optionalObjectKey, hydratedObjects[i], keys, true, null, null, (persister, data) => cacheBatcher.AddToBatch(persister, data)); tempResults.Add(result); if (createSubselects[i]) { subselectResultKeys[i].Add(keys); keys = new EntityKey[entitySpan]; //can't reuse in this case } } if (log.IsDebugEnabled()) { log.Debug("done processing result set ({0} rows)", count); } results.Add(tempResults); if (log.IsDebugEnabled()) { log.Debug("Query {0} returned {1} results", i, tempResults.Count); } reader.NextResult(); } for (int i = 0; i < translators.Count; i++) { ITranslator translator = translators[i]; QueryParameters parameter = parameters[i]; translator.Loader.InitializeEntitiesAndCollections(hydratedObjects[i], reader, session, false, cacheBatcher); if (createSubselects[i]) { translator.Loader.CreateSubselects(subselectResultKeys[i], parameter, session); } } cacheBatcher.ExecuteBatch(); } } catch (Exception sqle) { log.Error(sqle, "Failed to execute multi query: [{0}]", resultSetsCommand.Sql); throw ADOExceptionHelper.Convert(session.Factory.SQLExceptionConverter, sqle, "Failed to execute multi query", resultSetsCommand.Sql); } if (statsEnabled) { stopWatch.Stop(); session.Factory.StatisticsImplementor.QueryExecuted(string.Format("{0} queries (MultiQuery)", translators.Count), rowCount, stopWatch.Elapsed); } return(results); }
private void GetResultsFromDatabase(IList results) { Stopwatch stopWatch = null; if (session.Factory.Statistics.IsStatisticsEnabled) { stopWatch = Stopwatch.StartNew(); } int rowCount = 0; var cacheBatcher = new CacheBatcher(session); try { using (var reader = resultSetsCommand.GetReader(_timeout)) { var hydratedObjects = new List <object> [loaders.Count]; List <EntityKey[]>[] subselectResultKeys = new List <EntityKey[]> [loaders.Count]; bool[] createSubselects = new bool[loaders.Count]; for (int i = 0; i < loaders.Count; i++) { CriteriaLoader loader = loaders[i]; int entitySpan = loader.EntityPersisters.Length; hydratedObjects[i] = entitySpan == 0 ? null : new List <object>(entitySpan); EntityKey[] keys = new EntityKey[entitySpan]; QueryParameters queryParameters = parameters[i]; IList tmpResults = new List <object>(); RowSelection selection = parameters[i].RowSelection; createSubselects[i] = loader.IsSubselectLoadingEnabled; subselectResultKeys[i] = createSubselects[i] ? new List <EntityKey[]>() : null; int maxRows = Loader.Loader.HasMaxRows(selection) ? selection.MaxRows : int.MaxValue; if (!dialect.SupportsLimitOffset || !loader.UseLimit(selection, dialect)) { Loader.Loader.Advance(reader, selection); } int count; for (count = 0; count < maxRows && reader.Read(); count++) { rowCount++; object o = loader.GetRowFromResultSet(reader, session, queryParameters, loader.GetLockModes(queryParameters.LockModes), null, hydratedObjects[i], keys, true, null, null, (persister, data) => cacheBatcher.AddToBatch(persister, data)); if (createSubselects[i]) { subselectResultKeys[i].Add(keys); keys = new EntityKey[entitySpan]; //can't reuse in this case } tmpResults.Add(o); } results.Add(tmpResults); reader.NextResult(); } for (int i = 0; i < loaders.Count; i++) { CriteriaLoader loader = loaders[i]; loader.InitializeEntitiesAndCollections(hydratedObjects[i], reader, session, session.DefaultReadOnly, cacheBatcher); if (createSubselects[i]) { loader.CreateSubselects(subselectResultKeys[i], parameters[i], session); } } cacheBatcher.ExecuteBatch(); } } catch (Exception sqle) { log.Error(sqle, "Failed to execute multi criteria: [{0}]", resultSetsCommand.Sql); throw ADOExceptionHelper.Convert(session.Factory.SQLExceptionConverter, sqle, "Failed to execute multi criteria", resultSetsCommand.Sql); } if (stopWatch != null) { stopWatch.Stop(); session.Factory.StatisticsImplementor.QueryExecuted(string.Format("{0} queries (MultiCriteria)", loaders.Count), rowCount, stopWatch.Elapsed); } }
protected void ExecuteBatched() { var querySpaces = new HashSet <string>(_queries.SelectMany(t => t.GetQuerySpaces())); if (querySpaces.Count > 0) { // The auto-flush must be handled before querying the cache, because an auto-flush may // have to invalidate cached data, data which otherwise would cause a command to be skipped. Session.AutoFlushIfRequired(querySpaces); } GetCachedResults(); var resultSetsCommand = Session.Factory.ConnectionProvider.Driver.GetResultSetsCommand(Session); CombineQueries(resultSetsCommand); Stopwatch stopWatch = null; if (Session.Factory.Statistics.IsStatisticsEnabled) { stopWatch = Stopwatch.StartNew(); } if (Log.IsDebugEnabled()) { Log.Debug("Multi query with {0} queries: {1}", _queries.Count, resultSetsCommand.Sql); } var rowCount = 0; CacheBatcher cacheBatcher = null; try { if (resultSetsCommand.HasQueries) { cacheBatcher = new CacheBatcher(Session); using (var reader = resultSetsCommand.GetReader(Timeout)) { foreach (var query in _queries) { if (query.CachingInformation != null) { foreach (var cachingInfo in query.CachingInformation) { cachingInfo.SetCacheBatcher(cacheBatcher); } } rowCount += query.ProcessResultsSet(reader); } } } foreach (var query in _queries) { //TODO 6.0: Replace with query.ProcessResults(); if (query is IQueryBatchItemWithAsyncProcessResults q) { q.ProcessResults(); } else { query.ProcessResults(); } } cacheBatcher?.ExecuteBatch(); PutCacheableResults(); } catch (Exception sqle) { Log.Error(sqle, "Failed to execute query batch: [{0}]", resultSetsCommand.Sql); throw ADOExceptionHelper.Convert( Session.Factory.SQLExceptionConverter, sqle, "Failed to execute query batch", resultSetsCommand.Sql); } if (stopWatch != null && resultSetsCommand.HasQueries) { stopWatch.Stop(); Session.Factory.StatisticsImplementor.QueryExecuted( resultSetsCommand.Sql.ToString(), rowCount, stopWatch.Elapsed); } }
/// <inheritdoc /> public async Task <int> ProcessResultsSetAsync(DbDataReader reader, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); ThrowIfNotInitialized(); var dialect = Session.Factory.Dialect; var hydratedObjects = new List <object> [_queryInfos.Count]; using (Session.SwitchCacheMode(_cacheMode)) { var rowCount = 0; for (var i = 0; i < _queryInfos.Count; i++) { var queryInfo = _queryInfos[i]; var loader = queryInfo.Loader; var queryParameters = queryInfo.Parameters; //Skip processing for items already loaded from cache if (queryInfo.IsResultFromCache) { continue; } var entitySpan = loader.EntityPersisters.Length; hydratedObjects[i] = entitySpan == 0 ? null : new List <object>(entitySpan); var keys = new EntityKey[entitySpan]; var selection = queryParameters.RowSelection; var createSubselects = loader.IsSubselectLoadingEnabled; _subselectResultKeys[i] = createSubselects ? new List <EntityKey[]>() : null; var maxRows = Loader.Loader.HasMaxRows(selection) ? selection.MaxRows : int.MaxValue; var advanceSelection = !dialect.SupportsLimitOffset || !loader.UseLimit(selection, dialect); if (advanceSelection) { await(Loader.Loader.AdvanceAsync(reader, selection, cancellationToken)).ConfigureAwait(false); } var forcedResultTransformer = queryInfo.CacheKey?.ResultTransformer; if (queryParameters.HasAutoDiscoverScalarTypes) { loader.AutoDiscoverTypes(reader, queryParameters, forcedResultTransformer); } var lockModeArray = loader.GetLockModes(queryParameters.LockModes); var optionalObjectKey = Loader.Loader.GetOptionalObjectKey(queryParameters, Session); var tmpResults = new List <object>(); var cacheBatcher = queryInfo.CacheBatcher; var ownCacheBatcher = cacheBatcher == null; if (ownCacheBatcher) { cacheBatcher = new CacheBatcher(Session); } for (var count = 0; count < maxRows && await(reader.ReadAsync(cancellationToken)).ConfigureAwait(false); count++) { rowCount++; var o = await(loader.GetRowFromResultSetAsync( reader, Session, queryParameters, lockModeArray, optionalObjectKey, hydratedObjects[i], keys, true, forcedResultTransformer, (persister, data) => cacheBatcher.AddToBatch(persister, data) , cancellationToken)).ConfigureAwait(false); if (loader.IsSubselectLoadingEnabled) { _subselectResultKeys[i].Add(keys); keys = new EntityKey[entitySpan]; //can't reuse in this case } tmpResults.Add(o); } queryInfo.Result = tmpResults; if (queryInfo.CanPutToCache) { queryInfo.ResultToCache = tmpResults; } if (ownCacheBatcher) { await(cacheBatcher.ExecuteBatchAsync(cancellationToken)).ConfigureAwait(false); } await(reader.NextResultAsync(cancellationToken)).ConfigureAwait(false); } await(InitializeEntitiesAndCollectionsAsync(reader, hydratedObjects, cancellationToken)).ConfigureAwait(false); return(rowCount); } }
protected async Task ExecuteBatchedAsync(CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var querySpaces = new HashSet <string>(_queries.SelectMany(t => t.GetQuerySpaces())); if (querySpaces.Count > 0) { // The auto-flush must be handled before querying the cache, because an auto-flush may // have to invalidate cached data, data which otherwise would cause a command to be skipped. await(Session.AutoFlushIfRequiredAsync(querySpaces, cancellationToken)).ConfigureAwait(false); } await(GetCachedResultsAsync(cancellationToken)).ConfigureAwait(false); var resultSetsCommand = Session.Factory.ConnectionProvider.Driver.GetResultSetsCommand(Session); CombineQueries(resultSetsCommand); Stopwatch stopWatch = null; if (Session.Factory.Statistics.IsStatisticsEnabled) { stopWatch = Stopwatch.StartNew(); } if (Log.IsDebugEnabled()) { Log.Debug("Multi query with {0} queries: {1}", _queries.Count, resultSetsCommand.Sql); } var rowCount = 0; try { if (resultSetsCommand.HasQueries) { using (var reader = await(resultSetsCommand.GetReaderAsync(Timeout, cancellationToken)).ConfigureAwait(false)) { var cacheBatcher = new CacheBatcher(Session); foreach (var query in _queries) { if (query.CachingInformation != null) { foreach (var cachingInfo in query.CachingInformation.Where(ci => ci.IsCacheable)) { cachingInfo.SetCacheBatcher(cacheBatcher); } } rowCount += await(query.ProcessResultsSetAsync(reader, cancellationToken)).ConfigureAwait(false); } await(cacheBatcher.ExecuteBatchAsync(cancellationToken)).ConfigureAwait(false); } } // Query cacheable results must be cached untransformed: the put does not need to wait for // the ProcessResults. await(PutCacheableResultsAsync(cancellationToken)).ConfigureAwait(false); foreach (var query in _queries) { query.ProcessResults(); } } catch (OperationCanceledException) { throw; } catch (Exception sqle) { Log.Error(sqle, "Failed to execute query batch: [{0}]", resultSetsCommand.Sql); throw ADOExceptionHelper.Convert( Session.Factory.SQLExceptionConverter, sqle, "Failed to execute query batch", resultSetsCommand.Sql); } if (stopWatch != null && resultSetsCommand.HasQueries) { stopWatch.Stop(); Session.Factory.StatisticsImplementor.QueryExecuted( resultSetsCommand.Sql.ToString(), rowCount, stopWatch.Elapsed); } }
/// <inheritdoc /> public void SetCacheBatcher(CacheBatcher cacheBatcher) { CacheBatcher = cacheBatcher; }
/// <inheritdoc /> public int ProcessResultsSet(DbDataReader reader) { ThrowIfNotInitialized(); var dialect = Session.Factory.Dialect; var hydratedObjects = new List <object> [_queryInfos.Count]; var isDebugLog = Log.IsDebugEnabled(); using (Session.SwitchCacheMode(_cacheMode)) { var rowCount = 0; for (var i = 0; i < _queryInfos.Count; i++) { var queryInfo = _queryInfos[i]; var loader = queryInfo.Loader; var queryParameters = queryInfo.Parameters; //Skip processing for items already loaded from cache if (queryInfo.IsResultFromCache) { continue; } var entitySpan = loader.EntityPersisters.Length; hydratedObjects[i] = entitySpan == 0 ? null : new List <object>(entitySpan); var keys = new EntityKey[entitySpan]; var selection = queryParameters.RowSelection; var createSubselects = loader.IsSubselectLoadingEnabled; _subselectResultKeys[i] = createSubselects ? new List <EntityKey[]>() : null; var maxRows = Loader.Loader.HasMaxRows(selection) ? selection.MaxRows : int.MaxValue; var advanceSelection = !dialect.SupportsLimitOffset || !loader.UseLimit(selection, dialect); if (advanceSelection) { Loader.Loader.Advance(reader, selection); } var forcedResultTransformer = queryInfo.CacheKey?.ResultTransformer; if (queryParameters.HasAutoDiscoverScalarTypes) { loader.AutoDiscoverTypes(reader, queryParameters, forcedResultTransformer); } var lockModeArray = loader.GetLockModes(queryParameters.LockModes); var optionalObjectKey = Loader.Loader.GetOptionalObjectKey(queryParameters, Session); var tmpResults = new List <object>(); var queryCacheBuilder = queryInfo.IsCacheable ? new QueryCacheResultBuilder(loader) : null; var cacheBatcher = queryInfo.CacheBatcher; var ownCacheBatcher = cacheBatcher == null; if (ownCacheBatcher) { cacheBatcher = new CacheBatcher(Session); } if (isDebugLog) { Log.Debug("processing result set"); } int count; for (count = 0; count < maxRows && reader.Read(); count++) { if (isDebugLog) { Log.Debug("result set row: {0}", count); } rowCount++; var o = loader.GetRowFromResultSet( reader, Session, queryParameters, lockModeArray, optionalObjectKey, hydratedObjects[i], keys, true, forcedResultTransformer, queryCacheBuilder, (persister, data) => cacheBatcher.AddToBatch(persister, data) ); if (loader.IsSubselectLoadingEnabled) { _subselectResultKeys[i].Add(keys); keys = new EntityKey[entitySpan]; //can't reuse in this case } tmpResults.Add(o); } if (isDebugLog) { Log.Debug("done processing result set ({0} rows)", count); } queryInfo.Result = tmpResults; if (queryInfo.CanPutToCache) { queryInfo.ResultToCache = queryCacheBuilder.Result; } if (ownCacheBatcher) { cacheBatcher.ExecuteBatch(); } reader.NextResult(); } StopLoadingCollections(reader); _reader = reader; _hydratedObjects = hydratedObjects; return(rowCount); } }