下面列出了怎么用org.hibernate.stat.spi.StatisticsImplementor的API类实例代码及写法,或者点击链接到github查看源代码。
default CompletionStage<List<Object>> doReactiveList(
final String sql, final String queryIdentifier,
final SessionImplementor session,
final QueryParameters queryParameters,
final ResultTransformer forcedResultTransformer)
throws HibernateException {
final StatisticsImplementor statistics = session.getSessionFactory().getStatistics();
final boolean stats = statistics.isStatisticsEnabled();
final long startTime = stats ? System.nanoTime() : 0;
return doReactiveQueryAndInitializeNonLazyCollections( sql, session, queryParameters, true, forcedResultTransformer )
.handle( (list, err) -> {
CompletionStages.logSqlException( err, () -> "could not execute query", sql );
if ( err ==null && stats ) {
final long endTime = System.nanoTime();
final long milliseconds = TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
statistics.queryExecuted( queryIdentifier, list.size(), milliseconds );
}
return CompletionStages.returnOrRethrow(err, list );
} );
}
/**
* Performs the process of loading an entity from the configured
* underlying datasource.
*
* @param event The load event
* @param persister The persister for the entity being requested for load
*
* @return The object loaded from the datasource, or null if not found.
*/
protected CompletionStage<Object> loadFromDatasource(
final LoadEvent event,
final EntityPersister persister) {
CompletionStage<Object> entity =
( (ReactiveEntityPersister) persister).reactiveLoad(
event.getEntityId(),
event.getInstanceToLoad(),
event.getLockOptions(),
event.getSession()
);
final StatisticsImplementor statistics = event.getSession().getFactory().getStatistics();
if ( event.isAssociationFetch() && statistics.isStatisticsEnabled() ) {
statistics.fetchEntity( event.getEntityClassName() );
}
return entity;
}
private Object[] getDatabaseSnapshot(SessionImplementor session, EntityPersister persister, Serializable id) {
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
if ( persister.isSelectBeforeUpdateRequired() ) {
Object[] snapshot = persistenceContext
.getDatabaseSnapshot( id, persister );
if ( snapshot == null ) {
//do we even really need this? the update will fail anyway....
final StatisticsImplementor statistics = session.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics
.optimisticFailure( persister.getEntityName() );
}
throw new StaleObjectStateException( persister.getEntityName(), id );
}
return snapshot;
}
// TODO: optimize away this lookup for entities w/o unsaved-value="undefined"
final EntityKey entityKey = session.generateEntityKey( id, persister );
return persistenceContext.getCachedDatabaseSnapshot( entityKey );
}
@Override
public CompletionStage<Void> reactiveOnFlush(FlushEvent event) throws HibernateException {
final EventSource source = event.getSession();
final PersistenceContext persistenceContext = source.getPersistenceContextInternal();
if ( persistenceContext.getNumberOfManagedEntities() > 0 ||
persistenceContext.getCollectionEntriesSize() > 0 ) {
source.getEventListenerManager().flushStart();
return flushEverythingToExecutions(event)
.thenCompose( v -> performExecutions(source) )
.thenRun( () -> postFlush( source ) )
.whenComplete( (v, x) ->
source.getEventListenerManager().flushEnd(
event.getNumberOfEntitiesProcessed(),
event.getNumberOfCollectionsProcessed()
))
.thenRun( () -> {
postPostFlush( source );
final StatisticsImplementor statistics = source.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.flush();
}
} );
}
return CompletionStages.nullFuture();
}
@Override
public StatisticsImplementor initiateService(SessionFactoryServiceInitiatorContext context) {
final Object configValue = context.getServiceRegistry()
.getService( ConfigurationService.class )
.getSettings()
.get( STATS_BUILDER );
return initiateServiceInternal( context.getSessionFactory(), configValue, context.getServiceRegistry() );
}
@Override
public StatisticsImplementor initiateService(
SessionFactoryImplementor sessionFactory,
SessionFactoryOptions sessionFactoryOptions,
ServiceRegistryImplementor registry) {
final Object configValue = registry
.getService( ConfigurationService.class )
.getSettings()
.get( STATS_BUILDER );
return initiateServiceInternal( sessionFactory, configValue, registry );
}
@Override
public CompletionStage<Void> reactiveExecute() throws HibernateException {
final Serializable id = getId();
final EntityPersister persister = getPersister();
final SharedSessionContractImplementor session = getSession();
final Object instance = getInstance();
final boolean veto = preDelete();
Object version = getVersion();
if ( persister.isVersionPropertyGenerated() ) {
// we need to grab the version value from the entity, otherwise
// we have issues with generated-version entities that may have
// multiple actions queued during the same flush
version = persister.getVersion( instance );
}
final Object ck;
if ( persister.canWriteToCache() ) {
final EntityDataAccess cache = persister.getCacheAccessStrategy();
ck = cache.generateCacheKey( id, persister, session.getFactory(), session.getTenantIdentifier() );
setLock( cache.lockItem( session, ck, version ) );
}
else {
ck = null;
}
CompletionStage<?> deleteStep = CompletionStages.nullFuture();
if ( !isCascadeDeleteEnabled() && !veto ) {
deleteStep = ((ReactiveEntityPersister) persister).deleteReactive( id, version, instance, session );
}
return deleteStep.thenAccept( deleteAR -> {
//postDelete:
// After actually deleting a row, record the fact that the instance no longer
// exists on the database (needed for identity-column key generation), and
// remove it from the session cache
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
final EntityEntry entry = persistenceContext.removeEntry( instance );
if ( entry == null ) {
throw new AssertionFailure( "possible non-threadsafe access to session" );
}
entry.postDelete();
persistenceContext.removeEntity( entry.getEntityKey() );
persistenceContext.removeProxy( entry.getEntityKey() );
if ( persister.canWriteToCache() ) {
persister.getCacheAccessStrategy().remove( session, ck );
}
persistenceContext.getNaturalIdHelper().removeSharedNaturalIdCrossReference(
persister,
id,
getNaturalIdValues()
);
postDelete();
final StatisticsImplementor statistics = getSession().getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() && !veto ) {
statistics.deleteEntity( getPersister().getEntityName() );
}
} );
}
@Override
public CompletionStage<Void> reactiveExecute() throws HibernateException {
CompletionStage<Void> stage = reactiveNullifyTransientReferencesIfNotAlready();
final EntityPersister persister = getPersister();
final SharedSessionContractImplementor session = getSession();
final Object instance = getInstance();
setVeto( preInsert() );
// Don't need to lock the cache here, since if someone
// else inserted the same pk first, the insert would fail
if ( !isVeto() ) {
return stage.thenCompose( v -> ( (ReactiveEntityPersister) persister ).insertReactive( getState(), instance, session ) )
.thenAccept( generatedId -> {
setGeneratedId(generatedId);
if (persister.hasInsertGeneratedProperties()) {
persister.processInsertGeneratedProperties(generatedId, instance, getState(), session);
}
//need to do that here rather than in the save event listener to let
//the post insert events to have a id-filled entity when IDENTITY is used (EJB3)
persister.setIdentifier(instance, generatedId, session);
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
persistenceContext.registerInsertedKey(getPersister(), generatedId);
EntityKey entityKey = session.generateEntityKey(generatedId, persister);
setEntityKey( entityKey );
persistenceContext.checkUniqueness(entityKey, getInstance());
postInsert();
final StatisticsImplementor statistics = session.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() && !isVeto() ) {
statistics.insertEntity( getPersister().getEntityName() );
}
markExecuted();
});
}
else {
postInsert();
markExecuted();
return stage;
}
}
@Override
public CompletionStage<Void> reactiveOnAutoFlush(AutoFlushEvent event) throws HibernateException {
final EventSource source = event.getSession();
final SessionEventListenerManager eventListenerManager = source.getEventListenerManager();
eventListenerManager.partialFlushStart();
CompletionStage<Void> autoFlushStage = CompletionStages.nullFuture();
if ( flushMightBeNeeded( source ) ) {
// Need to get the number of collection removals before flushing to executions
// (because flushing to executions can add collection removal actions to the action queue).
final ActionQueue actionQueue = source.getActionQueue();
final int oldSize = actionQueue.numberOfCollectionRemovals();
autoFlushStage = flushEverythingToExecutions( event )
.thenCompose( v -> {
if ( flushIsReallyNeeded( event, source ) ) {
LOG.trace( "Need to execute flush" );
event.setFlushRequired( true );
return performExecutions( source )
.thenRun( () -> postFlush( source ) )
.thenRun( () -> postPostFlush( source ) )
.thenRun( () -> {
final StatisticsImplementor statistics = source.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.flush();
}
} );
}
else {
LOG.trace( "Don't need to execute flush" );
event.setFlushRequired( false );
actionQueue.clearFromFlushNeededCheck( oldSize );
return CompletionStages.nullFuture();
}
} );
}
autoFlushStage.whenComplete( (v, x) -> {
source.getEventListenerManager().flushEnd( event.getNumberOfEntitiesProcessed(), event.getNumberOfCollectionsProcessed() );
CompletionStages.returnNullorRethrow( x );
} );
return autoFlushStage;
}
/**
* called by a collection that wants to initialize itself
*/
public CompletionStage<Void> onReactiveInitializeCollection(InitializeCollectionEvent event) throws HibernateException {
PersistentCollection collection = event.getCollection();
SessionImplementor source = event.getSession();
CollectionEntry ce = source.getPersistenceContextInternal().getCollectionEntry( collection );
if ( ce == null ) {
throw new HibernateException( "collection was evicted" );
}
if ( !collection.wasInitialized() ) {
final CollectionPersister ceLoadedPersister = ce.getLoadedPersister();
if ( LOG.isTraceEnabled() ) {
LOG.tracev( "Initializing collection {0}",
MessageHelper.collectionInfoString( ceLoadedPersister, collection, ce.getLoadedKey(), source ) );
LOG.trace( "Checking second-level cache" );
}
final boolean foundInCache = initializeCollectionFromCache( ce.getLoadedKey(), ceLoadedPersister, collection, source );
if ( foundInCache ) {
if ( LOG.isTraceEnabled() ) {
LOG.trace( "Collection initialized from cache" );
}
return CompletionStages.nullFuture();
}
else {
if ( LOG.isTraceEnabled() ) {
LOG.trace( "Collection not cached" );
}
return ( (ReactiveOneToManyPersister) ceLoadedPersister ).reactiveInitialize( ce.getLoadedKey(), source )
.thenAccept( list -> {
if ( LOG.isTraceEnabled() ) {
LOG.trace( "Collection initialized" );
}
final StatisticsImplementor statistics = source.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.fetchCollection( ceLoadedPersister.getRole() );
}
} );
}
}
// Collection was already initialized.
return CompletionStages.nullFuture();
}
/**
* Try to initialize a collection from the cache
*
* @param id The id of the collection to initialize
* @param persister The collection persister
* @param collection The collection to initialize
* @param source The originating session
*
* @return true if we were able to initialize the collection from the cache;
* false otherwise.
*/
private boolean initializeCollectionFromCache(
Serializable id,
CollectionPersister persister,
PersistentCollection collection,
SessionImplementor source) {
if ( source.getLoadQueryInfluencers().hasEnabledFilters() && persister.isAffectedByEnabledFilters( source ) ) {
LOG.trace( "Disregarding cached version (if any) of collection due to enabled filters" );
return false;
}
final boolean useCache = persister.hasCache() && source.getCacheMode().isGetEnabled();
if ( !useCache ) {
return false;
}
final SessionFactoryImplementor factory = source.getFactory();
final CollectionDataAccess cacheAccessStrategy = persister.getCacheAccessStrategy();
final Object ck = cacheAccessStrategy.generateCacheKey( id, persister, factory, source.getTenantIdentifier() );
final Object ce = CacheHelper.fromSharedCache( source, ck, cacheAccessStrategy );
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
if ( ce == null ) {
statistics.collectionCacheMiss( persister.getNavigableRole(), cacheAccessStrategy.getRegion().getName() );
}
else {
statistics.collectionCacheHit( persister.getNavigableRole(), cacheAccessStrategy.getRegion().getName() );
}
}
if ( ce == null ) {
return false;
}
CollectionCacheEntry cacheEntry = (CollectionCacheEntry) persister.getCacheEntryStructure().destructure( ce, factory );
final PersistenceContext persistenceContext = source.getPersistenceContextInternal();
cacheEntry.assemble( collection, persister, persistenceContext.getCollectionOwner( id, persister ) );
persistenceContext.getCollectionEntry( collection ).postInitialize( collection );
return true;
}
protected CompletionStage<Void> entityIsDetached(MergeEvent event, MergeContext copyCache) {
LOG.trace( "Merging detached instance" );
final Object entity = event.getEntity();
final EventSource source = event.getSession();
final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity );
final String entityName = persister.getEntityName();
Serializable requestedId = event.getRequestedId();
Serializable id;
if ( requestedId == null ) {
id = persister.getIdentifier( entity, source );
}
else {
id = requestedId;
// check that entity id = requestedId
Serializable entityId = persister.getIdentifier( entity, source );
if ( !persister.getIdentifierType().isEqual( id, entityId, source.getFactory() ) ) {
throw new HibernateException( "merge requested with id not matching id of passed entity" );
}
}
String previousFetchProfile = source.getLoadQueryInfluencers().getInternalFetchProfile();
source.getLoadQueryInfluencers().setInternalFetchProfile( "merge" );
//we must clone embedded composite identifiers, or
//we will get back the same instance that we pass in
final Serializable clonedIdentifier = (Serializable)
persister.getIdentifierType().deepCopy( id, source.getFactory() );
return source.unwrap(ReactiveSession.class)
.reactiveGet( (Class<?>) persister.getMappedClass(), clonedIdentifier )
.thenCompose(result -> {
if ( result!=null ) {
// before cascade!
copyCache.put(entity, result, true);
Object target = unproxyManagedForDetachedMerging(entity, result, persister, source);
if (target == entity) {
throw new AssertionFailure("entity was not detached");
}
else if ( !source.getEntityName(target).equals(entityName) ) {
throw new WrongClassException(
"class of the given object did not match class of persistent copy",
event.getRequestedId(),
entityName
);
}
else if ( isVersionChanged(entity, source, persister, target) ) {
final StatisticsImplementor statistics = source.getFactory().getStatistics();
if (statistics.isStatisticsEnabled()) {
statistics.optimisticFailure(entityName);
}
throw new StaleObjectStateException(entityName, id);
}
// cascade first, so that all unsaved objects get their
// copy created before we actually copy
return cascadeOnMerge( source, persister, entity, copyCache )
.thenCompose( v -> fetchAndCopyValues( persister, entity, target, source, copyCache ) )
.thenAccept(v -> {
// copyValues() (called by fetchAndCopyValues) works by reflection,
// so explicitly mark the entity instance dirty
markInterceptorDirty(entity, target, persister);
event.setResult(result);
});
}
else {
//TODO: we should throw an exception if we really *know* for sure
// that this is a detached instance, rather than just assuming
//throw new StaleObjectStateException(entityName, id);
// we got here because we assumed that an instance
// with an assigned id was detached, when it was
// really persistent
return entityIsTransient(event, copyCache);
}
})
.whenComplete( (v,e) -> source.getLoadQueryInfluencers().setInternalFetchProfile(previousFetchProfile) );
}
@Override
StatisticsImplementor getStatistics();
/**
* @deprecated (since 5.2) Just use {@link #getStatistics} (with covariant return here as {@link StatisticsImplementor}).
*/
@Deprecated
default StatisticsImplementor getStatisticsImplementor() {
return getStatistics();
}
@Override
public StatisticsImplementor getStatistics() {
return delegate.getStatistics();
}
@Override
public StatisticsImplementor getStatisticsImplementor() {
return delegate.getStatistics();
}
@Override
public Class<StatisticsImplementor> getServiceInitiated() {
return StatisticsImplementor.class;
}
public StatisticsImplementor getStatistics() {
if ( statistics == null ) {
statistics = serviceRegistry.getService( StatisticsImplementor.class );
}
return statistics;
}
@Override
public StatisticsImplementor buildStatistics(
SessionFactoryImplementor sessionFactory) {
return new TransactionStatistics(sessionFactory);
}
public StatisticsImplementor getStatisticsImplementor() {
return sessionFactoryImplementor.getStatisticsImplementor();
}