下面列出了org.hibernate.jdbc.JDBCContext#org.hibernate.EmptyInterceptor 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
protected void reset() {
classes = new HashMap();
imports = new HashMap();
collections = new HashMap();
tables = new TreeMap();
namedQueries = new HashMap();
namedSqlQueries = new HashMap();
sqlResultSetMappings = new HashMap();
xmlHelper = new XMLHelper();
typeDefs = new HashMap();
propertyReferences = new ArrayList();
secondPasses = new ArrayList();
interceptor = EmptyInterceptor.INSTANCE;
properties = Environment.getProperties();
entityResolver = XMLHelper.DEFAULT_DTD_RESOLVER;
eventListeners = new EventListeners();
filterDefinitions = new HashMap();
// extendsQueue = new ArrayList();
extendsQueue = new HashMap();
auxiliaryDatabaseObjects = new ArrayList();
tableNameBinding = new HashMap();
columnNameBindingPerTable = new HashMap();
namingStrategy = DefaultNamingStrategy.INSTANCE;
sqlFunctions = new HashMap();
}
@Override
protected Interceptor interceptor() {
return new EmptyInterceptor() {
private Long startNanos;
@Override
public void preFlush(Iterator entities) {
startNanos = System.nanoTime();
}
@Override
public boolean onFlushDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types) {
if (enableMetrics) {
timer.update(System.nanoTime() - startNanos, TimeUnit.NANOSECONDS);
}
return false;
}
};
}
@Override
protected Interceptor interceptor() {
return new EmptyInterceptor() {
private Long startNanos;
@Override
public void preFlush(Iterator entities) {
startNanos = System.nanoTime();
}
@Override
public boolean onFlushDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types) {
if (enableMetrics) {
timer.update(System.nanoTime() - startNanos, TimeUnit.NANOSECONDS);
}
return false;
}
};
}
protected void reset() {
implicitNamingStrategy = ImplicitNamingStrategyJpaCompliantImpl.INSTANCE;
physicalNamingStrategy = PhysicalNamingStrategyStandardImpl.INSTANCE;
namedQueries = new HashMap<String,NamedQueryDefinition>();
namedSqlQueries = new HashMap<String,NamedSQLQueryDefinition>();
sqlResultSetMappings = new HashMap<String, ResultSetMappingDefinition>();
namedEntityGraphMap = new HashMap<String, NamedEntityGraphDefinition>();
namedProcedureCallMap = new HashMap<String, NamedProcedureCallDefinition>( );
standardServiceRegistryBuilder = new StandardServiceRegistryBuilder( bootstrapServiceRegistry );
entityTuplizerFactory = new EntityTuplizerFactory();
interceptor = EmptyInterceptor.INSTANCE;
properties = new Properties( );
properties.putAll( standardServiceRegistryBuilder.getSettings());
}
public static Interceptor configuredInterceptor(Interceptor interceptor, SessionFactoryOptions options) {
// NOTE : DO NOT return EmptyInterceptor.INSTANCE from here as a "default for the Session"
// we "filter" that one out here. The return from here should represent the
// explicitly configured Interceptor (if one). Return null from here instead; Session
// will handle it
if ( interceptor != null && interceptor != EmptyInterceptor.INSTANCE ) {
return interceptor;
}
// prefer the SF-scoped interceptor, prefer that to any Session-scoped interceptor prototype
if ( options.getInterceptor() != null && options.getInterceptor() != EmptyInterceptor.INSTANCE ) {
return options.getInterceptor();
}
// then check the Session-scoped interceptor prototype
if ( options.getStatelessInterceptorImplementor() != null && options.getStatelessInterceptorImplementorSupplier() != null ) {
throw new HibernateException(
"A session scoped interceptor class or supplier are allowed, but not both!" );
}
else if ( options.getStatelessInterceptorImplementor() != null ) {
try {
/**
* We could remove the getStatelessInterceptorImplementor method and use just the getStatelessInterceptorImplementorSupplier
* since it can cover both cases when the user has given a Supplier<? extends Interceptor> or just the
* Class<? extends Interceptor>, in which case, we simply instantiate the Interceptor when calling the Supplier.
*/
return options.getStatelessInterceptorImplementor().newInstance();
}
catch (InstantiationException | IllegalAccessException e) {
throw new HibernateException( "Could not supply session-scoped SessionFactory Interceptor", e );
}
}
else if ( options.getStatelessInterceptorImplementorSupplier() != null ) {
return options.getStatelessInterceptorImplementorSupplier().get();
}
return null;
}
/**
* Test case from HHH-1921. Here the interceptor resets the
* current-state to the same thing as the current db state; this
* causes EntityPersister.findDirty() to return no dirty properties.
*/
public void testPropertyIntercept2() {
Session s = openSession();
Transaction t = s.beginTransaction();
User u = new User("Josh", "test");
s.persist( u );
t.commit();
s.close();
s = openSession(
new EmptyInterceptor() {
public boolean onFlushDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types) {
currentState[0] = "test";
return true;
}
}
);
t = s.beginTransaction();
u = ( User ) s.get( User.class, u.getName() );
u.setPassword( "nottest" );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
u = (User) s.get(User.class, "Josh");
assertEquals("test", u.getPassword());
s.delete(u);
t.commit();
s.close();
}
public void testComponentInterceptor() {
final int checkPerm = 500;
final String checkComment = "generated from interceptor";
Session s = openSession(
new EmptyInterceptor() {
public boolean onSave(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) {
if ( state[0] == null ) {
Image.Details detail = new Image.Details();
detail.setPerm1( checkPerm );
detail.setComment( checkComment );
state[0] = detail;
}
return true;
}
}
);
s.beginTransaction();
Image i = new Image();
i.setName( "compincomp" );
i = ( Image ) s.merge( i );
assertNotNull( i.getDetails() );
assertEquals( checkPerm, i.getDetails().getPerm1() );
assertEquals( checkComment, i.getDetails().getComment() );
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
i = ( Image ) s.get( Image.class, i.getId() );
assertNotNull( i.getDetails() );
assertEquals( checkPerm, i.getDetails().getPerm1() );
assertEquals( checkComment, i.getDetails().getComment() );
s.delete( i );
s.getTransaction().commit();
s.close();
}
@Override
protected Interceptor interceptor() {
return new EmptyInterceptor() {
@Override
public void beforeTransactionCompletion(Transaction tx) {
if(applyInterceptor.get()) {
tx.rollback();
}
}
};
}
@Override
protected Interceptor interceptor() {
return new EmptyInterceptor() {
@Override
public void beforeTransactionCompletion(Transaction tx) {
if(applyInterceptor.get()) {
tx.rollback();
}
}
};
}
/**
* Create a {@link SessionFactory} using the properties and mappings in this configuration. The
* SessionFactory will be immutable, so changes made to this Configuration after building the
* SessionFactory will not affect it.
*
* @param serviceRegistry The registry of services to be used in creating this session factory.
*
* @return The built {@link SessionFactory}
*
* @throws HibernateException usually indicates an invalid configuration or invalid mapping information
*/
public SessionFactory buildSessionFactory(ServiceRegistry serviceRegistry) throws HibernateException {
log.debug( "Building session factory using provided StandardServiceRegistry" );
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder( (StandardServiceRegistry) serviceRegistry );
if ( implicitNamingStrategy != null ) {
metadataBuilder.applyImplicitNamingStrategy( implicitNamingStrategy );
}
if ( physicalNamingStrategy != null ) {
metadataBuilder.applyPhysicalNamingStrategy( physicalNamingStrategy );
}
if ( sharedCacheMode != null ) {
metadataBuilder.applySharedCacheMode( sharedCacheMode );
}
if ( !typeContributorRegistrations.isEmpty() ) {
for ( TypeContributor typeContributor : typeContributorRegistrations ) {
metadataBuilder.applyTypes( typeContributor );
}
}
if ( !basicTypes.isEmpty() ) {
for ( BasicType basicType : basicTypes ) {
metadataBuilder.applyBasicType( basicType );
}
}
if ( sqlFunctions != null ) {
for ( Map.Entry<String, SQLFunction> entry : sqlFunctions.entrySet() ) {
metadataBuilder.applySqlFunction( entry.getKey(), entry.getValue() );
}
}
if ( auxiliaryDatabaseObjectList != null ) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : auxiliaryDatabaseObjectList ) {
metadataBuilder.applyAuxiliaryDatabaseObject( auxiliaryDatabaseObject );
}
}
if ( attributeConverterDefinitionsByClass != null ) {
for ( AttributeConverterDefinition attributeConverterDefinition : attributeConverterDefinitionsByClass.values() ) {
metadataBuilder.applyAttributeConverter( attributeConverterDefinition );
}
}
final Metadata metadata = metadataBuilder.build();
final SessionFactoryBuilder sessionFactoryBuilder = metadata.getSessionFactoryBuilder();
if ( interceptor != null && interceptor != EmptyInterceptor.INSTANCE ) {
sessionFactoryBuilder.applyInterceptor( interceptor );
}
if ( getSessionFactoryObserver() != null ) {
sessionFactoryBuilder.addSessionFactoryObservers( getSessionFactoryObserver() );
}
if ( getEntityNotFoundDelegate() != null ) {
sessionFactoryBuilder.applyEntityNotFoundDelegate( getEntityNotFoundDelegate() );
}
if ( getEntityTuplizerFactory() != null ) {
sessionFactoryBuilder.applyEntityTuplizerFactory( getEntityTuplizerFactory() );
}
if ( getCurrentTenantIdentifierResolver() != null ) {
sessionFactoryBuilder.applyCurrentTenantIdentifierResolver( getCurrentTenantIdentifierResolver() );
}
return sessionFactoryBuilder.build();
}
@Override
@SuppressWarnings("unchecked")
public T noInterceptor() {
this.interceptor = EmptyInterceptor.INSTANCE;
return (T) this;
}
@Override
public Interceptor getInterceptor() {
return configuredInterceptor( EmptyInterceptor.INSTANCE, sessionFactory.getSessionFactoryOptions() );
}
private Interceptor interpret(Interceptor interceptor) {
return interceptor == null ? EmptyInterceptor.INSTANCE : interceptor;
}
@Override
public Interceptor getInterceptor() {
return interceptor == null ? EmptyInterceptor.INSTANCE : interceptor;
}
StatelessSessionImpl(Connection connection, SessionFactoryImpl factory) {
super( factory );
this.jdbcContext = new JDBCContext( this, connection, EmptyInterceptor.INSTANCE );
}
public Interceptor getInterceptor() {
return EmptyInterceptor.INSTANCE;
}
/**
* Verify the imported dump.
* @return Number of checked objects. This number is negative if any error occurs (at least one object wasn't imported successfully).
*/
public int verifyDump(final XStreamSavingConverter xstreamSavingConverter)
{
final SessionFactory sessionFactory = hibernate.getSessionFactory();
Session session = null;
boolean hasError = false;
try {
session = sessionFactory.openSession(EmptyInterceptor.INSTANCE);
session.setDefaultReadOnly(true);
int counter = 0;
for (final Map.Entry<Class< ? >, List<Object>> entry : xstreamSavingConverter.getAllObjects().entrySet()) {
final List<Object> objects = entry.getValue();
final Class< ? > entityClass = entry.getKey();
if (objects == null) {
continue;
}
for (final Object obj : objects) {
if (HibernateUtils.isEntity(obj.getClass()) == false) {
continue;
}
final Serializable id = HibernateUtils.getIdentifier(obj);
if (id == null) {
// Can't compare this object without identifier.
continue;
}
// log.info("Testing object: " + obj);
final Object databaseObject = session.get(entityClass, id, LockOptions.READ);
Hibernate.initialize(databaseObject);
final boolean equals = equals(obj, databaseObject, true);
if (equals == false) {
log.error("Object not sucessfully imported! xml object=[" + obj + "], data base=[" + databaseObject + "]");
hasError = true;
}
++counter;
}
}
for (final HistoryEntry historyEntry : xstreamSavingConverter.getHistoryEntries()) {
final Class< ? > type = xstreamSavingConverter.getClassFromHistoryName(historyEntry.getClassName());
final Object o = type != null ? session.get(type, historyEntry.getEntityId()) : null;
if (o == null) {
log.warn("A corrupted history entry found (entity of class '"
+ historyEntry.getClassName()
+ "' with id "
+ historyEntry.getEntityId()
+ " not found: "
+ historyEntry
+ ". This doesn't affect the functioning of ProjectForge, this may result in orphaned history entries.");
hasError = true;
}
++counter;
}
if (hasError == true) {
log.fatal("*********** A inconsistency in the import was found! This may result in a data loss or corrupted data! Please retry the import. "
+ counter
+ " entries checked.");
return -counter;
}
log.info("Data-base import successfully verified: " + counter + " entries checked.");
return counter;
} finally {
if (session != null) {
session.close();
}
}
}