下面列出了怎么用org.hibernate.tool.schema.spi.ExecutionOptions的API类实例代码及写法,或者点击链接到github查看源代码。
@Override
public void doCreation(
Metadata metadata,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
// Add auxiliary database objects to batch DDL statements
metadata.getDatabase().addAuxiliaryDatabaseObject(new StartBatchDdl(Action.CREATE));
metadata.getDatabase().addAuxiliaryDatabaseObject(new RunBatchDdl(Action.CREATE));
try (Connection connection = tool.getDatabaseMetadataConnection(options)) {
SpannerDatabaseInfo spannerDatabaseInfo = new SpannerDatabaseInfo(connection.getMetaData());
tool.getSpannerTableExporter(options).init(metadata, spannerDatabaseInfo, Action.CREATE);
tool.getForeignKeyExporter(options).init(spannerDatabaseInfo);
schemaCreator.doCreation(metadata, options, sourceDescriptor, targetDescriptor);
} catch (SQLException e) {
throw new RuntimeException("Failed to update Spanner table schema.", e);
}
}
@Override
public void doDrop(
Metadata metadata,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
// Initialize auxiliary database objects to enable DDL statement batching.
metadata.getDatabase().addAuxiliaryDatabaseObject(new StartBatchDdl(Action.DROP));
metadata.getDatabase().addAuxiliaryDatabaseObject(new RunBatchDdl(Action.DROP));
try (Connection connection = tool.getDatabaseMetadataConnection(options)) {
// Initialize exporters with drop table dependencies so tables are dropped in the right order.
SpannerDatabaseInfo spannerDatabaseInfo = new SpannerDatabaseInfo(connection.getMetaData());
tool.getSpannerTableExporter(options).init(metadata, spannerDatabaseInfo, Action.DROP);
tool.getForeignKeyExporter(options).init(spannerDatabaseInfo);
schemaDropper.doDrop(metadata, options, sourceDescriptor, targetDescriptor);
} catch (SQLException e) {
throw new RuntimeException("Failed to update Spanner table schema.", e);
}
}
@Override
public void doMigration(
Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor) {
// Add auxiliary database objects to batch DDL statements
metadata.getDatabase().addAuxiliaryDatabaseObject(new StartBatchDdl(Action.UPDATE));
metadata.getDatabase().addAuxiliaryDatabaseObject(new RunBatchDdl(Action.UPDATE));
try (Connection connection = tool.getDatabaseMetadataConnection(options)) {
SpannerDatabaseInfo spannerDatabaseInfo = new SpannerDatabaseInfo(connection.getMetaData());
tool.getSpannerTableExporter(options).init(metadata, spannerDatabaseInfo, Action.UPDATE);
tool.getForeignKeyExporter(options).init(spannerDatabaseInfo);
schemaMigrator.doMigration(metadata, options, targetDescriptor);
} catch (SQLException e) {
throw new RuntimeException("Failed to update Spanner table schema.", e);
}
}
@SuppressWarnings("unchecked")
public void validate(Metadata metadata, ServiceRegistry serviceRegistry) {
LOG.runningSchemaValidator();
Map config = new HashMap();
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final ExecutionOptions executionOptions = SchemaManagementToolCoordinator.buildExecutionOptions(
config,
ExceptionHandlerHaltImpl.INSTANCE
);
tool.getSchemaValidator( config ).doValidation( metadata, executionOptions );
}
@Override
public void doCreation(
Metadata metadata,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
return;
}
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets(
targetDescriptor,
jdbcContext,
options.getConfigurationValues(),
true
);
doCreation( metadata, jdbcContext.getDialect(), options, sourceDescriptor, targets );
}
public void createFromScript(
ScriptSourceInput scriptSourceInput,
ImportSqlCommandExtractor commandExtractor,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
scriptSourceInput.prepare();
try {
for ( String command : scriptSourceInput.read( commandExtractor ) ) {
applySqlString( command, formatter, options, targets );
}
}
finally {
scriptSourceInput.release();
}
}
private static void applySqlString(
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
try {
String sqlStringFormatted = formatter.format( sqlString );
for ( GenerationTarget target : targets ) {
target.accept( sqlStringFormatted );
}
}
catch (CommandAcceptanceException e) {
options.getExceptionHandler().handleException( e );
}
}
@Override
public void doValidation(Metadata metadata, ExecutionOptions options) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DdlTransactionIsolator isolator = tool.getDdlTransactionIsolator( jdbcContext );
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
isolator,
metadata.getDatabase().getDefaultNamespace().getName()
);
try {
performValidation( metadata, databaseInformation, options, jdbcContext.getDialect() );
}
finally {
try {
databaseInformation.cleanup();
}
catch (Exception e) {
log.debug( "Problem releasing DatabaseInformation : " + e.getMessage() );
}
isolator.release();
}
}
@Override
protected void validateTables(
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
Dialect dialect, Namespace namespace) {
final NameSpaceTablesInformation tables = databaseInformation.getTablesInformation( namespace );
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
validateTable(
table,
tables.getTableInformation( table ),
metadata,
options,
dialect
);
}
}
}
@Override
protected void validateTables(
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
Dialect dialect,
Namespace namespace) {
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
final TableInformation tableInformation = databaseInformation.getTableInformation(
table.getQualifiedTableName()
);
validateTable( table, tableInformation, metadata, options, dialect );
}
}
}
protected void migrateTable(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Database database = metadata.getDatabase();
//noinspection unchecked
applySqlStrings(
false,
table.sqlAlterStrings(
dialect,
metadata,
tableInformation,
getDefaultCatalogName( database, dialect ),
getDefaultSchemaName( database, dialect )
),
formatter,
options,
targets
);
}
private static void applySqlString(
boolean quiet,
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( !StringHelper.isEmpty( sqlString ) ) {
String sqlStringFormatted = formatter.format( sqlString );
for ( GenerationTarget target : targets ) {
try {
target.accept( sqlStringFormatted );
}
catch (CommandAcceptanceException e) {
if ( !quiet ) {
options.getExceptionHandler().handleException( e );
}
// otherwise ignore the exception
}
}
}
}
@Override
public void doDrop(
Metadata metadata,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
return;
}
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(), true );
doDrop( metadata, options, jdbcContext.getDialect(), sourceDescriptor, targets );
}
private void performDrop(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
final ImportSqlCommandExtractor commandExtractor = tool.getServiceRegistry().getService( ImportSqlCommandExtractor.class );
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
if ( sourceDescriptor.getSourceType() == SourceType.SCRIPT ) {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
}
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA ) {
dropFromMetadata( metadata, options, dialect, formatter, targets );
}
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA_THEN_SCRIPT ) {
dropFromMetadata( metadata, options, dialect, formatter, targets );
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
}
else {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
dropFromMetadata( metadata, options, dialect, formatter, targets );
}
}
private void dropFromScript(
ScriptSourceInput scriptSourceInput,
ImportSqlCommandExtractor commandExtractor,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
scriptSourceInput.prepare();
try {
for ( String command : scriptSourceInput.read( commandExtractor ) ) {
applySqlString( command, formatter, options, targets );
}
}
finally {
scriptSourceInput.release();
}
}
private static void applySqlString(
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
String sqlStringFormatted = formatter.format( sqlString );
for ( GenerationTarget target : targets ) {
try {
target.accept( sqlStringFormatted );
}
catch (CommandAcceptanceException e) {
options.getExceptionHandler().handleException( e );
}
}
}
@Override
public DelayedDropAction buildDelayedAction(
Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor) {
try (Connection connection = tool.getDatabaseMetadataConnection(options)) {
// Initialize exporters with drop table dependencies so tables are dropped in the right order.
SpannerDatabaseInfo spannerDatabaseInfo = new SpannerDatabaseInfo(connection.getMetaData());
tool.getSpannerTableExporter(options).init(
metadata, spannerDatabaseInfo, Action.DROP);
tool.getForeignKeyExporter(options).init(spannerDatabaseInfo);
return schemaDropper.buildDelayedAction(metadata, options, sourceDescriptor);
} catch (SQLException e) {
throw new RuntimeException("Failed to update Spanner table schema.", e);
}
}
List<String> createScript(Metadata metadata, Dialect d, boolean includeDrops) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
final ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return false;
}
@Override
public Map getConfigurationValues() {
return Collections.emptyMap();
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE;
}
};
HibernateSchemaManagementTool tool = new HibernateSchemaManagementTool();
tool.injectServices((ServiceRegistryImplementor) this.registry);
SourceDescriptor sd = new SourceDescriptor() {
@Override
public SourceType getSourceType() {
return SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
};
if (includeDrops) {
new SchemaDropperImpl(tool).doDrop(metadata, options, d, sd, target);
}
new SchemaCreatorImpl(tool).doCreation(metadata, d, options, sd, target);
return target.commands;
}
@SuppressWarnings("unchecked")
public void execute(EnumSet<TargetType> targetTypes, Metadata metadata, ServiceRegistry serviceRegistry) {
if ( targetTypes.isEmpty() ) {
LOG.debug( "Skipping SchemaExport as no targets were specified" );
return;
}
exceptions.clear();
LOG.runningHbm2ddlSchemaUpdate();
Map config = new HashMap();
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
config.put( AvailableSettings.FORMAT_SQL, format );
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final ExceptionHandler exceptionHandler = haltOnError
? ExceptionHandlerHaltImpl.INSTANCE
: new ExceptionHandlerCollectingImpl();
final ExecutionOptions executionOptions = SchemaManagementToolCoordinator.buildExecutionOptions(
config,
exceptionHandler
);
final TargetDescriptor targetDescriptor = SchemaExport.buildTargetDescriptor( targetTypes, outputFile, serviceRegistry );
try {
tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, targetDescriptor );
}
finally {
if ( exceptionHandler instanceof ExceptionHandlerCollectingImpl ) {
exceptions.addAll( ( (ExceptionHandlerCollectingImpl) exceptionHandler ).getExceptions() );
}
}
}
private void performCreation(
Metadata metadata,
Dialect dialect,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
final ImportSqlCommandExtractor commandExtractor = tool.getServiceRegistry().getService( ImportSqlCommandExtractor.class );
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
switch ( sourceDescriptor.getSourceType() ) {
case SCRIPT: {
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
break;
}
case METADATA: {
createFromMetadata( metadata, options, dialect, formatter, targets );
break;
}
case METADATA_THEN_SCRIPT: {
createFromMetadata( metadata, options, dialect, formatter, targets );
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
break;
}
case SCRIPT_THEN_METADATA: {
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
createFromMetadata( metadata, options, dialect, formatter, targets );
}
}
applyImportSources( options, commandExtractor, format, targets );
}
private static void applySqlStrings(
String[] sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, formatter, options, targets );
}
}
/**
* For testing...
*
* @param metadata The metadata for which to generate the creation commands.
*
* @return The generation commands
*/
public List<String> generateCreationCommands(Metadata metadata, final boolean manageNamespaces) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
final Dialect dialect = serviceRegistry.getService( JdbcEnvironment.class ).getDialect();
final ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return manageNamespaces;
}
@Override
public Map getConfigurationValues() {
return Collections.emptyMap();
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE;
}
};
createFromMetadata( metadata, options, dialect, FormatStyle.NONE.getFormatter(), target );
return target.commands;
}
public void doCreation(
Metadata metadata,
final ServiceRegistry serviceRegistry,
final Map settings,
final boolean manageNamespaces,
GenerationTarget... targets) {
doCreation(
metadata,
serviceRegistry.getService( JdbcEnvironment.class ).getDialect(),
new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return manageNamespaces;
}
@Override
public Map getConfigurationValues() {
return settings;
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
},
new SourceDescriptor() {
@Override
public SourceType getSourceType() {
return SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
},
targets
);
}
protected void validateTable(
Table table,
TableInformation tableInformation,
Metadata metadata,
ExecutionOptions options,
Dialect dialect) {
if ( tableInformation == null ) {
throw new SchemaManagementException(
String.format(
"Schema-validation: missing table [%s]",
table.getQualifiedTableName().toString()
)
);
}
final Iterator selectableItr = table.getColumnIterator();
while ( selectableItr.hasNext() ) {
final Selectable selectable = (Selectable) selectableItr.next();
if ( Column.class.isInstance( selectable ) ) {
final Column column = (Column) selectable;
final ColumnInformation existingColumn = tableInformation.getColumn( Identifier.toIdentifier( column.getQuotedName() ) );
if ( existingColumn == null ) {
throw new SchemaManagementException(
String.format(
"Schema-validation: missing column [%s] in table [%s]",
column.getName(),
table.getQualifiedTableName()
)
);
}
validateColumnType( table, column, existingColumn, metadata, options, dialect );
}
}
}
protected void validateColumnType(
Table table,
Column column,
ColumnInformation columnInformation,
Metadata metadata,
ExecutionOptions options,
Dialect dialect) {
boolean typesMatch = column.getSqlTypeCode( metadata ) == columnInformation.getTypeCode()
|| column.getSqlType( dialect, metadata ).toLowerCase(Locale.ROOT).startsWith( columnInformation.getTypeName().toLowerCase(Locale.ROOT) );
if ( !typesMatch ) {
throw new SchemaManagementException(
String.format(
"Schema-validation: wrong column type encountered in column [%s] in " +
"table [%s]; found [%s (Types#%s)], but expecting [%s (Types#%s)]",
column.getName(),
table.getQualifiedTableName(),
columnInformation.getTypeName().toLowerCase(Locale.ROOT),
JdbcTypeNameMapper.getTypeName( columnInformation.getTypeCode() ),
column.getSqlType().toLowerCase(Locale.ROOT),
JdbcTypeNameMapper.getTypeName( column.getSqlTypeCode( metadata ) )
)
);
}
// this is the old Hibernate check...
//
// but I think a better check involves checks against type code and then the type code family, not
// just the type name.
//
// See org.hibernate.type.descriptor.sql.JdbcTypeFamilyInformation
// todo : this ^^
}
protected abstract NameSpaceTablesInformation performTablesMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
boolean tryToCreateCatalogs,
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace, GenerationTarget[] targets);
protected void createTable(
Table table,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
applySqlStrings(
false,
dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
formatter,
options,
targets
);
}
protected void applyIndexes(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Exporter<Index> exporter = dialect.getIndexExporter();
final Iterator<Index> indexItr = table.getIndexIterator();
while ( indexItr.hasNext() ) {
final Index index = indexItr.next();
if ( !StringHelper.isEmpty( index.getName() ) ) {
IndexInformation existingIndex = null;
if ( tableInformation != null ) {
existingIndex = findMatchingIndex( index, tableInformation );
}
if ( existingIndex == null ) {
applySqlStrings(
false,
exporter.getSqlCreateStrings( index, metadata ),
formatter,
options,
targets
);
}
}
}
}
protected void applyForeignKeys(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( dialect.hasAlterTable() ) {
final Exporter<ForeignKey> exporter = dialect.getForeignKeyExporter();
@SuppressWarnings("unchecked")
final Iterator<ForeignKey> fkItr = table.getForeignKeyIterator();
while ( fkItr.hasNext() ) {
final ForeignKey foreignKey = fkItr.next();
if ( foreignKey.isPhysicalConstraint() && foreignKey.isCreationEnabled() ) {
boolean existingForeignKeyFound = false;
if ( tableInformation != null ) {
existingForeignKeyFound = checkForExistingForeignKey(
foreignKey,
tableInformation
);
}
if ( !existingForeignKeyFound ) {
// todo : shouldn't we just drop+recreate if FK exists?
// this follows the existing code from legacy SchemaUpdate which just skipped
// in old SchemaUpdate code, this was the trigger to "create"
applySqlStrings(
false,
exporter.getSqlCreateStrings( foreignKey, metadata ),
formatter,
options,
targets
);
}
}
}
}
}
protected static void applySqlStrings(
boolean quiet,
String[] sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings != null ) {
for ( String sqlString : sqlStrings ) {
applySqlString( quiet, sqlString, formatter, options, targets );
}
}
}