下面列出了怎么用org.hibernate.tool.schema.TargetType的API类实例代码及写法,或者点击链接到github查看源代码。
@Test
public void generateDropStringsTest() throws IOException, SQLException {
this.connection.setMetaData(MockJdbcUtils.metaDataBuilder()
.setTables("test_table", "TestEntity_stringList")
.build());
String testFileName = UUID.randomUUID().toString();
new SchemaExport().setOutputFile(testFileName)
.drop(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), this.metadata);
File scriptFile = new File(testFileName);
scriptFile.deleteOnExit();
List<String> statements = Files.readAllLines(scriptFile.toPath());
assertThat(statements)
.containsExactly(
"START BATCH DDL",
"drop table `TestEntity_stringList`",
"drop table `test_table`",
"RUN BATCH");
}
@Test
public void generateDeleteStringsWithIndices() throws IOException, SQLException {
this.connection.setMetaData(MockJdbcUtils.metaDataBuilder()
.setTables("Employee", "hibernate_sequence")
.setIndices("name_index")
.build());
Metadata employeeMetadata =
new MetadataSources(this.registry).addAnnotatedClass(Employee.class).buildMetadata();
String testFileName = UUID.randomUUID().toString();
new SchemaExport().setOutputFile(testFileName)
.drop(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), employeeMetadata);
File scriptFile = new File(testFileName);
scriptFile.deleteOnExit();
List<String> statements = Files.readAllLines(scriptFile.toPath());
assertThat(statements).containsExactly(
"START BATCH DDL",
"drop index name_index",
"drop table Employee",
"drop table hibernate_sequence",
"RUN BATCH");
}
@Test
public void omitCreatingPreexistingTables() throws IOException, SQLException {
this.connection.setMetaData(MockJdbcUtils.metaDataBuilder()
.setTables("Employee")
.build());
Metadata employeeMetadata =
new MetadataSources(this.registry).addAnnotatedClass(Employee.class).buildMetadata();
String testFileName = UUID.randomUUID().toString();
new SchemaExport().setOutputFile(testFileName)
.createOnly(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), employeeMetadata);
File scriptFile = new File(testFileName);
scriptFile.deleteOnExit();
List<String> statements = Files.readAllLines(scriptFile.toPath());
assertThat(statements).containsExactly(
// This omits creating the Employee table since it is declared to exist in metadata.
"START BATCH DDL",
"create table hibernate_sequence (next_val INT64) PRIMARY KEY ()",
"create index name_index on Employee (name)",
"alter table Employee add constraint FKiralam2duuhr33k8a10aoc2t6 "
+ "foreign key (manager_id) references Employee (id)",
"RUN BATCH",
"INSERT INTO hibernate_sequence (next_val) VALUES(1)"
);
}
@Test
public void generateCreateStringsNoPkEntityTest() {
assertThatThrownBy(() -> {
Metadata metadata = new MetadataSources(this.registry)
.addAnnotatedClass(NoPkEntity.class)
.buildMetadata();
new SchemaExport()
.setOutputFile("unused")
.createOnly(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), metadata);
})
.isInstanceOf(AnnotationException.class)
.hasMessage(
"No identifier specified for entity: "
+ "com.google.cloud.spanner.hibernate.SpannerTableExporterTests$NoPkEntity");
}
protected void applyConstraints(Metadata metadata) {
File tempFile = null;
try {
tempFile = File.createTempFile("schema", ".sql");
new SchemaExport().setOutputFile(tempFile.getAbsolutePath())
.setFormat(false).createOnly(EnumSet.of(TargetType.SCRIPT), metadata);
List<String> sqls = new ArrayList<>();
for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) {
if (isApplyingConstraints(sql)) {
sqls.add(sql);
}
}
execute(sqls, true);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (tempFile != null)
tempFile.delete();
}
}
protected void createTables(Metadata metadata) {
File tempFile = null;
try {
tempFile = File.createTempFile("schema", ".sql");
new SchemaExport().setOutputFile(tempFile.getAbsolutePath())
.setFormat(false).createOnly(EnumSet.of(TargetType.SCRIPT), metadata);
List<String> sqls = new ArrayList<>();
for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) {
if (shouldInclude(sql) && !isApplyingConstraints(sql))
sqls.add(sql);
}
execute(sqls, true);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (tempFile != null)
FileUtils.deleteFile(tempFile);
}
}
protected void dropConstraints(Metadata metadata) {
File tempFile = null;
try {
tempFile = File.createTempFile("schema", ".sql");
new SchemaExport().setOutputFile(tempFile.getAbsolutePath())
.setFormat(false).drop(EnumSet.of(TargetType.SCRIPT), metadata);
List<String> sqls = new ArrayList<>();
for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) {
if (isDroppingConstraints(sql))
sqls.add(sql);
}
execute(sqls, false);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (tempFile != null)
tempFile.delete();
}
}
protected void cleanDatabase(Metadata metadata) {
File tempFile = null;
try {
tempFile = File.createTempFile("schema", ".sql");
new SchemaExport().setOutputFile(tempFile.getAbsolutePath())
.setFormat(false).drop(EnumSet.of(TargetType.SCRIPT), metadata);
List<String> sqls = new ArrayList<>();
for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) {
sqls.add(sql);
}
execute(sqls, false);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (tempFile != null)
tempFile.delete();
}
}
@SuppressWarnings("unchecked")
public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata, ServiceRegistry serviceRegistry) {
if ( action == Action.NONE ) {
LOG.debug( "Skipping SchemaExport as Action.NONE was passed" );
return;
}
if ( targetTypes.isEmpty() ) {
LOG.debug( "Skipping SchemaExport as no targets were specified" );
return;
}
exceptions.clear();
LOG.runningHbm2ddlSchemaExport();
final TargetDescriptor targetDescriptor = buildTargetDescriptor( targetTypes, outputFile, serviceRegistry );
doExecution( action, needsJdbcConnection( targetTypes ), metadata, serviceRegistry, targetDescriptor );
}
public static TargetDescriptor buildTargetDescriptor(
EnumSet<TargetType> targetTypes,
String outputFile,
ServiceRegistry serviceRegistry) {
final ScriptTargetOutput scriptTarget;
if ( targetTypes.contains( TargetType.SCRIPT ) ) {
if ( outputFile == null ) {
throw new SchemaManagementException( "Writing to script was requested, but no script file was specified" );
}
scriptTarget = Helper.interpretScriptTargetSetting(
outputFile,
serviceRegistry.getService( ClassLoaderService.class ),
(String) serviceRegistry.getService( ConfigurationService.class ).getSettings().get( AvailableSettings.HBM2DDL_CHARSET_NAME )
);
}
else {
scriptTarget = null;
}
return new TargetDescriptorImpl( targetTypes, scriptTarget );
}
public static EnumSet<TargetType> parseCommandLineOptions(String targetTypeText) {
final EnumSet<TargetType> options = EnumSet.noneOf( TargetType.class );
if ( !targetTypeText.equalsIgnoreCase( "none" ) ) {
for ( String option : targetTypeText.split( "," ) ) {
if ( option.equalsIgnoreCase( "database" ) ) {
options.add( TargetType.DATABASE );
}
else if ( option.equalsIgnoreCase( "stdout" ) ) {
options.add( TargetType.STDOUT );
}
else if ( option.equalsIgnoreCase( "script" ) ) {
options.add( TargetType.SCRIPT );
}
else {
throw new IllegalArgumentException( "Unrecognized --target option : " + option );
}
}
}
return options;
}
private void outputDdl(String packageName, String dialect, String fileName) {
LocalSessionFactoryBean sfBean = sfBean(packageName, dialect);
StandardServiceRegistry serviceRegistry = sfBean.getConfiguration().getStandardServiceRegistryBuilder().build();
try {
String outputFile = OutputRoot + fileName;
Files.deleteIfExists(Paths.get(outputFile));
Metadata metadata = metadata(serviceRegistry, sfBean.getMetadataSources());
SchemaExport export = new SchemaExport();
export.setDelimiter(";");
export.setFormat(FormatSql);
export.setOutputFile(outputFile);
export.create(EnumSet.of(TargetType.SCRIPT, TargetType.STDOUT), metadata);
} catch (Exception e) {
throw new InvocationException(e);
} finally {
StandardServiceRegistryBuilder.destroy(serviceRegistry);
}
}
private void outputDdl(String packageName, String dialect, String fileName) {
LocalSessionFactoryBean sfBean = sfBean(packageName, dialect);
StandardServiceRegistry serviceRegistry = sfBean.getConfiguration().getStandardServiceRegistryBuilder().build();
try {
String outputFile = OutputRoot + fileName;
Files.deleteIfExists(Paths.get(outputFile));
Metadata metadata = metadata(serviceRegistry, sfBean.getMetadataSources());
SchemaExport export = new SchemaExport();
export.setDelimiter(";");
export.setFormat(FormatSql);
export.setOutputFile(outputFile);
export.create(EnumSet.of(TargetType.SCRIPT, TargetType.STDOUT), metadata);
} catch (Exception e) {
throw new InvocationException(e);
} finally {
StandardServiceRegistryBuilder.destroy(serviceRegistry);
}
}
/**
* Method that actually creates the file.
*
* @param dbDialect to use
*/
private void generate(Dialect dialect) {
StandardServiceRegistryBuilder ssrb = new StandardServiceRegistryBuilder();
ssrb.applySetting("hibernate.dialect", dialect.getDialectClass());
StandardServiceRegistry standardServiceRegistry = ssrb.build();
MetadataSources metadataSources = new MetadataSources(standardServiceRegistry);
for (Class clzz : jpaClasses) {
metadataSources.addAnnotatedClass(clzz);
}
Metadata metadata = metadataSources.buildMetadata();
SchemaExport export = new SchemaExport();
export.setDelimiter(";");
export.setOutputFile(dialect.name().toLowerCase() + ".ddl");
//export.execute(true, false, false, true);
export.execute(EnumSet.of(TargetType.SCRIPT), Action.BOTH, metadata);
}
/**
* Generates database create commands for the specified entities using Hibernate native API, SchemaExport.
* Creation commands are exported into the create.sql file.
*/
public static void generateSchema() {
Map<String, String> settings = new HashMap<>();
settings.put(Environment.URL, "jdbc:h2:mem:schema");
StandardServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(settings).build();
MetadataSources metadataSources = new MetadataSources(serviceRegistry);
metadataSources.addAnnotatedClass(Account.class);
metadataSources.addAnnotatedClass(AccountSetting.class);
Metadata metadata = metadataSources.buildMetadata();
SchemaExport schemaExport = new SchemaExport();
schemaExport.setFormat(true);
schemaExport.setOutputFile("create.sql");
schemaExport.createOnly(EnumSet.of(TargetType.SCRIPT), metadata);
}
@Test
public void generateCreateStringsTest() throws IOException {
String testFileName = UUID.randomUUID().toString();
new SchemaExport().setOutputFile(testFileName)
.createOnly(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), this.metadata);
File scriptFile = new File(testFileName);
scriptFile.deleteOnExit();
List<String> statements = Files.readAllLines(scriptFile.toPath());
// The types in the following string need to be updated when SpannerDialect
// implementation maps types.
String expectedCreateString = "create table `test_table` (`ID1` INT64 not null,id2"
+ " STRING(255) not null,`boolColumn` BOOL,longVal INT64 not null,stringVal"
+ " STRING(255)) PRIMARY KEY (`ID1`,id2)";
String expectedCollectionCreateString = "create table `TestEntity_stringList` "
+ "(`TestEntity_ID1` INT64 not null,`TestEntity_id2` STRING(255) not null,"
+ "stringList STRING(255)) PRIMARY KEY (`TestEntity_ID1`,`TestEntity_id2`,stringList)";
String foreignKeyString =
"alter table `TestEntity_stringList` add constraint FK2is6fwy3079dmfhjot09x5och "
+ "foreign key (`TestEntity_ID1`, `TestEntity_id2`) "
+ "references `test_table` (`ID1`, id2)";
assertThat(statements.get(0)).isEqualTo("START BATCH DDL");
assertThat(statements.subList(1, 4))
.containsExactlyInAnyOrder(
expectedCreateString, expectedCollectionCreateString, foreignKeyString);
assertThat(statements.get(4)).isEqualTo("RUN BATCH");
}
@Test
public void generateCreateStringsEmptyEntityTest() {
assertThatThrownBy(() -> {
Metadata metadata = new MetadataSources(this.registry)
.addAnnotatedClass(EmptyEntity.class)
.buildMetadata();
new SchemaExport()
.setOutputFile("unused")
.createOnly(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), metadata);
})
.isInstanceOf(AnnotationException.class)
.hasMessage(
"No identifier specified for entity: "
+ "com.google.cloud.spanner.hibernate.SpannerTableExporterTests$EmptyEntity");
}
/**
* For testing use
*/
public void perform(Action action, Metadata metadata, ScriptTargetOutput target) {
doExecution(
action,
false,
metadata,
( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry(),
new TargetDescriptorImpl( EnumSet.of( TargetType.SCRIPT ), target )
);
}
public TargetDescriptorImpl(
EnumSet<TargetType> targetTypes,
ScriptTargetOutput scriptTarget) {
this.targetTypes = targetTypes;
this.scriptTarget = scriptTarget;
}
@SuppressWarnings("unchecked")
public void execute(EnumSet<TargetType> targetTypes, Metadata metadata, ServiceRegistry serviceRegistry) {
if ( targetTypes.isEmpty() ) {
LOG.debug( "Skipping SchemaExport as no targets were specified" );
return;
}
exceptions.clear();
LOG.runningHbm2ddlSchemaUpdate();
Map config = new HashMap();
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
config.put( AvailableSettings.FORMAT_SQL, format );
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final ExceptionHandler exceptionHandler = haltOnError
? ExceptionHandlerHaltImpl.INSTANCE
: new ExceptionHandlerCollectingImpl();
final ExecutionOptions executionOptions = SchemaManagementToolCoordinator.buildExecutionOptions(
config,
exceptionHandler
);
final TargetDescriptor targetDescriptor = SchemaExport.buildTargetDescriptor( targetTypes, outputFile, serviceRegistry );
try {
tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, targetDescriptor );
}
finally {
if ( exceptionHandler instanceof ExceptionHandlerCollectingImpl ) {
exceptions.addAll( ( (ExceptionHandlerCollectingImpl) exceptionHandler ).getExceptions() );
}
}
}
public static EnumSet<TargetType> parseLegacyCommandLineOptions(boolean script, boolean export, String outputFile) {
final EnumSet<TargetType> options = EnumSet.noneOf( TargetType.class );
final Target target = Target.interpret( script, export );
if ( outputFile != null ) {
options.add( TargetType.SCRIPT );
}
if ( target.doScript() ) {
options.add( TargetType.STDOUT );
}
if ( target.doExport() ) {
options.add( TargetType.DATABASE );
}
return options;
}
GenerationTarget[] buildGenerationTargets(
TargetDescriptor targetDescriptor,
JdbcContext jdbcContext,
Map options,
boolean needsAutoCommit) {
final String scriptDelimiter = ConfigurationHelper.getString( HBM2DDL_DELIMITER, options );
final GenerationTarget[] targets = new GenerationTarget[ targetDescriptor.getTargetTypes().size() ];
int index = 0;
if ( targetDescriptor.getTargetTypes().contains( TargetType.STDOUT ) ) {
targets[index] = new GenerationTargetToStdout( scriptDelimiter );
index++;
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.SCRIPT ) ) {
if ( targetDescriptor.getScriptTargetOutput() == null ) {
throw new SchemaManagementException( "Writing to script was requested, but no script file was specified" );
}
targets[index] = new GenerationTargetToScript( targetDescriptor.getScriptTargetOutput(), scriptDelimiter );
index++;
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.DATABASE ) ) {
targets[index] = new GenerationTargetToDatabase( getDdlTransactionIsolator( jdbcContext ), true );
}
return targets;
}
GenerationTarget[] buildGenerationTargets(
TargetDescriptor targetDescriptor,
DdlTransactionIsolator ddlTransactionIsolator,
Map options) {
final String scriptDelimiter = ConfigurationHelper.getString( HBM2DDL_DELIMITER, options );
final GenerationTarget[] targets = new GenerationTarget[ targetDescriptor.getTargetTypes().size() ];
int index = 0;
if ( targetDescriptor.getTargetTypes().contains( TargetType.STDOUT ) ) {
targets[index] = new GenerationTargetToStdout( scriptDelimiter );
index++;
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.SCRIPT ) ) {
if ( targetDescriptor.getScriptTargetOutput() == null ) {
throw new SchemaManagementException( "Writing to script was requested, but no script file was specified" );
}
targets[index] = new GenerationTargetToScript( targetDescriptor.getScriptTargetOutput(), scriptDelimiter );
index++;
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.DATABASE ) ) {
targets[index] = new GenerationTargetToDatabase( ddlTransactionIsolator, false );
}
return targets;
}
private static void exportSchema(Metadata buildMetadata) {
String rootPath = System.getProperty(ModelDBConstants.userDir);
rootPath = rootPath + "\\src\\main\\resources\\liquibase\\hibernate-base-db-schema.sql";
new SchemaExport()
.setDelimiter(";")
.setOutputFile(rootPath)
.create(EnumSet.of(TargetType.SCRIPT), buildMetadata);
}
public static void main(String[] args) throws Exception {
String locationPattern = "classpath:/org/wallride/domain/*";
final BootstrapServiceRegistry registry = new BootstrapServiceRegistryBuilder().build();
final MetadataSources metadataSources = new MetadataSources(registry);
final StandardServiceRegistryBuilder registryBuilder = new StandardServiceRegistryBuilder(registry);
registryBuilder.applySetting(AvailableSettings.DIALECT, ExtendedMySQL5InnoDBDialect.class.getCanonicalName());
registryBuilder.applySetting(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, true);
registryBuilder.applySetting(AvailableSettings.PHYSICAL_NAMING_STRATEGY, PhysicalNamingStrategySnakeCaseImpl.class);
final PathMatchingResourcePatternResolver resourcePatternResolver = new PathMatchingResourcePatternResolver();
final Resource[] resources = resourcePatternResolver.getResources(locationPattern);
final SimpleMetadataReaderFactory metadataReaderFactory = new SimpleMetadataReaderFactory();
for (Resource resource : resources) {
MetadataReader metadataReader = metadataReaderFactory.getMetadataReader(resource);
AnnotationMetadata metadata = metadataReader.getAnnotationMetadata();
if (metadata.hasAnnotation(Entity.class.getName())) {
metadataSources.addAnnotatedClass(Class.forName(metadata.getClassName()));
}
}
final StandardServiceRegistryImpl registryImpl = (StandardServiceRegistryImpl) registryBuilder.build();
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder(registryImpl);
new SchemaExport()
.setHaltOnError(true)
.setDelimiter(";")
.create(EnumSet.of(TargetType.STDOUT), metadataBuilder.build());
}
@Override
public void start() {
String dialect = getDialect().toLowerCase();
if (dialect.contains("hsql"))
execute(Lists.newArrayList("SET DATABASE TRANSACTION CONTROL MVCC"), true);
String dbDataVersion = checkDataVersion(true);
Metadata metadata = buildMetadata();
if (dbDataVersion == null) {
File tempFile = null;
try {
tempFile = File.createTempFile("schema", ".sql");
new SchemaExport().setOutputFile(tempFile.getAbsolutePath())
.setFormat(false).createOnly(EnumSet.of(TargetType.SCRIPT), metadata);
List<String> sqls = new ArrayList<String>();
for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) {
if (shouldInclude(sql))
sqls.add(sql);
}
execute(sqls, true);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (tempFile != null)
tempFile.delete();
}
sessionFactory = buildSessionFactory(metadata);
idManager.init();
transactionManager.run(new Runnable() {
@Override
public void run() {
ModelVersion dataVersion = new ModelVersion();
dataVersion.versionColumn = MigrationHelper.getVersion(DataMigrator.class);
transactionManager.getSession().save(dataVersion);
}
});
} else {
sessionFactory = metadata.getSessionFactoryBuilder().applyInterceptor(interceptor).build();
idManager.init();
}
}
public void drop(EnumSet<TargetType> targetTypes, Metadata metadata) {
execute( targetTypes, Action.DROP, metadata );
}
public void create(EnumSet<TargetType> targetTypes, Metadata metadata) {
execute( targetTypes, Action.BOTH, metadata );
}
public void createOnly(EnumSet<TargetType> targetTypes, Metadata metadata) {
execute( targetTypes, Action.CREATE, metadata );
}
public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata) {
execute( targetTypes, action, metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry() );
}