org.hibernate.tool.hbm2ddl.SchemaExport Java Examples
The following examples show how to use
org.hibernate.tool.hbm2ddl.SchemaExport.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SpannerTableExporterTests.java From google-cloud-spanner-hibernate with GNU Lesser General Public License v2.1 | 6 votes |
@Test public void generateDeleteStringsWithIndices() throws IOException, SQLException { this.connection.setMetaData(MockJdbcUtils.metaDataBuilder() .setTables("Employee", "hibernate_sequence") .setIndices("name_index") .build()); Metadata employeeMetadata = new MetadataSources(this.registry).addAnnotatedClass(Employee.class).buildMetadata(); String testFileName = UUID.randomUUID().toString(); new SchemaExport().setOutputFile(testFileName) .drop(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), employeeMetadata); File scriptFile = new File(testFileName); scriptFile.deleteOnExit(); List<String> statements = Files.readAllLines(scriptFile.toPath()); assertThat(statements).containsExactly( "START BATCH DDL", "drop index name_index", "drop table Employee", "drop table hibernate_sequence", "RUN BATCH"); }
Example #2
Source File: SuppliedConnectionTest.java From cacheonix-core with GNU Lesser General Public License v2.1 | 6 votes |
protected void prepareTest() throws Exception { super.prepareTest(); Connection conn = cp.getConnection(); try { new SchemaExport( getCfg(), conn ).create( false, true ); } finally { if ( conn != null ) { try { cp.closeConnection( conn ); } catch( Throwable ignore ) { } } } }
Example #3
Source File: MigrationTest.java From cacheonix-core with GNU Lesser General Public License v2.1 | 6 votes |
public void testSimpleColumnAddition() { String resource1 = "org/hibernate/test/schemaupdate/1_Version.hbm.xml"; String resource2 = "org/hibernate/test/schemaupdate/2_Version.hbm.xml"; Configuration v1cfg = new Configuration(); v1cfg.addResource( resource1 ); new SchemaExport( v1cfg ).execute( false, true, true, false ); SchemaUpdate v1schemaUpdate = new SchemaUpdate( v1cfg ); v1schemaUpdate.execute( true, true ); assertEquals( 0, v1schemaUpdate.getExceptions().size() ); Configuration v2cfg = new Configuration(); v2cfg.addResource( resource2 ); SchemaUpdate v2schemaUpdate = new SchemaUpdate( v2cfg ); v2schemaUpdate.execute( true, true ); assertEquals( 0, v2schemaUpdate.getExceptions().size() ); }
Example #4
Source File: DdlExporter.java From sample-boot-hibernate with MIT License | 6 votes |
private void outputDdl(String packageName, String dialect, String fileName) { LocalSessionFactoryBean sfBean = sfBean(packageName, dialect); StandardServiceRegistry serviceRegistry = sfBean.getConfiguration().getStandardServiceRegistryBuilder().build(); try { String outputFile = OutputRoot + fileName; Files.deleteIfExists(Paths.get(outputFile)); Metadata metadata = metadata(serviceRegistry, sfBean.getMetadataSources()); SchemaExport export = new SchemaExport(); export.setDelimiter(";"); export.setFormat(FormatSql); export.setOutputFile(outputFile); export.create(EnumSet.of(TargetType.SCRIPT, TargetType.STDOUT), metadata); } catch (Exception e) { throw new InvocationException(e); } finally { StandardServiceRegistryBuilder.destroy(serviceRegistry); } }
Example #5
Source File: App.java From juddi with Apache License 2.0 | 6 votes |
/** * Method that actually creates the file. * * @param dbDialect to use */ private void generate(Dialect dialect) { StandardServiceRegistryBuilder ssrb = new StandardServiceRegistryBuilder(); ssrb.applySetting("hibernate.dialect", dialect.getDialectClass()); StandardServiceRegistry standardServiceRegistry = ssrb.build(); MetadataSources metadataSources = new MetadataSources(standardServiceRegistry); for (Class clzz : jpaClasses) { metadataSources.addAnnotatedClass(clzz); } Metadata metadata = metadataSources.buildMetadata(); SchemaExport export = new SchemaExport(); export.setDelimiter(";"); export.setOutputFile(dialect.name().toLowerCase() + ".ddl"); //export.execute(true, false, false, true); export.execute(EnumSet.of(TargetType.SCRIPT), Action.BOTH, metadata); }
Example #6
Source File: SchemaGenerator.java From core with GNU General Public License v3.0 | 6 votes |
/** * Method that actually creates the file. * * @param dbDialect to use */ private void generate(Dialect dialect) { cfg.setProperty("hibernate.dialect", dialect.getDialectClass()); SchemaExport export = new SchemaExport(cfg); export.setDelimiter(";"); // Determine file name. Use "ddl_" plus dialect name such as mysql or // oracle plus the package name with "_" replacing "." such as // org_transitime_db_structs . String packeNameSuffix = packageName.replace(".", "_"); String outputFilename = (outputDirectory!=null?outputDirectory+"/" : "") + "ddl_" + dialect.name().toLowerCase() + "_" + packeNameSuffix + ".sql"; export.setOutputFile(outputFilename); // Export, but only to an SQL file. Don't actually modify the database System.out.println("Writing file " + outputFilename); export.execute(true, false, false, false); // Get rid of unneeded SQL for dropping tables and keys and such trimCruftFromFile(outputFilename); }
Example #7
Source File: Test.java From document-management-system with GNU General Public License v2.0 | 6 votes |
/** * Only for testing purposes */ public static void main(String[] args) throws Exception { log.info("Generate database schema & initial data"); HibernateUtil.generateDatabase("org.hibernate.dialect.Oracle10gDialect"); Configuration cfg = new Configuration(); // Add annotated beans cfg.addAnnotatedClass(NodeFolder.class); // Configure Hibernate cfg.setProperty("hibernate.dialect", Config.HIBERNATE_DIALECT); cfg.setProperty("hibernate.hbm2ddl.auto", "create"); SchemaExport se = new SchemaExport(cfg); se.setOutputFile("/home/pavila/export.sql"); se.setDelimiter(";"); se.setFormat(false); se.create(false, false); }
Example #8
Source File: HibernateUtil.java From document-management-system with GNU General Public License v2.0 | 6 votes |
/** * Generate database schema and initial data for a defined dialect */ public static void generateDatabase(String dialect) throws IOException { // Configure Hibernate log.info("Exporting Database Schema..."); String dbSchema = EnvironmentDetector.getUserHome() + "/schema.sql"; Configuration cfg = getConfiguration().configure(); cfg.setProperty("hibernate.dialect", dialect); SchemaExport se = new SchemaExport(cfg); se.setOutputFile(dbSchema); se.setDelimiter(";"); se.setFormat(false); se.create(false, false); log.info("Database Schema exported to {}", dbSchema); String initialData = new File("").getAbsolutePath() + "/src/main/resources/default.sql"; log.info("Exporting Initial Data from '{}'...", initialData); String initData = EnvironmentDetector.getUserHome() + "/data.sql"; FileInputStream fis = new FileInputStream(initialData); String ret = DatabaseDialectAdapter.dialectAdapter(fis, dialect); FileWriter fw = new FileWriter(initData); IOUtils.write(ret, fw); fw.flush(); fw.close(); log.info("Initial Data exported to {}", initData); }
Example #9
Source File: DdlExporter.java From sample-boot-micro with MIT License | 6 votes |
private void outputDdl(String packageName, String dialect, String fileName) { LocalSessionFactoryBean sfBean = sfBean(packageName, dialect); StandardServiceRegistry serviceRegistry = sfBean.getConfiguration().getStandardServiceRegistryBuilder().build(); try { String outputFile = OutputRoot + fileName; Files.deleteIfExists(Paths.get(outputFile)); Metadata metadata = metadata(serviceRegistry, sfBean.getMetadataSources()); SchemaExport export = new SchemaExport(); export.setDelimiter(";"); export.setFormat(FormatSql); export.setOutputFile(outputFile); export.create(EnumSet.of(TargetType.SCRIPT, TargetType.STDOUT), metadata); } catch (Exception e) { throw new InvocationException(e); } finally { StandardServiceRegistryBuilder.destroy(serviceRegistry); } }
Example #10
Source File: DefaultPersistManager.java From onedev with MIT License | 6 votes |
protected void cleanDatabase(Metadata metadata) { File tempFile = null; try { tempFile = File.createTempFile("schema", ".sql"); new SchemaExport().setOutputFile(tempFile.getAbsolutePath()) .setFormat(false).drop(EnumSet.of(TargetType.SCRIPT), metadata); List<String> sqls = new ArrayList<>(); for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) { sqls.add(sql); } execute(sqls, false); } catch (IOException e) { throw new RuntimeException(e); } finally { if (tempFile != null) tempFile.delete(); } }
Example #11
Source File: DefaultPersistManager.java From onedev with MIT License | 6 votes |
protected void dropConstraints(Metadata metadata) { File tempFile = null; try { tempFile = File.createTempFile("schema", ".sql"); new SchemaExport().setOutputFile(tempFile.getAbsolutePath()) .setFormat(false).drop(EnumSet.of(TargetType.SCRIPT), metadata); List<String> sqls = new ArrayList<>(); for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) { if (isDroppingConstraints(sql)) sqls.add(sql); } execute(sqls, false); } catch (IOException e) { throw new RuntimeException(e); } finally { if (tempFile != null) tempFile.delete(); } }
Example #12
Source File: DefaultPersistManager.java From onedev with MIT License | 6 votes |
protected void createTables(Metadata metadata) { File tempFile = null; try { tempFile = File.createTempFile("schema", ".sql"); new SchemaExport().setOutputFile(tempFile.getAbsolutePath()) .setFormat(false).createOnly(EnumSet.of(TargetType.SCRIPT), metadata); List<String> sqls = new ArrayList<>(); for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) { if (shouldInclude(sql) && !isApplyingConstraints(sql)) sqls.add(sql); } execute(sqls, true); } catch (IOException e) { throw new RuntimeException(e); } finally { if (tempFile != null) FileUtils.deleteFile(tempFile); } }
Example #13
Source File: DefaultPersistManager.java From onedev with MIT License | 6 votes |
protected void applyConstraints(Metadata metadata) { File tempFile = null; try { tempFile = File.createTempFile("schema", ".sql"); new SchemaExport().setOutputFile(tempFile.getAbsolutePath()) .setFormat(false).createOnly(EnumSet.of(TargetType.SCRIPT), metadata); List<String> sqls = new ArrayList<>(); for (String sql: FileUtils.readLines(tempFile, Charset.defaultCharset())) { if (isApplyingConstraints(sql)) { sqls.add(sql); } } execute(sqls, true); } catch (IOException e) { throw new RuntimeException(e); } finally { if (tempFile != null) tempFile.delete(); } }
Example #14
Source File: HibernateUtil.java From tutorials with MIT License | 6 votes |
/** * Generates database create commands for the specified entities using Hibernate native API, SchemaExport. * Creation commands are exported into the create.sql file. */ public static void generateSchema() { Map<String, String> settings = new HashMap<>(); settings.put(Environment.URL, "jdbc:h2:mem:schema"); StandardServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(settings).build(); MetadataSources metadataSources = new MetadataSources(serviceRegistry); metadataSources.addAnnotatedClass(Account.class); metadataSources.addAnnotatedClass(AccountSetting.class); Metadata metadata = metadataSources.buildMetadata(); SchemaExport schemaExport = new SchemaExport(); schemaExport.setFormat(true); schemaExport.setOutputFile("create.sql"); schemaExport.createOnly(EnumSet.of(TargetType.SCRIPT), metadata); }
Example #15
Source File: SpannerTableExporterTests.java From google-cloud-spanner-hibernate with GNU Lesser General Public License v2.1 | 6 votes |
@Test public void generateCreateStringsNoPkEntityTest() { assertThatThrownBy(() -> { Metadata metadata = new MetadataSources(this.registry) .addAnnotatedClass(NoPkEntity.class) .buildMetadata(); new SchemaExport() .setOutputFile("unused") .createOnly(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), metadata); }) .isInstanceOf(AnnotationException.class) .hasMessage( "No identifier specified for entity: " + "com.google.cloud.spanner.hibernate.SpannerTableExporterTests$NoPkEntity"); }
Example #16
Source File: SpannerTableExporterTests.java From google-cloud-spanner-hibernate with GNU Lesser General Public License v2.1 | 6 votes |
@Test public void generateDropStringsTest() throws IOException, SQLException { this.connection.setMetaData(MockJdbcUtils.metaDataBuilder() .setTables("test_table", "TestEntity_stringList") .build()); String testFileName = UUID.randomUUID().toString(); new SchemaExport().setOutputFile(testFileName) .drop(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), this.metadata); File scriptFile = new File(testFileName); scriptFile.deleteOnExit(); List<String> statements = Files.readAllLines(scriptFile.toPath()); assertThat(statements) .containsExactly( "START BATCH DDL", "drop table `TestEntity_stringList`", "drop table `test_table`", "RUN BATCH"); }
Example #17
Source File: SpannerTableExporterTests.java From google-cloud-spanner-hibernate with GNU Lesser General Public License v2.1 | 6 votes |
@Test public void omitCreatingPreexistingTables() throws IOException, SQLException { this.connection.setMetaData(MockJdbcUtils.metaDataBuilder() .setTables("Employee") .build()); Metadata employeeMetadata = new MetadataSources(this.registry).addAnnotatedClass(Employee.class).buildMetadata(); String testFileName = UUID.randomUUID().toString(); new SchemaExport().setOutputFile(testFileName) .createOnly(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), employeeMetadata); File scriptFile = new File(testFileName); scriptFile.deleteOnExit(); List<String> statements = Files.readAllLines(scriptFile.toPath()); assertThat(statements).containsExactly( // This omits creating the Employee table since it is declared to exist in metadata. "START BATCH DDL", "create table hibernate_sequence (next_val INT64) PRIMARY KEY ()", "create index name_index on Employee (name)", "alter table Employee add constraint FKiralam2duuhr33k8a10aoc2t6 " + "foreign key (manager_id) references Employee (id)", "RUN BATCH", "INSERT INTO hibernate_sequence (next_val) VALUES(1)" ); }
Example #18
Source File: HibernateUtil.java From maven-framework-project with MIT License | 5 votes |
/** * @param args */ public static void main(String[] args) { Configuration configuration = new Configuration().configure(); SchemaExport export = new SchemaExport(configuration); export.create(true, true); }
Example #19
Source File: CreateDataBaseTableByHibernateConfigFile.java From base-framework with Apache License 2.0 | 5 votes |
public static void main(String[] args) { Configuration configuration = new Configuration().configure().setNamingStrategy(new ImprovedNamingStrategy()); EnversSchemaGenerator generator = new EnversSchemaGenerator(configuration); SchemaExport export = generator.export(); export.setFormat(false); export.setOutputFile("src/test/resources/data/h2/create-table-new.sql"); export.create(true, false); }
Example #20
Source File: Hbm2ddl.java From wallride with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { String locationPattern = "classpath:/org/wallride/domain/*"; final BootstrapServiceRegistry registry = new BootstrapServiceRegistryBuilder().build(); final MetadataSources metadataSources = new MetadataSources(registry); final StandardServiceRegistryBuilder registryBuilder = new StandardServiceRegistryBuilder(registry); registryBuilder.applySetting(AvailableSettings.DIALECT, ExtendedMySQL5InnoDBDialect.class.getCanonicalName()); registryBuilder.applySetting(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, true); registryBuilder.applySetting(AvailableSettings.PHYSICAL_NAMING_STRATEGY, PhysicalNamingStrategySnakeCaseImpl.class); final PathMatchingResourcePatternResolver resourcePatternResolver = new PathMatchingResourcePatternResolver(); final Resource[] resources = resourcePatternResolver.getResources(locationPattern); final SimpleMetadataReaderFactory metadataReaderFactory = new SimpleMetadataReaderFactory(); for (Resource resource : resources) { MetadataReader metadataReader = metadataReaderFactory.getMetadataReader(resource); AnnotationMetadata metadata = metadataReader.getAnnotationMetadata(); if (metadata.hasAnnotation(Entity.class.getName())) { metadataSources.addAnnotatedClass(Class.forName(metadata.getClassName())); } } final StandardServiceRegistryImpl registryImpl = (StandardServiceRegistryImpl) registryBuilder.build(); final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder(registryImpl); new SchemaExport() .setHaltOnError(true) .setDelimiter(";") .create(EnumSet.of(TargetType.STDOUT), metadataBuilder.build()); }
Example #21
Source File: DatabaseSetup.java From olat with Apache License 2.0 | 5 votes |
/** * Write database configuration to file. Includes differences to existing database. Filename: "database/setupDatabase.sql" */ private static void exportDDLtoFile() { String outputFile = "database/setupDatabase.sql"; boolean script = true; // write DDL boolean export = false; // don't update databse try { SchemaExport se = new SchemaExport(cf); se.setOutputFile(outputFile); se.setDelimiter(";"); se.create(script, export); } catch (Exception e) { log.error("DDL export to file failed: Reason: ", e); } }
Example #22
Source File: DatabaseSetup.java From olat with Apache License 2.0 | 5 votes |
/** * Write database configuration to file. Includes differences to existing database. Filename: "database/setupDatabase.sql" */ private static void exportDDLtoFile() { String outputFile = "database/setupDatabase.sql"; boolean script = true; // write DDL boolean export = false; // don't update databse try { SchemaExport se = new SchemaExport(cf); se.setOutputFile(outputFile); se.setDelimiter(";"); se.create(script, export); } catch (Exception e) { log.error("DDL export to file failed: Reason: ", e); } }
Example #23
Source File: CreateTestInitializeDataBaseSqlFile.java From base-framework with Apache License 2.0 | 5 votes |
public static void main(String[] args) { Configuration configuration = new Configuration().configure().setNamingStrategy(new ImprovedNamingStrategy()); EnversSchemaGenerator generator = new EnversSchemaGenerator(configuration); SchemaExport export = generator.export(); export.setFormat(false); export.setOutputFile("src/test/resources/h2schma.sql"); export.create(true, false); }
Example #24
Source File: TestSchemaTools.java From cacheonix-core with GNU Lesser General Public License v2.1 | 5 votes |
public void testFailingNonQuoteValidation() throws Exception{ // database schema have been created thanks to the setUp method // we have 2 schemas SA et SB, SB must be set as the default schema // used by hibernate hibernate.default_schema SB SchemaExport se = new SchemaExport(getCfg()); se.create(true,true); // here we modify the generated table in order to test SchemaUpdate Session session = openSession(); Connection conn = session.connection(); Statement stat = conn.createStatement(); stat.execute("ALTER TABLE \"SB\".\"TEAM\" DROP COLUMN xname "); // update schema //SchemaUpdate su = new SchemaUpdate(getCfg()); //su.execute(true,true); try { SchemaValidator sv = new SchemaValidator(getCfg()); sv.validate(); fail("should fail since we mutated the current schema."); } catch(HibernateException he) { } // it's time to clean our database se.drop(true,true); // then the schemas and false table. stat.execute("DROP TABLE \"SA\".\"Team\" "); stat.execute(" DROP SCHEMA sa "); stat.execute("DROP SCHEMA sb "); stat.close(); session.close(); }
Example #25
Source File: TestSchemaTools.java From cacheonix-core with GNU Lesser General Public License v2.1 | 5 votes |
public void testSchemaToolsNonQuote() throws Exception{ // database schema have been created thanks to the setUp method // we have 2 schemas SA et SB, SB must be set as the default schema // used by hibernate hibernate.default_schema SB SchemaExport se = new SchemaExport(getCfg()); se.create(true,true); // here we modify the generated table in order to test SchemaUpdate Session session = openSession(); Connection conn = session.connection(); Statement stat = conn.createStatement(); stat.execute("ALTER TABLE \"SB\".\"TEAM\" DROP COLUMN xname "); // update schema SchemaUpdate su = new SchemaUpdate(getCfg()); su.execute(true,true); // we can run schema validation. Note that in the setUp method a *wrong* table // has been created with different column names // if schema validator chooses the bad db schema, then the testcase will fail (exception) SchemaValidator sv = new SchemaValidator(getCfg()); sv.validate(); // it's time to clean our database se.drop(true,true); // then the schemas and false table. stat.execute("DROP TABLE \"SA\".\"Team\" "); stat.execute(" DROP SCHEMA sa "); stat.execute("DROP SCHEMA sb "); stat.close(); session.close(); }
Example #26
Source File: SpannerTableExporterTests.java From google-cloud-spanner-hibernate with GNU Lesser General Public License v2.1 | 5 votes |
@Test public void generateCreateStringsTest() throws IOException { String testFileName = UUID.randomUUID().toString(); new SchemaExport().setOutputFile(testFileName) .createOnly(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), this.metadata); File scriptFile = new File(testFileName); scriptFile.deleteOnExit(); List<String> statements = Files.readAllLines(scriptFile.toPath()); // The types in the following string need to be updated when SpannerDialect // implementation maps types. String expectedCreateString = "create table `test_table` (`ID1` INT64 not null,id2" + " STRING(255) not null,`boolColumn` BOOL,longVal INT64 not null,stringVal" + " STRING(255)) PRIMARY KEY (`ID1`,id2)"; String expectedCollectionCreateString = "create table `TestEntity_stringList` " + "(`TestEntity_ID1` INT64 not null,`TestEntity_id2` STRING(255) not null," + "stringList STRING(255)) PRIMARY KEY (`TestEntity_ID1`,`TestEntity_id2`,stringList)"; String foreignKeyString = "alter table `TestEntity_stringList` add constraint FK2is6fwy3079dmfhjot09x5och " + "foreign key (`TestEntity_ID1`, `TestEntity_id2`) " + "references `test_table` (`ID1`, id2)"; assertThat(statements.get(0)).isEqualTo("START BATCH DDL"); assertThat(statements.subList(1, 4)) .containsExactlyInAnyOrder( expectedCreateString, expectedCollectionCreateString, foreignKeyString); assertThat(statements.get(4)).isEqualTo("RUN BATCH"); }
Example #27
Source File: SpannerTableExporterTests.java From google-cloud-spanner-hibernate with GNU Lesser General Public License v2.1 | 5 votes |
@Test public void generateCreateStringsEmptyEntityTest() { assertThatThrownBy(() -> { Metadata metadata = new MetadataSources(this.registry) .addAnnotatedClass(EmptyEntity.class) .buildMetadata(); new SchemaExport() .setOutputFile("unused") .createOnly(EnumSet.of(TargetType.STDOUT, TargetType.SCRIPT), metadata); }) .isInstanceOf(AnnotationException.class) .hasMessage( "No identifier specified for entity: " + "com.google.cloud.spanner.hibernate.SpannerTableExporterTests$EmptyEntity"); }
Example #28
Source File: ModelDBHibernateUtil.java From modeldb with Apache License 2.0 | 5 votes |
private static void exportSchema(Metadata buildMetadata) { String rootPath = System.getProperty(ModelDBConstants.userDir); rootPath = rootPath + "\\src\\main\\resources\\liquibase\\hibernate-base-db-schema.sql"; new SchemaExport() .setDelimiter(";") .setOutputFile(rootPath) .create(EnumSet.of(TargetType.SCRIPT), buildMetadata); }
Example #29
Source File: TestSchemaTools.java From cacheonix-core with GNU Lesser General Public License v2.1 | 5 votes |
public void testSchemaTools() throws Exception{ // database schema have been created thanks to the setUp method // we have 2 schemas SA et SB, SB must be set as the default schema // used by hibernate hibernate.default_schema SB SchemaExport se = new SchemaExport(getCfg()); se.create(true,true); // here we modify the generated table in order to test SchemaUpdate Session session = openSession(); Connection conn = session.connection(); Statement stat = conn.createStatement(); stat.execute("ALTER TABLE \"SB\".\"Team\" DROP COLUMN name "); // update schema SchemaUpdate su = new SchemaUpdate(getCfg()); su.execute(true,true); // we can run schema validation. Note that in the setUp method a *wrong* table // has been created with different column names // if schema validator chooses the bad db schema, then the testcase will fail (exception) SchemaValidator sv = new SchemaValidator(getCfg()); sv.validate(); // it's time to clean our database se.drop(true,true); // then the schemas and false table. stat.execute("DROP TABLE \"SA\".\"Team\" "); stat.execute(" DROP SCHEMA sa "); stat.execute("DROP SCHEMA sb "); stat.close(); session.close(); }
Example #30
Source File: TestSchemaTools.java From cacheonix-core with GNU Lesser General Public License v2.1 | 5 votes |
public void testFailingQuoteValidation() throws Exception{ // database schema have been created thanks to the setUp method // we have 2 schemas SA et SB, SB must be set as the default schema // used by hibernate hibernate.default_schema SB SchemaExport se = new SchemaExport(getCfg()); se.create(true,true); // here we modify the generated table in order to test SchemaUpdate Session session = openSession(); Connection conn = session.connection(); Statement stat = conn.createStatement(); stat.execute("ALTER TABLE \"SB\".\"Team\" DROP COLUMN name "); // update schema //SchemaUpdate su = new SchemaUpdate(getCfg()); //su.execute(true,true); try { SchemaValidator sv = new SchemaValidator(getCfg()); sv.validate(); fail("should fail since we mutated the current schema."); } catch(HibernateException he) { } // it's time to clean our database se.drop(true,true); // then the schemas and false table. stat.execute("DROP TABLE \"SA\".\"Team\" "); stat.execute(" DROP SCHEMA sa "); stat.execute("DROP SCHEMA sb "); stat.close(); session.close(); }