最近项目需要对接达梦数据库,原有的项目的数据管理采用flyway进行管理,不过目前flyway不支持达梦数据库,查看原有的flyway代码结构哦如下:
在这里新增一个支持达梦数据库的包,dm:
DMConnection:
package org.flywaydb.core.internal.database.dm; import org.flywaydb.core.internal.database.base.Connection; import org.flywaydb.core.internal.database.base.Schema; import java.sql.SQLException; public class DMConnection extends Connection{ DMConnection(DMDatabase database, java.sql.Connection connection) { super(database, connection); } @Override protected String getCurrentSchemaNameOrSearchPath() throws SQLException { //获取当前连接db的用户名 return jdbcTemplate.queryForString("SELECt SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA') FROM DUAL"); } @Override public void doChangeCurrentSchemaOrSearchPathTo(String schema) throws SQLException { jdbcTemplate.execute("ALTER SESSION SET CURRENT_SCHEMA=" + database.quote(schema)); } @Override public Schema getSchema(String name) { return new DMSchema(jdbcTemplate, database, name); } }
DMDatabase:
package org.flywaydb.core.internal.database.dm; import org.flywaydb.core.api.configuration.Configuration; import org.flywaydb.core.internal.database.base.Database; import org.flywaydb.core.internal.database.base.Table; import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; import org.flywaydb.core.internal.util.StringUtils; import java.sql.Connection; import java.sql.SQLException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; public class DMDatabase extends Database{ private static final String DM_NET_TNS_ADMIN = "DM.net.tns_admin"; public static void enableTnsnamesOraSupport() { String tnsAdminEnvVar = System.getenv("TNS_ADMIN"); String tnsAdminSysProp = System.getProperty(DM_NET_TNS_ADMIN); if (StringUtils.hasLength(tnsAdminEnvVar) && tnsAdminSysProp == null) { System.setProperty(DM_NET_TNS_ADMIN, tnsAdminEnvVar); } } public DMDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory) { super(configuration, jdbcConnectionFactory ); } @Override protected DMConnection doGetConnection(Connection connection) { return new DMConnection(this, connection); } @Override public final void ensureSupported() { //最小支持版本7 ensureDatabaseIsRecentEnough("7.0"); //最新支持版本8.1 ensureDatabaseNotOlderThanOtherwiseRecommendUpgradeToFlywayEdition("8.1", org.flywaydb.core.internal.license.Edition.ENTERPRISE); recommendFlywayUpgradeIfNecessary("8.1"); } @Override public String getRawCreateScript(Table table, boolean baseline) { String tablespace = configuration.getTablespace() == null ? "" : " TABLESPACE "" + configuration.getTablespace() + """; return "CREATE TABLE " + table + " (n" + " "installed_rank" INT NOT NULL,n" + " "version" VARCHAr2(50),n" + " "description" VARCHAr2(200) NOT NULL,n" + " "type" VARCHAr2(20) NOT NULL,n" + " "script" VARCHAr2(1000) NOT NULL,n" + " "checksum" INT,n" + " "installed_by" VARCHAr2(100) NOT NULL,n" + " "installed_on" TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,n" + " "execution_time" INT NOT NULL,n" + " "success" NUMBER(1) NOT NULL,n" + " CONSTRAINT "" + table.getName() + "_pk" PRIMARY KEY ("installed_rank")n" + ")" + tablespace + ";n" + (baseline ? getBaselineStatement(table) + ";n" : "") + "CREATE INDEX "" + table.getSchema().getName() + ""."" + table.getName() + "_s_idx" ON " + table + " ("success");n"; } @Override public boolean supportsEmptyMigrationDescription() { // DM will convert the empty string to NULL implicitly, and throw an exception as the column is NOT NULL return false; } @Override protected String doGetCurrentUser() throws SQLException { return getMainConnection().getJdbcTemplate().queryForString("SELECt USER FROM DUAL"); } @Override public boolean supportsDdlTransactions() { return false; } @Override public boolean supportsChangingCurrentSchema() { return true; } @Override public String getBooleanTrue() { return "1"; } @Override public String getBooleanFalse() { return "0"; } @Override public String doQuote(String identifier) { return """ + identifier + """; } @Override public boolean catalogIsSchema() { return false; } boolean queryReturnsRows(String query, String... params) throws SQLException { return getMainConnection().getJdbcTemplate().queryForBoolean("SELECt CASE WHEN EXISTS(" + query + ") THEN 1 ELSE 0 END FROM DUAL", params); } boolean isPrivOrRoleGranted(String name) throws SQLException { return queryReturnsRows("SELECt 1 FROM SESSION_PRIVS WHERe PRIVILEGE = ? UNIOn ALL " + "SELECt 1 FROM SESSION_ROLES WHERe ROLE = ?", name, name); } private boolean isDataDictViewAccessible(String owner, String name) throws SQLException { return queryReturnsRows("SELECt * FROM ALL_TAB_PRIVS WHERe TABLE_SCHEMA = ? AND TABLE_NAME = ?" + " AND PRIVILEGE = 'SELECt'", owner, name); } boolean isDataDictViewAccessible(String name) throws SQLException { return isDataDictViewAccessible("SYS", name); } String dbaOrAll(String baseName) throws SQLException { return isPrivOrRoleGranted("SELECT ANY DICTIONARY") || isDataDictViewAccessible("DBA_" + baseName) ? "DBA_" + baseName : "ALL_" + baseName; } private Set getAvailableOptions() throws SQLException { return new HashSet<>(getMainConnection().getJdbcTemplate() .queryForStringList("SELECT PARAMETER FROM V$OPTION WHERe VALUE = 'TRUE'")); } boolean isFlashbackDataArchiveAvailable() throws SQLException { return getAvailableOptions().contains("Flashback Data Archive"); } boolean isDataMiningAvailable() throws SQLException { return getAvailableOptions().contains("Data Mining"); } boolean isLocatorAvailable() throws SQLException { return isDataDictViewAccessible("MDSYS", "ALL_SDO_GEOM_METADATA"); } Set getSystemSchemas() throws SQLException { // The list of known default system schemas Set result = new HashSet<>(Arrays.asList( "SYS", "SYSTEM", // Standard system accounts "SYSBACKUP", "SYSDG", "SYSKM", "SYSRAC", "SYS$UMF", // Auxiliary system accounts "DBSNMP", "MGMT_VIEW", "SYSMAN", // Enterprise Manager accounts "OUTLN", // Stored outlines "AUDSYS", // Unified auditing "DM_OCM", // DM Configuration Manager "APPQOSSYS", // DM Database QoS Management "OJVMSYS", // DM JavaVM "DVF", "DVSYS", // DM Database Vault "DBSFWUSER", // Database Service Firewall "REMOTE_SCHEDULER_AGENT", // Remote scheduler agent "DIP", // DM Directory Integration Platform "APEX_PUBLIC_USER", "FLOWS_FILES", // DM Application Express "ANONYMOUS", "XDB", "XS$NULL", // DM XML Database "CTXSYS", // DM Text "LBACSYS", // DM Label Security "EXFSYS", // DM Rules Manager and Expression Filter "MDDATA", "MDSYS", "SPATIAL_CSW_ADMIN_USR", "SPATIAL_WFS_ADMIN_USR", // DM Locator and Spatial "ORDDATA", "ORDPLUGINS", "ORDSYS", "SI_INFORMTN_SCHEMA", // DM Multimedia "WMSYS", // DM Workspace Manager "OLAPSYS", // DM OLAP catalogs "OWBSYS", "OWBSYS_AUDIT", // DM Warehouse Builder "GSMADMIN_INTERNAL", "GSMCATUSER", "GSMUSER", // Global Data Services "GGSYS", // DM GoldenGate "WK_TEST", "WKSYS", "WKPROXY", // DM Ultra Search "ODM", "ODM_MTR", "DMSYS", // DM Data Mining "TSMSYS" // Transparent Session Migration )); result.addAll(getMainConnection().getJdbcTemplate().queryForStringList("SELECt USERNAME FROM ALL_USERS " + "WHERe REGEXP_LIKE(USERNAME, '^(APEX|FLOWS)_\d+$')" + " OR DM_MAINTAINED = 'Y'" )); return result; } }
DMParser:
package org.flywaydb.core.internal.database.dm;
import org.flywaydb.core.api.configuration.Configuration;
import org.flywaydb.core.internal.parser.*;
import org.flywaydb.core.internal.sqlscript.Delimiter;
import org.flywaydb.core.internal.sqlscript.ParsedSqlStatement;
import org.flywaydb.core.internal.util.StringUtils;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
public class DMParser extends Parser {
private static final Delimiter PLSQL_DELIMITER = new Delimiter("/", true);
private static final Pattern PLSQL_TYPE_BODY_REGEX = Pattern.compile("^CREATE(\sOR\sREPLACe)?(\s(NON)?EDITIONABLE)?\sTYPE\sBODY\s([^\s]*\s)?(IS|AS)");
private static final Pattern PLSQL_PACKAGE_BODY_REGEX = Pattern.compile("^CREATE(\sOR\sREPLACE)?(\s(NON)?EDITIONABLE)?\sPACKAGE\sBODY\s([^\s]*\s)?(IS|AS)");
private static final StatementType PLSQL_PACKAGE_BODY_STATEMENT = new StatementType();
private static final Pattern PLSQL_PACKAGE_DEFINITION_REGEX = Pattern.compile("^CREATE(\sOR\sREPLACE)?(\s(NON)?EDITIONABLE)?\sPACKAGE\s([^\s]*\s)?(AUTHID\s[^\s]*\s)?(IS|AS)");
private static final Pattern PLSQL_VIEW_REGEX = Pattern.compile("^CREATE(\sOR\sREPLACE)?(\s(NON)?EDITIONABLE)?\sVIEW\s([^\s]*\s)?AS\sWITH\s(PROCEDURE|FUNCTION)");
private static final StatementType PLSQL_VIEW_STATEMENT = new StatementType();
private static final Pattern PLSQL_REGEX = Pattern.compile("^CREATE(\sOR\sREPLACE)?(\s(NON)?EDITIONABLE)?\s(FUNCTION|PROCEDURE|TYPE|TRIGGER)");
private static final Pattern DECLARE_BEGIN_REGEX = Pattern.compile("^DECLARE|BEGIN|WITH");
private static final StatementType PLSQL_STATEMENT = new StatementType();
private static final Pattern JAVA_REGEX = Pattern.compile("^CREATE(\sOR\sREPLACE)?(\sAND\s(RESOLVE|COMPILE))?(\sNOFORCE)?\sJAVA\s(SOURCE|RESOURCE|CLASS)");
private static final StatementType PLSQL_JAVA_STATEMENT = new StatementType();
private static Pattern toRegex(String... commands) {
return Pattern.compile(toRegexPattern(commands));
}
private static String toRegexPattern(String... commands) {
return "^(" + StringUtils.arrayToDelimitedString("|", commands) + ")";
}
public DMParser(Configuration configuration, ParsingContext parsingContext) {
super(configuration, parsingContext, 3);
}
@Override
protected ParsedSqlStatement createStatement(PeekingReader reader, Recorder recorder,
int statementPos, int statementLine, int statementCol,
int nonCommentPartPos, int nonCommentPartLine, int nonCommentPartCol,
StatementType statementType, boolean canExecuteInTransaction,
Delimiter delimiter, String sql) throws IOException {
if (PLSQL_VIEW_STATEMENT == statementType) {
sql = sql.trim();
// Strip extra semicolon to avoid issues with WITH statements containing PL/SQL
if (sql.endsWith(";")) {
sql = sql.substring(0, sql.length() - 1);
}
}
return super.createStatement(reader, recorder, statementPos, statementLine, statementCol,
nonCommentPartPos, nonCommentPartLine, nonCommentPartCol,
statementType, canExecuteInTransaction, delimiter, sql);
}
@Override
protected StatementType detectStatementType(String simplifiedStatement) {
if (PLSQL_PACKAGE_BODY_REGEX.matcher(simplifiedStatement).matches()) {
return PLSQL_PACKAGE_BODY_STATEMENT;
}
if (PLSQL_REGEX.matcher(simplifiedStatement).matches()
|| PLSQL_PACKAGE_DEFINITION_REGEX.matcher(simplifiedStatement).matches()
|| DECLARE_BEGIN_REGEX.matcher(simplifiedStatement).matches()) {
return PLSQL_STATEMENT;
}
if (JAVA_REGEX.matcher(simplifiedStatement).matches()) {
return PLSQL_JAVA_STATEMENT;
}
if (PLSQL_VIEW_REGEX.matcher(simplifiedStatement).matches()) {
return PLSQL_VIEW_STATEMENT;
}
return super.detectStatementType(simplifiedStatement);
}
@Override
protected boolean shouldDiscard(Token token, boolean nonCommentPartSeen) {
// Discard dangling PL/SQL / delimiters
return ("/".equals(token.getText()) && !nonCommentPartSeen) || super.shouldDiscard(token, nonCommentPartSeen);
}
@Override
protected void adjustDelimiter(ParserContext context, StatementType statementType) {
if (statementType == PLSQL_STATEMENT || statementType == PLSQL_VIEW_STATEMENT || statementType == PLSQL_JAVA_STATEMENT
|| statementType == PLSQL_PACKAGE_BODY_STATEMENT) {
context.setDelimiter(PLSQL_DELIMITER);
} else {
context.setDelimiter(Delimiter.SEMICOLON);
}
}
@Override
protected boolean shouldAdjustBlockDepth(ParserContext context, Token token) {
// Package bodies can have an unbalanced BEGIN without END in the initialisation section.
TokenType tokenType = token.getType();
if (context.getStatementType() == PLSQL_PACKAGE_BODY_STATEMENT && (TokenType.EOF == tokenType || TokenType.DELIMITER == tokenType)) {
return true;
}
// In DM, symbols { } affect the block depth in embedded Java code
if (token.getType() == TokenType.SYMBOL && context.getStatementType() == PLSQL_JAVA_STATEMENT) {
return true;
}
return super.shouldAdjustBlockDepth(context, token);
}
// These words increase the block depth - unless preceded by END (in which case the END will decrease the block depth)
private static final List CONTROL_FLOW_KEYWORDS = Arrays.asList("IF", "LOOP", "CASE");
@Override
protected void adjustBlockDepth(ParserContext context, List tokens, Token keyword, PeekingReader reader) throws IOException {
String keywordText = keyword.getText();
// In embedded Java code we judge the end of a class definition by the depth of braces.
// We ignore normal SQL keywords as Java code can contain arbitrary identifiers.
if (context.getStatementType() == PLSQL_JAVA_STATEMENT) {
if ("{".equals(keywordText)) {
context.increaseBlockDepth();
} else if ("}".equals(keywordText)) {
context.decreaseBlockDepth();
}
return;
}
int parensDepth = keyword.getParensDepth();
if ("BEGIN".equals(keywordText)
|| (CONTROL_FLOW_KEYWORDS.contains(keywordText) && !lastTokenIs(tokens, parensDepth, "END"))
|| ("TRIGGER".equals(keywordText) && lastTokenIs(tokens, parensDepth, "COMPOUND"))
|| doTokensMatchPattern(tokens, keyword, PLSQL_PACKAGE_BODY_REGEX)
|| doTokensMatchPattern(tokens, keyword, PLSQL_PACKAGE_DEFINITION_REGEX)
|| doTokensMatchPattern(tokens, keyword, PLSQL_TYPE_BODY_REGEX)
) {
context.increaseBlockDepth();
} else if ("END".equals(keywordText)) {
context.decreaseBlockDepth();
}
// Package bodies can have an unbalanced BEGIN without END in the initialisation section. This allows us
// to exit the package even though we are still at block depth 1 due to the BEGIN.
TokenType tokenType = keyword.getType();
if (context.getStatementType() == PLSQL_PACKAGE_BODY_STATEMENT && (TokenType.EOF == tokenType || TokenType.DELIMITER == tokenType) && context.getBlockDepth() == 1) {
context.decreaseBlockDepth();
return;
}
}
@Override
protected boolean isDelimiter(String peek, ParserContext context, int col) {
Delimiter delimiter = context.getDelimiter();
// Only consider alone-on-line delimiters (such as "/" for PL/SQL) if
// it's the first character on the line
if (delimiter.isAloneOnLine() && col > 1) {
return false;
}
if (col == 1 && "/".equals(peek.trim())) {
return true;
}
return super.isDelimiter(peek, context, col);
}
@Override
protected boolean isAlternativeStringLiteral(String peek) {
if (peek.length() < 3) {
return false;
}
// DM's quoted-literal syntax is introduced by q (case-insensitive) followed by a literal surrounded by
// any of !!, [], {}, (), <> provided the selected pair do not appear in the literal string; the others may do.
char firstChar = peek.charAt(0);
return (firstChar == 'q' || firstChar == 'Q') && peek.charAt(1) == ''';
}
@Override
protected Token handleAlternativeStringLiteral(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
reader.swallow(2);
String closeQuote = computeAlternativeCloseQuote((char) reader.read());
reader.swallowUntilExcluding(closeQuote);
reader.swallow(closeQuote.length());
return new Token(TokenType.STRING, pos, line, col, null, null, context.getParensDepth());
}
private String computeAlternativeCloseQuote(char specialChar) {
switch (specialChar) {
case '!':
return "!'";
case '[':
return "]'";
case '(':
return ")'";
case '{':
return "}'";
case '<':
return ">'";
default:
return specialChar + "'";
}
}
}
DMSchema:
package org.flywaydb.core.internal.database.dm; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.api.logging.Log; import org.flywaydb.core.api.logging.LogFactory; import org.flywaydb.core.internal.database.base.Schema; import org.flywaydb.core.internal.database.base.Table; import org.flywaydb.core.internal.jdbc.JdbcTemplate; import org.flywaydb.core.internal.util.StringUtils; import java.sql.SQLException; import java.util.*; import static org.flywaydb.core.internal.database.dm.DMSchema.ObjectType.*; public class DMSchema extends Schema{ private static final Log LOG = LogFactory.getLog(DMSchema.class); DMSchema(JdbcTemplate jdbcTemplate, DMDatabase database, String name) { super(jdbcTemplate, database, name); } public boolean isSystem() throws SQLException { return database.getSystemSchemas().contains(name); } boolean isDefaultSchemaForUser() throws SQLException { return name.equals(database.doGetCurrentUser()); } @Override protected boolean doExists() throws SQLException { return database.queryReturnsRows("SELECT * FROM ALL_USERS WHERe USERNAME = ?", name); } @Override protected boolean doEmpty() throws SQLException { return !supportedTypesExist(jdbcTemplate, database, this); } @Override protected void doCreate() throws SQLException { jdbcTemplate.execute("CREATE USER " + database.quote(name) + " IDENTIFIED BY " + database.quote("FFllyywwaayy00!!")); jdbcTemplate.execute("GRANT RESOURCE TO " + database.quote(name)); jdbcTemplate.execute("GRANT UNLIMITED TABLESPACE TO " + database.quote(name)); } @Override protected void doDrop() throws SQLException { jdbcTemplate.execute("DROP USER " + database.quote(name) + " CASCADE"); } @Override protected void doClean() throws SQLException { if (isSystem()) { throw new FlywayException("Clean not supported on DM for system schema " + database.quote(name) + "! " + "It must not be changed in any way except by running an DM-supplied script!"); } // Disable FBA for schema tables. if (database.isFlashbackDataArchiveAvailable()) { disableFlashbackArchiveForFbaTrackedTables(); } // Clean DM Locator metadata. if (database.isLocatorAvailable()) { cleanLocatorMetadata(); } // Get existing object types in the schema. Set objectTypeNames = getObjectTypeNames(jdbcTemplate, database, this); // Define the list of types to process, order is important. List objectTypesToClean = Arrays.asList( TRIGGER, QUEUE_TABLE, FILE_WATCHER, SCHEDULER_CHAIN, SCHEDULER_JOB, SCHEDULER_PROGRAM, SCHEDULE, RULE_SET, RULE, EVALUATION_CONTEXT, FILE_GROUP, MINING_MODEL, REWRITE_EQUIVALENCE, SQL_TRANSLATION_PROFILE, MATERIALIZED_VIEW, MATERIALIZED_VIEW_LOG, DIMENSION, VIEW, DOMAIN_INDEX, DOMAIN_INDEX_TYPE, TABLE, INDEX, CLUSTER, SEQUENCE, OPERATOR, FUNCTION, PROCEDURE, PACKAGE, CONTEXT, LIBRARY, TYPE, SYNONYM, JAVA_SOURCE, JAVA_CLASS, JAVA_RESOURCE, // Object types with sensitive information (passwords), skip intentionally, print warning if found. DATABASE_LINK, CREDENTIAL, // Unsupported types, print warning if found DATABASE_DESTINATION, SCHEDULER_GROUP, CUBE, CUBE_DIMENSION, CUBE_BUILD_PROCESS, MEASURE_FOLDER, // Undocumented types, print warning if found ASSEMBLY, JAVA_DATA ); for (ObjectType objectType : objectTypesToClean) { if (objectTypeNames.contains(objectType.getName())) { LOG.debug("Cleaning objects of type " + objectType + " ..."); objectType.dropObjects(jdbcTemplate, database, this); } } if (isDefaultSchemaForUser()) { jdbcTemplate.execute("PURGE RECYCLEBIN"); } } private void disableFlashbackArchiveForFbaTrackedTables() throws SQLException { boolean dbaViewAccessible = database.isPrivOrRoleGranted("SELECT ANY DICTIONARY") || database.isDataDictViewAccessible("DBA_FLASHBACK_ARCHIVE_TABLES"); if (!dbaViewAccessible && !isDefaultSchemaForUser()) { LOG.warn("Unable to check and disable Flashback Archive for tables in schema " + database.quote(name) + " by user "" + database.doGetCurrentUser() + "": DBA_FLASHBACK_ARCHIVE_TABLES is not accessible"); return; } boolean DM18orNewer = database.getVersion().isAtLeast("18"); String queryForFbaTrackedTables = "SELECT TABLE_NAME FROM " + (dbaViewAccessible ? "DBA_" : "USER_") + "FLASHBACK_ARCHIVE_TABLES WHERe OWNER_NAME = ?" + (DM18orNewer ? " AND STATUS='ENABLED'" : ""); List tableNames = jdbcTemplate.queryForStringList(queryForFbaTrackedTables, name); for (String tableName : tableNames) { jdbcTemplate.execute("ALTER TABLE " + database.quote(name, tableName) + " NO FLASHBACK ARCHIVE"); //wait until the tables disappear while (database.queryReturnsRows(queryForFbaTrackedTables + " AND TABLE_NAME = ?", name, tableName)) { try { LOG.debug("Actively waiting for Flashback cleanup on table: " + database.quote(name, tableName)); Thread.sleep(1000); } catch (InterruptedException e) { throw new FlywayException("Waiting for Flashback cleanup interrupted", e); } } } if (DM18orNewer) { while (database.queryReturnsRows("SELECt TABLE_NAME FROM ALL_TABLES WHERe OWNER = ?n" + " AND TABLE_NAME LIKE 'SYS_FBA_DDL_COLMAP_%'", name)) { try { LOG.debug("Actively waiting for Flashback colmap cleanup"); Thread.sleep(1000); } catch (InterruptedException e) { throw new FlywayException("Waiting for Flashback colmap cleanup interrupted", e); } } } } private boolean locatorMetadataExists() throws SQLException { return database.queryReturnsRows("SELECt * FROM ALL_SDO_GEOM_METADATA WHERe OWNER = ?", name); } private void cleanLocatorMetadata() throws SQLException { if (!locatorMetadataExists()) { return; } if (!isDefaultSchemaForUser()) { LOG.warn("Unable to clean DM Locator metadata for schema " + database.quote(name) + " by user "" + database.doGetCurrentUser() + "": unsupported operation"); return; } jdbcTemplate.getConnection().commit(); jdbcTemplate.execute("DELETe FROM USER_SDO_GEOM_METADATA"); jdbcTemplate.getConnection().commit(); } @Override protected DMTable[] doAllTables() throws SQLException { List tableNames = TABLE.getObjectNames(jdbcTemplate, database, this); DMTable[] tables = new DMTable[tableNames.size()]; for (int i = 0; i < tableNames.size(); i++) { tables[i] = new DMTable(jdbcTemplate, database, this, tableNames.get(i)); } return tables; } @Override public Table getTable(String tableName) { return new DMTable(jdbcTemplate, database, this, tableName); } public enum ObjectType { // Tables, including XML tables, except for nested tables, IOT overflow tables and other secondary objects. TABLE("TABLE", "CASCADE CONSTRAINTS PURGE") { @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { boolean referencePartitionedTablesExist = database.queryReturnsRows( "SELECt * FROM ALL_PART_TABLES WHERe OWNER = ? AND PARTITIONING_TYPE = 'REFERENCE'", schema.getName()); StringBuilder tablesQuery = new StringBuilder(); tablesQuery.append("WITH TABLES AS (n" + " SELECt TABLE_NAME, OWNERn" + " FROM ALL_TABLESn" + " WHERe OWNER = ?n" + " AND (IOT_TYPE IS NULL OR IOT_TYPE NOT LIKE '%OVERFLOW%')n" + " AND NESTED != 'YES'n" + " AND SECONDARY != 'Y'n"); tablesQuery.append(")n" + "SELECt t.TABLE_NAMEn" + "FROM TABLES tn"); // Reference partitioned tables should be dropped in child-to-parent order. if (referencePartitionedTablesExist) { tablesQuery.append(" LEFT JOIN ALL_PART_TABLES ptn" + " ON t.OWNER = pt.OWNERn" + " AND t.TABLE_NAME = pt.TABLE_NAMEn" + " AND pt.PARTITIONING_TYPE = 'REFERENCE'n" + " LEFT JOIN ALL_CONSTRAINTS fkn" + " ON pt.OWNER = fk.OWNERn" + " AND pt.TABLE_NAME = fk.TABLE_NAMEn" + " AND pt.REF_PTN_CONSTRAINT_NAME = fk.CONSTRAINT_NAMEn" + " AND fk.CONSTRAINT_TYPE = 'R'n" + " LEFT JOIN ALL_CONSTRAINTS pukn" + " ON fk.R_OWNER = puk.OWNERn" + " AND fk.R_CONSTRAINT_NAME = puk.CONSTRAINT_NAMEn" + " AND puk.CONSTRAINT_TYPE IN ('P', 'U')n" + " LEFT JOIN TABLES pn" + " ON puk.OWNER = p.OWNERn" + " AND puk.TABLE_NAME = p.TABLE_NAMEn" + "START WITH p.TABLE_NAME IS NULLn" + "CONNECT BY PRIOR t.TABLE_NAME = p.TABLE_NAMEn" + "ORDER BY LEVEL DESC"); } int n = 1; String[] params = new String[n]; Arrays.fill(params, schema.getName()); return jdbcTemplate.queryForStringList(tablesQuery.toString(), params); } }, // Queue tables, have related objects and should be dropped separately prior to other types. QUEUE_TABLE("QUEUE TABLE") { @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return jdbcTemplate.queryForStringList( "SELECT QUEUE_TABLE FROM ALL_QUEUE_TABLES WHERe OWNER = ?", schema.getName() ); } @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_AQADM.DROP_QUEUE_TABLE('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, // Materialized view logs. MATERIALIZED_VIEW_LOG("MATERIALIZED VIEW LOG") { @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return jdbcTemplate.queryForStringList( "SELECT MASTER FROM ALL_MVIEW_LOGS WHERe LOG_OWNER = ?", schema.getName() ); } @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "DROP " + this.getName() + " ON " + database.quote(schema.getName(), objectName); } }, // All indexes, except for domain indexes, should be dropped after tables (if any left). INDEX("INDEX") { @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return jdbcTemplate.queryForStringList( "SELECT INDEX_NAME FROM ALL_INDEXES WHERe OWNER = ?" + //" AND INDEX_NAME NOT LIKE 'SYS_C%'"+ " AND INDEX_TYPE NOT LIKE '%DOMAIN%'", schema.getName() ); } }, // Domain indexes, have related objects and should be dropped separately prior to tables. DOMAIN_INDEX("INDEX", "FORCE") { @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return jdbcTemplate.queryForStringList( "SELECT INDEX_NAME FROM ALL_INDEXES WHERe OWNER = ? AND INDEX_TYPE LIKE '%DOMAIN%'", schema.getName() ); } }, // Domain index types. DOMAIN_INDEX_TYPE("INDEXTYPE", "FORCE"), // Operators. OPERATOR("OPERATOR", "FORCE"), // Clusters. CLUSTER("CLUSTER", "INCLUDING TABLES CASCADE CONSTRAINTS"), // Views, including XML views. VIEW("VIEW", "CASCADE CONSTRAINTS"), // Materialized views, keep tables as they may be referenced. MATERIALIZED_VIEW("MATERIALIZED VIEW", "PRESERVE TABLE"), // Dimensions. DIMENSION("DIMENSION") { @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return jdbcTemplate.queryForStringList( "SELECt DIMENSION_NAME FROM ALL_DIMENSIONS WHERe OWNER = ?", schema.getName() ); } }, // Local synonyms. SYNONYM("SYNONYM", "FORCE"), // Sequences, no filtering for identity sequences, since they get dropped along with master tables. SEQUENCE("SEQUENCE"), // Procedures, functions, packages. PROCEDURE("PROCEDURE"), FUNCTION("FUNCTION"), PACKAGE("PACKAGE"), // Contexts, seen in DBA_CONTEXT view, may remain if DBA_CONTEXT is not accessible. CONTEXT("CONTEXT") { @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return jdbcTemplate.queryForStringList( "SELECT NAMESPACE FROM " + database.dbaOrAll("CONTEXT") + " WHERe SCHEMA = ?", schema.getName() ); } @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "DROP " + this.getName() + " " + database.quote(objectName); // no owner } }, // Triggers of all types, should be dropped at first, because invalid DDL triggers may break the whole clean. TRIGGER("TRIGGER"), // Types. TYPE("TYPE", "FORCE"), // Java sources, classes, resources. JAVA_SOURCE("JAVA SOURCE"), JAVA_CLASS("JAVA CLASS"), JAVA_RESOURCE("JAVA RESOURCE"), // Libraries. LIBRARY("LIBRARY"), // Rewrite equivalences. REWRITE_EQUIVALENCE("REWRITE EQUIVALENCE") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN SYS.DBMS_ADVANCED_REWRITE.DROP_REWRITE_EQUIVALENCE('" + database.quote(schema.getName(), objectName) + "'); END;"; } }, // SQL translation profiles. SQL_TRANSLATION_PROFILE("SQL TRANSLATION PROFILE") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SQL_TRANSLATOR.DROP_PROFILE('" + database.quote(schema.getName(), objectName) + "'); END;"; } }, // Data mining models, have related objects, should be dropped prior to tables. MINING_MODEL("MINING MODEL") { @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return super.getObjectNames(jdbcTemplate, database, schema); } @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_DATA_MINING.DROP_MODEL('" + database.quote(schema.getName(), objectName) + "'); END;"; } }, // Scheduler objects. SCHEDULER_JOB("JOB") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SCHEDULER.DROP_JOB('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, SCHEDULER_PROGRAM("PROGRAM") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SCHEDULER.DROP_PROGRAM('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, SCHEDULE("SCHEDULE") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SCHEDULER.DROP_SCHEDULE('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, SCHEDULER_CHAIN("CHAIN") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SCHEDULER.DROP_CHAIN('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, FILE_WATCHER("FILE WATCHER") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SCHEDULER.DROP_FILE_WATCHER('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, // Streams/rule objects. RULE_SET("RULE SET") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_RULE_ADM.DROP_RULE_SET('" + database.quote(schema.getName(), objectName) + "', DELETE_RULES => FALSE); END;"; } }, RULE("RULE") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_RULE_ADM.DROP_RULE('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, EVALUATION_CONTEXT("EVALUATION CONTEXT") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_RULE_ADM.DROP_EVALUATION_CONTEXT('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, FILE_GROUP("FILE GROUP") { @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_FILE_GROUP.DROP_FILE_GROUP('" + database.quote(schema.getName(), objectName) + "'); END;"; } }, // Database links and credentials, contain sensitive information (password) and hence not always can be re-created. // Intentionally skip them and let the clean callbacks handle them if needed. DATABASE_LINK("DATABASE LINK") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName())); } @Override public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return jdbcTemplate.queryForStringList( "SELECT DB_LINK FROM " + database.dbaOrAll("DB_LINKS") + " WHERe OWNER = ?", schema.getName() ); } @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "DROP " + this.getName() + " " + objectName; // db link name is case-insensitive and needs no owner } }, CREDENTIAL("CREDENTIAL") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName())); } @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SCHEDULER.DROP_CREDENTIAL('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, // Some scheduler types, not supported yet. DATABASE_DESTINATION("DESTINATION") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName())); } @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SCHEDULER.DROP_DATABASE_DESTINATION('" + database.quote(schema.getName(), objectName) + "'); END;"; } }, SCHEDULER_GROUP("SCHEDULER GROUP") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName())); } @Override public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "BEGIN DBMS_SCHEDULER.DROP_GROUP('" + database.quote(schema.getName(), objectName) + "', FORCE => TRUE); END;"; } }, // OLAP objects, not supported yet. CUBE("CUBE") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName())); } }, CUBE_DIMENSION("CUBE DIMENSION") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName())); } }, CUBE_BUILD_PROCESS("CUBE BUILD PROCESS") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName()), "cube build processes"); } }, MEASURE_FOLDER("MEASURE FOLDER") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName())); } }, // Undocumented objects. ASSEMBLY("ASSEMBLY") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName()), "assemblies"); } }, JAVA_DATA("JAVA DATA") { @Override public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) { super.warnUnsupported(database.quote(schema.getName())); } }, // SYS-owned objects, cannot be dropped when a schema gets cleaned, simply ignore them. CAPTURE("CAPTURE"), APPLY("APPLY"), DIRECTORY("DIRECTORY"), RESOURCE_PLAN("RESOURCE PLAN"), CONSUMER_GROUP("CONSUMER GROUP"), JOB_CLASS("JOB CLASS"), WINDOWS("WINDOW"), EDITION("EDITION"), AGENT_DESTINATION("DESTINATION"), UNIFIED_AUDIT_POLICY("UNIFIED AUDIT POLICY"); private final String name; private final String dropOptions; ObjectType(String name, String dropOptions) { this.name = name; this.dropOptions = dropOptions; } ObjectType(String name) { this(name, ""); } public String getName() { return name; } @Override public String toString() { return super.toString().replace('_', ' '); } public List getObjectNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { return jdbcTemplate.queryForStringList( "SELECT DISTINCT OBJECT_NAME FROM ALL_OBJECTS WHERe OWNER = ? AND OBJECT_TYPE = ?", schema.getName(), this.getName() ); } public String generateDropStatement(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String objectName) { return "DROP " + this.getName() + " " + database.quote(schema.getName(), objectName) + (StringUtils.hasText(dropOptions) ? " " + dropOptions : ""); } public void dropObjects(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { for (String objectName : getObjectNames(jdbcTemplate, database, schema)) { jdbcTemplate.execute(generateDropStatement(jdbcTemplate, database, schema, objectName)); } } private void warnUnsupported(String schemaName, String typeDesc) { LOG.warn("Unable to clean " + typeDesc + " for schema " + schemaName + ": unsupported operation"); } private void warnUnsupported(String schemaName) { warnUnsupported(schemaName, this.toString().toLowerCase() + "s"); } public static Set getObjectTypeNames(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { // 某模式下所有表名 需要DBA权限 AND SEGMENT_NAME LIKE 'CD_%'n" String query ="select SEGMENT_NAME as tbName from dba_segments where segment_type='TABLE' and OWNER =? "; int n = 1; String[] params = new String[n]; Arrays.fill(params, schema.getName()); return new HashSet<>(jdbcTemplate.queryForStringList(query, params)); } public static boolean supportedTypesExist(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema) throws SQLException { Set existingTypeNames = new HashSet<>(getObjectTypeNames(jdbcTemplate, database, schema)); return !existingTypeNames.isEmpty(); } } }
DMSqlScriptExecutor:
public class DMSqlScriptExecutor extends DefaultSqlScriptExecutor {
public DMSqlScriptExecutor(JdbcTemplate jdbcTemplate) {
super(jdbcTemplate);
}
}
DMTable:
package org.flywaydb.core.internal.database.dm; import org.flywaydb.core.internal.database.base.Table; import org.flywaydb.core.internal.jdbc.JdbcTemplate; import java.sql.SQLException; public class DMTable extends Table{ public DMTable(JdbcTemplate jdbcTemplate, DMDatabase database, DMSchema schema, String name) { super(jdbcTemplate, database, schema, name); } @Override protected void doDrop() throws SQLException { jdbcTemplate.execute("DROP TABLE " + database.quote(schema.getName(), name) + " CASCADE CONSTRAINTS PURGE"); } @Override protected boolean doExists() throws SQLException { return exists(null, schema, name); } @Override protected void doLock() throws SQLException { jdbcTemplate.execute("LOCK TABLE " + this + " IN EXCLUSIVE MODE"); } }
DMResults:
package org.flywaydb.core.internal.database.dm;
import org.flywaydb.core.internal.jdbc.Results;
public class DMResults extends Results {
}
在数据支持工厂中增加DM的支持:
DatabaseFactory:
package org.flywaydb.core.internal.database;
import org.flywaydb.core.api.FlywayException;
import org.flywaydb.core.api.configuration.Configuration;
import org.flywaydb.core.api.logging.Log;
import org.flywaydb.core.api.logging.LogFactory;
import org.flywaydb.core.internal.database.base.Database;
import org.flywaydb.core.internal.database.cockroachdb.CockroachDBDatabase;
import org.flywaydb.core.internal.database.cockroachdb.CockroachDBParser;
import org.flywaydb.core.internal.database.cockroachdb.CockroachDBRetryingStrategy;
import org.flywaydb.core.internal.database.db2.DB2Database;
import org.flywaydb.core.internal.database.db2.DB2Parser;
import org.flywaydb.core.internal.database.derby.DerbyDatabase;
import org.flywaydb.core.internal.database.derby.DerbyParser;
import org.flywaydb.core.internal.database.dm.DMDatabase;
import org.flywaydb.core.internal.database.dm.DMParser;
import org.flywaydb.core.internal.database.dm.DMSqlScriptExecutor;
import org.flywaydb.core.internal.database.firebird.FirebirdDatabase;
import org.flywaydb.core.internal.database.firebird.FirebirdParser;
import org.flywaydb.core.internal.database.h2.H2Database;
import org.flywaydb.core.internal.database.h2.H2Parser;
import org.flywaydb.core.internal.database.hsqldb.HSQLDBDatabase;
import org.flywaydb.core.internal.database.hsqldb.HSQLDBParser;
import org.flywaydb.core.internal.database.informix.InformixDatabase;
import org.flywaydb.core.internal.database.informix.InformixParser;
import org.flywaydb.core.internal.database.mysql.MySQLDatabase;
import org.flywaydb.core.internal.database.mysql.MySQLParser;
import org.flywaydb.core.internal.database.oracle.OracleDatabase;
import org.flywaydb.core.internal.database.oracle.OracleParser;
import org.flywaydb.core.internal.database.oracle.OracleSqlScriptExecutor;
import org.flywaydb.core.internal.database.postgresql.PostgreSQLDatabase;
import org.flywaydb.core.internal.database.postgresql.PostgreSQLParser;
import org.flywaydb.core.internal.database.redshift.RedshiftDatabase;
import org.flywaydb.core.internal.database.redshift.RedshiftParser;
import org.flywaydb.core.internal.database.saphana.SAPHANADatabase;
import org.flywaydb.core.internal.database.saphana.SAPHANAParser;
import org.flywaydb.core.internal.database.snowflake.SnowflakeDatabase;
import org.flywaydb.core.internal.database.snowflake.SnowflakeParser;
import org.flywaydb.core.internal.database.sqlite.SQLiteDatabase;
import org.flywaydb.core.internal.database.sqlite.SQLiteParser;
import org.flywaydb.core.internal.database.sqlserver.SQLServerDatabase;
import org.flywaydb.core.internal.database.sqlserver.SQLServerParser;
import org.flywaydb.core.internal.database.sybasease.SybaseASEDatabase;
import org.flywaydb.core.internal.database.sybasease.SybaseASEParser;
import org.flywaydb.core.internal.jdbc.DatabaseType;
import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
import org.flywaydb.core.internal.jdbc.JdbcTemplate;
import org.flywaydb.core.internal.parser.Parser;
import org.flywaydb.core.internal.parser.ParsingContext;
import org.flywaydb.core.internal.resource.LoadableResource;
import org.flywaydb.core.internal.resource.ResourceProvider;
import org.flywaydb.core.internal.sqlscript.*;
import java.sql.Connection;
import static org.flywaydb.core.internal.sqlscript.SqlScriptMetadata.getMetadataResource;
public class DatabaseFactory {
private static final Log LOG = LogFactory.getLog(DatabaseFactory.class);
private DatabaseFactory() {
//Do nothing
}
public static Database createDatabase(Configuration configuration, boolean printInfo,
JdbcConnectionFactory jdbcConnectionFactory
) {
OracleDatabase.enableTnsnamesOraSupport();
String databaseProductName = jdbcConnectionFactory.getProductName();
if (printInfo) {
LOG.info("Database: " + jdbcConnectionFactory.getJdbcUrl() + " (" + databaseProductName + ")");
LOG.debug("Driver : " + jdbcConnectionFactory.getDriverInfo());
}
DatabaseType databaseType = jdbcConnectionFactory.getDatabaseType();
Database database = createDatabase(databaseType, configuration, jdbcConnectionFactory
);
String intendedCurrentSchema = configuration.getDefaultSchema();
if (!database.supportsChangingCurrentSchema() && intendedCurrentSchema != null) {
LOG.warn(databaseProductName + " does not support setting the schema for the current session. " +
"Default schema will NOT be changed to " + intendedCurrentSchema + " !");
}
return database;
}
private static Database createDatabase(DatabaseType databaseType, Configuration configuration,
JdbcConnectionFactory jdbcConnectionFactory
) {
switch (databaseType) {
case COCKROACHDB:
return new CockroachDBDatabase(configuration, jdbcConnectionFactory
);
case DB2:
return new DB2Database(configuration, jdbcConnectionFactory
);
case DERBY:
return new DerbyDatabase(configuration, jdbcConnectionFactory
);
case FIREBIRD:
return new FirebirdDatabase(configuration, jdbcConnectionFactory
);
case H2:
return new H2Database(configuration, jdbcConnectionFactory
);
case DM:
return new DMDatabase(configuration, jdbcConnectionFactory);
case HSQLDB:
return new HSQLDBDatabase(configuration, jdbcConnectionFactory
);
case INFORMIX:
return new InformixDatabase(configuration, jdbcConnectionFactory
);
case MARIADB:
case MYSQL:
return new MySQLDatabase(configuration, jdbcConnectionFactory
);
case ORACLE:
return new OracleDatabase(configuration, jdbcConnectionFactory
);
case POSTGRESQL:
return new PostgreSQLDatabase(configuration, jdbcConnectionFactory
);
case REDSHIFT:
return new RedshiftDatabase(configuration, jdbcConnectionFactory
);
case SNOWFLAKE:
return new SnowflakeDatabase(configuration, jdbcConnectionFactory
);
case SQLITE:
return new SQLiteDatabase(configuration, jdbcConnectionFactory
);
case SAPHANA:
return new SAPHANADatabase(configuration, jdbcConnectionFactory
);
case SQLSERVER:
return new SQLServerDatabase(configuration, jdbcConnectionFactory
);
case SYBASEASE_JCONNECT:
case SYBASEASE_JTDS:
return new SybaseASEDatabase(configuration, jdbcConnectionFactory
);
default:
throw new FlywayException("Unsupported Database: " + databaseType.name());
}
}
public static SqlScriptFactory createSqlScriptFactory(final JdbcConnectionFactory jdbcConnectionFactory,
final Configuration configuration,
final ParsingContext parsingContext) {
final DatabaseType databaseType = jdbcConnectionFactory.getDatabaseType();
return new SqlScriptFactory() {
@Override
public SqlScript createSqlScript(LoadableResource resource, boolean mixed, ResourceProvider resourceProvider) {
return new ParserSqlScript(createParser(jdbcConnectionFactory, configuration, parsingContext), resource, getMetadataResource(resourceProvider, resource), mixed);
}
};
}
private static Parser createParser(JdbcConnectionFactory jdbcConnectionFactory, Configuration configuration, ParsingContext parsingContext) {
final DatabaseType databaseType = jdbcConnectionFactory.getDatabaseType();
switch (databaseType) {
case COCKROACHDB:
return new CockroachDBParser(configuration, parsingContext);
case DB2:
return new DB2Parser(configuration, parsingContext);
case DERBY:
return new DerbyParser(configuration, parsingContext);
case FIREBIRD:
return new FirebirdParser(configuration, parsingContext);
case H2:
return new H2Parser(configuration, parsingContext);
case HSQLDB:
return new HSQLDBParser(configuration, parsingContext);
case INFORMIX:
return new InformixParser(configuration, parsingContext);
case MARIADB:
case MYSQL:
return new MySQLParser(configuration, parsingContext);
case ORACLE:
return new OracleParser(configuration, parsingContext);
case POSTGRESQL:
return new PostgreSQLParser(configuration, parsingContext);
case REDSHIFT:
return new RedshiftParser(configuration, parsingContext);
case SQLITE:
return new SQLiteParser(configuration, parsingContext);
case DM:
return new DMParser(configuration, parsingContext);
case SAPHANA:
return new SAPHANAParser(configuration, parsingContext);
case SNOWFLAKE:
return new SnowflakeParser(configuration, parsingContext);
case SQLSERVER:
return new SQLServerParser(configuration, parsingContext);
case SYBASEASE_JCONNECT:
case SYBASEASE_JTDS:
return new SybaseASEParser(configuration, parsingContext);
default:
throw new FlywayException("Unsupported Database: " + databaseType.name());
}
}
public static SqlScriptExecutorFactory createSqlScriptExecutorFactory(
final JdbcConnectionFactory jdbcConnectionFactory
) {
final DatabaseType databaseType = jdbcConnectionFactory.getDatabaseType();
if (DatabaseType.ORACLE == databaseType) {
return new SqlScriptExecutorFactory() {
@Override
public SqlScriptExecutor createSqlScriptExecutor(Connection connection
) {
return new OracleSqlScriptExecutor(new JdbcTemplate(connection, databaseType)
);
}
};
}
if (DatabaseType.DM == databaseType) {
return new SqlScriptExecutorFactory() {
@Override
public SqlScriptExecutor createSqlScriptExecutor(Connection connection) {
return new DMSqlScriptExecutor(new JdbcTemplate(connection, databaseType)
);
}
};
}
return new SqlScriptExecutorFactory() {
@Override
public SqlScriptExecutor createSqlScriptExecutor(Connection connection
) {
return new DefaultSqlScriptExecutor(new JdbcTemplate(connection, databaseType)
);
}
};
}
public static DatabaseExecutionStrategy createExecutionStrategy(Connection connection) {
if (connection == null) {
return new DefaultExecutionStrategy();
}
DatabaseType databaseType = DatabaseType.fromJdbcConnection(connection);
switch (databaseType) {
case COCKROACHDB:
return new CockroachDBRetryingStrategy();
default:
return new DefaultExecutionStrategy();
}
}
}
flyway配置:
spring:
datasource:
driver-class-name: dm.jdbc.driver.DmDriver
url: jdbc:dm://${DB_URL:127.0.0.1:5236}/asset
username: ASSET
password: 123456789
flyway:
locations: classpath:init_dameng
clean-disabled: true
baseline-on-migrate: true
table: flyway_asset
数据库脚本路径:
需要注意的是改修改只实现了简单迁移功能:
PS: 在达梦中需要有dba角色的模式下才可以使用,建议先执行如下脚本,请依据情况自行修改:
CREATE TABLESPACE ASSET DATAFILE 'ASSET.DBF' SIZE 128; CREATE USER ASSET IDENTIFIED BY "123456789" DEFAULT TABLESPACE ASSET; grant DBA to ASSET;
最终效果:
也可以直接下载我的jar包支持一下flyway支持达梦数据库版本jar包-Java文档类资源-CSDN下载



