- 背景
- 依赖
- 代码
- 效果
- 总结
公司是微服务架构,项目就比较多。为了数据库的版本化管理引入了Liquibase(ps:这个东西我也没有研究,只是每次对自己项目数据库更新的时候提供一个Changelog.xml)。每次上线都要把自己的sql文件转成Changelog.xml,而且如果你的CreateTable字段特别多,那酸爽一次足以。sql文件是规范的,Changelog也是规范的。那应该可以让代码去完成这个事。
依赖两个包
jsqlparser:把sql文件格式化成结构化对象
liquibase-maven-plugin:结构化Change对象生成xml
代码com.github.jsqlparser jsqlparser 4.2 org.liquibase liquibase-maven-plugin 4.6.2
import liquibase.change.AddColumnConfig;
import liquibase.change.ColumnConfig;
import liquibase.change.ConstraintsConfig;
import liquibase.change.core.AddColumnChange;
import liquibase.change.core.CreateIndexChange;
import liquibase.change.core.CreateTableChange;
import liquibase.changelog.ChangeSet;
import liquibase.changelog.DatabaseChangeLog;
import liquibase.serializer.core.xml.XMLChangeLogSerializer;
import liquibase.util.StringUtil;
import net.sf.jsqlparser.parser.CCJSqlParser;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
import net.sf.jsqlparser.parser.ParseException;
import net.sf.jsqlparser.statement.Statement;
import net.sf.jsqlparser.statement.alter.Alter;
import net.sf.jsqlparser.statement.alter.Alterexpression;
import net.sf.jsqlparser.statement.alter.AlterOperation;
import net.sf.jsqlparser.statement.create.index.CreateIndex;
import net.sf.jsqlparser.statement.create.table.CreateTable;
import java.io.*;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class GenerateLiquibaseXmlUtil {
private String[] args;
public GenerateLiquibaseXmlUtil(String[] args) {
this.args = args;
execute();
}
private void execute() {
String path;
if (getStartParam("-p") != null) {
path = getStartParam("-p");
} else {
path = getPath();
}
System.out.println("当前路径:" + path);
File file = new File(path);
if (file.isFile()) {
if (file.getName().endsWith(".sql")) {
System.out.println("正在处理" + file.getName() + "...");
generateLiquibaseXml(file);
System.out.println("结束");
} else {
System.out.println("未发现sql文件");
return;
}
} else {
System.out.println("开始扫描当前目录下sql文件");
File[] files = file.listFiles();
if (files == null || files.length == 0) {
System.out.println("当前目录下未发现sql文件");
return;
}
Arrays.stream(files).forEach(item -> {
if (item.isFile() && item.getName().endsWith(".sql")) {
System.out.println("正在处理" + item.getName() + "...");
generateLiquibaseXml(item);
}
});
System.out.println("结束");
}
}
private void generateLiquibaseXml(File sqlFile) {
String databaseName = getStartParam("-d");
try {
CCJSqlParser ccjSqlParser = CCJSqlParserUtil.newParser(new FileInputStream(sqlFile));
List statements = ccjSqlParser.Statements().getStatements();
// create a changelog
DatabaseChangeLog databaseChangeLog = new DatabaseChangeLog();
// create a changeset
ChangeSet changeSet = new ChangeSet("v", "system", false, false, "", null, null, true, null, databaseChangeLog);
for (Statement statement : statements) {
System.out.println("正在转换:" + statement.toString());
if (statement instanceof CreateTable) {
CreateTable createTable = (CreateTable) statement;
CreateTableChange createTableChange = new CreateTableChange();
createTableChange.setTableName(createTable.getTable().getName());
createTableChange.setSchemaName(createTable.getTable().getSchemaName());
List columnConfigList = createTable.getColumnDefinitions().stream().map(columnDefinition -> {
ColumnConfig columnConfig = new ColumnConfig();
columnConfig.setName(columnDefinition.getColumnName());
columnConfig.setType(columnDefinition.getColDataType().toString());
if (hasAttr(columnDefinition.getColumnSpecs(), "auto_increment")) {
columnConfig.setAutoIncrement(true);
}
ConstraintsConfig constraintsConfig = null;
if (hasAttr(columnDefinition.getColumnSpecs(), "primary") && hasAttr(columnDefinition.getColumnSpecs(), "key")) {
if (constraintsConfig == null) {
constraintsConfig = new ConstraintsConfig();
}
constraintsConfig.setPrimaryKey(true);
columnConfig.setConstraints(constraintsConfig);
}
//对not null(ColumnSpecs)的处理很不优雅
if (hasAttr(columnDefinition.getColumnSpecs(), "not") && hasAttr(columnDefinition.getColumnSpecs(), "null")) {
if (constraintsConfig == null) {
constraintsConfig = new ConstraintsConfig();
}
constraintsConfig.setNullable(false);
columnConfig.setConstraints(constraintsConfig);
}
return columnConfig;
}).collect(Collectors.toList());
createTableChange.setColumns(columnConfigList);
changeSet.addChange(createTableChange);
} else if (statement instanceof CreateIndex) {
CreateIndex createIndex = (CreateIndex) statement;
CreateIndexChange createIndexChange = new CreateIndexChange();
createIndexChange.setTableName(createIndex.getTable().getName());
createIndexChange.setIndexName(createIndex.getIndex().getName());
List columnsNames = createIndex.getIndex().getColumnsNames();
List addColumnConfigList = columnsNames.stream().map(columnsName -> {
AddColumnConfig addColumnConfig = new AddColumnConfig();
addColumnConfig.setName(columnsName);
return addColumnConfig;
}).collect(Collectors.toList());
createIndexChange.setColumns(addColumnConfigList);
changeSet.addChange(createIndexChange);
} else if (statement instanceof Alter) {
Alter alter = (Alter) statement;
String schemaName = alter.getTable().getSchemaName();
if (StringUtil.isEmpty(schemaName)) {
databaseName = schemaName;
}
List alterexpressions = alter.getAlterexpressions();
for (Alterexpression alterexpression : alterexpressions) {
if (alterexpression.getOperation() == AlterOperation.ADD) {
AddColumnChange addColumnChange = new AddColumnChange();
addColumnChange.setSchemaName("public");
addColumnChange.setCatalogName(databaseName);
addColumnChange.setTableName(alter.getTable().getName());
List colDataTypeList = alterexpression.getColDataTypeList();
List addColumnConfigList = colDataTypeList.stream().map(columnDataType -> {
AddColumnConfig addColumnConfig = new AddColumnConfig();
addColumnConfig.setName(columnDataType.getColumnName());
addColumnConfig.setType(columnDataType.getColDataType().toString());
return addColumnConfig;
}).collect(Collectors.toList());
addColumnChange.setColumns(addColumnConfigList);
changeSet.addChange(addColumnChange);
}
}
} else {
System.out.println("暂时不支持的语句");
}
}
// add created changeset to changelog
databaseChangeLog.addChangeSet(changeSet);
// create a new serializer
XMLChangeLogSerializer xmlChangeLogSerializer = new XMLChangeLogSerializer();
String newPath = getPath() + "/" + sqlFile.getName().replace(".sql", "") + ".xml";
FileOutputStream fileOutputStream = new FileOutputStream(new File(newPath));
xmlChangeLogSerializer.write(databaseChangeLog.getChangeSets(), fileOutputStream);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ParseException e) {
e.printStackTrace();
}
}
private boolean hasAttr(List columnSpecs, String attr) {
return columnSpecs.stream().anyMatch(item -> item.toLowerCase().equals(attr));
}
private String getStartParam(String paramKey) {
for (int i = 0; i < args.length; i++) {
if (args[i].equals(paramKey) && i + 1 < args.length) {
return args[i + 1];
}
}
return null;
}
public String getPath() {
String path = GenerateLiquibaseXmlUtil.class.getProtectionDomain().getCodeSource().getLocation().getPath();
if (path.startsWith("file:")) {
path = path.substring(5);
}
if (path.contains("jar")) {
path = path.substring(0, path.lastIndexOf("."));
return path.substring(0, path.lastIndexOf("/"));
}
return path;
}
}
效果
测试sql文件
alter table test.TEST_TABLE_NAME
add TEST_COLUMN varchar(200) null;
create index IDX_TEST_INDEX
on TEST_TABLE_NAME (TEST_COLUMN);
create table TEST_TABLE_NAME
(
ID bigint auto_increment
primary key,
TEST_COLUMN1 datetime(6) null,
TEST_COLUMN2 datetime(6) null,
TEST_COLUMN3 bigint not null,
TEST_COLUMN4 int not null,
TEST_COLUMN5 bigint not null,
TEST_COLUMN6 bigint not null,
TEST_COLUMN7 varchar(1000) null,
TEST_COLUMN8 varchar(200) null,
TEST_COLUMN9 varchar(20) null,
TEST_COLUMN10 int null,
TEST_COLUMN11 varchar(50) null,
TEST_COLUMN12 bigint null,
TEST_COLUMN13 varchar(20) null,
TEST_COLUMN14 varchar(20) null,
TEST_COLUMN15 int null,
TEST_COLUMN16 int null,
TEST_COLUMN17 int null,
TEST_COLUMN18 decimal(18,6) null,
TEST_COLUMN19 decimal(18,6) null,
TEST_COLUMN20 bit null,
TEST_COLUMN21 varchar(20) null,
TEST_COLUMN22 bit null,
TEST_COLUMN23 bit null,
TEST_COLUMN24 bigint null,
TEST_COLUMN25 bigint null,
TEST_COLUMN26 int null,
TEST_COLUMN27 int null,
TEST_COLUMN28 int null,
TEST_COLUMN29 varchar(50) null,
TEST_COLUMN30 bit null,
TEST_COLUMN31 bit null,
TEST_COLUMN32 varchar(2000) null,
TEST_COLUMN33 varchar(200) null
);
我这边给做成jar包了,所以直接java -jar
-p 指定路径。-d 数据库名。
总结
还是说仅提供一种思路吧,代码写的也比较糙,因为找到pom之后关键的处理只有jsqlparser中Statement到Liquibase中AbstractChange的转化。代码里面也并没有对所有的情况全覆盖。因为写的比较急,对ColumnSpecs的处理也非常勉强,哈哈 所以所文章给相关问题的同学提供个思路吧。



