Commit 45703a64 authored by godkaikai's avatar godkaikai

优化SQL生成

parent 64050869
......@@ -124,8 +124,7 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
DataBase dataBase = getById(id);
Asserts.checkNotNull(dataBase,"该数据源不存在!");
Driver driver = Driver.build(dataBase.getDriverConfig()).connect();
List<Column> columns = driver.listColumns(schemaName, tableName);
Table table = Table.build(tableName, schemaName, columns);
Table table = driver.getTable(schemaName, tableName);
SqlGeneration sqlGeneration = new SqlGeneration();
sqlGeneration.setFlinkSqlCreate(table.getFlinkTableSql(dataBase.getName(),driver.getFlinkColumnTypeConversion(),dataBase.getFlinkConfig()));
sqlGeneration.setSqlSelect(table.getSqlSelect(dataBase.getName()));
......
......@@ -82,7 +82,11 @@ public class Table implements Serializable, Comparable<Table> {
if (i > 0) {
sb.append(",");
}
sb.append(columns.get(i).getName() + " " + type + "\n");
sb.append(columns.get(i).getName() + " " + type);
if (Asserts.isNotNullString(columns.get(i).getComment())) {
sb.append(" COMMENT '"+columns.get(i).getComment() + "'");
}
sb.append("\n");
if (columns.get(i).isKeyFlag()) {
pks.add(columns.get(i).getName());
}
......@@ -99,7 +103,11 @@ public class Table implements Serializable, Comparable<Table> {
sb.append(" ,");
sb.append(pksb);
}
sb.append(") WITH (\n");
sb.append(")");
if(Asserts.isNotNullString(comment)){
sb.append(" COMMENT '"+comment+"'\n");
}
sb.append(" WITH (\n");
sb.append(getFlinkTableWith(flinkConfig));
sb.append("\n);\n");
return sb.toString();
......
......@@ -42,6 +42,21 @@ public abstract class AbstractDriver implements Driver {
return listTables(schema).stream().peek(table -> table.setColumns(listColumns(schema,table.getName()))).sorted().collect(Collectors.toList());
}
@Override
public Table getTable(String schemaName, String tableName) {
List<Table> tables = listTables(schemaName);
Table table = null;
for(Table item : tables){
if(Asserts.isEquals(item.getName(),tableName)){
table = item;
}
}
if(Asserts.isNotNull(table)) {
table.setColumns(listColumns(schemaName, table.getName()));
}
return table;
}
@Override
public boolean existTable(Table table){
return listTables(table.getSchema()).stream().anyMatch(tableItem -> Asserts.isEquals(tableItem.getName(),table.getName()));
......
......@@ -214,6 +214,26 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
}
}
@Override
public String getCreateTableSql(Table table) {
String createTable = null;
PreparedStatement preparedStatement = null;
ResultSet results = null;
String createTableSql = getDBQuery().createTableSql(table.getSchema(),table.getName());
try {
preparedStatement = conn.prepareStatement(createTableSql);
results = preparedStatement.executeQuery();
if (results.next()) {
createTable = results.getString(getDBQuery().createTableName());
}
} catch (Exception e) {
e.printStackTrace();
} finally {
close(preparedStatement, results);
}
return createTable;
}
@Override
public String getDropTableSql(Table table) {
StringBuilder sb = new StringBuilder();
......
......@@ -67,6 +67,8 @@ public interface Driver {
List<Table> getTablesAndColumns(String schemaName);
Table getTable(String schemaName, String tableName);
boolean existTable(Table table);
boolean createTable(Table table) throws Exception;
......
......@@ -11,6 +11,17 @@ import java.sql.SQLException;
**/
public abstract class AbstractDBQuery implements IDBQuery {
@Override
public String createTableSql(String schemaName, String tableName) {
return "show create table " + schemaName + "." + tableName;
}
@Override
public String createTableName() {
return "Create Table";
}
@Override
public String[] columnCustom() {
return null;
......
......@@ -22,6 +22,14 @@ public interface IDBQuery {
* 表字段信息查询 SQL
*/
String columnsSql(String schemaName,String tableName);
/**
* 建表 SQL
*/
String createTableSql(String schemaName,String tableName);
/**
* 建表语句列名
*/
String createTableName();
/**
* 数据库、模式、组织名称
*/
......
......@@ -82,11 +82,6 @@ public class ClickHouseDriver extends AbstractJdbcDriver {
return tableList;
}
@Override
public String getCreateTableSql(Table table) {
return null;
}
@Override
public List<SqlExplainResult> explain(String sql){
String initialSql = sql;
......
package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts;
import com.dlink.metadata.convert.DorisTypeConvert;
import com.dlink.metadata.convert.ITypeConvert;
import com.dlink.metadata.query.DorisQuery;
import com.dlink.metadata.query.IDBQuery;
import com.dlink.model.Column;
import com.dlink.model.Table;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class DorisDriver extends AbstractJdbcDriver{
......@@ -39,50 +34,6 @@ public class DorisDriver extends AbstractJdbcDriver{
return "Doris";
}
@Override
public String getCreateTableSql(Table table) {
StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE " + table.getSchema() + "." + table.getName() + " (");
List<Column> columns = table.getColumns();
for (int i = 0; i < columns.size(); i++) {
if (i > 0) {
sb.append(",");
}
sb.append( columns.get(i).getName() + " " + getTypeConvert().convertToDB(columns.get(i)));
if ("YES".equals(columns.get(i).isNullable())) {
sb.append(" NOT NULL ");
} else {
sb.append(" NULL ");
}
if (Asserts.isNotNullString(columns.get(i).getComment())) {
sb.append(" COMMENT '" + columns.get(i).getComment() + "' ");
}
}
sb.append(" ) ENGINE = olap ");
List<String> pks = new ArrayList<>();
for (int i = 0; i < columns.size(); i++) {
if (columns.get(i).isKeyFlag()) {
pks.add(columns.get(i).getName());
}
}
if (pks.size() > 0) {
sb.append("UNIQUE KEY( ");
for (int i = 0; i < pks.size(); i++) {
if (i > 0) {
sb.append(",");
}
sb.append(pks.get(i));
}
sb.append(") ");
}
if(Asserts.isNotNullString(table.getComment())){
sb.append("COMMENT '" + table.getComment() + "' ");
}
sb.append("DISTRIBUTED BY HASH(" + pks.get(0) + ") BUCKETS 32 ");
sb.append("PROPERTIES('replication_num' = '1')");
return sb.toString();
}
@Override
public Map<String,String> getFlinkColumnTypeConversion(){
return new HashMap<>();
......
package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts;
import com.dlink.metadata.convert.ITypeConvert;
import com.dlink.metadata.convert.MySqlTypeConvert;
import com.dlink.metadata.query.IDBQuery;
import com.dlink.metadata.query.MySqlQuery;
import com.dlink.model.Column;
import com.dlink.model.Table;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
......@@ -46,45 +41,6 @@ public class MySqlDriver extends AbstractJdbcDriver {
return "com.mysql.cj.jdbc.Driver";
}
@Override
public String getCreateTableSql(Table table) {
StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE `"+table.getName() + "` (");
List<Column> columns = table.getColumns();
for (int i = 0; i < columns.size(); i++) {
if(i>0){
sb.append(",");
}
sb.append("`"+columns.get(i).getName() + "` " + getTypeConvert().convertToDB(columns.get(i)));
if(columns.get(i).isNullable()){
sb.append(" NOT NULL");
}else{
sb.append(" NULL");
}
if(Asserts.isNotNull(columns.get(i).getComment())){
sb.append(" COMMENT '"+columns.get(i).getComment()+"'");
}
}
List<String> pks = new ArrayList<>();
for (int i = 0; i < columns.size(); i++) {
if(columns.get(i).isKeyFlag()){
pks.add(columns.get(i).getName());
}
}
if(pks.size()>0){
sb.append(", PRIMARY KEY ( ");
for (int i = 0; i < pks.size(); i++) {
if(i>0){
sb.append(",");
}
sb.append("`"+pks.get(i)+"`");
}
sb.append(" ) USING BTREE");
}
sb.append(") ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '"+(table.getComment()!=null?table.getComment():"")+"' ROW_FORMAT = Dynamic;");
return sb.toString();
}
@Override
public Map<String,String> getFlinkColumnTypeConversion(){
HashMap<String,String> map = new HashMap<>();
......
package com.dlink.metadata.query;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* MySqlQuery
*
......
......@@ -41,11 +41,6 @@ public class PostgreSqlDriver extends AbstractJdbcDriver {
return "PostgreSql 数据库";
}
@Override
public String getCreateTableSql(Table table) {
return null;
}
@Override
public Map<String,String> getFlinkColumnTypeConversion(){
return new HashMap<>();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment