Commit cf972f62 authored by wenmo's avatar wenmo

metadata format

parent 2767b4b5
......@@ -23,8 +23,8 @@ public abstract class AbstractDriver implements Driver {
public abstract ITypeConvert getTypeConvert();
public boolean canHandle(String type){
return Asserts.isEqualsIgnoreCase(getType(),type);
public boolean canHandle(String type) {
return Asserts.isEqualsIgnoreCase(getType(), type);
}
public Driver setDriverConfig(DriverConfig config) {
......@@ -32,36 +32,36 @@ public abstract class AbstractDriver implements Driver {
return this;
}
public boolean isHealth(){
public boolean isHealth() {
return false;
}
public List<Schema> getSchemasAndTables(){
public List<Schema> getSchemasAndTables() {
return listSchemas().stream().peek(schema -> schema.setTables(listTables(schema.getName()))).sorted().collect(Collectors.toList());
}
public List<Table> getTablesAndColumns(String schema){
return listTables(schema).stream().peek(table -> table.setColumns(listColumns(schema,table.getName()))).sorted().collect(Collectors.toList());
public List<Table> getTablesAndColumns(String schema) {
return listTables(schema).stream().peek(table -> table.setColumns(listColumns(schema, table.getName()))).sorted().collect(Collectors.toList());
}
@Override
public Table getTable(String schemaName, String tableName) {
List<Table> tables = listTables(schemaName);
Table table = null;
for(Table item : tables){
if(Asserts.isEquals(item.getName(),tableName)){
for (Table item : tables) {
if (Asserts.isEquals(item.getName(), tableName)) {
table = item;
}
}
if(Asserts.isNotNull(table)) {
if (Asserts.isNotNull(table)) {
table.setColumns(listColumns(schemaName, table.getName()));
}
return table;
}
@Override
public boolean existTable(Table table){
return listTables(table.getSchema()).stream().anyMatch(tableItem -> Asserts.isEquals(tableItem.getName(),table.getName()));
public boolean existTable(Table table) {
return listTables(table.getSchema()).stream().anyMatch(tableItem -> Asserts.isEquals(tableItem.getName(), table.getName()));
}
}
\ No newline at end of file
package com.dlink.metadata.query;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* AbstractDBQuery
*
......
package com.dlink.metadata.query;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* IDBQuery
*
......@@ -14,110 +11,137 @@ public interface IDBQuery {
* 所有数据库信息查询 SQL
*/
String schemaAllSql();
/**
* 表信息查询 SQL
*/
String tablesSql(String schemaName);
/**
* 表字段信息查询 SQL
*/
String columnsSql(String schemaName,String tableName);
String columnsSql(String schemaName, String tableName);
/**
* 建表 SQL
*/
String createTableSql(String schemaName,String tableName);
String createTableSql(String schemaName, String tableName);
/**
* 建表语句列名
*/
String createTableName();
/**
* 数据库、模式、组织名称
*/
String schemaName();
/**
* catalog 名称
*/
String catalogName();
/**
* 表名称
*/
String tableName();
/**
* 表注释
*/
String tableComment();
/**
* 表类型
*/
String tableType();
/**
* 表引擎
*/
String engine();
/**
* 表配置
*/
String options();
/**
* 表记录数
*/
String rows();
/**
* 创建时间
*/
String createTime();
/**
* 更新时间
*/
String updateTime();
/**
* 字段名称
*/
String columnName();
/**
* 字段序号
*/
String columnPosition();
/**
* 字段类型
*/
String columnType();
/**
* 字段注释
*/
String columnComment();
/**
* 主键字段
*/
String columnKey();
/**
* 主键自增
*/
String autoIncrement();
/**
* 默认值
*/
String defaultValue();
/**
* @return 是否允许为 NULL
*/
String isNullable();
/**
* @return 精度
*/
String precision();
/**
* @return 小数范围
*/
String scale();
/**
* @return 字符集名称
*/
String characterSet();
/**
* @return 排序规则
*/
String collation();
/**
* 自定义字段名称
*/
......
......@@ -2,12 +2,9 @@ package com.dlink.metadata.result;
import com.dlink.result.AbstractResult;
import com.dlink.result.IResult;
import lombok.Getter;
import lombok.Setter;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
......@@ -19,29 +16,31 @@ import java.util.List;
*/
public class JdbcSelectResult extends AbstractResult implements IResult {
private List<String> columns;
private List<LinkedHashMap<String,Object>> rowData;
private List<LinkedHashMap<String, Object>> rowData;
private Integer total;
private Integer page;
private Integer limit;
private static final String STATUS = "status";
private static final List<String> STATUS_COLUMN = new ArrayList<String>(){{ add("status"); }};
private static final List<String> STATUS_COLUMN = new ArrayList<String>() {{
add("status");
}};
public JdbcSelectResult() {
}
public static JdbcSelectResult buildResult(){
public static JdbcSelectResult buildResult() {
JdbcSelectResult result = new JdbcSelectResult();
result.setStartTime(LocalDateTime.now());
return result;
}
public void setStatusList(List<Object> statusList){
public void setStatusList(List<Object> statusList) {
this.setColumns(STATUS_COLUMN);
List<LinkedHashMap<String,Object>> dataList = new ArrayList<>();
for(Object item: statusList){
LinkedHashMap map = new LinkedHashMap<String,Object>();
map.put(STATUS,item);
List<LinkedHashMap<String, Object>> dataList = new ArrayList<>();
for (Object item : statusList) {
LinkedHashMap map = new LinkedHashMap<String, Object>();
map.put(STATUS, item);
dataList.add(map);
}
this.setRowData(dataList);
......
......@@ -11,6 +11,7 @@ public interface IColumnType {
* 获取字段类型
*/
String getType();
/**
* 获取字段类型完整名
*/
......
......@@ -12,7 +12,7 @@ import java.util.ArrayList;
import java.util.List;
public class Clickhouse20CreateTableStatement extends SQLCreateTableStatement {
protected final List<SQLAssignItem> settings = new ArrayList<SQLAssignItem>();
protected final List<SQLAssignItem> settings = new ArrayList<SQLAssignItem>();
private SQLOrderBy orderBy;
private SQLExpr partitionBy;
private SQLExpr primaryKey;
......
......@@ -20,7 +20,10 @@ import com.dlink.utils.LogUtil;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
......@@ -83,7 +86,7 @@ public class ClickHouseDriver extends AbstractJdbcDriver {
}
@Override
public List<SqlExplainResult> explain(String sql){
public List<SqlExplainResult> explain(String sql) {
String initialSql = sql;
List<SqlExplainResult> sqlExplainResults = new ArrayList<>();
StringBuilder explain = new StringBuilder();
......@@ -91,35 +94,35 @@ public class ClickHouseDriver extends AbstractJdbcDriver {
ResultSet results = null;
String current = null;
try {
sql = sql.replaceAll("(?i)if exists","");
sql = sql.replaceAll("(?i)if exists", "");
Clickhouse20StatementParser parser = new Clickhouse20StatementParser(sql);
List<SQLStatement> stmtList = new ArrayList<>();
parser.parseStatementList(stmtList, -1, null);
if (parser.getLexer().token() != Token.EOF) {
throw new ParserException("syntax error : " + sql);
}
for(SQLStatement item : stmtList){
for (SQLStatement item : stmtList) {
current = item.toString();
String type = item.getClass().getSimpleName();
if(!(item instanceof SQLSelectStatement)){
if(item instanceof Clickhouse20CreateTableStatement){
if (!(item instanceof SQLSelectStatement)) {
if (item instanceof Clickhouse20CreateTableStatement) {
Matcher m = Pattern.compile(",\\s*\\)").matcher(sql);
if (m.find()) {
sqlExplainResults.add(SqlExplainResult.fail(sql, "No comma can be added to the last field of Table! "));
break;
}
sqlExplainResults.add(checkCreateTable((Clickhouse20CreateTableStatement)item));
} else if(item instanceof SQLDropTableStatement){
sqlExplainResults.add(checkDropTable((SQLDropTableStatement)item,initialSql));
sqlExplainResults.add(checkCreateTable((Clickhouse20CreateTableStatement) item));
} else if (item instanceof SQLDropTableStatement) {
sqlExplainResults.add(checkDropTable((SQLDropTableStatement) item, initialSql));
} else {
sqlExplainResults.add(SqlExplainResult.success(type, current, explain.toString()));
}
continue;
}
preparedStatement = conn.prepareStatement("explain "+current);
preparedStatement = conn.prepareStatement("explain " + current);
results = preparedStatement.executeQuery();
while(results.next()){
explain.append(getTypeConvert().convertValue(results,"explain", "string")+"\r\n");
while (results.next()) {
explain.append(getTypeConvert().convertValue(results, "explain", "string") + "\r\n");
}
sqlExplainResults.add(SqlExplainResult.success(type, current, explain.toString()));
}
......@@ -131,34 +134,34 @@ public class ClickHouseDriver extends AbstractJdbcDriver {
}
}
private SqlExplainResult checkCreateTable(Clickhouse20CreateTableStatement sqlStatement){
if(existTable(Table.build(sqlStatement.getTableName()))){
if(sqlStatement.isIfNotExists()){
private SqlExplainResult checkCreateTable(Clickhouse20CreateTableStatement sqlStatement) {
if (existTable(Table.build(sqlStatement.getTableName()))) {
if (sqlStatement.isIfNotExists()) {
return SqlExplainResult.success(sqlStatement.getClass().getSimpleName(), sqlStatement.toString(), null);
}else{
String schema = null == sqlStatement.getSchema() ? "" : sqlStatement.getSchema()+".";
return SqlExplainResult.fail(sqlStatement.toString(), "Table "+schema+sqlStatement.getTableName()+" already exists.");
} else {
String schema = null == sqlStatement.getSchema() ? "" : sqlStatement.getSchema() + ".";
return SqlExplainResult.fail(sqlStatement.toString(), "Table " + schema + sqlStatement.getTableName() + " already exists.");
}
}else{
} else {
return SqlExplainResult.success(sqlStatement.getClass().getSimpleName(), sqlStatement.toString(), null);
}
}
private SqlExplainResult checkDropTable(SQLDropTableStatement sqlStatement,String sql){
private SqlExplainResult checkDropTable(SQLDropTableStatement sqlStatement, String sql) {
SQLExprTableSource sqlExprTableSource = sqlStatement.getTableSources().get(0);
if(!existTable(Table.build(sqlExprTableSource.getTableName()))){
if(Pattern.compile("(?i)if exists").matcher(sql).find()){
if (!existTable(Table.build(sqlExprTableSource.getTableName()))) {
if (Pattern.compile("(?i)if exists").matcher(sql).find()) {
return SqlExplainResult.success(sqlStatement.getClass().getSimpleName(), sqlStatement.toString(), null);
}else{
return SqlExplainResult.fail(sqlStatement.toString(), "Table "+sqlExprTableSource.getSchema()+"."+sqlExprTableSource.getTableName()+" not exists.");
} else {
return SqlExplainResult.fail(sqlStatement.toString(), "Table " + sqlExprTableSource.getSchema() + "." + sqlExprTableSource.getTableName() + " not exists.");
}
}else{
} else {
return SqlExplainResult.success(sqlStatement.getClass().getSimpleName(), sqlStatement.toString(), null);
}
}
@Override
public Map<String,String> getFlinkColumnTypeConversion(){
public Map<String, String> getFlinkColumnTypeConversion() {
return new HashMap<>();
}
}
......@@ -59,7 +59,7 @@ public class Clickhouse20CreateTableParser extends SQLCreateTableParser {
if (lexer.identifierEquals("SETTINGS")) {
lexer.nextToken();
for (;;) {
for (; ; ) {
SQLAssignItem item = this.exprParser.parseAssignItem();
item.setParent(ckStmt);
ckStmt.getSettings().add(item);
......
......@@ -28,11 +28,11 @@ import java.util.Arrays;
public class Clickhouse20ExprParser extends SQLExprParser {
private final static String[] AGGREGATE_FUNCTIONS;
private final static long[] AGGREGATE_FUNCTIONS_CODES;
private final static long[] AGGREGATE_FUNCTIONS_CODES;
static {
String[] strings = { "AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER",
"ROWNUMBER" };
String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER",
"ROWNUMBER"};
AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true);
AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length];
for (String str : strings) {
......@@ -42,17 +42,17 @@ public class Clickhouse20ExprParser extends SQLExprParser {
}
}
public Clickhouse20ExprParser(String sql){
public Clickhouse20ExprParser(String sql) {
this(new Clickhouse20Lexer(sql));
this.lexer.nextToken();
}
public Clickhouse20ExprParser(String sql, SQLParserFeature... features){
public Clickhouse20ExprParser(String sql, SQLParserFeature... features) {
this(new Clickhouse20Lexer(sql, features));
this.lexer.nextToken();
}
public Clickhouse20ExprParser(Lexer lexer){
public Clickhouse20ExprParser(Lexer lexer) {
super(lexer);
this.aggregateFunctions = AGGREGATE_FUNCTIONS;
this.aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES;
......
......@@ -38,7 +38,7 @@ public class Clickhouse20Lexer extends Lexer {
super.keywords = DEFAULT_KEYWORDS;
}
public Clickhouse20Lexer(String input, SQLParserFeature... features){
public Clickhouse20Lexer(String input, SQLParserFeature... features) {
super(input);
super.keywords = DEFAULT_KEYWORDS;
for (SQLParserFeature feature : features) {
......
package com.dlink.metadata.parser;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.sql.parser.*;
public class Clickhouse20StatementParser extends SQLStatementParser {
public Clickhouse20StatementParser(String sql) {
super (new Clickhouse20ExprParser(sql));
super(new Clickhouse20ExprParser(sql));
}
public Clickhouse20StatementParser(String sql, SQLParserFeature... features) {
super (new Clickhouse20ExprParser(sql, features));
super(new Clickhouse20ExprParser(sql, features));
}
public Clickhouse20StatementParser(Lexer lexer){
public Clickhouse20StatementParser(Lexer lexer) {
super(new Clickhouse20ExprParser(lexer));
}
......
package com.dlink.metadata.query;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* ClickHouseQuery
*
......@@ -22,8 +19,8 @@ public class ClickHouseQuery extends AbstractDBQuery {
@Override
public String columnsSql(String schemaName,String tableName) {
return "desc `"+tableName+"`";
public String columnsSql(String schemaName, String tableName) {
return "desc `" + tableName + "`";
}
@Override
......
......@@ -5,15 +5,15 @@ import com.alibaba.druid.sql.repository.SchemaRepository;
import com.alibaba.druid.sql.visitor.SchemaStatVisitor;
public class Click20SchemaStatVisitor extends SchemaStatVisitor implements Clickhouse20Visitor {
{
dbType = DbType.antspark;
}
{
dbType = DbType.antspark;
}
public Click20SchemaStatVisitor() {
super(DbType.antspark);
}
super(DbType.antspark);
}
public Click20SchemaStatVisitor(SchemaRepository repository) {
super(repository);
}
super(repository);
}
}
......@@ -35,7 +35,7 @@ public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisito
*/
private final boolean requireParameterizedOutput;
public Clickhouse20ExportParameterVisitor(final List<Object> parameters, final Appendable appender, final boolean wantParameterizedOutput){
public Clickhouse20ExportParameterVisitor(final List<Object> parameters, final Appendable appender, final boolean wantParameterizedOutput) {
super(appender, true);
this.parameters = parameters;
this.requireParameterizedOutput = wantParameterizedOutput;
......@@ -45,21 +45,21 @@ public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisito
this(new ArrayList<Object>());
}
public Clickhouse20ExportParameterVisitor(final List<Object> parameters){
this(parameters,new StringBuilder(),false);
public Clickhouse20ExportParameterVisitor(final List<Object> parameters) {
this(parameters, new StringBuilder(), false);
}
public Clickhouse20ExportParameterVisitor(final Appendable appender) {
this(new ArrayList<Object>(),appender,true);
this(new ArrayList<Object>(), appender, true);
}
public List<Object> getParameters() {
return parameters;
}
@Override
public boolean visit(SQLSelectItem x) {
if(requireParameterizedOutput){
if (requireParameterizedOutput) {
return super.visit(x);
}
return false;
......@@ -67,7 +67,7 @@ public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisito
@Override
public boolean visit(SQLOrderBy x) {
if(requireParameterizedOutput){
if (requireParameterizedOutput) {
return super.visit(x);
}
return false;
......@@ -75,7 +75,7 @@ public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisito
@Override
public boolean visit(SQLSelectGroupByClause x) {
if(requireParameterizedOutput){
if (requireParameterizedOutput) {
return super.visit(x);
}
return false;
......@@ -83,7 +83,7 @@ public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisito
@Override
public boolean visit(SQLMethodInvokeExpr x) {
if(requireParameterizedOutput){
if (requireParameterizedOutput) {
return super.visit(x);
}
ExportParameterVisitorUtils.exportParamterAndAccept(this.parameters, x.getArguments());
......@@ -93,7 +93,7 @@ public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisito
@Override
public boolean visit(SQLInListExpr x) {
if(requireParameterizedOutput){
if (requireParameterizedOutput) {
return super.visit(x);
}
ExportParameterVisitorUtils.exportParamterAndAccept(this.parameters, x.getTargetList());
......@@ -103,7 +103,7 @@ public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisito
@Override
public boolean visit(SQLBetweenExpr x) {
if(requireParameterizedOutput){
if (requireParameterizedOutput) {
return super.visit(x);
}
ExportParameterVisitorUtils.exportParameter(this.parameters, x);
......@@ -111,7 +111,7 @@ public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisito
}
public boolean visit(SQLBinaryOpExpr x) {
if(requireParameterizedOutput){
if (requireParameterizedOutput) {
return super.visit(x);
}
ExportParameterVisitorUtils.exportParameter(this.parameters, x);
......
package com.dlink.metadata.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.SQLStructDataType;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
......
......@@ -5,7 +5,7 @@ import com.dlink.metadata.constant.DorisConstant;
import java.sql.ResultSet;
import java.sql.SQLException;
public class DorisQuery extends AbstractDBQuery{
public class DorisQuery extends AbstractDBQuery {
@Override
public String schemaAllSql() {
return DorisConstant.QUERY_ALL_DATABASE;
......@@ -81,6 +81,7 @@ public class DorisQuery extends AbstractDBQuery{
public String collation() {
return "Default";
}
@Override
public String columnPosition() {
return "Default";
......@@ -90,10 +91,12 @@ public class DorisQuery extends AbstractDBQuery{
public String precision() {
return "Default";
}
@Override
public String scale() {
return "Default";
}
@Override
public String autoIncrement() {
return "Default";
......
......@@ -69,13 +69,13 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
public Table getTable(String schemaName, String tableName) {
List<Table> tables = listTables(schemaName);
Table table = null;
for(Table item : tables){
if(Asserts.isEquals(item.getName(),tableName)){
for (Table item : tables) {
if (Asserts.isEquals(item.getName(), tableName)) {
table = item;
break;
}
}
if(Asserts.isNotNull(table)) {
if (Asserts.isNotNull(table)) {
table.setColumns(listColumns(schemaName, table.getName()));
}
return table;
......@@ -101,17 +101,17 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
if (Asserts.isNotNullString(tableName)) {
Table tableInfo = new Table();
tableInfo.setName(tableName);
if(columnList.contains(dbQuery.tableComment())) {
if (columnList.contains(dbQuery.tableComment())) {
tableInfo.setComment(results.getString(dbQuery.tableComment()));
}
tableInfo.setSchema(schemaName);
if(columnList.contains(dbQuery.tableType())) {
if (columnList.contains(dbQuery.tableType())) {
tableInfo.setType(results.getString(dbQuery.tableType()));
}
if(columnList.contains(dbQuery.catalogName())) {
if (columnList.contains(dbQuery.catalogName())) {
tableInfo.setCatalog(results.getString(dbQuery.catalogName()));
}
if(columnList.contains(dbQuery.engine())) {
if (columnList.contains(dbQuery.engine())) {
tableInfo.setEngine(results.getString(dbQuery.engine()));
}
tableList.add(tableInfo);
......@@ -131,7 +131,7 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
}
@Override
public List<Schema> listSchemas() {
public List<Schema> listSchemas() {
List<Schema> schemas = new ArrayList<>();
PreparedStatement preparedStatement = null;
......@@ -173,21 +173,20 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
for (int i = 1; i <= metaData.getColumnCount(); i++) {
columnList.add(metaData.getColumnLabel(i));
}
Integer positionId=1;
Integer positionId = 1;
while (results.next()) {
Column field = new Column();
if (StringUtils.isEmpty(results.getString(dbQuery.columnName()))) {
break;
}
else{
if(columnList.contains(dbQuery.columnName())){
} else {
if (columnList.contains(dbQuery.columnName())) {
String columnName = results.getString(dbQuery.columnName());
field.setName(columnName);
}
if(columnList.contains(dbQuery.columnType())) {
if (columnList.contains(dbQuery.columnType())) {
field.setType(results.getString(dbQuery.columnType()));
}
if(columnList.contains(dbQuery.columnComment()) && Asserts.isNotNull(results.getString(dbQuery.columnComment()))) {
if (columnList.contains(dbQuery.columnComment()) && Asserts.isNotNull(results.getString(dbQuery.columnComment()))) {
String columnComment = results.getString(dbQuery.columnComment()).replaceAll("\"|'", "");
field.setComment(columnComment);
}
......@@ -209,7 +208,7 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
StringBuilder createTable = new StringBuilder();
PreparedStatement preparedStatement = null;
ResultSet results = null;
String createTableSql = getDBQuery().createTableSql(table.getSchema(),table.getName());
String createTableSql = getDBQuery().createTableSql(table.getSchema(), table.getName());
try {
preparedStatement = conn.prepareStatement(createTableSql);
results = preparedStatement.executeQuery();
......
......@@ -2,7 +2,7 @@ package com.dlink.metadata.query;
import com.dlink.metadata.constant.HiveConstant;
public class HiveQuery extends AbstractDBQuery{
public class HiveQuery extends AbstractDBQuery {
@Override
public String schemaAllSql() {
return HiveConstant.QUERY_ALL_DATABASE;
......@@ -28,6 +28,7 @@ public class HiveQuery extends AbstractDBQuery{
public String createTableName() {
return "createtab_stmt";
}
@Override
public String tableName() {
return "tab_name";
......
......@@ -53,21 +53,21 @@ public class OracleDriver extends AbstractJdbcDriver {
sb.append(table.getName() + " (");
List<Column> columns = table.getColumns();
for (int i = 0; i < columns.size(); i++) {
if(i>0){
if (i > 0) {
sb.append(",");
}
sb.append(columns.get(i).getName() + " " + getTypeConvert().convertToDB(columns.get(i)));
if(columns.get(i).isNullable()){
if (columns.get(i).isNullable()) {
sb.append(" NOT NULL");
}
}
sb.append(");");
sb.append("\r\n");
List<Column> pks = columns.stream().filter(column -> column.isKeyFlag()).collect(Collectors.toList());
if(Asserts.isNotNullCollection(pks)) {
if (Asserts.isNotNullCollection(pks)) {
sb.append("ALTER TABLE " + table.getName() + " ADD CONSTRAINT " + table.getName() + "_PK PRIMARY KEY (");
for (int i = 0; i < pks.size(); i++) {
if(i>0){
if (i > 0) {
sb.append(",");
}
sb.append(pks.get(i).getName());
......@@ -75,13 +75,13 @@ public class OracleDriver extends AbstractJdbcDriver {
sb.append(");\r\n");
}
for (int i = 0; i < columns.size(); i++) {
sb.append("COMMENT ON COLUMN "+table.getName()+"."+columns.get(i).getName()+" IS '"+columns.get(i).getComment()+"';");
sb.append("COMMENT ON COLUMN " + table.getName() + "." + columns.get(i).getName() + " IS '" + columns.get(i).getComment() + "';");
}
return sb.toString();
}
@Override
public Map<String,String> getFlinkColumnTypeConversion(){
public Map<String, String> getFlinkColumnTypeConversion() {
return new HashMap<>();
}
}
......@@ -15,11 +15,11 @@ public class OracleQuery extends AbstractDBQuery {
@Override
public String tablesSql(String schemaName) {
return "SELECT * FROM ALL_TAB_COMMENTS WHERE OWNER='"+schemaName+"'";
return "SELECT * FROM ALL_TAB_COMMENTS WHERE OWNER='" + schemaName + "'";
}
@Override
public String columnsSql(String schemaName,String tableName) {
public String columnsSql(String schemaName, String tableName) {
return "SELECT A.COLUMN_NAME, CASE WHEN A.DATA_TYPE='NUMBER' THEN "
+ "(CASE WHEN A.DATA_PRECISION IS NULL THEN A.DATA_TYPE "
+ "WHEN NVL(A.DATA_SCALE, 0) > 0 THEN A.DATA_TYPE||'('||A.DATA_PRECISION||','||A.DATA_SCALE||')' "
......@@ -31,10 +31,10 @@ public class OracleQuery extends AbstractDBQuery {
+ " and pcc.owner = upper(A.OWNER)"
+ " and pcc.table_name = upper(A.TABLE_NAME)),0,'','PRI') KEY "
+ "FROM ALL_TAB_COLUMNS A "
+ " INNER JOIN ALL_COL_COMMENTS B ON A.TABLE_NAME = B.TABLE_NAME AND A.COLUMN_NAME = B.COLUMN_NAME AND B.OWNER = '"+schemaName+"'"
+ " LEFT JOIN ALL_CONSTRAINTS D ON D.TABLE_NAME = A.TABLE_NAME AND D.CONSTRAINT_TYPE = 'P' AND D.OWNER = '"+schemaName+"'"
+ " LEFT JOIN ALL_CONS_COLUMNS C ON C.CONSTRAINT_NAME = D.CONSTRAINT_NAME AND C.COLUMN_NAME=A.COLUMN_NAME AND C.OWNER = '"+schemaName+"'"
+ "WHERE A.OWNER = '"+schemaName+"' AND A.TABLE_NAME = '"+tableName+"' ORDER BY A.COLUMN_ID ";
+ " INNER JOIN ALL_COL_COMMENTS B ON A.TABLE_NAME = B.TABLE_NAME AND A.COLUMN_NAME = B.COLUMN_NAME AND B.OWNER = '" + schemaName + "'"
+ " LEFT JOIN ALL_CONSTRAINTS D ON D.TABLE_NAME = A.TABLE_NAME AND D.CONSTRAINT_TYPE = 'P' AND D.OWNER = '" + schemaName + "'"
+ " LEFT JOIN ALL_CONS_COLUMNS C ON C.CONSTRAINT_NAME = D.CONSTRAINT_NAME AND C.COLUMN_NAME=A.COLUMN_NAME AND C.OWNER = '" + schemaName + "'"
+ "WHERE A.OWNER = '" + schemaName + "' AND A.TABLE_NAME = '" + tableName + "' ORDER BY A.COLUMN_ID ";
}
@Override
......
package com.dlink.metadata.constant;
/**
*
* @author lcg
* @operate Phoenix常量
* @date 2022/2/16 14:19
......
package com.dlink.metadata.constant;
/**
*
* @author lcg
* @operate Phoenix常用数据类型及对应code
* @date 2022/2/16 16:49
......@@ -25,24 +24,27 @@ public enum PhoenixEnum {
VARBINARY(-3);
int dataTypeCode;
PhoenixEnum(int dataTypeCode) {
this.dataTypeCode = dataTypeCode;
}
public int getDataTypeCode(){
public int getDataTypeCode() {
return dataTypeCode;
}
/**
* 获取数字 对应的数据类型 默认返回VARCHAR(无对应) , 传参为空时返回为null
*
* @param dataTypeCode
* @return
*/
public static PhoenixEnum getDataTypeEnum(Integer dataTypeCode){
if(dataTypeCode == null){
public static PhoenixEnum getDataTypeEnum(Integer dataTypeCode) {
if (dataTypeCode == null) {
return null;
}else {
} else {
for (PhoenixEnum typeEnum : PhoenixEnum.values()) {
if(dataTypeCode.equals(typeEnum.dataTypeCode)){
if (dataTypeCode.equals(typeEnum.dataTypeCode)) {
return typeEnum;
}
}
......
......@@ -7,7 +7,7 @@ import com.dlink.model.ColumnType;
public class PhoenixTypeConvert implements ITypeConvert {
@Override
public ColumnType convert(Column column) {
if (Asserts.isNull(column)||Asserts.isNull(column.getType())) {
if (Asserts.isNull(column) || Asserts.isNull(column.getType())) {
return ColumnType.STRING;
}
String t = column.getType().toLowerCase();
......
......@@ -16,11 +16,10 @@ import java.util.List;
import java.util.Properties;
/**
*
* @author lcg
* @operate
* @operate
* @date 2022/2/16 16:50
* @return
* @return
*/
public class PhoenixDriver extends AbstractJdbcDriver {
@Override
......@@ -84,6 +83,7 @@ public class PhoenixDriver extends AbstractJdbcDriver {
/**
* 解决phoenix SQL多语句执行问题
* phoenix SQL中不能执行带;语句
*
* @param sql
* @return
*/
......@@ -93,7 +93,7 @@ public class PhoenixDriver extends AbstractJdbcDriver {
@Override
public JdbcSelectResult query(String sql, Integer limit) {
return super.query(parsePhoenixSql(sql),limit);
return super.query(parsePhoenixSql(sql), limit);
}
@Override
......
......@@ -77,7 +77,6 @@ public class PhoenixQuery extends AbstractDBQuery {
}
public String isNullable() {
return "NULLABLE";
}
......
......@@ -4,7 +4,6 @@ import com.dlink.metadata.convert.ITypeConvert;
import com.dlink.metadata.convert.PostgreSqlTypeConvert;
import com.dlink.metadata.query.IDBQuery;
import com.dlink.metadata.query.PostgreSqlQuery;
import com.dlink.model.Table;
import java.util.HashMap;
import java.util.Map;
......@@ -42,7 +41,7 @@ public class PostgreSqlDriver extends AbstractJdbcDriver {
}
@Override
public Map<String,String> getFlinkColumnTypeConversion(){
public Map<String, String> getFlinkColumnTypeConversion() {
return new HashMap<>();
}
}
......@@ -14,14 +14,14 @@ public class PostgreSqlQuery extends AbstractDBQuery {
@Override
public String tablesSql(String schemaName) {
return "SELECT A.tablename, obj_description(relfilenode, 'pg_class') AS comments FROM pg_tables A, pg_class B WHERE A.schemaname='"+schemaName+"' AND A.tablename = B.relname";
return "SELECT A.tablename, obj_description(relfilenode, 'pg_class') AS comments FROM pg_tables A, pg_class B WHERE A.schemaname='" + schemaName + "' AND A.tablename = B.relname";
}
@Override
public String columnsSql(String schemaName,String tableName) {
public String columnsSql(String schemaName, String tableName) {
return "SELECT A.attname AS name,format_type (A.atttypid,A.atttypmod) AS type,col_description (A.attrelid,A.attnum) AS comment,\n" +
"(CASE WHEN (SELECT COUNT (*) FROM pg_constraint AS PC WHERE A.attnum = PC.conkey[1] AND PC.contype = 'p') > 0 THEN 'PRI' ELSE '' END) AS key \n" +
"FROM pg_class AS C,pg_attribute AS A WHERE A.attrelid='"+schemaName+"."+tableName+"'::regclass AND A.attrelid= C.oid AND A.attnum> 0 AND NOT A.attisdropped ORDER BY A.attnum";
"FROM pg_class AS C,pg_attribute AS A WHERE A.attrelid='" + schemaName + "." + tableName + "'::regclass AND A.attrelid= C.oid AND A.attnum> 0 AND NOT A.attisdropped ORDER BY A.attnum";
}
@Override
......
package com.dlink.metadata.constant;
/**
*
* @operate sqlServer常量
* @date 2022/1/26 14:11
* @return
......
......@@ -7,19 +7,19 @@ import com.dlink.metadata.query.IDBQuery;
import com.dlink.metadata.query.SqlServerQuery;
import com.dlink.model.Column;
import com.dlink.model.Table;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author lcg
* @operate
* @operate
* @date 2022/1/26 14:23
* @return
* @return
*/
public class SqlServerDriver extends AbstractJdbcDriver {
public class SqlServerDriver extends AbstractJdbcDriver {
@Override
public IDBQuery getDBQuery() {
return new SqlServerQuery();
......@@ -48,40 +48,40 @@ public class SqlServerDriver extends AbstractJdbcDriver {
@Override
public String getCreateTableSql(Table table) {
StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE ["+table.getName() + "] (");
sb.append("CREATE TABLE [" + table.getName() + "] (");
List<Column> columns = table.getColumns();
for (int i = 0; i < columns.size(); i++) {
if(i>0){
if (i > 0) {
sb.append(",");
}
sb.append("["+columns.get(i).getName() + "]" + getTypeConvert().convertToDB(columns.get(i)));
if(columns.get(i).isNullable()){
sb.append("[" + columns.get(i).getName() + "]" + getTypeConvert().convertToDB(columns.get(i)));
if (columns.get(i).isNullable()) {
sb.append(" NOT NULL");
}else{
} else {
sb.append(" NULL");
}
}
List<String> pks = new ArrayList<>();
for (int i = 0; i < columns.size(); i++) {
if(columns.get(i).isKeyFlag()){
if (columns.get(i).isKeyFlag()) {
pks.add(columns.get(i).getName());
}
}
if(pks.size()>0){
if (pks.size() > 0) {
sb.append(", PRIMARY KEY ( ");
for (int i = 0; i < pks.size(); i++) {
if(i>0){
if (i > 0) {
sb.append(",");
}
sb.append("["+pks.get(i)+"]");
sb.append("[" + pks.get(i) + "]");
}
sb.append(" ) ");
}
sb.append(") GO ");
for (Column column : columns) {
String comment = column.getComment();
if(comment != null && !comment.isEmpty()){
sb.append(String.format(SqlServerConstant.COMMENT_SQL, comment, table.getSchema() == null || table.getSchema().isEmpty() ? "dbo":table.getSchema(),
if (comment != null && !comment.isEmpty()) {
sb.append(String.format(SqlServerConstant.COMMENT_SQL, comment, table.getSchema() == null || table.getSchema().isEmpty() ? "dbo" : table.getSchema(),
table.getName(), column.getName()) + " GO ");
}
}
......@@ -89,7 +89,7 @@ public class SqlServerDriver extends AbstractJdbcDriver {
}
@Override
public Map<String,String> getFlinkColumnTypeConversion(){
public Map<String, String> getFlinkColumnTypeConversion() {
return new HashMap<>();
}
}
package com.dlink.metadata.query;
import com.dlink.metadata.constant.SqlServerConstant;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
*
* @author lcg
* @operate
* @operate
* @date 2022/1/26 15:42
* @return
* @return
*/
public class SqlServerQuery extends AbstractDBQuery {
......@@ -25,7 +25,7 @@ public class SqlServerQuery extends AbstractDBQuery {
@Override
public String columnsSql(String schemaName, String tableName) {
return String.format(SqlServerConstant.QUERY_COLUMNS_SQL, tableName);
return String.format(SqlServerConstant.QUERY_COLUMNS_SQL, tableName);
}
@Override
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment