Commit 279df255 authored by godkaikai's avatar godkaikai

ck校验优化

parent db423486
...@@ -156,11 +156,9 @@ public class StudioServiceImpl implements StudioService { ...@@ -156,11 +156,9 @@ public class StudioServiceImpl implements StudioService {
}}; }};
} }
Driver driver = Driver.build(dataBase.getDriverConfig()).connect(); Driver driver = Driver.build(dataBase.getDriverConfig()).connect();
SqlExplainResult explainResult = driver.explain(studioExecuteDTO.getStatement()); List<SqlExplainResult> sqlExplainResults = driver.explain(studioExecuteDTO.getStatement());
driver.close(); driver.close();
return new ArrayList<SqlExplainResult>(){{ return sqlExplainResults;
add(explainResult);
}};
} }
} }
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# 要运行的jar包路径,加不加引号都行。 注意:等号两边 不能 有空格,否则会提示command找不到 # 要运行的jar包路径,加不加引号都行。 注意:等号两边 不能 有空格,否则会提示command找不到
JAR_NAME="./dlink-admin-*.jar" JAR_NAME="./dlink-admin-*.jar"
#java -Djava.ext.dirs=$JAVA_HOME/jre/lib/ext:$JAVA_HOME/jre/lib:./lib -classpath ."/lib/*.jar" -jar dlink-admin-*.jar #java -Djava.ext.dirs=$JAVA_HOME/jre/lib/ext:$JAVA_HOME/jre/lib:./lib -classpath ."/lib/*.jar" -jar dlink-admin-*.jar
SETTING="-Dloader.path=./lib,./plugins" SETTING="-Dloader.path=./lib,./plugins -Ddruid.mysql.usePingMethod=false"
# 如果输入格式不对,给出提示! # 如果输入格式不对,给出提示!
tips() { tips() {
echo "" echo ""
......
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement; import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.dialect.clickhouse.parser.ClickhouseStatementParser; import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.sql.parser.Token;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
import com.dlink.constant.CommonConstant; import com.dlink.constant.CommonConstant;
import com.dlink.metadata.result.JdbcSelectResult; import com.dlink.metadata.result.JdbcSelectResult;
...@@ -13,7 +15,13 @@ import com.dlink.result.SqlExplainResult; ...@@ -13,7 +15,13 @@ import com.dlink.result.SqlExplainResult;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.sql.*; import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
...@@ -288,34 +296,20 @@ public abstract class AbstractJdbcDriver extends AbstractDriver { ...@@ -288,34 +296,20 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
} }
@Override @Override
public SqlExplainResult explain(String sql){ public List<SqlExplainResult> explain(String sql){
boolean correct = true; List<SqlExplainResult> sqlExplainResults = new ArrayList<>();
String error = null; String current = null;
String type = "Sql";
PreparedStatement preparedStatement = null;
ResultSet results = null;
try { try {
ClickhouseStatementParser parser = new ClickhouseStatementParser(sql); List<SQLStatement> stmtList = SQLUtils.parseStatements(sql,config.getType());
SQLStatement sqlStatement = parser.parseStatement(); for(SQLStatement item : stmtList){
type = sqlStatement.getClass().getSimpleName(); current = item.toString();
if(!(sqlStatement instanceof SQLSelectStatement)){ String type = item.getClass().getSimpleName();
return SqlExplainResult.success(type, sql, ""); sqlExplainResults.add(SqlExplainResult.success(type, current, null));
}
preparedStatement = conn.prepareStatement("explain "+sql);
results = preparedStatement.executeQuery();
if(!results.next()){
correct = false;
} }
} catch (Exception e) { } catch (Exception e) {
correct = false; sqlExplainResults.add(SqlExplainResult.fail(current,e.getMessage()));
error = e.getMessage();
} finally { } finally {
close(preparedStatement, results); return sqlExplainResults;
if(correct) {
return SqlExplainResult.success(type, sql, null);
}else {
return SqlExplainResult.fail(sql,error);
}
} }
} }
......
...@@ -91,6 +91,6 @@ public interface Driver { ...@@ -91,6 +91,6 @@ public interface Driver {
JdbcSelectResult query(String sql, Integer limit); JdbcSelectResult query(String sql, Integer limit);
SqlExplainResult explain(String sql); List<SqlExplainResult> explain(String sql);
} }
...@@ -25,5 +25,10 @@ ...@@ -25,5 +25,10 @@
<groupId>ru.yandex.clickhouse</groupId> <groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId> <artifactId>clickhouse-jdbc</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<scope>provided</scope>
</dependency>
</dependencies> </dependencies>
</project> </project>
package com.dlink.metadata.ast;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.ClickhouseVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import java.util.ArrayList;
import java.util.List;
public class Clickhouse20CreateTableStatement extends SQLCreateTableStatement {
protected final List<SQLAssignItem> settings = new ArrayList<SQLAssignItem>();
private SQLOrderBy orderBy;
private SQLExpr partitionBy;
private SQLExpr primaryKey;
private SQLExpr sampleBy;
public Clickhouse20CreateTableStatement() {
super(DbType.clickhouse);
}
public SQLOrderBy getOrderBy() {
return orderBy;
}
public void setOrderBy(SQLOrderBy x) {
if (x != null) {
x.setParent(this);
}
this.orderBy = x;
}
public SQLExpr getPartitionBy() {
return partitionBy;
}
public void setPartitionBy(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.partitionBy = x;
}
public SQLExpr getPrimaryKey() {
return primaryKey;
}
public void setPrimaryKey(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.primaryKey = x;
}
public SQLExpr getSampleBy() {
return sampleBy;
}
public void setSampleBy(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.sampleBy = x;
}
public List<SQLAssignItem> getSettings() {
return settings;
}
@Override
protected void accept0(SQLASTVisitor v) {
if (v instanceof ClickhouseVisitor) {
ClickhouseVisitor vv = (ClickhouseVisitor) v;
if (vv.visit(this)) {
acceptChild(vv);
}
vv.endVisit(this);
return;
}
if (v.visit(this)) {
acceptChild(v);
}
v.endVisit(this);
}
}
...@@ -2,12 +2,11 @@ package com.dlink.metadata.driver; ...@@ -2,12 +2,11 @@ package com.dlink.metadata.driver;
import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement; import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
import com.alibaba.druid.sql.dialect.clickhouse.ast.ClickhouseCreateTableStatement; import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.dialect.clickhouse.parser.ClickhouseCreateTableParser; import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.sql.dialect.clickhouse.parser.ClickhouseStatementParser;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.ClickhouseVisitor;
import com.dlink.metadata.convert.ClickHouseTypeConvert; import com.dlink.metadata.convert.ClickHouseTypeConvert;
import com.dlink.metadata.convert.ITypeConvert; import com.dlink.metadata.convert.ITypeConvert;
import com.dlink.metadata.parser.Clickhouse20StatementParser;
import com.dlink.metadata.query.ClickHouseQuery; import com.dlink.metadata.query.ClickHouseQuery;
import com.dlink.metadata.query.IDBQuery; import com.dlink.metadata.query.IDBQuery;
import com.dlink.model.Table; import com.dlink.model.Table;
...@@ -15,7 +14,8 @@ import com.dlink.result.SqlExplainResult; ...@@ -15,7 +14,8 @@ import com.dlink.result.SqlExplainResult;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.util.ArrayList;
import java.util.List;
/** /**
* ClickHouseDriver * ClickHouseDriver
...@@ -55,36 +55,39 @@ public class ClickHouseDriver extends AbstractJdbcDriver { ...@@ -55,36 +55,39 @@ public class ClickHouseDriver extends AbstractJdbcDriver {
} }
@Override @Override
public SqlExplainResult explain(String sql){ public List<SqlExplainResult> explain(String sql){
boolean correct = true; List<SqlExplainResult> sqlExplainResults = new ArrayList<>();
String error = null;
String type = "ClickHouseSql";
StringBuilder explain = new StringBuilder(); StringBuilder explain = new StringBuilder();
PreparedStatement preparedStatement = null; PreparedStatement preparedStatement = null;
ResultSet results = null; ResultSet results = null;
String current = null;
try { try {
ClickhouseStatementParser parser = new ClickhouseStatementParser(sql); Clickhouse20StatementParser parser = new Clickhouse20StatementParser(sql);
SQLStatement sqlStatement = parser.parseStatement(); List<SQLStatement> stmtList = new ArrayList<>();
type = sqlStatement.getClass().getSimpleName(); parser.parseStatementList(stmtList, -1, null);
if(!(sqlStatement instanceof SQLSelectStatement)){ if (parser.getLexer().token() != Token.EOF) {
return SqlExplainResult.success(type, sql, explain.toString()); throw new ParserException("syntax error : " + sql);
} }
preparedStatement = conn.prepareStatement("explain "+sql); for(SQLStatement item : stmtList){
results = preparedStatement.executeQuery(); current = item.toString();
while(results.next()){ String type = item.getClass().getSimpleName();
explain.append(getTypeConvert().convertValue(results,"explain", "string")+"\r\n"); if(!(item instanceof SQLSelectStatement)){
sqlExplainResults.add(SqlExplainResult.success(type, current, explain.toString()));
continue;
}
preparedStatement = conn.prepareStatement("explain "+current);
results = preparedStatement.executeQuery();
while(results.next()){
explain.append(getTypeConvert().convertValue(results,"explain", "string")+"\r\n");
}
sqlExplainResults.add(SqlExplainResult.success(type, current, explain.toString()));
} }
} catch (Exception e) { } catch (Exception e) {
correct = false; e.printStackTrace();
error = e.getMessage(); sqlExplainResults.add(SqlExplainResult.fail(current, e.getMessage()));
} finally { } finally {
close(preparedStatement, results); close(preparedStatement, results);
if(correct) { return sqlExplainResults;
return SqlExplainResult.success(type, sql, explain.toString());
}else {
return SqlExplainResult.fail(sql,error);
}
} }
} }
} }
package com.dlink.metadata.parser;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
public class Clickhouse20CreateTableParser extends SQLCreateTableParser {
public Clickhouse20CreateTableParser(SQLExprParser exprParser) {
super(exprParser);
}
protected SQLCreateTableStatement newCreateStatement() {
return new Clickhouse20CreateTableStatement();
}
protected void parseCreateTableRest(SQLCreateTableStatement stmt) {
Clickhouse20CreateTableStatement ckStmt = (Clickhouse20CreateTableStatement) stmt;
if (lexer.identifierEquals(FnvHash.Constants.ENGINE)) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.setEngine(
this.exprParser.expr()
);
}
if (lexer.identifierEquals("PARTITION")) {
lexer.nextToken();
accept(Token.BY);
SQLExpr expr = this.exprParser.expr();
ckStmt.setPartitionBy(expr);
}
if (lexer.identifierEquals("PRIMARY")) {
lexer.nextToken();
accept(Token.KEY);
SQLExpr expr = this.exprParser.expr();
ckStmt.setPrimaryKey(expr);
}
if (lexer.token() == Token.PRIMARY) {
lexer.nextToken();
accept(Token.KEY);
SQLExpr expr = this.exprParser.expr();
ckStmt.setPrimaryKey(expr);
}
if (lexer.token() == Token.ORDER) {
SQLOrderBy orderBy = this.exprParser.parseOrderBy();
ckStmt.setOrderBy(orderBy);
}
if (lexer.identifierEquals("SAMPLE")) {
lexer.nextToken();
accept(Token.BY);
SQLExpr expr = this.exprParser.expr();
ckStmt.setSampleBy(expr);
}
if (lexer.identifierEquals("SETTINGS")) {
lexer.nextToken();
for (;;) {
SQLAssignItem item = this.exprParser.parseAssignItem();
item.setParent(ckStmt);
ckStmt.getSettings().add(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
}
}
}
/*
* Copyright 1999-2017 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dlink.metadata.parser;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.expr.SQLArrayExpr;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
import java.util.Arrays;
public class Clickhouse20ExprParser extends SQLExprParser {
private final static String[] AGGREGATE_FUNCTIONS;
private final static long[] AGGREGATE_FUNCTIONS_CODES;
static {
String[] strings = { "AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER",
"ROWNUMBER" };
AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true);
AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length];
for (String str : strings) {
long hash = FnvHash.fnv1a_64_lower(str);
int index = Arrays.binarySearch(AGGREGATE_FUNCTIONS_CODES, hash);
AGGREGATE_FUNCTIONS[index] = str;
}
}
public Clickhouse20ExprParser(String sql){
this(new Clickhouse20Lexer(sql));
this.lexer.nextToken();
}
public Clickhouse20ExprParser(String sql, SQLParserFeature... features){
this(new Clickhouse20Lexer(sql, features));
this.lexer.nextToken();
}
public Clickhouse20ExprParser(Lexer lexer){
super(lexer);
this.aggregateFunctions = AGGREGATE_FUNCTIONS;
this.aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES;
}
protected SQLExpr parseAliasExpr(String alias) {
String chars = alias.substring(1, alias.length() - 1);
return new SQLCharExpr(chars);
}
public SQLExpr primaryRest(SQLExpr expr) {
if (lexer.token() == Token.LBRACKET) {
SQLArrayExpr array = new SQLArrayExpr();
array.setExpr(expr);
lexer.nextToken();
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
return primaryRest(array);
}
return super.primaryRest(expr);
}
}
package com.dlink.metadata.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class Clickhouse20Lexer extends Lexer {
public final static Keywords DEFAULT_KEYWORDS;
static {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
map.put("OF", Token.OF);
map.put("CONCAT", Token.CONCAT);
map.put("CONTINUE", Token.CONTINUE);
map.put("MERGE", Token.MERGE);
map.put("USING", Token.USING);
map.put("ROW", Token.ROW);
map.put("LIMIT", Token.LIMIT);
map.put("SHOW", Token.SHOW);
map.put("ALL", Token.ALL);
map.put("GLOBAL", Token.GLOBAL);
DEFAULT_KEYWORDS = new Keywords(map);
}
public Clickhouse20Lexer(String input) {
super(input);
dbType = DbType.clickhouse;
super.keywords = DEFAULT_KEYWORDS;
}
public Clickhouse20Lexer(String input, SQLParserFeature... features){
super(input);
super.keywords = DEFAULT_KEYWORDS;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
}
package com.dlink.metadata.parser;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.sql.parser.Token;
public class Clickhouse20StatementParser extends SQLStatementParser {
public Clickhouse20StatementParser(String sql) {
super (new Clickhouse20ExprParser(sql));
}
public Clickhouse20StatementParser(String sql, SQLParserFeature... features) {
super (new Clickhouse20ExprParser(sql, features));
}
public Clickhouse20StatementParser(Lexer lexer){
super(new Clickhouse20ExprParser(lexer));
}
@Override
public SQLWithSubqueryClause parseWithQuery() {
SQLWithSubqueryClause withQueryClause = new SQLWithSubqueryClause();
if (lexer.hasComment() && lexer.isKeepComments()) {
withQueryClause.addBeforeComment(lexer.readAndResetComments());
}
accept(Token.WITH);
for (; ; ) {
SQLWithSubqueryClause.Entry entry = new SQLWithSubqueryClause.Entry();
entry.setParent(withQueryClause);
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
switch (lexer.token()) {
case VALUES:
case WITH:
case SELECT:
entry.setSubQuery(
this.createSQLSelectParser()
.select());
break;
default:
break;
}
accept(Token.RPAREN);
} else {
entry.setExpr(exprParser.expr());
}
accept(Token.AS);
String alias = this.lexer.stringVal();
lexer.nextToken();
entry.setAlias(alias);
withQueryClause.addEntry(entry);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
return withQueryClause;
}
public SQLCreateTableParser getSQLCreateTableParser() {
return new Clickhouse20CreateTableParser(this.exprParser);
}
}
package com.dlink.metadata.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.repository.SchemaRepository;
import com.alibaba.druid.sql.visitor.SchemaStatVisitor;
public class Click20SchemaStatVisitor extends SchemaStatVisitor implements Clickhouse20Visitor {
{
dbType = DbType.antspark;
}
public Click20SchemaStatVisitor() {
super(DbType.antspark);
}
public Click20SchemaStatVisitor(SchemaRepository repository) {
super(repository);
}
}
/*
* Copyright 1999-2017 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dlink.metadata.visitor;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.expr.SQLBetweenExpr;
import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr;
import com.alibaba.druid.sql.ast.expr.SQLInListExpr;
import com.alibaba.druid.sql.ast.expr.SQLMethodInvokeExpr;
import com.alibaba.druid.sql.ast.statement.SQLSelectGroupByClause;
import com.alibaba.druid.sql.ast.statement.SQLSelectItem;
import com.alibaba.druid.sql.visitor.ExportParameterVisitor;
import com.alibaba.druid.sql.visitor.ExportParameterVisitorUtils;
import java.util.ArrayList;
import java.util.List;
public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisitor implements ExportParameterVisitor {
/**
* true= if require parameterized sql output
*/
private final boolean requireParameterizedOutput;
public Clickhouse20ExportParameterVisitor(final List<Object> parameters, final Appendable appender, final boolean wantParameterizedOutput){
super(appender, true);
this.parameters = parameters;
this.requireParameterizedOutput = wantParameterizedOutput;
}
public Clickhouse20ExportParameterVisitor() {
this(new ArrayList<Object>());
}
public Clickhouse20ExportParameterVisitor(final List<Object> parameters){
this(parameters,new StringBuilder(),false);
}
public Clickhouse20ExportParameterVisitor(final Appendable appender) {
this(new ArrayList<Object>(),appender,true);
}
public List<Object> getParameters() {
return parameters;
}
@Override
public boolean visit(SQLSelectItem x) {
if(requireParameterizedOutput){
return super.visit(x);
}
return false;
}
@Override
public boolean visit(SQLOrderBy x) {
if(requireParameterizedOutput){
return super.visit(x);
}
return false;
}
@Override
public boolean visit(SQLSelectGroupByClause x) {
if(requireParameterizedOutput){
return super.visit(x);
}
return false;
}
@Override
public boolean visit(SQLMethodInvokeExpr x) {
if(requireParameterizedOutput){
return super.visit(x);
}
ExportParameterVisitorUtils.exportParamterAndAccept(this.parameters, x.getArguments());
return true;
}
@Override
public boolean visit(SQLInListExpr x) {
if(requireParameterizedOutput){
return super.visit(x);
}
ExportParameterVisitorUtils.exportParamterAndAccept(this.parameters, x.getTargetList());
return true;
}
@Override
public boolean visit(SQLBetweenExpr x) {
if(requireParameterizedOutput){
return super.visit(x);
}
ExportParameterVisitorUtils.exportParameter(this.parameters, x);
return true;
}
public boolean visit(SQLBinaryOpExpr x) {
if(requireParameterizedOutput){
return super.visit(x);
}
ExportParameterVisitorUtils.exportParameter(this.parameters, x);
return true;
}
}
package com.dlink.metadata.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.SQLStructDataType;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import java.util.List;
public class Clickhouse20OutputVisitor extends SQLASTOutputVisitor implements Clickhouse20Visitor {
public Clickhouse20OutputVisitor(Appendable appender) {
super(appender, DbType.clickhouse);
}
public Clickhouse20OutputVisitor(Appendable appender, DbType dbType) {
super(appender, dbType);
}
public Clickhouse20OutputVisitor(Appendable appender, boolean parameterized) {
super(appender, parameterized);
}
@Override
public boolean visit(SQLWithSubqueryClause.Entry x) {
if (x.getExpr() != null) {
x.getExpr().accept(this);
} else if (x.getSubQuery() != null) {
print('(');
println();
SQLSelect query = x.getSubQuery();
if (query != null) {
query.accept(this);
} else {
x.getReturningStatement().accept(this);
}
println();
print(')');
}
print(' ');
print0(ucase ? "AS " : "as ");
print0(x.getAlias());
return false;
}
public boolean visit(SQLStructDataType x) {
print0(ucase ? "NESTED (" : "nested (");
incrementIndent();
println();
printlnAndAccept(x.getFields(), ",");
decrementIndent();
println();
print(')');
return false;
}
@Override
public boolean visit(SQLStructDataType.Field x) {
SQLName name = x.getName();
if (name != null) {
name.accept(this);
}
SQLDataType dataType = x.getDataType();
if (dataType != null) {
print(' ');
dataType.accept(this);
}
return false;
}
@Override
public boolean visit(Clickhouse20CreateTableStatement x) {
super.visit((SQLCreateTableStatement) x);
SQLExpr partitionBy = x.getPartitionBy();
if (partitionBy != null) {
println();
print0(ucase ? "PARTITION BY " : "partition by ");
partitionBy.accept(this);
}
SQLOrderBy orderBy = x.getOrderBy();
if (orderBy != null) {
println();
orderBy.accept(this);
}
SQLExpr sampleBy = x.getSampleBy();
if (sampleBy != null) {
println();
print0(ucase ? "SAMPLE BY " : "sample by ");
sampleBy.accept(this);
}
List<SQLAssignItem> settings = x.getSettings();
if (!settings.isEmpty()) {
println();
print0(ucase ? "SETTINGS " : "settings ");
printAndAccept(settings, ", ");
}
return false;
}
public boolean visit(SQLAlterTableAddColumn x) {
print0(ucase ? "ADD COLUMN " : "add column ");
printAndAccept(x.getColumns(), ", ");
return false;
}
}
package com.dlink.metadata.visitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
public interface Clickhouse20Visitor extends SQLASTVisitor {
default boolean visit(Clickhouse20CreateTableStatement x) {
return true;
}
default void endVisit(Clickhouse20CreateTableStatement x) {
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment