Unverified Commit 09a9d4aa authored by wmtbnbo's avatar wmtbnbo Committed by GitHub

[Fix Feature][dlink alert metadata] Fix code format configuration conflicts...

[Fix Feature][dlink alert metadata] Fix code format configuration conflicts and add new data source presto (#1168)

* [Feature] Update flink1.14 to flink1.14.5

* [Bug] [dlink-flink] Fix the local operation exception caused by the conflict between the CDC package and the Flink clients package

* [Future] [dlink-flink] Update flink1.15 to flink1.15.1

* [Fix][web] Fix StarRocks databases display

* [Fix][web] Fix query exceptions caused by the system's failure to automatically clean up the selected table information when switching data sources
Signed-off-by: 's avatar王沐 <wangmu@haiermoney.com>

* [Fix][metadata] Fix Exception in obtaining SQLDDL of view
Signed-off-by: 's avatar王沐 <wangmu@haiermoney.com>

* [Fix][style] Fix code format configuration conflicts

* [Feature][metadata] Add new data source presto

* [Feature][metadata] Add new data source presto

* [Feature][metadata] Add new data source presto
Signed-off-by: 's avatar王沐 <wangmu@haiermoney.com>
Co-authored-by: 's avatar王沐 <wangmu@haiermoney.com>
parent 67feb1f2
......@@ -15,16 +15,14 @@
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>dlink</artifactId>
<groupId>com.dlink</groupId>
<artifactId>dlink</artifactId>
<version>0.6.8-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-alert</artifactId>
<packaging>pom</packaging>
......
......@@ -244,6 +244,14 @@
<include>dlink-metadata-starrocks-${project.version}.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.parent.basedir}/dlink-metadata/dlink-metadata-presto/target
</directory>
<outputDirectory>lib</outputDirectory>
<includes>
<include>dlink-metadata-presto-${project.version}.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.parent.basedir}/dlink-alert/dlink-alert-dingtalk/target
</directory>
......
......@@ -169,6 +169,11 @@
<artifactId>dlink-metadata-starrocks</artifactId>
<scope>${scope.runtime}</scope>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata-presto</artifactId>
<scope>${scope.runtime}</scope>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-connector-phoenix-1.13</artifactId>
......
......@@ -44,6 +44,7 @@ public enum Dialect {
PHOENIX("Phoenix"),
HIVE("Hive"),
STARROCKS("StarRocks"),
PRESTO("Presto"),
KUBERNETES_APPLICATION("KubernetesApplaction");
private String value;
......@@ -90,6 +91,7 @@ public enum Dialect {
case PHOENIX:
case HIVE:
case STARROCKS:
case PRESTO:
return true;
default:
return false;
......
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata</artifactId>
<version>0.6.8-SNAPSHOT</version>
</parent>
<artifactId>dlink-metadata-presto</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata-base</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-nop</artifactId>
<version>1.6.1</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.4</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
<version>0.235</version>
<scope>compile</scope>
</dependency>
</dependencies>
</project>
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.dlink.metadata.constant;
public interface PrestoConstant {
/**
* 查询所有database
*/
String QUERY_ALL_DATABASE = "show catalogs";
/**
* 查询某个schema下的所有表
*/
String QUERY_ALL_TABLES_BY_SCHEMA = "show tables from %s";
/**
* 查询指定schema.table的信息 列 列类型 列注释
*/
String QUERY_TABLE_SCHEMA = " describe %s.%s";
/**
* 只查询指定schema.table的列名
*/
String QUERY_TABLE_COLUMNS_ONLY = "show schemas from %s";
/**
* 查询schema列名
*/
String SCHEMA = "SCHEMA";
/**
* 需要排除的catalog
*/
String EXTRA_SCHEMA = "system";
/**
* 需要排除的schema
*/
String EXTRA_DB = "information_schema";
}
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
import com.dlink.model.Column;
import com.dlink.model.ColumnType;
public class PrestoTypeConvert implements ITypeConvert {
@Override
public ColumnType convert(Column column) {
ColumnType columnType = ColumnType.STRING;
if (Asserts.isNull(column)) {
return columnType;
}
String t = column.getType().toLowerCase().trim();
boolean isNullable = !column.isKeyFlag() && column.isNullable();
if (t.contains("char")) {
columnType = ColumnType.STRING;
} else if (t.contains("boolean")) {
if (isNullable) {
columnType = ColumnType.JAVA_LANG_BOOLEAN;
} else {
columnType = ColumnType.BOOLEAN;
}
} else if (t.contains("tinyint")) {
if (isNullable) {
columnType = ColumnType.JAVA_LANG_BYTE;
} else {
columnType = ColumnType.BYTE;
}
} else if (t.contains("smallint")) {
if (isNullable) {
columnType = ColumnType.JAVA_LANG_SHORT;
} else {
columnType = ColumnType.SHORT;
}
} else if (t.contains("bigint")) {
if (isNullable) {
columnType = ColumnType.JAVA_LANG_LONG;
} else {
columnType = ColumnType.LONG;
}
} else if (t.contains("largeint")) {
columnType = ColumnType.STRING;
} else if (t.contains("int")) {
if (isNullable) {
columnType = ColumnType.INTEGER;
} else {
columnType = ColumnType.INT;
}
} else if (t.contains("float")) {
if (isNullable) {
columnType = ColumnType.JAVA_LANG_FLOAT;
} else {
columnType = ColumnType.FLOAT;
}
} else if (t.contains("double")) {
if (isNullable) {
columnType = ColumnType.JAVA_LANG_DOUBLE;
} else {
columnType = ColumnType.DOUBLE;
}
} else if (t.contains("timestamp")) {
columnType = ColumnType.TIMESTAMP;
} else if (t.contains("date")) {
columnType = ColumnType.STRING;
} else if (t.contains("datetime")) {
columnType = ColumnType.STRING;
} else if (t.contains("decimal")) {
columnType = ColumnType.DECIMAL;
} else if (t.contains("time")) {
if (isNullable) {
columnType = ColumnType.JAVA_LANG_DOUBLE;
} else {
columnType = ColumnType.DOUBLE;
}
}
return columnType;
}
@Override
public String convertToDB(ColumnType columnType) {
switch (columnType) {
case STRING:
return "varchar";
case BOOLEAN:
case JAVA_LANG_BOOLEAN:
return "boolean";
case BYTE:
case JAVA_LANG_BYTE:
return "tinyint";
case SHORT:
case JAVA_LANG_SHORT:
return "smallint";
case LONG:
case JAVA_LANG_LONG:
return "bigint";
case FLOAT:
case JAVA_LANG_FLOAT:
return "float";
case DOUBLE:
case JAVA_LANG_DOUBLE:
return "double";
case DECIMAL:
return "decimal";
case INT:
case INTEGER:
return "int";
case TIMESTAMP:
return "timestamp";
default:
return "varchar";
}
}
}
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts;
import com.dlink.metadata.constant.PrestoConstant;
import com.dlink.metadata.convert.ITypeConvert;
import com.dlink.metadata.convert.PrestoTypeConvert;
import com.dlink.metadata.query.IDBQuery;
import com.dlink.metadata.query.PrestoQuery;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.QueryData;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class PrestoDriver extends AbstractJdbcDriver implements Driver {
@Override
public Table getTable(String schemaName, String tableName) {
List<Table> tables = listTables(schemaName);
Table table = null;
for (Table item : tables) {
if (Asserts.isEquals(item.getName(), tableName)) {
table = item;
break;
}
}
if (Asserts.isNotNull(table)) {
table.setColumns(listColumns(schemaName, table.getName()));
}
return table;
}
@Override
public List<Table> listTables(String schemaName) {
List<Table> tableList = new ArrayList<>();
PreparedStatement preparedStatement = null;
ResultSet results = null;
IDBQuery dbQuery = getDBQuery();
String sql = dbQuery.tablesSql(schemaName);
try {
preparedStatement = conn.get().prepareStatement(String.format(sql, schemaName));
results = preparedStatement.executeQuery();
ResultSetMetaData metaData = results.getMetaData();
List<String> columnList = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
columnList.add(metaData.getColumnLabel(i));
}
while (results.next()) {
String tableName = results.getString(dbQuery.tableName());
if (Asserts.isNotNullString(tableName)) {
Table tableInfo = new Table();
tableInfo.setName(tableName);
if (columnList.contains(dbQuery.tableComment())) {
tableInfo.setComment(results.getString(dbQuery.tableComment()));
}
tableInfo.setSchema(schemaName);
if (columnList.contains(dbQuery.tableType())) {
tableInfo.setType(results.getString(dbQuery.tableType()));
}
if (columnList.contains(dbQuery.catalogName())) {
tableInfo.setCatalog(results.getString(dbQuery.catalogName()));
}
if (columnList.contains(dbQuery.engine())) {
tableInfo.setEngine(results.getString(dbQuery.engine()));
}
tableList.add(tableInfo);
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
close(preparedStatement, results);
}
return tableList;
}
@Override
public List<Schema> getSchemasAndTables() {
return listSchemas();
}
@Override
public List<Schema> listSchemas() {
List<Schema> schemas = new ArrayList<>();
PreparedStatement preparedStatement = null;
ResultSet results = null;
PreparedStatement ps = null;
ResultSet rs = null;
String schemasSql = getDBQuery().schemaAllSql();
try {
preparedStatement = conn.get().prepareStatement(schemasSql);
results = preparedStatement.executeQuery();
while (results.next()) {
String schemaName = results.getString(getDBQuery().schemaName());
// !PrestoConstant.EXTRA_SCHEMA.equals(schemaName) filter system catalog
if (Asserts.isNotNullString(schemaName) && !PrestoConstant.EXTRA_SCHEMA.equals(schemaName)) {
ps = conn.get()
.prepareStatement(String.format(PrestoConstant.QUERY_TABLE_COLUMNS_ONLY, schemaName));
rs = ps.executeQuery();
while (rs.next()) {
String db = rs.getString(PrestoConstant.SCHEMA);
// !PrestoConstant.EXTRA_DB.equals(db) filter system schema
if (Asserts.isNotNullString(db) && !PrestoConstant.EXTRA_DB.equals(db)) {
Schema schema = new Schema(schemaName + "." + db);
schema.setTables(listTables(schema.getName()));
schemas.add(schema);
}
}
close(ps, rs);
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
close(ps, rs);
close(preparedStatement, results);
}
return schemas;
}
@Override
public List<Column> listColumns(String schemaName, String tableName) {
List<Column> columns = new ArrayList<>();
PreparedStatement preparedStatement = null;
ResultSet results = null;
IDBQuery dbQuery = getDBQuery();
String tableFieldsSql = dbQuery.columnsSql(schemaName, tableName);
try {
preparedStatement = conn.get().prepareStatement(tableFieldsSql);
results = preparedStatement.executeQuery();
ResultSetMetaData metaData = results.getMetaData();
List<String> columnList = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
columnList.add(metaData.getColumnLabel(i));
}
Integer positionId = 1;
while (results.next()) {
Column field = new Column();
if (StringUtils.isEmpty(results.getString(dbQuery.columnName()))) {
break;
} else {
if (columnList.contains(dbQuery.columnName())) {
String columnName = results.getString(dbQuery.columnName());
field.setName(columnName);
}
if (columnList.contains(dbQuery.columnType())) {
field.setType(results.getString(dbQuery.columnType()));
}
if (columnList.contains(dbQuery.columnComment())
&& Asserts.isNotNull(results.getString(dbQuery.columnComment()))) {
String columnComment = results.getString(dbQuery.columnComment()).replaceAll("\"|'", "");
field.setComment(columnComment);
}
field.setPosition(positionId++);
field.setJavaType(getTypeConvert().convert(field));
}
columns.add(field);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
close(preparedStatement, results);
}
return columns;
}
@Override
public String getCreateTableSql(Table table) {
StringBuilder createTable = new StringBuilder();
PreparedStatement preparedStatement = null;
ResultSet results = null;
String createTableSql = getDBQuery().createTableSql(table.getSchema(), table.getName());
try {
preparedStatement = conn.get().prepareStatement(createTableSql);
results = preparedStatement.executeQuery();
ResultSetMetaData metaData = results.getMetaData();
while (results.next()) {
createTable.append(results.getString(getDBQuery().createTableName())).append("\n");
}
} catch (Exception e) {
e.printStackTrace();
} finally {
close(preparedStatement, results);
}
return createTable.toString();
}
@Override
public int executeUpdate(String sql) throws Exception {
Asserts.checkNullString(sql, "Sql 语句为空");
String querySQL = sql.trim().replaceAll(";$", "");
int res = 0;
try (Statement statement = conn.get().createStatement()) {
res = statement.executeUpdate(querySQL);
}
return res;
}
@Override
public JdbcSelectResult query(String sql, Integer limit) {
if (Asserts.isNull(limit)) {
limit = 100;
}
JdbcSelectResult result = new JdbcSelectResult();
List<LinkedHashMap<String, Object>> datas = new ArrayList<>();
List<Column> columns = new ArrayList<>();
List<String> columnNameList = new ArrayList<>();
PreparedStatement preparedStatement = null;
ResultSet results = null;
int count = 0;
try {
String querySQL = sql.trim().replaceAll(";$", "");
preparedStatement = conn.get().prepareStatement(querySQL);
results = preparedStatement.executeQuery();
if (Asserts.isNull(results)) {
result.setSuccess(true);
close(preparedStatement, results);
return result;
}
ResultSetMetaData metaData = results.getMetaData();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
columnNameList.add(metaData.getColumnLabel(i));
Column column = new Column();
column.setName(metaData.getColumnLabel(i));
column.setType(metaData.getColumnTypeName(i));
column.setAutoIncrement(metaData.isAutoIncrement(i));
column.setNullable(metaData.isNullable(i) == 0 ? false : true);
column.setJavaType(getTypeConvert().convert(column));
columns.add(column);
}
result.setColumns(columnNameList);
while (results.next()) {
LinkedHashMap<String, Object> data = new LinkedHashMap<>();
for (int i = 0; i < columns.size(); i++) {
data.put(columns.get(i).getName(),
getTypeConvert().convertValue(results, columns.get(i).getName(), columns.get(i).getType()));
}
datas.add(data);
count++;
if (count >= limit) {
break;
}
}
result.setSuccess(true);
} catch (Exception e) {
result.setError(LogUtil.getError(e));
result.setSuccess(false);
} finally {
close(preparedStatement, results);
result.setRowData(datas);
return result;
}
}
/**
* sql拼接 未实现分页
* */
@Override
public StringBuilder genQueryOption(QueryData queryData) {
String where = queryData.getOption().getWhere();
String order = queryData.getOption().getOrder();
StringBuilder optionBuilder = new StringBuilder()
.append("select * from ")
.append(queryData.getSchemaName())
.append(".")
.append(queryData.getTableName());
if (where != null && !where.equals("")) {
optionBuilder.append(" where ").append(where);
}
if (order != null && !order.equals("")) {
optionBuilder.append(" order by ").append(order);
}
return optionBuilder;
}
@Override
public IDBQuery getDBQuery() {
return new PrestoQuery();
}
@Override
public ITypeConvert getTypeConvert() {
return new PrestoTypeConvert();
}
@Override
String getDriverClass() {
return "com.facebook.presto.jdbc.PrestoDriver";
}
@Override
public String getType() {
return "Presto";
}
@Override
public String getName() {
return "Presto";
}
@Override
public Map<String, String> getFlinkColumnTypeConversion() {
HashMap<String, String> map = new HashMap<>();
map.put("BOOLEAN", "BOOLEAN");
map.put("TINYINT", "TINYINT");
map.put("SMALLINT", "SMALLINT");
map.put("INT", "INT");
map.put("VARCHAR", "STRING");
map.put("TEXY", "STRING");
map.put("INT", "INT");
map.put("DATETIME", "TIMESTAMP");
return map;
}
}
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.dlink.metadata.query;
import com.dlink.metadata.constant.PrestoConstant;
public class PrestoQuery extends AbstractDBQuery {
@Override
public String schemaAllSql() {
return PrestoConstant.QUERY_ALL_DATABASE;
}
@Override
public String tablesSql(String schemaName) {
return PrestoConstant.QUERY_ALL_TABLES_BY_SCHEMA;
}
@Override
public String columnsSql(String schemaName, String tableName) {
return String.format(PrestoConstant.QUERY_TABLE_SCHEMA, schemaName, tableName);
}
@Override
public String schemaName() {
return "Catalog";
}
@Override
public String createTableName() {
return "Create Table";
}
@Override
public String tableName() {
return "Table";
}
@Override
public String tableComment() {
return "Comment";
}
@Override
public String columnName() {
return "Column";
}
@Override
public String columnType() {
return "Type";
}
@Override
public String columnComment() {
return "Comment";
}
}
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.dlink;
import com.dlink.metadata.driver.Driver;
import com.dlink.metadata.driver.DriverConfig;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.UUID;
import org.junit.Before;
import org.junit.Test;
public class PrestoTest {
private Driver driver;
@Before
public void init() {
DriverConfig config = new DriverConfig();
config.setName(UUID.randomUUID().toString());
config.setType("Presto");
config.setIp("10.168.100.115");
config.setUsername("presto");
config.setUrl("jdbc:presto://10.168.100.115:2080");
try {
driver = Driver.build(config);
} catch (Exception e) {
System.err.println("连接创建失败:" + e.getMessage());
}
}
@Test
public void test() throws SQLException {
// test
String test = driver.test();
System.out.println(test);
System.out.println("schema && table...");
testSchema();
System.out.println("columns...");
testColumns();
System.out.println("query...");
query();
}
@Test
public void testSchema() {
// schema && table
List<Schema> schemasAndTables = driver.getSchemasAndTables();
for (Schema schemasAndTable : schemasAndTables) {
List<Table> tables = schemasAndTable.getTables();
for (Table table : tables) {
System.out.println(table.getName() + " " + table.getSchema());
}
}
}
@Test
public void testColumns() {
// columns
List<Column> columns = driver.listColumns("hive.lake", "test");
for (Column column : columns) {
System.out.println(column.getName() + " " + column.getType() + " " + column.getComment());
}
}
@Test
public void query() {
JdbcSelectResult selectResult = driver.query("select * from hive.lake.test", 10);
List<LinkedHashMap<String, Object>> rowData = selectResult.getRowData();
for (LinkedHashMap<String, Object> rowDatum : rowData) {
System.out.println(rowDatum);
}
}
}
......@@ -15,16 +15,14 @@
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>dlink</artifactId>
<groupId>com.dlink</groupId>
<artifactId>dlink</artifactId>
<version>0.6.8-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-metadata</artifactId>
<packaging>pom</packaging>
......@@ -39,7 +37,7 @@
<module>dlink-metadata-phoenix</module>
<module>dlink-metadata-hive</module>
<module>dlink-metadata-starrocks</module>
<module>dlink-metadata-presto</module>
</modules>
</project>
......@@ -129,6 +129,7 @@ const SimpleTaskForm: React.FC<UpdateFormProps> = (props) => {
<Option value={DIALECT.HIVE}>{DIALECT.HIVE}</Option>
<Option value={DIALECT.PHOENIX}>{DIALECT.PHOENIX}</Option>
<Option value={DIALECT.STARROCKS}>{DIALECT.STARROCKS}</Option>
<Option value={DIALECT.PRESTO}>{DIALECT.PRESTO}</Option>
<Option key={DIALECT.JAVA} value={DIALECT.JAVA}>{DIALECT.JAVA}</Option>
<Option key={DIALECT.SCALA} value={DIALECT.SCALA}>{DIALECT.SCALA}</Option>
<Option key={DIALECT.PYTHON} value={DIALECT.PYTHON}>{DIALECT.PYTHON}</Option>
......
......@@ -42,6 +42,7 @@ export const DIALECT = {
HIVE: 'Hive',
PHOENIX: 'Phoenix',
STARROCKS: 'StarRocks',
PRESTO: 'Presto',
KUBERNETES_APPLICATION: 'KubernetesApplaction',
JAVA: 'Java',
SCALA: 'Scala',
......@@ -61,6 +62,7 @@ export const isSql = (dialect: string) => {
case DIALECT.DORIS:
case DIALECT.HIVE:
case DIALECT.STARROCKS:
case DIALECT.PRESTO:
return true;
default:
return false;
......@@ -83,6 +85,7 @@ export const isExecuteSql = (dialect: string) => {
case DIALECT.FLINKSQL:
case DIALECT.HIVE:
case DIALECT.STARROCKS:
case DIALECT.PRESTO:
return true;
default:
return false;
......@@ -106,6 +109,7 @@ export const isTask = (dialect: string) => {
case DIALECT.FLINKJAR:
case DIALECT.HIVE:
case DIALECT.STARROCKS:
case DIALECT.PRESTO:
case DIALECT.KUBERNETES_APPLICATION:
return true;
default:
......
......@@ -47,6 +47,8 @@ export const getIcon = (type: string) => {
return (<Icon component={PhoenixSvg}/>);
case DIALECT.STARROCKS:
return (<Icon component={StarRocksSvg}/>);
case DIALECT.PRESTO:
return (<Icon component={PrestoSvg}/>);
case DIALECT.JAVA:
return (<Icon component={JavaSvg}/>);
case DIALECT.SCALA:
......@@ -329,7 +331,35 @@ export const StarRocksSvg = () => (
d="M32,24.57,41.52,10a2.37,2.37,0,0,1,3.17-.75L51,12.9a5.94,5.94,0,0,1,3,5.15V42.37a5.94,5.94,0,0,1-2.39,4.76c1.7-1.31,1-2.78-.1-3.72L32.43,27.69A2.37,2.37,0,0,1,32,24.57Z"/>
</svg>
);
export const PrestoSvg = () => (
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 348.418 300.463" width={svgSize} height={svgSize}>
<g data-name="Group 406" transform="translate(-107.354)">
<circle data-name="Ellipse 154" cx="15.683" cy="15.683" r="15.683" transform="rotate(-45 164.143 -320.553)" fill="#5d88d6"/>
<circle data-name="Ellipse 155" cx="17.423" cy="17.423" r="17.423" transform="rotate(-45 236.512 -348.067)" fill="#5d88d6"/>
<circle data-name="Ellipse 156" cx="19.163" cy="19.163" r="19.163" transform="rotate(-45 308.884 -375.574)" fill="#5d88d6"/>
<circle data-name="Ellipse 157" cx="20.902" cy="20.902" r="20.902" transform="rotate(-45 381.172 -403.063)" fill="#5d88d6"/>
<circle data-name="Ellipse 158" cx="13.943" cy="13.943" r="13.943" transform="rotate(-45 122.284 -219.497)" fill="#32dbfc"/>
<circle data-name="Ellipse 159" cx="15.683" cy="15.683" r="15.683" transform="rotate(-45 194.653 -247.009)" fill="#32dbfc"/>
<circle data-name="Ellipse 160" cx="17.423" cy="17.423" r="17.423" transform="rotate(-45 266.949 -274.494)" fill="#32dbfc"/>
<circle data-name="Ellipse 161" cx="19.163" cy="19.163" r="19.163" transform="rotate(-45 339.278 -301.998)" fill="#32dbfc"/>
<circle data-name="Ellipse 162" cx="20.902" cy="20.902" r="20.902" transform="rotate(-45 411.681 -329.508)" fill="#32dbfc"/>
<circle data-name="Ellipse 163" cx="12.203" cy="12.203" r="12.203" transform="rotate(-45 80.509 -118.474)"/>
<circle data-name="Ellipse 164" cx="13.943" cy="13.943" r="13.943" transform="rotate(-45 152.754 -145.942)"/>
<circle data-name="Ellipse 165" cx="15.683" cy="15.683" r="15.683" transform="rotate(-45 225.165 -173.465)"/>
<circle data-name="Ellipse 166" cx="17.423" cy="17.423" r="17.423" transform="rotate(-45 297.49 -200.963)"/>
<circle data-name="Ellipse 167" cx="19.163" cy="19.163" r="19.163" transform="rotate(-45 369.83 -228.455)"/>
<circle data-name="Ellipse 168" cx="20.902" cy="20.902" r="20.902" transform="rotate(-45 442.075 -255.942)"/>
<g data-name="Group 120" transform="translate(110.646 174.812)" fill="#5d88d6">
<circle data-name="Ellipse 169" cx="13.943" cy="13.943" r="13.943" transform="rotate(-45 63.994 -87.174)"/>
<circle data-name="Ellipse 170" cx="13.943" cy="13.943" transform="translate(43.075 48.862)" r="13.943"/>
<circle data-name="Ellipse 171" cx="13.943" cy="13.943" transform="translate(0 91.982)" r="13.943"/>
<circle data-name="Ellipse 172" cx="13.943" cy="13.943" transform="translate(86.164 91.957)" r="13.943"/>
<circle data-name="Ellipse 173" cx="13.943" cy="13.943" r="13.943" transform="rotate(-45 211.16 -148.109)"/>
<circle data-name="Ellipse 174" cx="13.943" cy="13.943" r="13.943" transform="rotate(-45 137.546 -117.642)"/>
</g>
</g>
</svg>
);
export const HiveSvg = () => (
<svg t="1647425762735" className="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg"
p-id="3247" width={svgSize} height={svgSize}>
......
......@@ -48,6 +48,9 @@ export function getDBImage(type: string) {
case 'starrocks':
imageUrl += 'starrocks.jpg';
break;
case 'presto':
imageUrl += 'presto.png';
break;
default:
imageUrl += 'db.jpg';
}
......
......@@ -62,6 +62,9 @@ const data: any = [
},
{
type: 'StarRocks',
},
{
type: 'Presto',
}
];
......
......@@ -137,7 +137,7 @@ const DataBaseForm: React.FC<ClickHouseFormProps> = (props) => {
>
<Input.Password/>
</Form.Item>
{type !== "Hive" &&
{type !== "Hive" && type !== "Presto" &&
<>
<Form.Item
name="flinkConfig"
......
......@@ -23,6 +23,7 @@ ADD ./dlink-metadata/dlink-metadata-hive/pom.xml ./dlink-metadata/dlink-metadata
ADD ./dlink-metadata/dlink-metadata-base/pom.xml ./dlink-metadata/dlink-metadata-base/pom.xml
ADD ./dlink-metadata/dlink-metadata-doris/pom.xml ./dlink-metadata/dlink-metadata-doris/pom.xml
ADD ./dlink-metadata/dlink-metadata-oracle/pom.xml ./dlink-metadata/dlink-metadata-oracle/pom.xml
ADD ./dlink-metadata/dlink-metadata-presto/pom.xml ./dlink-metadata/dlink-metadata-presto/pom.xml
ADD ./dlink-admin/pom.xml ./dlink-admin/pom.xml
ADD ./dlink-extends/pom.xml ./dlink-extends/pom.xml
ADD ./dlink-app/pom.xml ./dlink-app/pom.xml
......
......@@ -344,6 +344,11 @@
<artifactId>dlink-metadata-starrocks</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata-presto</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-gateway</artifactId>
......@@ -541,9 +546,9 @@
<eclipse>
<file>style/spotless_dlink_formatter.xml</file>
</eclipse>
<removeUnusedImports/>
<removeUnusedImports />
<importOrder>
<order>com.dlink,org.apache,java,javax,org,com,,\#</order>
<order>\#,com.dlink,org.apache,java,javax,org,com</order>
</importOrder>
<licenseHeader>
<file>style/license_header</file>
......@@ -578,7 +583,7 @@
<includes>
<include>**/*.md</include>
</includes>
<flexmark/>
<flexmark />
</markdown>
</configuration>
<executions>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment