Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dlink
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
zhaowei
dlink
Commits
6860ab21
Commit
6860ab21
authored
Mar 17, 2022
by
zhu-mingye
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
新增扩展Hive元数据
parent
f9ae43cc
Changes
19
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
19 changed files
with
714 additions
and
9 deletions
+714
-9
package.xml
dlink-assembly/src/main/assembly/package.xml
+9
-1
Table.java
dlink-common/src/main/java/com/dlink/model/Table.java
+2
-2
pom.xml
dlink-core/pom.xml
+5
-0
Dialect.java
dlink-core/src/main/java/com/dlink/config/Dialect.java
+2
-2
pom.xml
dlink-metadata/dlink-metadata-hive/pom.xml
+82
-0
HiveConstant.java
...c/main/java/com/dlink/metadata/constant/HiveConstant.java
+33
-0
HiveTypeConvert.java
...main/java/com/dlink/metadata/convert/HiveTypeConvert.java
+97
-0
HiveDriver.java
...e/src/main/java/com/dlink/metadata/driver/HiveDriver.java
+250
-0
HiveQuery.java
...ive/src/main/java/com/dlink/metadata/query/HiveQuery.java
+59
-0
com.dlink.metadata.driver.Driver
...ources/META-INF/services/com.dlink.metadata.driver.Driver
+1
-0
HiveTest.java
...adata-hive/src/test/java/com/dlink/metadata/HiveTest.java
+108
-0
pom.xml
dlink-metadata/pom.xml
+1
-0
hive.png
dlink-web/public/database/hive.png
+0
-0
SimpleTaskForm.tsx
...omponents/Studio/StudioTree/components/SimpleTaskForm.tsx
+3
-2
conf.ts
dlink-web/src/components/Studio/conf.ts
+4
-0
icon.tsx
dlink-web/src/components/Studio/icon.tsx
+46
-1
DB.ts
dlink-web/src/pages/DataBase/DB.ts
+3
-0
DBForm.tsx
dlink-web/src/pages/DataBase/components/DBForm.tsx
+4
-1
pom.xml
pom.xml
+5
-0
No files found.
dlink-assembly/src/main/assembly/package.xml
View file @
6860ab21
...
...
@@ -132,7 +132,15 @@
<include>
dlink-metadata-postgresql-${project.version}.jar
</include>
</includes>
</fileSet>
<!-- 将模块dlink-metadata的jar文件放到打包目录/lib下 -->
<fileSet>
<directory>
${project.parent.basedir}/dlink-metadata/dlink-metadata-hive/target
</directory>
<outputDirectory>
lib
</outputDirectory>
<includes>
<include>
dlink-metadata-hive-${project.version}.jar
</include>
</includes>
</fileSet>
<fileSet>
<directory>
${project.parent.basedir}/dlink-metadata/dlink-metadata-sqlserver/target
</directory>
...
...
dlink-common/src/main/java/com/dlink/model/Table.java
View file @
6860ab21
...
...
@@ -140,9 +140,9 @@ public class Table implements Serializable, Comparable<Table> {
}
}
if
(
Asserts
.
isNotNullString
(
comment
)){
sb
.
append
(
" FROM "
+
catalogName
+
"."
+
schema
+
"."
+
name
+
";"
+
" -- "
+
comment
+
"\n"
);
sb
.
append
(
" FROM "
+
schema
+
"."
+
name
+
";"
+
" -- "
+
comment
+
"\n"
);
}
else
{
sb
.
append
(
" FROM "
+
catalogName
+
"."
+
schema
+
"."
+
name
+
";\n"
);
sb
.
append
(
" FROM "
+
schema
+
"."
+
name
+
";\n"
);
}
return
sb
.
toString
();
}
...
...
dlink-core/pom.xml
View file @
6860ab21
...
...
@@ -105,6 +105,11 @@
<artifactId>
dlink-metadata-doris
</artifactId>
<scope>
${scope.runtime}
</scope>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-metadata-hive
</artifactId>
<scope>
${scope.runtime}
</scope>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-metadata-postgresql
</artifactId>
...
...
dlink-core/src/main/java/com/dlink/config/Dialect.java
View file @
6860ab21
...
...
@@ -12,7 +12,7 @@ public enum Dialect {
FLINKSQL
(
"FlinkSql"
),
FLINKJAR
(
"FlinkJar"
),
FLINKSQLENV
(
"FlinkSqlEnv"
),
SQL
(
"Sql"
),
JAVA
(
"Java"
),
MYSQL
(
"Mysql"
),
ORACLE
(
"Oracle"
),
SQLSERVER
(
"SqlServer"
),
POSTGRESQL
(
"PostGreSql"
),
CLICKHOUSE
(
"ClickHouse"
),
DORIS
(
"Doris"
),
PHOENIX
(
"Phoenix"
);
DORIS
(
"Doris"
),
PHOENIX
(
"Phoenix"
)
,
HIVE
(
"Hive"
)
;
private
String
value
;
...
...
@@ -42,7 +42,7 @@ public enum Dialect {
public
static
boolean
isSql
(
String
value
){
Dialect
dialect
=
Dialect
.
get
(
value
);
switch
(
dialect
){
case
SQL:
case
MYSQL:
case
ORACLE:
case
SQLSERVER:
case
POSTGRESQL:
case
CLICKHOUSE:
case
DORIS:
case
PHOENIX:
case
SQL:
case
MYSQL:
case
ORACLE:
case
SQLSERVER:
case
POSTGRESQL:
case
CLICKHOUSE:
case
DORIS:
case
PHOENIX:
case
HIVE:
return
true
;
default
:
return
false
;
...
...
dlink-metadata/dlink-metadata-hive/pom.xml
0 → 100644
View file @
6860ab21
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<parent>
<artifactId>
dlink-metadata
</artifactId>
<groupId>
com.dlink
</groupId>
<version>
0.6.0-SNAPSHOT
</version>
</parent>
<modelVersion>
4.0.0
</modelVersion>
<artifactId>
dlink-metadata-hive
</artifactId>
<properties>
<maven.compiler.source>
8
</maven.compiler.source>
<maven.compiler.target>
8
</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-metadata-base
</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.hive</groupId>-->
<!-- <artifactId>hive-metastore</artifactId>-->
<!-- <version>2.1.1-cdh6.2.1</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>commons-logging</groupId>-->
<!-- <artifactId>commons-logging</artifactId>-->
<!-- <version>1.2</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.apache.hadoop</groupId>-->
<!-- <artifactId>hadoop-common</artifactId>-->
<!-- <version>3.0.0-cdh6.2.1</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.apache.hive</groupId>-->
<!-- <artifactId>hive-exec</artifactId>-->
<!-- <version>2.1.1-cdh6.2.1</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.apache.thrift</groupId>-->
<!-- <artifactId>libfb303</artifactId>-->
<!-- <version>0.9.3</version>-->
<!-- </dependency>-->
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-nop
</artifactId>
<version>
1.6.1
</version>
</dependency>
<dependency>
<groupId>
junit
</groupId>
<artifactId>
junit
</artifactId>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.commons
</groupId>
<artifactId>
commons-lang3
</artifactId>
<version>
3.4
</version>
</dependency>
</dependencies>
<repositories>
<repository>
<id>
cloudera
</id>
<url>
https://repository.cloudera.com/artifactory/cloudera-repos/
</url>
</repository>
</repositories>
</project>
\ No newline at end of file
dlink-metadata/dlink-metadata-hive/src/main/java/com/dlink/metadata/constant/HiveConstant.java
0 → 100644
View file @
6860ab21
package
com
.
dlink
.
metadata
.
constant
;
public
interface
HiveConstant
{
/**
* 查询所有database
*/
String
QUERY_ALL_DATABASE
=
" show databases"
;
/**
* 查询所有schema下的所有表
*/
String
QUERY_ALL_TABLES_BY_SCHEMA
=
"show tables"
;
/**
* 扩展信息Key
*/
String
DETAILED_TABLE_INFO
=
"Detailed Table Information"
;
/**
* 查询指定schema.table的扩展信息
*/
String
QUERY_TABLE_SCHEMA_EXTENED_INFOS
=
" describe extended `%s`.`%s`"
;
/**
* 查询指定schema.table的信息 列 列类型 列注释
*/
String
QUERY_TABLE_SCHEMA
=
" describe `%s`.`%s`"
;
/**
* 使用 DB
*/
String
USE_DB
=
"use `%s`"
;
/**
* 只查询指定schema.table的列名
*/
String
QUERY_TABLE_COLUMNS_ONLY
=
"show columns in `%s`.`%s`"
;
}
dlink-metadata/dlink-metadata-hive/src/main/java/com/dlink/metadata/convert/HiveTypeConvert.java
0 → 100644
View file @
6860ab21
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.Column
;
import
com.dlink.model.ColumnType
;
public
class
HiveTypeConvert
implements
ITypeConvert
{
@Override
public
ColumnType
convert
(
Column
column
)
{
if
(
Asserts
.
isNull
(
column
))
{
return
ColumnType
.
STRING
;
}
String
t
=
column
.
getType
().
toLowerCase
().
trim
();
if
(
t
.
contains
(
"char"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"boolean"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_BOOLEAN
;
}
return
ColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"tinyint"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_BYTE
;
}
return
ColumnType
.
BYTE
;
}
else
if
(
t
.
contains
(
"smallint"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_SHORT
;
}
return
ColumnType
.
SHORT
;
}
else
if
(
t
.
contains
(
"bigint"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_LONG
;
}
return
ColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"largeint"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"int"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
INTEGER
;
}
return
ColumnType
.
INT
;
}
else
if
(
t
.
contains
(
"float"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_FLOAT
;
}
return
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"double"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_DOUBLE
;
}
return
ColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"date"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"datetime"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"decimal"
))
{
return
ColumnType
.
DECIMAL
;
}
else
if
(
t
.
contains
(
"time"
))
{
return
ColumnType
.
DOUBLE
;
}
return
ColumnType
.
STRING
;
}
@Override
public
String
convertToDB
(
ColumnType
columnType
)
{
switch
(
columnType
)
{
case
STRING:
return
"varchar"
;
case
BOOLEAN:
case
JAVA_LANG_BOOLEAN:
return
"boolean"
;
case
BYTE:
case
JAVA_LANG_BYTE:
return
"tinyint"
;
case
SHORT:
case
JAVA_LANG_SHORT:
return
"smallint"
;
case
LONG:
case
JAVA_LANG_LONG:
return
"bigint"
;
case
FLOAT:
case
JAVA_LANG_FLOAT:
return
"float"
;
case
DOUBLE:
case
JAVA_LANG_DOUBLE:
return
"double"
;
case
DECIMAL:
return
"decimal"
;
case
INT:
case
INTEGER:
return
"int"
;
default
:
return
"varchar"
;
}
}
}
dlink-metadata/dlink-metadata-hive/src/main/java/com/dlink/metadata/driver/HiveDriver.java
0 → 100644
View file @
6860ab21
package
com
.
dlink
.
metadata
.
driver
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.metadata.constant.HiveConstant
;
import
com.dlink.metadata.convert.HiveTypeConvert
;
import
com.dlink.metadata.convert.ITypeConvert
;
import
com.dlink.metadata.query.HiveQuery
;
import
com.dlink.metadata.query.IDBQuery
;
import
com.dlink.model.Column
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
import
org.apache.commons.lang3.StringUtils
;
import
java.sql.PreparedStatement
;
import
java.sql.ResultSet
;
import
java.sql.ResultSetMetaData
;
import
java.sql.SQLException
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
public
class
HiveDriver
extends
AbstractJdbcDriver
implements
Driver
{
// @Override
// public Table getTable(String schemaName, String tableName) {
// List<Table> tables = listTables(schemaName);
// Table table = null;
// for(Table item : tables){
// if(Asserts.isEquals(item.getName(),tableName)){
// table = item;
// }
// }
// if(Asserts.isNotNull(table)) {
// List<Column> columnList = new ArrayList<>();// 接收排除 Detailed Table Information 之后的 Column对象
// List<Column> columnListWithExt = listColumns(schemaName, table.getName()); //获取所有的 Column对象
//
// Column columnExtInfoToTable = columnListWithExt.get(columnListWithExt.size() - 1); //获取 Detailed Table Information 下方解析该值 并赋值给Table的属性
// String extenedInfo = columnExtInfoToTable.getType(); //获取 Detailed Table Information 的值
// /**
// * 解析 Detailed Table Information 开始
// */
//
// System.out.println(extenedInfo);
//
// /**
// * 解析 Detailed Table Information 结束
// */
//
//
// for (int i = 0; i < columnListWithExt.size(); i++) {
// Column columnExt = columnListWithExt.get(i);
// if (!columnExt.getName().contains(HiveConstant.DETAILED_TABLE_INFO)){// 排除 Detailed Table Information
// Column columnBean = new Column();
// columnBean.setName(columnExt.getName());
// columnBean.setType(columnExt.getType());
// columnBean.setComment(columnExt.getComment());
// columnList.add(columnBean);
// }
// }
// table.setColumns(columnList);
// }
// return table;
// }
@Override
public
List
<
Table
>
listTables
(
String
schemaName
)
{
List
<
Table
>
tableList
=
new
ArrayList
<>();
PreparedStatement
preparedStatement
=
null
;
ResultSet
results
=
null
;
IDBQuery
dbQuery
=
getDBQuery
();
String
sql
=
dbQuery
.
tablesSql
(
schemaName
);
try
{
preparedStatement
=
conn
.
prepareStatement
(
sql
);
results
=
preparedStatement
.
executeQuery
();
ResultSetMetaData
metaData
=
results
.
getMetaData
();
List
<
String
>
columnList
=
new
ArrayList
<>();
for
(
int
i
=
1
;
i
<=
metaData
.
getColumnCount
();
i
++)
{
columnList
.
add
(
metaData
.
getColumnLabel
(
i
));
}
while
(
results
.
next
())
{
String
tableName
=
results
.
getString
(
dbQuery
.
tableName
());
if
(
Asserts
.
isNotNullString
(
tableName
))
{
Table
tableInfo
=
new
Table
();
tableInfo
.
setName
(
tableName
);
if
(
columnList
.
contains
(
dbQuery
.
tableComment
()))
{
tableInfo
.
setComment
(
results
.
getString
(
dbQuery
.
tableComment
()));
}
tableInfo
.
setSchema
(
schemaName
);
if
(
columnList
.
contains
(
dbQuery
.
tableType
()))
{
tableInfo
.
setType
(
results
.
getString
(
dbQuery
.
tableType
()));
}
if
(
columnList
.
contains
(
dbQuery
.
catalogName
()))
{
tableInfo
.
setCatalog
(
results
.
getString
(
dbQuery
.
catalogName
()));
}
if
(
columnList
.
contains
(
dbQuery
.
engine
()))
{
tableInfo
.
setEngine
(
results
.
getString
(
dbQuery
.
engine
()));
}
tableList
.
add
(
tableInfo
);
}
}
}
catch
(
SQLException
e
)
{
e
.
printStackTrace
();
}
finally
{
close
(
preparedStatement
,
results
);
}
return
tableList
;
}
@Override
public
List
<
Schema
>
getSchemasAndTables
()
{
return
listSchemas
();
}
@Override
public
List
<
Schema
>
listSchemas
()
{
List
<
Schema
>
schemas
=
new
ArrayList
<>();
PreparedStatement
preparedStatement
=
null
;
ResultSet
results
=
null
;
String
schemasSql
=
getDBQuery
().
schemaAllSql
();
try
{
preparedStatement
=
conn
.
prepareStatement
(
schemasSql
);
results
=
preparedStatement
.
executeQuery
();
while
(
results
.
next
())
{
String
schemaName
=
results
.
getString
(
getDBQuery
().
schemaName
());
if
(
Asserts
.
isNotNullString
(
schemaName
))
{
Schema
schema
=
new
Schema
(
schemaName
);
if
(
execute
(
String
.
format
(
HiveConstant
.
USE_DB
,
schemaName
)))
{
schema
.
setTables
(
listTables
(
schema
.
getName
()));
}
schemas
.
add
(
schema
);
}
}
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
}
finally
{
close
(
preparedStatement
,
results
);
}
return
schemas
;
}
@Override
public
List
<
Column
>
listColumns
(
String
schemaName
,
String
tableName
)
{
List
<
Column
>
columns
=
new
ArrayList
<>();
PreparedStatement
preparedStatement
=
null
;
ResultSet
results
=
null
;
IDBQuery
dbQuery
=
getDBQuery
();
String
tableFieldsSql
=
dbQuery
.
columnsSql
(
schemaName
,
tableName
);
try
{
preparedStatement
=
conn
.
prepareStatement
(
tableFieldsSql
);
results
=
preparedStatement
.
executeQuery
();
ResultSetMetaData
metaData
=
results
.
getMetaData
();
List
<
String
>
columnList
=
new
ArrayList
<>();
for
(
int
i
=
1
;
i
<=
metaData
.
getColumnCount
();
i
++)
{
columnList
.
add
(
metaData
.
getColumnLabel
(
i
));
}
Integer
positionId
=
1
;
while
(
results
.
next
())
{
Column
field
=
new
Column
();
if
(
StringUtils
.
isEmpty
(
results
.
getString
(
dbQuery
.
columnName
())))
{
break
;
}
else
{
if
(
columnList
.
contains
(
dbQuery
.
columnName
())){
String
columnName
=
results
.
getString
(
dbQuery
.
columnName
());
field
.
setName
(
columnName
);
}
if
(
columnList
.
contains
(
dbQuery
.
columnType
()))
{
field
.
setType
(
results
.
getString
(
dbQuery
.
columnType
()));
}
if
(
columnList
.
contains
(
dbQuery
.
columnComment
())
&&
Asserts
.
isNotNull
(
results
.
getString
(
dbQuery
.
columnComment
())))
{
String
columnComment
=
results
.
getString
(
dbQuery
.
columnComment
()).
replaceAll
(
"\"|'"
,
""
);
field
.
setComment
(
columnComment
);
}
field
.
setPosition
(
positionId
++);
field
.
setJavaType
(
getTypeConvert
().
convert
(
field
));
}
columns
.
add
(
field
);
}
}
catch
(
SQLException
e
)
{
e
.
printStackTrace
();
}
finally
{
close
(
preparedStatement
,
results
);
}
return
columns
;
}
@Override
public
String
getCreateTableSql
(
Table
table
)
{
StringBuilder
createTable
=
new
StringBuilder
();
PreparedStatement
preparedStatement
=
null
;
ResultSet
results
=
null
;
String
createTableSql
=
getDBQuery
().
createTableSql
(
table
.
getSchema
(),
table
.
getName
());
try
{
preparedStatement
=
conn
.
prepareStatement
(
createTableSql
);
results
=
preparedStatement
.
executeQuery
();
while
(
results
.
next
())
{
createTable
.
append
(
results
.
getString
(
getDBQuery
().
createTableName
())).
append
(
"\n"
);
}
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
}
finally
{
close
(
preparedStatement
,
results
);
}
return
createTable
.
toString
();
}
@Override
public
IDBQuery
getDBQuery
()
{
return
new
HiveQuery
();
}
@Override
public
ITypeConvert
getTypeConvert
()
{
return
new
HiveTypeConvert
();
}
@Override
String
getDriverClass
()
{
return
"org.apache.hive.jdbc.HiveDriver"
;
}
@Override
public
String
getType
()
{
return
"Hive"
;
}
@Override
public
String
getName
()
{
return
"Hive"
;
}
@Override
public
Map
<
String
,
String
>
getFlinkColumnTypeConversion
()
{
HashMap
<
String
,
String
>
map
=
new
HashMap
<>();
map
.
put
(
"BOOLEAN"
,
"BOOLEAN"
);
map
.
put
(
"TINYINT"
,
"TINYINT"
);
map
.
put
(
"SMALLINT"
,
"SMALLINT"
);
map
.
put
(
"INT"
,
"INT"
);
map
.
put
(
"VARCHAR"
,
"STRING"
);
map
.
put
(
"TEXY"
,
"STRING"
);
map
.
put
(
"INT"
,
"INT"
);
map
.
put
(
"DATETIME"
,
"TIMESTAMP"
);
return
map
;
}
}
dlink-metadata/dlink-metadata-hive/src/main/java/com/dlink/metadata/query/HiveQuery.java
0 → 100644
View file @
6860ab21
package
com
.
dlink
.
metadata
.
query
;
import
com.dlink.metadata.constant.HiveConstant
;
public
class
HiveQuery
extends
AbstractDBQuery
{
@Override
public
String
schemaAllSql
()
{
return
HiveConstant
.
QUERY_ALL_DATABASE
;
}
@Override
public
String
tablesSql
(
String
schemaName
)
{
return
HiveConstant
.
QUERY_ALL_TABLES_BY_SCHEMA
;
}
@Override
public
String
columnsSql
(
String
schemaName
,
String
tableName
)
{
return
String
.
format
(
HiveConstant
.
QUERY_TABLE_SCHEMA
,
schemaName
,
tableName
);
}
@Override
public
String
schemaName
()
{
return
"database_name"
;
}
@Override
public
String
createTableName
()
{
return
"createtab_stmt"
;
}
@Override
public
String
tableName
()
{
return
"tab_name"
;
}
@Override
public
String
tableComment
()
{
return
"comment"
;
}
@Override
public
String
columnName
()
{
return
"col_name"
;
}
@Override
public
String
columnType
()
{
return
"data_type"
;
}
@Override
public
String
columnComment
()
{
return
"comment"
;
}
}
dlink-metadata/dlink-metadata-hive/src/main/resources/META-INF/services/com.dlink.metadata.driver.Driver
0 → 100644
View file @
6860ab21
com.dlink.metadata.driver.HiveDriver
\ No newline at end of file
dlink-metadata/dlink-metadata-hive/src/test/java/com/dlink/metadata/HiveTest.java
0 → 100644
View file @
6860ab21
package
com
.
dlink
.
metadata
;
import
com.dlink.metadata.driver.Driver
;
import
com.dlink.metadata.driver.DriverConfig
;
import
com.dlink.model.Column
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
import
org.junit.Test
;
import
java.util.List
;
/**
* MysqlTest
*
* @author wenmo
* @since 2021/7/20 15:32
**/
public
class
HiveTest
{
private
static
final
String
IP
=
"cdh1"
;
private
static
final
Integer
PORT
=
10000
;
private
static
final
String
hiveDB
=
"test"
;
private
static
final
String
username
=
"zhumingye"
;
private
static
final
String
passwd
=
"123456"
;
private
static
final
String
hive
=
"Hive"
;
private
static
String
url
=
"jdbc:hive2://"
+
IP
+
":"
+
PORT
+
"/"
+
hiveDB
;
public
Driver
getDriver
()
{
DriverConfig
config
=
new
DriverConfig
();
config
.
setType
(
hive
);
config
.
setName
(
hive
);
config
.
setIp
(
IP
);
config
.
setPort
(
PORT
);
config
.
setUsername
(
username
);
config
.
setPassword
(
passwd
);
config
.
setUrl
(
url
);
return
Driver
.
build
(
config
);
}
@Test
public
void
connectTest
()
{
DriverConfig
config
=
new
DriverConfig
();
config
.
setType
(
hive
);
config
.
setName
(
hive
);
config
.
setIp
(
IP
);
config
.
setPort
(
PORT
);
config
.
setUsername
(
username
);
config
.
setPassword
(
passwd
);
config
.
setUrl
(
url
);
String
test
=
Driver
.
build
(
config
).
test
();
System
.
out
.
println
(
test
);
System
.
err
.
println
(
"end..."
);
}
@Test
public
void
getDBSTest
()
{
Driver
driver
=
getDriver
();
List
<
Schema
>
schemasAndTables
=
driver
.
listSchemas
();
schemasAndTables
.
forEach
(
schema
->
{
System
.
out
.
println
(
schema
.
getName
()+
"\t\t"
+
schema
.
getTables
().
toString
());
});
System
.
err
.
println
(
"end..."
);
}
@Test
public
void
getTablesByDBTest
()
throws
Exception
{
Driver
driver
=
getDriver
();
driver
.
execute
(
"use odsp "
);
List
<
Table
>
tableList
=
driver
.
listTables
(
hiveDB
);
tableList
.
forEach
(
schema
->
{
System
.
out
.
println
(
schema
.
getName
());
});
System
.
err
.
println
(
"end..."
);
}
@Test
public
void
getColumnsByTableTest
()
{
Driver
driver
=
getDriver
();
List
<
Column
>
columns
=
driver
.
listColumns
(
hiveDB
,
"biz_college_planner_mysql_language_score_item"
);
for
(
Column
column
:
columns
)
{
System
.
out
.
println
(
column
.
getName
()+
" \t "
+
column
.
getType
()+
" \t "
+
column
.
getComment
());
}
System
.
err
.
println
(
"end..."
);
}
@Test
public
void
getCreateTableTest
()
throws
Exception
{
Driver
driver
=
getDriver
();
// JdbcSelectResult jdbcSelectResult = driver.executeSql("show create table odsp.biz_college_planner_mysql_language_score_item", 1);
Table
driverTable
=
driver
.
getTable
(
hiveDB
,
"biz_college_planner_mysql_language_score_item"
);
String
createTableSql
=
driver
.
getCreateTableSql
(
driverTable
);
System
.
out
.
println
(
createTableSql
);
System
.
err
.
println
(
"end..."
);
}
@Test
public
void
getTableExtenedInfoTest
()
throws
Exception
{
Driver
driver
=
getDriver
();
Table
driverTable
=
driver
.
getTable
(
hiveDB
,
"employees"
);
for
(
Column
column
:
driverTable
.
getColumns
())
{
System
.
out
.
println
(
column
.
getName
()+
"\t\t"
+
column
.
getType
()+
"\t\t"
+
column
.
getComment
());
}
}
}
dlink-metadata/pom.xml
View file @
6860ab21
...
...
@@ -20,6 +20,7 @@
<module>
dlink-metadata-sqlserver
</module>
<module>
dlink-metadata-doris
</module>
<module>
dlink-metadata-phoenix
</module>
<module>
dlink-metadata-hive
</module>
</modules>
...
...
dlink-web/public/database/hive.png
0 → 100644
View file @
6860ab21
11.5 KB
dlink-web/src/components/Studio/StudioTree/components/SimpleTaskForm.tsx
View file @
6860ab21
import
React
,
{
use
Effect
,
use
State
}
from
'react'
;
import
{
Form
,
Button
,
Input
,
Modal
,
Select
}
from
'antd'
;
import
React
,
{
useState
}
from
'react'
;
import
{
Button
,
Form
,
Input
,
Modal
,
Select
}
from
'antd'
;
import
type
{
TaskTableListItem
}
from
'../data.d'
;
import
{
DIALECT
}
from
"@/components/Studio/conf"
;
...
...
@@ -60,6 +60,7 @@ const SimpleTaskForm: React.FC<UpdateFormProps> = (props) => {
<
Option
value=
{
DIALECT
.
POSTGRESQL
}
>
{
DIALECT
.
POSTGRESQL
}
</
Option
>
<
Option
value=
{
DIALECT
.
CLICKHOUSE
}
>
{
DIALECT
.
CLICKHOUSE
}
</
Option
>
<
Option
value=
{
DIALECT
.
DORIS
}
>
{
DIALECT
.
DORIS
}
</
Option
>
<
Option
value=
{
DIALECT
.
HIVE
}
>
{
DIALECT
.
HIVE
}
</
Option
>
<
Option
value=
{
DIALECT
.
PHOENIX
}
>
{
DIALECT
.
PHOENIX
}
</
Option
>
<
Option
value=
{
DIALECT
.
JAVA
}
>
{
DIALECT
.
JAVA
}
</
Option
>
<
Option
value=
{
DIALECT
.
SQL
}
>
{
DIALECT
.
SQL
}
</
Option
>
...
...
dlink-web/src/components/Studio/conf.ts
View file @
6860ab21
...
...
@@ -19,6 +19,7 @@ export const DIALECT = {
POSTGRESQL
:
'PostGreSql'
,
CLICKHOUSE
:
'ClickHouse'
,
DORIS
:
'Doris'
,
HIVE
:
'Hive'
,
PHOENIX
:
'Phoenix'
,
JAVA
:
'Java'
,
};
...
...
@@ -39,6 +40,7 @@ export const isSql = (dialect: string) => {
case
DIALECT
.
CLICKHOUSE
:
case
DIALECT
.
PHOENIX
:
case
DIALECT
.
DORIS
:
case
DIALECT
.
HIVE
:
return
true
;
default
:
return
false
;
...
...
@@ -59,6 +61,7 @@ export const isExecuteSql = (dialect: string) => {
case
DIALECT
.
DORIS
:
case
DIALECT
.
PHOENIX
:
case
DIALECT
.
FLINKSQL
:
case
DIALECT
.
HIVE
:
return
true
;
default
:
return
false
;
...
...
@@ -80,6 +83,7 @@ export const isTask = (dialect: string) => {
case
DIALECT
.
PHOENIX
:
case
DIALECT
.
FLINKSQL
:
case
DIALECT
.
FLINKJAR
:
case
DIALECT
.
HIVE
:
return
true
;
default
:
return
false
;
...
...
dlink-web/src/components/Studio/icon.tsx
View file @
6860ab21
This diff is collapsed.
Click to expand it.
dlink-web/src/pages/DataBase/DB.ts
View file @
6860ab21
...
...
@@ -22,6 +22,9 @@ export function getDBImage(type: string) {
case
'phoenix'
:
imageUrl
+=
'phoenix.png'
;
break
;
case
'hive'
:
imageUrl
+=
'hive.png'
;
break
;
default
:
imageUrl
+=
'db.jpg'
;
}
...
...
dlink-web/src/pages/DataBase/components/DBForm.tsx
View file @
6860ab21
import
React
,
{
useState
}
from
'react'
;
import
{
Modal
,
Image
,
List
,
Card
}
from
'antd'
;
import
{
Card
,
Image
,
List
,
Modal
}
from
'antd'
;
import
{
DataBaseItem
}
from
'../data.d'
;
import
{
connect
}
from
"umi"
;
...
...
@@ -37,6 +37,9 @@ const data:any = [
{
type
:
'Phoenix'
,
},
{
type
:
'Hive'
,
},
];
const
DBForm
:
React
.
FC
<
UpdateFormProps
>
=
(
props
)
=>
{
...
...
pom.xml
View file @
6860ab21
...
...
@@ -262,6 +262,11 @@
<artifactId>
dlink-metadata-doris
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-metadata-hive
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-metadata-phoenix
</artifactId>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment