Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dlink
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
zhaowei
dlink
Commits
9aac98c6
Commit
9aac98c6
authored
Feb 17, 2022
by
wenmo
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
优化元数据类型转换和连接管理
parent
2408187c
Changes
33
Hide whitespace changes
Inline
Side-by-side
Showing
33 changed files
with
663 additions
and
383 deletions
+663
-383
README.md
README.md
+4
-0
pom.xml
dlink-admin/pom.xml
+9
-0
DataBase.java
dlink-admin/src/main/java/com/dlink/model/DataBase.java
+2
-2
DataBaseService.java
...dmin/src/main/java/com/dlink/service/DataBaseService.java
+4
-0
DataBaseServiceImpl.java
...main/java/com/dlink/service/impl/DataBaseServiceImpl.java
+43
-25
StudioServiceImpl.java
...c/main/java/com/dlink/service/impl/StudioServiceImpl.java
+2
-2
TaskServiceImpl.java
...src/main/java/com/dlink/service/impl/TaskServiceImpl.java
+1
-1
Column.java
dlink-common/src/main/java/com/dlink/model/Column.java
+1
-1
ColumnType.java
dlink-common/src/main/java/com/dlink/model/ColumnType.java
+56
-0
Table.java
dlink-common/src/main/java/com/dlink/model/Table.java
+2
-3
ITypeConvert.java
...rc/main/java/com/dlink/metadata/convert/ITypeConvert.java
+3
-7
AbstractDriver.java
...c/main/java/com/dlink/metadata/driver/AbstractDriver.java
+4
-2
AbstractJdbcDriver.java
...in/java/com/dlink/metadata/driver/AbstractJdbcDriver.java
+22
-7
Driver.java
...-base/src/main/java/com/dlink/metadata/driver/Driver.java
+14
-4
DriverConfig.java
...src/main/java/com/dlink/metadata/driver/DriverConfig.java
+3
-1
DriverPool.java
...e/src/main/java/com/dlink/metadata/driver/DriverPool.java
+37
-0
ClickHouseTypeConvert.java
...ava/com/dlink/metadata/convert/ClickHouseTypeConvert.java
+69
-5
DorisTypeConvert.java
...ain/java/com/dlink/metadata/convert/DorisTypeConvert.java
+79
-44
DorisDriver.java
.../src/main/java/com/dlink/metadata/driver/DorisDriver.java
+12
-3
DorisTest.java
...ata-doris/src/test/java/com/dlink/metadata/DorisTest.java
+9
-7
MySqlTypeConvert.java
...ain/java/com/dlink/metadata/convert/MySqlTypeConvert.java
+62
-47
MySqlDriver.java
.../src/main/java/com/dlink/metadata/driver/MySqlDriver.java
+6
-6
MysqlTest.java
...ata-mysql/src/test/java/com/dlink/metadata/MysqlTest.java
+10
-9
OracleTypeConvert.java
...in/java/com/dlink/metadata/convert/OracleTypeConvert.java
+36
-30
OracleDriver.java
...src/main/java/com/dlink/metadata/driver/OracleDriver.java
+1
-1
OracleTest.java
...a-oracle/src/test/java/com/dlink/metadata/OracleTest.java
+8
-8
PhoenixTypeConvert.java
...n/java/com/dlink/metadata/convert/PhoenixTypeConvert.java
+49
-76
PhoenixDriver.java
...rc/main/java/com/dlink/metadata/driver/PhoenixDriver.java
+1
-1
PhoenixTest.java
...phoenix/src/test/java/com/dlink/metadata/PhoenixTest.java
+3
-1
PostgreSqlTypeConvert.java
...ava/com/dlink/metadata/convert/PostgreSqlTypeConvert.java
+60
-49
SqlServerTypeConvert.java
...java/com/dlink/metadata/convert/SqlServerTypeConvert.java
+44
-40
SqlServerTest.java
...erver/src/test/java/com/dlink/metadata/SqlServerTest.java
+1
-1
pom.xml
pom.xml
+6
-0
No files found.
README.md
View file @
9aac98c6
...
...
@@ -18,6 +18,10 @@ Dinky(原 Dlink):
3.
从 Dlink 改名为 Dinky 过渡平滑,更加形象的阐明了开源项目的目标,始终指引参与者们 “不忘初心,方得始终 ”。
## 发展
[

](https://starchart.cc/DataLinkDC/dlink)
## 原理

...
...
dlink-admin/pom.xml
View file @
9aac98c6
...
...
@@ -101,6 +101,10 @@
<groupId>
cn.dev33
</groupId>
<artifactId>
sa-token-spring-boot-starter
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.commons
</groupId>
<artifactId>
commons-lang3
</artifactId>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-core
</artifactId>
...
...
@@ -129,6 +133,11 @@
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata-mysql</artifactId>
<version>0.6.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata-doris</artifactId>
<version>0.6.0-SNAPSHOT</version>
</dependency>-->
<!--<dependency>
<groupId>org.apache.flink</groupId>
...
...
dlink-admin/src/main/java/com/dlink/model/DataBase.java
View file @
9aac98c6
...
...
@@ -48,7 +48,7 @@ public class DataBase extends SuperEntity {
private
LocalDateTime
heartbeatTime
;
public
DriverConfig
getDriverConfig
(){
return
new
DriverConfig
(
type
,
url
,
username
,
password
);
public
DriverConfig
getDriverConfig
()
{
return
new
DriverConfig
(
getName
(),
type
,
url
,
username
,
password
);
}
}
dlink-admin/src/main/java/com/dlink/service/DataBaseService.java
View file @
9aac98c6
...
...
@@ -35,4 +35,8 @@ public interface DataBaseService extends ISuperService<DataBase> {
String
getSqlCreate
(
Integer
id
,
String
schemaName
,
String
tableName
);
SqlGeneration
getSqlGeneration
(
Integer
id
,
String
schemaName
,
String
tableName
);
List
<
String
>
listEnabledFlinkWith
();
String
getEnabledFlinkWithSql
();
}
dlink-admin/src/main/java/com/dlink/service/impl/DataBaseServiceImpl.java
View file @
9aac98c6
...
...
@@ -6,13 +6,13 @@ import com.dlink.constant.CommonConstant;
import
com.dlink.db.service.impl.SuperServiceImpl
;
import
com.dlink.mapper.DataBaseMapper
;
import
com.dlink.metadata.driver.Driver
;
import
com.dlink.metadata.driver.DriverConfig
;
import
com.dlink.model.*
;
import
com.dlink.service.DataBaseService
;
import
org.apache.commons.lang3.StringUtils
;
import
org.springframework.stereotype.Service
;
import
java.time.LocalDateTime
;
import
java.util.
HashMap
;
import
java.util.
ArrayList
;
import
java.util.List
;
...
...
@@ -31,10 +31,10 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
@Override
public
boolean
checkHeartBeat
(
DataBase
dataBase
)
{
boolean
isHealthy
=
Asserts
.
isEquals
(
CommonConstant
.
HEALTHY
,
Driver
.
build
(
dataBase
.
getDriverConfig
()).
test
());
boolean
isHealthy
=
Asserts
.
isEquals
(
CommonConstant
.
HEALTHY
,
Driver
.
build
(
dataBase
.
getDriverConfig
()).
test
());
dataBase
.
setStatus
(
isHealthy
);
dataBase
.
setHeartbeatTime
(
LocalDateTime
.
now
());
if
(
isHealthy
)
{
if
(
isHealthy
)
{
dataBase
.
setHealthTime
(
LocalDateTime
.
now
());
}
return
isHealthy
;
...
...
@@ -42,21 +42,21 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
@Override
public
boolean
saveOrUpdateDataBase
(
DataBase
dataBase
)
{
if
(
Asserts
.
isNull
(
dataBase
))
{
if
(
Asserts
.
isNull
(
dataBase
))
{
return
false
;
}
if
(
Asserts
.
isNull
(
dataBase
.
getId
()))
{
if
(
Asserts
.
isNull
(
dataBase
.
getId
()))
{
checkHeartBeat
(
dataBase
);
return
save
(
dataBase
);
}
else
{
}
else
{
DataBase
dataBaseInfo
=
getById
(
dataBase
.
getId
());
if
(
Asserts
.
isNull
(
dataBase
.
getUrl
()))
{
if
(
Asserts
.
isNull
(
dataBase
.
getUrl
()))
{
dataBase
.
setUrl
(
dataBaseInfo
.
getUrl
());
}
if
(
Asserts
.
isNull
(
dataBase
.
getUsername
()))
{
if
(
Asserts
.
isNull
(
dataBase
.
getUsername
()))
{
dataBase
.
setUsername
(
dataBaseInfo
.
getUsername
());
}
if
(
Asserts
.
isNull
(
dataBase
.
getPassword
()))
{
if
(
Asserts
.
isNull
(
dataBase
.
getPassword
()))
{
dataBase
.
setPassword
(
dataBaseInfo
.
getPassword
());
}
checkHeartBeat
(
dataBase
);
...
...
@@ -66,14 +66,14 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
@Override
public
List
<
DataBase
>
listEnabledAll
()
{
return
this
.
list
(
new
QueryWrapper
<
DataBase
>().
eq
(
"enabled"
,
1
));
return
this
.
list
(
new
QueryWrapper
<
DataBase
>().
eq
(
"enabled"
,
1
));
}
@Override
public
List
<
Schema
>
getSchemasAndTables
(
Integer
id
)
{
DataBase
dataBase
=
getById
(
id
);
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
List
<
Schema
>
schemasAndTables
=
driver
.
getSchemasAndTables
();
driver
.
close
();
return
schemasAndTables
;
...
...
@@ -82,8 +82,8 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
@Override
public
List
<
Column
>
listColumns
(
Integer
id
,
String
schemaName
,
String
tableName
)
{
DataBase
dataBase
=
getById
(
id
);
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
List
<
Column
>
columns
=
driver
.
listColumns
(
schemaName
,
tableName
);
driver
.
close
();
return
columns
;
...
...
@@ -92,18 +92,18 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
@Override
public
String
getFlinkTableSql
(
Integer
id
,
String
schemaName
,
String
tableName
)
{
DataBase
dataBase
=
getById
(
id
);
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
List
<
Column
>
columns
=
driver
.
listColumns
(
schemaName
,
tableName
);
Table
table
=
Table
.
build
(
tableName
,
schemaName
,
columns
);
return
table
.
getFlinkTableSql
(
dataBase
.
getName
(),
driver
.
getFlinkColumnTypeConversion
(),
dataBase
.
getFlinkConfig
());
return
table
.
getFlinkTableSql
(
dataBase
.
getName
(),
dataBase
.
getFlinkConfig
());
}
@Override
public
String
getSqlSelect
(
Integer
id
,
String
schemaName
,
String
tableName
)
{
DataBase
dataBase
=
getById
(
id
);
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
List
<
Column
>
columns
=
driver
.
listColumns
(
schemaName
,
tableName
);
Table
table
=
Table
.
build
(
tableName
,
schemaName
,
columns
);
return
table
.
getSqlSelect
(
dataBase
.
getName
());
...
...
@@ -112,8 +112,8 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
@Override
public
String
getSqlCreate
(
Integer
id
,
String
schemaName
,
String
tableName
)
{
DataBase
dataBase
=
getById
(
id
);
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
List
<
Column
>
columns
=
driver
.
listColumns
(
schemaName
,
tableName
);
Table
table
=
Table
.
build
(
tableName
,
schemaName
,
columns
);
return
driver
.
getCreateTableSql
(
table
);
...
...
@@ -122,13 +122,31 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
@Override
public
SqlGeneration
getSqlGeneration
(
Integer
id
,
String
schemaName
,
String
tableName
)
{
DataBase
dataBase
=
getById
(
id
);
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Asserts
.
checkNotNull
(
dataBase
,
"该数据源不存在!"
);
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
Table
table
=
driver
.
getTable
(
schemaName
,
tableName
);
SqlGeneration
sqlGeneration
=
new
SqlGeneration
();
sqlGeneration
.
setFlinkSqlCreate
(
table
.
getFlinkTableSql
(
dataBase
.
getName
(),
driver
.
getFlinkColumnTypeConversion
(),
dataBase
.
getFlinkConfig
()));
sqlGeneration
.
setFlinkSqlCreate
(
table
.
getFlinkTableSql
(
dataBase
.
getName
(),
dataBase
.
getFlinkConfig
()));
sqlGeneration
.
setSqlSelect
(
table
.
getSqlSelect
(
dataBase
.
getName
()));
sqlGeneration
.
setSqlCreate
(
driver
.
getCreateTableSql
(
table
));
return
sqlGeneration
;
}
@Override
public
List
<
String
>
listEnabledFlinkWith
()
{
List
<
DataBase
>
dataBases
=
listEnabledAll
();
List
<
String
>
list
=
new
ArrayList
<>();
for
(
DataBase
dataBase
:
dataBases
)
{
if
(
Asserts
.
isNotNullString
(
dataBase
.
getFlinkConfig
()))
{
list
.
add
(
dataBase
.
getName
()
+
":="
+
dataBase
.
getFlinkConfig
()
+
";\n"
);
}
}
return
list
;
}
@Override
public
String
getEnabledFlinkWithSql
()
{
List
<
String
>
list
=
listEnabledFlinkWith
();
return
StringUtils
.
join
(
list
,
""
);
}
}
dlink-admin/src/main/java/com/dlink/service/impl/StudioServiceImpl.java
View file @
9aac98c6
...
...
@@ -116,7 +116,7 @@ public class StudioServiceImpl implements StudioService {
result
.
setEndTime
(
LocalDateTime
.
now
());
return
result
;
}
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
JdbcSelectResult
selectResult
=
driver
.
executeSql
(
sqlDTO
.
getStatement
(),
sqlDTO
.
getMaxRowNum
());
driver
.
close
();
result
.
setResult
(
selectResult
);
...
...
@@ -174,7 +174,7 @@ public class StudioServiceImpl implements StudioService {
add
(
SqlExplainResult
.
fail
(
studioExecuteDTO
.
getStatement
(),
"数据源不存在"
));
}};
}
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
List
<
SqlExplainResult
>
sqlExplainResults
=
driver
.
explain
(
studioExecuteDTO
.
getStatement
());
driver
.
close
();
return
sqlExplainResults
;
...
...
dlink-admin/src/main/java/com/dlink/service/impl/TaskServiceImpl.java
View file @
9aac98c6
...
...
@@ -96,7 +96,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
result
.
setEndTime
(
LocalDateTime
.
now
());
return
result
;
}
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
())
.
connect
()
;
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
JdbcSelectResult
selectResult
=
driver
.
executeSql
(
sqlDTO
.
getStatement
(),
sqlDTO
.
getMaxRowNum
());
driver
.
close
();
result
.
setResult
(
selectResult
);
...
...
dlink-common/src/main/java/com/dlink/model/Column.java
View file @
9aac98c6
...
...
@@ -24,7 +24,7 @@ public class Column implements Serializable {
private
boolean
autoIncrement
;
private
String
defaultValue
;
private
boolean
isNullable
;
private
String
javaType
;
private
ColumnType
javaType
;
private
String
columnFamily
;
private
Integer
position
;
private
Integer
precision
;
...
...
dlink-common/src/main/java/com/dlink/model/ColumnType.java
0 → 100644
View file @
9aac98c6
package
com
.
dlink
.
model
;
/**
* ColumnType
*
* @author wenmo
* @since 2022/2/17 10:59
**/
public
enum
ColumnType
{
STRING
(
"java.lang.String"
,
"STRING"
),
JAVA_LANG_BOOLEAN
(
"java.lang.Boolean"
,
"BOOLEAN"
),
BOOLEAN
(
"Boolean"
,
"BOOLEAN NOT NULL"
),
JAVA_LANG_BYTE
(
"java.lang.Byte"
,
"TINYINT"
),
BYTE
(
"byte"
,
"TINYINT NOT NULL"
),
JAVA_LANG_SHORT
(
"java.lang.Short"
,
"SMALLINT"
),
SHORT
(
"short"
,
"SMALLINT NOT NULL"
),
INTEGER
(
"java.lang.Integer"
,
"INT"
),
INT
(
"int"
,
"INT NOT NULL"
),
JAVA_LANG_LONG
(
"java.lang.Long"
,
"BIGINT"
),
LONG
(
"long"
,
"BIGINT NOT NULL"
),
JAVA_LANG_FLOAT
(
"java.lang.Float"
,
"FLOAT"
),
FLOAT
(
"float"
,
"FLOAT NOT NULL"
),
JAVA_LANG_DOUBLE
(
"java.lang.Double"
,
"DOUBLE"
),
DOUBLE
(
"double"
,
"DOUBLE NOT NULL"
),
DATE
(
"java.sql.Date"
,
"DATE"
),
LOCALDATE
(
"java.time.LocalDate"
,
"DATE"
),
TIME
(
"java.sql.Time"
,
"TIME(0)"
),
LOCALTIME
(
"java.time.LocalTime"
,
"TIME(9)"
),
TIMESTAMP
(
"java.sql.Timestamp"
,
"TIMESTAMP(9)"
),
LOCALDATETIME
(
"java.time.LocalDateTime"
,
"TIMESTAMP(9)"
),
OFFSETDATETIME
(
"java.time.OffsetDateTime"
,
"TIMESTAMP(9) WITH TIME ZONE"
),
INSTANT
(
"java.time.Instant"
,
"TIMESTAMP_LTZ(9)"
),
DURATION
(
"java.time.Duration"
,
"INVERVAL SECOND(9)"
),
PERIOD
(
"java.time.Period"
,
"INTERVAL YEAR(4) TO MONTH"
),
DECIMAL
(
"java.math.BigDecimal"
,
"DECIMAL"
),
BYTES
(
"byte[]"
,
"BYTES"
),
T
(
"T[]"
,
"ARRAY<T>"
),
MAP
(
"java.util.Map<K, V>"
,
"MAP<K, V>"
);
private
String
javaType
;
private
String
flinkType
;
ColumnType
(
String
javaType
,
String
flinkType
)
{
this
.
javaType
=
javaType
;
this
.
flinkType
=
flinkType
;
}
public
String
getJavaType
()
{
return
javaType
;
}
public
String
getFlinkType
()
{
return
flinkType
;
}
}
dlink-common/src/main/java/com/dlink/model/Table.java
View file @
9aac98c6
...
...
@@ -71,13 +71,12 @@ public class Table implements Serializable, Comparable<Table> {
return
tableWithSql
;
}
public
String
getFlinkTableSql
(
String
catalogName
,
Map
<
String
,
String
>
typeConversion
,
String
flinkConfig
)
{
public
String
getFlinkTableSql
(
String
catalogName
,
String
flinkConfig
)
{
StringBuilder
sb
=
new
StringBuilder
(
"CREATE TABLE "
);
sb
.
append
(
catalogName
+
"."
+
schema
+
"."
+
name
+
" (\n"
);
List
<
String
>
pks
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
columns
.
size
();
i
++)
{
String
type
=
typeConversion
.
containsKey
(
columns
.
get
(
i
).
getType
())
?
typeConversion
.
get
(
columns
.
get
(
i
).
getType
())
:
"STRING"
;
String
type
=
columns
.
get
(
i
).
getJavaType
().
getFlinkType
();
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
...
...
dlink-metadata/dlink-metadata-base/src/main/java/com/dlink/metadata/convert/ITypeConvert.java
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.metadata.rules.IColumnType
;
import
com.dlink.model.Column
;
import
com.dlink.model.ColumnType
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
...
...
@@ -14,17 +14,13 @@ import java.sql.SQLException;
**/
public
interface
ITypeConvert
{
default
IColumnType
convert
(
Column
column
)
{
return
convert
(
column
.
getType
());
}
default
String
convertToDB
(
Column
column
)
{
return
convertToDB
(
column
.
getJavaType
());
}
IColumnType
convert
(
String
columnType
);
ColumnType
convert
(
Column
column
);
String
convertToDB
(
String
columnType
);
String
convertToDB
(
ColumnType
columnType
);
default
Object
convertValue
(
ResultSet
results
,
String
columnName
,
String
javaType
)
throws
SQLException
{
switch
(
javaType
.
toLowerCase
())
{
...
...
dlink-metadata/dlink-metadata-base/src/main/java/com/dlink/metadata/driver/AbstractDriver.java
View file @
9aac98c6
...
...
@@ -17,8 +17,6 @@ import java.util.stream.Collectors;
*/
public
abstract
class
AbstractDriver
implements
Driver
{
// public Logger logger = LoggerFactory.getLogger(this.getClass());
protected
DriverConfig
config
;
public
abstract
IDBQuery
getDBQuery
();
...
...
@@ -34,6 +32,10 @@ public abstract class AbstractDriver implements Driver {
return
this
;
}
public
boolean
isHealth
(){
return
false
;
}
public
List
<
Schema
>
getSchemasAndTables
(){
return
listSchemas
().
stream
().
peek
(
schema
->
schema
.
setTables
(
listTables
(
schema
.
getName
()))).
sorted
().
collect
(
Collectors
.
toList
());
}
...
...
dlink-metadata/dlink-metadata-base/src/main/java/com/dlink/metadata/driver/AbstractJdbcDriver.java
View file @
9aac98c6
...
...
@@ -64,6 +64,19 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
return
this
;
}
@Override
public
boolean
isHealth
(){
try
{
if
(
Asserts
.
isNotNull
(
conn
)){
return
!
conn
.
isClosed
();
}
return
false
;
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
return
false
;
}
}
@Override
public
void
close
()
{
try
{
...
...
@@ -165,7 +178,6 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
field
.
setKeyFlag
(
Asserts
.
isNotNullString
(
key
)
&&
Asserts
.
isEqualsIgnoreCase
(
"PRI"
,
key
));
field
.
setName
(
columnName
);
field
.
setType
(
results
.
getString
(
dbQuery
.
columnType
()));
field
.
setJavaType
(
getTypeConvert
().
convert
(
field
.
getType
()).
getType
());
String
columnComment
=
results
.
getString
(
dbQuery
.
columnComment
()).
replaceAll
(
"\"|'"
,
""
);
field
.
setComment
(
columnComment
);
...
...
@@ -177,6 +189,7 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
field
.
setPrecision
(
results
.
getInt
(
dbQuery
.
precision
()));
field
.
setScale
(
results
.
getInt
(
dbQuery
.
scale
()));
field
.
setAutoIncrement
(
Asserts
.
isEqualsIgnoreCase
(
results
.
getString
(
dbQuery
.
autoIncrement
()),
"auto_increment"
));
field
.
setJavaType
(
getTypeConvert
().
convert
(
field
));
columns
.
add
(
field
);
}
}
catch
(
SQLException
e
)
{
...
...
@@ -262,11 +275,10 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
@Override
public
boolean
execute
(
String
sql
)
throws
Exception
{
Asserts
.
checkNullString
(
sql
,
"Sql 语句为空"
);
boolean
res
=
false
;
try
(
Statement
statement
=
conn
.
createStatement
())
{
res
=
statement
.
execute
(
sql
);
statement
.
execute
(
sql
);
}
return
res
;
return
true
;
}
@Override
...
...
@@ -305,7 +317,9 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
Column
column
=
new
Column
();
column
.
setName
(
metaData
.
getColumnLabel
(
i
));
column
.
setType
(
metaData
.
getColumnTypeName
(
i
));
column
.
setJavaType
(
getTypeConvert
().
convert
(
metaData
.
getColumnTypeName
(
i
)).
getType
());
column
.
setAutoIncrement
(
metaData
.
isAutoIncrement
(
i
));
column
.
setNullable
(
metaData
.
isNullable
(
i
)==
0
?
false
:
true
);
column
.
setJavaType
(
getTypeConvert
().
convert
(
column
));
columns
.
add
(
column
);
}
result
.
setColumns
(
columnNameList
);
...
...
@@ -351,9 +365,10 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
}
}
else
{
try
{
resList
.
add
(
execute
(
item
.
toString
()));
execute
(
item
.
toString
());
resList
.
add
(
1
);
}
catch
(
Exception
e
)
{
resList
.
add
(
false
);
resList
.
add
(
0
);
result
.
setStatusList
(
resList
);
result
.
error
(
LogUtil
.
getError
(
e
));
return
result
;
...
...
dlink-metadata/dlink-metadata-base/src/main/java/com/dlink/metadata/driver/Driver.java
View file @
9aac98c6
...
...
@@ -9,7 +9,6 @@ import com.dlink.model.Table;
import
com.dlink.result.SqlExplainResult
;
import
sun.misc.Service
;
import
java.sql.SQLException
;
import
java.util.Iterator
;
import
java.util.List
;
import
java.util.Map
;
...
...
@@ -36,11 +35,20 @@ public interface Driver {
}
static
Driver
build
(
DriverConfig
config
)
{
String
key
=
config
.
getName
();
if
(
DriverPool
.
exist
(
key
))
{
Driver
driver
=
DriverPool
.
get
(
key
);
if
(
driver
.
isHealth
())
{
return
driver
;
}
}
Optional
<
Driver
>
optionalDriver
=
Driver
.
get
(
config
);
if
(!
optionalDriver
.
isPresent
())
{
throw
new
MetaDataException
(
"不支持数据源类型【"
+
config
.
getType
()
+
"】"
);
throw
new
MetaDataException
(
"不支持数据源类型【"
+
config
.
getType
()
+
"】
,请在 lib 下添加扩展依赖
"
);
}
return
optionalDriver
.
get
();
Driver
driver
=
optionalDriver
.
get
().
connect
();
DriverPool
.
push
(
key
,
driver
);
return
driver
;
}
Driver
setDriverConfig
(
DriverConfig
config
);
...
...
@@ -53,6 +61,8 @@ public interface Driver {
String
test
();
boolean
isHealth
();
Driver
connect
();
void
close
();
...
...
@@ -101,5 +111,5 @@ public interface Driver {
List
<
SqlExplainResult
>
explain
(
String
sql
);
Map
<
String
,
String
>
getFlinkColumnTypeConversion
();
Map
<
String
,
String
>
getFlinkColumnTypeConversion
();
}
dlink-metadata/dlink-metadata-base/src/main/java/com/dlink/metadata/driver/DriverConfig.java
View file @
9aac98c6
...
...
@@ -14,6 +14,7 @@ import lombok.Setter;
@Setter
public
class
DriverConfig
{
private
String
name
;
private
String
type
;
private
String
driverClassName
;
private
String
ip
;
...
...
@@ -25,7 +26,8 @@ public class DriverConfig {
public
DriverConfig
()
{
}
public
DriverConfig
(
String
type
,
String
url
,
String
username
,
String
password
)
{
public
DriverConfig
(
String
name
,
String
type
,
String
url
,
String
username
,
String
password
)
{
this
.
name
=
name
;
this
.
type
=
type
;
this
.
url
=
url
;
this
.
username
=
username
;
...
...
dlink-metadata/dlink-metadata-base/src/main/java/com/dlink/metadata/driver/DriverPool.java
0 → 100644
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
driver
;
import
java.util.Map
;
import
java.util.concurrent.ConcurrentHashMap
;
/**
* DriverPool
*
* @author wenmo
* @since 2022/2/17 15:29
**/
public
class
DriverPool
{
private
static
volatile
Map
<
String
,
Driver
>
driverMap
=
new
ConcurrentHashMap
<>();
public
static
boolean
exist
(
String
key
)
{
if
(
driverMap
.
containsKey
(
key
))
{
return
true
;
}
return
false
;
}
public
static
Integer
push
(
String
key
,
Driver
gainer
)
{
driverMap
.
put
(
key
,
gainer
);
return
driverMap
.
size
();
}
public
static
Integer
remove
(
String
key
)
{
driverMap
.
remove
(
key
);
return
driverMap
.
size
();
}
public
static
Driver
get
(
String
key
)
{
return
driverMap
.
get
(
key
);
}
}
dlink-metadata/dlink-metadata-clickhouse/src/main/java/com/dlink/metadata/convert/ClickHouseTypeConvert.java
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.metadata.rules.IColumnType
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.Column
;
import
com.dlink.model.ColumnType
;
/**
* ClickHouseTypeConvert
...
...
@@ -9,13 +11,75 @@ import com.dlink.metadata.rules.IColumnType;
* @since 2021/7/21 17:15
**/
public
class
ClickHouseTypeConvert
implements
ITypeConvert
{
// Use mysql now,and welcome to fix it.
@Override
public
IColumnType
convert
(
String
columnType
)
{
return
null
;
public
ColumnType
convert
(
Column
column
)
{
if
(
Asserts
.
isNull
(
column
))
{
return
ColumnType
.
STRING
;
}
String
t
=
column
.
getType
().
toLowerCase
();
if
(
t
.
contains
(
"tinyint"
))
{
return
ColumnType
.
BYTE
;
}
else
if
(
t
.
contains
(
"smallint"
)
||
t
.
contains
(
"tinyint unsigned"
))
{
return
ColumnType
.
SHORT
;
}
else
if
(
t
.
contains
(
"bigint unsigned"
)
||
t
.
contains
(
"numeric"
)
||
t
.
contains
(
"decimal"
))
{
return
ColumnType
.
DECIMAL
;
}
else
if
(
t
.
contains
(
"bigint"
)
||
t
.
contains
(
"int unsigned"
))
{
return
ColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"float"
))
{
return
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"double"
))
{
return
ColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"boolean"
)
||
t
.
contains
(
"tinyint(1)"
))
{
return
ColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"datetime"
))
{
return
ColumnType
.
TIMESTAMP
;
}
else
if
(
t
.
contains
(
"date"
))
{
return
ColumnType
.
DATE
;
}
else
if
(
t
.
contains
(
"time"
))
{
return
ColumnType
.
TIME
;
}
else
if
(
t
.
contains
(
"char"
)
||
t
.
contains
(
"text"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"binary"
)
||
t
.
contains
(
"blob"
))
{
return
ColumnType
.
BYTES
;
}
else
if
(
t
.
contains
(
"int"
)
||
t
.
contains
(
"mediumint"
)
||
t
.
contains
(
"smallint unsigned"
))
{
return
ColumnType
.
INTEGER
;
}
return
ColumnType
.
STRING
;
}
@Override
public
String
convertToDB
(
String
columnType
)
{
return
null
;
public
String
convertToDB
(
ColumnType
columnType
)
{
switch
(
columnType
)
{
case
STRING:
return
"varchar"
;
case
BYTE:
return
"tinyint"
;
case
SHORT:
return
"smallint"
;
case
DECIMAL:
return
"decimal"
;
case
LONG:
return
"bigint"
;
case
FLOAT:
return
"float"
;
case
DOUBLE:
return
"double"
;
case
BOOLEAN:
return
"boolean"
;
case
TIMESTAMP:
return
"datetime"
;
case
DATE:
return
"date"
;
case
TIME:
return
"time"
;
case
BYTES:
return
"binary"
;
case
INTEGER:
return
"int"
;
default
:
return
"varchar"
;
}
}
}
dlink-metadata/dlink-metadata-doris/src/main/java/com/dlink/metadata/convert/DorisTypeConvert.java
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.metadata.rules.DbColumnType
;
import
com.dlink.metadata.rules.IColumnType
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.Column
;
import
com.dlink.model.ColumnType
;
public
class
DorisTypeConvert
implements
ITypeConvert
{
public
class
DorisTypeConvert
implements
ITypeConvert
{
@Override
public
IColumnType
convert
(
String
columnType
)
{
String
t
=
columnType
.
toLowerCase
();
public
ColumnType
convert
(
Column
column
)
{
if
(
Asserts
.
isNull
(
column
))
{
return
ColumnType
.
STRING
;
}
String
t
=
column
.
getType
().
toLowerCase
();
if
(
t
.
contains
(
"char"
))
{
return
DbColumnType
.
STRING
;
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"boolean"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_BOOLEAN
;
}
return
ColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"tinyint"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_BYTE
;
}
return
ColumnType
.
BYTE
;
}
else
if
(
t
.
contains
(
"smallint"
))
{
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_SHORT
;
}
return
ColumnType
.
SHORT
;
}
else
if
(
t
.
contains
(
"bigint"
))
{
return
DbColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"tinyint(1)"
))
{
return
DbColumnType
.
BOOLEAN
;
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_LONG
;
}
return
ColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"largeint"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"int"
))
{
return
DbColumnType
.
INTEGER
;
}
else
if
(
t
.
contains
(
"text"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bit"
))
{
return
DbColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"decimal"
))
{
return
DbColumnType
.
BIG_DECIMAL
;
}
else
if
(
t
.
contains
(
"clob"
))
{
return
DbColumnType
.
CLOB
;
}
else
if
(
t
.
contains
(
"blob"
))
{
return
DbColumnType
.
BLOB
;
}
else
if
(
t
.
contains
(
"binary"
))
{
return
DbColumnType
.
BYTE_ARRAY
;
if
(
column
.
isNullable
())
{
return
ColumnType
.
INTEGER
;
}
return
ColumnType
.
INT
;
}
else
if
(
t
.
contains
(
"float"
))
{
return
DbColumnType
.
FLOAT
;
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_FLOAT
;
}
return
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"double"
))
{
return
DbColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"json"
)
||
t
.
contains
(
"enum"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"date"
)
||
t
.
contains
(
"time"
)
||
t
.
contains
(
"year"
))
{
return
DbColumnType
.
DATE
;
if
(
column
.
isNullable
())
{
return
ColumnType
.
JAVA_LANG_DOUBLE
;
}
return
ColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"date"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"datetime"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"decimal"
))
{
return
ColumnType
.
DECIMAL
;
}
else
if
(
t
.
contains
(
"time"
))
{
return
ColumnType
.
DOUBLE
;
}
return
Db
ColumnType
.
STRING
;
return
ColumnType
.
STRING
;
}
@Override
public
String
convertToDB
(
String
columnType
)
{
switch
(
columnType
.
toLowerCase
()){
case
"string"
:
return
"varchar(255)"
;
case
"boolean"
:
return
"tinyint(1)"
;
case
"int"
:
case
"integer"
:
return
"int"
;
case
"double"
:
return
"double"
;
case
"float"
:
public
String
convertToDB
(
ColumnType
columnType
)
{
switch
(
columnType
)
{
case
STRING:
return
"varchar"
;
case
BOOLEAN:
case
JAVA_LANG_BOOLEAN:
return
"boolean"
;
case
BYTE:
case
JAVA_LANG_BYTE:
return
"tinyint"
;
case
SHORT:
case
JAVA_LANG_SHORT:
return
"smallint"
;
case
LONG:
case
JAVA_LANG_LONG:
return
"bigint"
;
case
FLOAT:
case
JAVA_LANG_FLOAT:
return
"float"
;
case
"date"
:
return
"datetime(0)"
;
case
DOUBLE:
case
JAVA_LANG_DOUBLE:
return
"double"
;
case
DECIMAL:
return
"decimal"
;
case
INT:
case
INTEGER:
return
"int"
;
default
:
return
"varchar
(255)
"
;
return
"varchar"
;
}
}
}
dlink-metadata/dlink-metadata-doris/src/main/java/com/dlink/metadata/driver/DorisDriver.java
View file @
9aac98c6
...
...
@@ -8,7 +8,7 @@ import com.dlink.metadata.query.IDBQuery;
import
java.util.HashMap
;
import
java.util.Map
;
public
class
DorisDriver
extends
AbstractJdbcDriver
{
public
class
DorisDriver
extends
AbstractJdbcDriver
{
@Override
public
IDBQuery
getDBQuery
()
{
return
new
DorisQuery
();
...
...
@@ -35,7 +35,16 @@ public class DorisDriver extends AbstractJdbcDriver{
}
@Override
public
Map
<
String
,
String
>
getFlinkColumnTypeConversion
(){
return
new
HashMap
<>();
public
Map
<
String
,
String
>
getFlinkColumnTypeConversion
()
{
HashMap
<
String
,
String
>
map
=
new
HashMap
<>();
map
.
put
(
"BOOLEAN"
,
"BOOLEAN"
);
map
.
put
(
"TINYINT"
,
"TINYINT"
);
map
.
put
(
"SMALLINT"
,
"SMALLINT"
);
map
.
put
(
"INT"
,
"INT"
);
map
.
put
(
"VARCHAR"
,
"STRING"
);
map
.
put
(
"TEXY"
,
"STRING"
);
map
.
put
(
"INT"
,
"INT"
);
map
.
put
(
"DATETIME"
,
"TIMESTAMP"
);
return
map
;
}
}
dlink-metadata/dlink-metadata-doris/src/test/java/com/dlink/metadata/DorisTest.java
View file @
9aac98c6
...
...
@@ -15,8 +15,9 @@ import java.util.List;
public
class
DorisTest
{
private
Driver
driver
;
@Before
public
void
init
(){
public
void
init
()
{
DriverConfig
config
=
new
DriverConfig
();
config
.
setType
(
"Doris"
);
config
.
setIp
(
"192.168.68.133"
);
...
...
@@ -25,8 +26,8 @@ public class DorisTest {
config
.
setPassword
(
"123456"
);
config
.
setUrl
(
"jdbc:mysql://192.168.68.133:9030/test"
);
try
{
driver
=
Driver
.
build
(
config
).
connect
(
);
}
catch
(
Exception
e
)
{
driver
=
Driver
.
build
(
config
);
}
catch
(
Exception
e
)
{
System
.
err
.
println
(
"连接创建失败"
);
e
.
printStackTrace
();
}
...
...
@@ -46,7 +47,7 @@ public class DorisTest {
}
@Test
public
void
testSchema
(){
public
void
testSchema
()
{
//schema && table
List
<
Schema
>
schemasAndTables
=
driver
.
getSchemasAndTables
();
for
(
Schema
schemasAndTable
:
schemasAndTables
)
{
...
...
@@ -58,15 +59,16 @@ public class DorisTest {
}
@Test
public
void
testColumns
(){
public
void
testColumns
()
{
// columns
List
<
Column
>
columns
=
driver
.
listColumns
(
"test"
,
"scoreinfo"
);
for
(
Column
column
:
columns
)
{
System
.
out
.
println
(
column
.
getName
()
+
" "
+
column
.
getType
()
+
column
.
getComment
()
);
System
.
out
.
println
(
column
.
getName
()
+
" "
+
column
.
getType
()
+
column
.
getComment
());
}
}
@Test
public
void
query
(){
public
void
query
()
{
JdbcSelectResult
selectResult
=
driver
.
query
(
"select * from scoreinfo "
,
10
);
List
<
LinkedHashMap
<
String
,
Object
>>
rowData
=
selectResult
.
getRowData
();
for
(
LinkedHashMap
<
String
,
Object
>
rowDatum
:
rowData
)
{
...
...
dlink-metadata/dlink-metadata-mysql/src/main/java/com/dlink/metadata/convert/MySqlTypeConvert.java
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.metadata.rules.DbColumnType
;
import
com.dlink.metadata.rules.IColumnType
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.Column
;
import
com.dlink.model.ColumnType
;
/**
* MySqlTypeConvert
...
...
@@ -9,60 +10,74 @@ import com.dlink.metadata.rules.IColumnType;
* @author wenmo
* @since 2021/7/20 15:21
**/
public
class
MySqlTypeConvert
implements
ITypeConvert
{
public
class
MySqlTypeConvert
implements
ITypeConvert
{
@Override
public
IColumnType
convert
(
String
columnType
)
{
String
t
=
columnType
.
toLowerCase
();
if
(
t
.
contains
(
"char"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bigint"
))
{
return
DbColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"tinyint(1)"
))
{
return
DbColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"int"
))
{
return
DbColumnType
.
INTEGER
;
}
else
if
(
t
.
contains
(
"text"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bit"
))
{
return
DbColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"decimal"
))
{
return
DbColumnType
.
BIG_DECIMAL
;
}
else
if
(
t
.
contains
(
"clob"
))
{
return
DbColumnType
.
CLOB
;
}
else
if
(
t
.
contains
(
"blob"
))
{
return
DbColumnType
.
BLOB
;
}
else
if
(
t
.
contains
(
"binary"
))
{
return
DbColumnType
.
BYTE_ARRAY
;
public
ColumnType
convert
(
Column
column
)
{
if
(
Asserts
.
isNull
(
column
))
{
return
ColumnType
.
STRING
;
}
String
t
=
column
.
getType
().
toLowerCase
();
if
(
t
.
contains
(
"tinyint"
))
{
return
ColumnType
.
BYTE
;
}
else
if
(
t
.
contains
(
"smallint"
)
||
t
.
contains
(
"tinyint unsigned"
))
{
return
ColumnType
.
SHORT
;
}
else
if
(
t
.
contains
(
"bigint unsigned"
)
||
t
.
contains
(
"numeric"
)
||
t
.
contains
(
"decimal"
))
{
return
ColumnType
.
DECIMAL
;
}
else
if
(
t
.
contains
(
"bigint"
)
||
t
.
contains
(
"int unsigned"
))
{
return
ColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"float"
))
{
return
Db
ColumnType
.
FLOAT
;
return
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"double"
))
{
return
DbColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"json"
)
||
t
.
contains
(
"enum"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"date"
)
||
t
.
contains
(
"time"
)
||
t
.
contains
(
"year"
))
{
return
DbColumnType
.
DATE
;
return
ColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"boolean"
)
||
t
.
contains
(
"tinyint(1)"
))
{
return
ColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"datetime"
))
{
return
ColumnType
.
TIMESTAMP
;
}
else
if
(
t
.
contains
(
"date"
))
{
return
ColumnType
.
DATE
;
}
else
if
(
t
.
contains
(
"time"
))
{
return
ColumnType
.
TIME
;
}
else
if
(
t
.
contains
(
"char"
)
||
t
.
contains
(
"text"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"binary"
)
||
t
.
contains
(
"blob"
))
{
return
ColumnType
.
BYTES
;
}
else
if
(
t
.
contains
(
"int"
)
||
t
.
contains
(
"mediumint"
)
||
t
.
contains
(
"smallint unsigned"
))
{
return
ColumnType
.
INTEGER
;
}
return
Db
ColumnType
.
STRING
;
return
ColumnType
.
STRING
;
}
@Override
public
String
convertToDB
(
String
columnType
)
{
switch
(
columnType
.
toLowerCase
()){
case
"string"
:
return
"varchar(255)"
;
case
"boolean"
:
return
"tinyint(1)"
;
case
"int"
:
case
"integer"
:
return
"int"
;
case
"double"
:
return
"double"
;
case
"float"
:
public
String
convertToDB
(
ColumnType
columnType
)
{
switch
(
columnType
)
{
case
STRING:
return
"varchar"
;
case
BYTE:
return
"tinyint"
;
case
SHORT:
return
"smallint"
;
case
DECIMAL:
return
"decimal"
;
case
LONG:
return
"bigint"
;
case
FLOAT:
return
"float"
;
case
"date"
:
return
"datetime(0)"
;
case
DOUBLE:
return
"double"
;
case
BOOLEAN:
return
"boolean"
;
case
TIMESTAMP:
return
"datetime"
;
case
DATE:
return
"date"
;
case
TIME:
return
"time"
;
case
BYTES:
return
"binary"
;
case
INTEGER:
return
"int"
;
default
:
return
"varchar
(255)
"
;
return
"varchar"
;
}
}
}
dlink-metadata/dlink-metadata-mysql/src/main/java/com/dlink/metadata/driver/MySqlDriver.java
View file @
9aac98c6
...
...
@@ -42,12 +42,12 @@ public class MySqlDriver extends AbstractJdbcDriver {
}
@Override
public
Map
<
String
,
String
>
getFlinkColumnTypeConversion
()
{
HashMap
<
String
,
String
>
map
=
new
HashMap
<>();
map
.
put
(
"
varchar"
,
"STRING"
);
map
.
put
(
"
text"
,
"STRING"
);
map
.
put
(
"
int"
,
"INT"
);
map
.
put
(
"
datetime"
,
"TIMESTAMP"
);
public
Map
<
String
,
String
>
getFlinkColumnTypeConversion
()
{
HashMap
<
String
,
String
>
map
=
new
HashMap
<>();
map
.
put
(
"
VARCHAR"
,
"STRING"
);
map
.
put
(
"
TEXY"
,
"STRING"
);
map
.
put
(
"
INT"
,
"INT"
);
map
.
put
(
"
DATETIME"
,
"TIMESTAMP"
);
return
map
;
}
}
dlink-metadata/dlink-metadata-mysql/src/test/java/com/dlink/metadata/MysqlTest.java
View file @
9aac98c6
...
...
@@ -18,49 +18,50 @@ import java.util.List;
public
class
MysqlTest
{
private
static
final
String
IP
=
"127.0.0.1"
;
public
Driver
getDriver
(){
public
Driver
getDriver
()
{
DriverConfig
config
=
new
DriverConfig
();
config
.
setType
(
"Mysql"
);
config
.
setIp
(
IP
);
config
.
setPort
(
3306
);
config
.
setUsername
(
"dca"
);
config
.
setPassword
(
"dca"
);
config
.
setUrl
(
"jdbc:mysql://"
+
IP
+
":3306/dca?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&autoReconnect=true"
);
return
Driver
.
build
(
config
)
.
connect
()
;
config
.
setUrl
(
"jdbc:mysql://"
+
IP
+
":3306/dca?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&autoReconnect=true"
);
return
Driver
.
build
(
config
);
}
@Test
public
void
connectTest
(){
public
void
connectTest
()
{
DriverConfig
config
=
new
DriverConfig
();
config
.
setType
(
"Mysql"
);
config
.
setIp
(
IP
);
config
.
setPort
(
3306
);
config
.
setUsername
(
"dca"
);
config
.
setPassword
(
"dca"
);
config
.
setUrl
(
"jdbc:mysql://"
+
IP
+
":3306/dca?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&autoReconnect=true"
);
config
.
setUrl
(
"jdbc:mysql://"
+
IP
+
":3306/dca?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&autoReconnect=true"
);
String
test
=
Driver
.
build
(
config
).
test
();
System
.
out
.
println
(
test
);
System
.
out
.
println
(
"end..."
);
}
@Test
public
void
schemaTest
(){
public
void
schemaTest
()
{
Driver
driver
=
getDriver
();
List
<
Schema
>
schemasAndTables
=
driver
.
getSchemasAndTables
();
System
.
out
.
println
(
"end..."
);
}
@Test
public
void
columnTest
(){
public
void
columnTest
()
{
Driver
driver
=
getDriver
();
List
<
Column
>
columns
=
driver
.
listColumns
(
"dca"
,
"MENU"
);
System
.
out
.
println
(
"end..."
);
}
@Test
public
void
queryTest
(){
public
void
queryTest
()
{
Driver
driver
=
getDriver
();
JdbcSelectResult
query
=
driver
.
query
(
"select * from MENU"
,
10
);
JdbcSelectResult
query
=
driver
.
query
(
"select * from MENU"
,
10
);
System
.
out
.
println
(
"end..."
);
}
}
dlink-metadata/dlink-metadata-oracle/src/main/java/com/dlink/metadata/convert/OracleTypeConvert.java
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.metadata.rules.DbColumnType
;
import
com.dlink.metadata.rules.IColumnType
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.Column
;
import
com.dlink.model.ColumnType
;
/**
* OracleTypeConvert
...
...
@@ -11,48 +12,53 @@ import com.dlink.metadata.rules.IColumnType;
**/
public
class
OracleTypeConvert
implements
ITypeConvert
{
@Override
public
IColumnType
convert
(
String
columnType
)
{
String
t
=
columnType
.
toLowerCase
();
public
ColumnType
convert
(
Column
column
)
{
if
(
Asserts
.
isNull
(
column
))
{
return
ColumnType
.
STRING
;
}
String
t
=
column
.
getType
().
toLowerCase
();
if
(
t
.
contains
(
"char"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"date"
)
||
t
.
contains
(
"timestamp"
))
{
return
DbColumnType
.
DATE
;
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"date"
))
{
return
ColumnType
.
DATE
;
}
else
if
(
t
.
contains
(
"timestamp"
))
{
return
ColumnType
.
TIMESTAMP
;
}
else
if
(
t
.
contains
(
"number"
))
{
if
(
t
.
matches
(
"number\\(+\\d\\)"
))
{
return
Db
ColumnType
.
INTEGER
;
return
ColumnType
.
INTEGER
;
}
else
if
(
t
.
matches
(
"number\\(+\\d{2}+\\)"
))
{
return
Db
ColumnType
.
LONG
;
return
ColumnType
.
LONG
;
}
return
DbColumnType
.
BIG_
DECIMAL
;
return
ColumnType
.
DECIMAL
;
}
else
if
(
t
.
contains
(
"float"
))
{
return
Db
ColumnType
.
FLOAT
;
return
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"clob"
))
{
return
Db
ColumnType
.
STRING
;
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"blob"
))
{
return
DbColumnType
.
BLOB
;
}
else
if
(
t
.
contains
(
"binary"
))
{
return
DbColumnType
.
BYTE_ARRAY
;
}
else
if
(
t
.
contains
(
"raw"
))
{
return
DbColumnType
.
BYTE_ARRAY
;
return
ColumnType
.
BYTES
;
}
return
Db
ColumnType
.
STRING
;
return
ColumnType
.
STRING
;
}
@Override
public
String
convertToDB
(
String
columnType
)
{
switch
(
columnType
.
toLowerCase
()){
case
"string"
:
return
"varchar(255)"
;
case
"boolean"
:
case
"int"
:
case
"integer"
:
case
"double"
:
case
"float"
:
return
"number"
;
case
"date"
:
public
String
convertToDB
(
ColumnType
columnType
)
{
switch
(
columnType
)
{
case
STRING:
return
"varchar"
;
case
DATE:
return
"date"
;
case
TIMESTAMP:
return
"timestamp"
;
case
INTEGER:
case
LONG:
case
DECIMAL:
return
"number"
;
case
FLOAT:
return
"float"
;
case
BYTES:
return
"blob"
;
default
:
return
"varchar
(255)
"
;
return
"varchar"
;
}
}
}
dlink-metadata/dlink-metadata-oracle/src/main/java/com/dlink/metadata/driver/OracleDriver.java
View file @
9aac98c6
...
...
@@ -56,7 +56,7 @@ public class OracleDriver extends AbstractJdbcDriver {
if
(
i
>
0
){
sb
.
append
(
","
);
}
sb
.
append
(
columns
.
get
(
i
).
getName
()
+
" "
+
getTypeConvert
().
convertToDB
(
columns
.
get
(
i
)
.
getType
()
));
sb
.
append
(
columns
.
get
(
i
).
getName
()
+
" "
+
getTypeConvert
().
convertToDB
(
columns
.
get
(
i
)));
if
(
columns
.
get
(
i
).
isNullable
()){
sb
.
append
(
" NOT NULL"
);
}
...
...
dlink-metadata/dlink-metadata-oracle/src/test/java/com/dlink/metadata/OracleTest.java
View file @
9aac98c6
...
...
@@ -19,47 +19,47 @@ public class OracleTest {
private
static
final
String
IP
=
"127.0.0.1"
;
public
Driver
getDriver
(){
public
Driver
getDriver
()
{
DriverConfig
config
=
new
DriverConfig
();
config
.
setType
(
"Oracle"
);
config
.
setIp
(
IP
);
config
.
setPort
(
1521
);
config
.
setUsername
(
"cdr"
);
config
.
setPassword
(
"cdr"
);
config
.
setUrl
(
"jdbc:oracle:thin:@"
+
IP
+
":1521:orcl"
);
return
Driver
.
build
(
config
)
.
connect
()
;
config
.
setUrl
(
"jdbc:oracle:thin:@"
+
IP
+
":1521:orcl"
);
return
Driver
.
build
(
config
);
}
@Test
public
void
connectTest
(){
public
void
connectTest
()
{
DriverConfig
config
=
new
DriverConfig
();
config
.
setType
(
"Oracle"
);
config
.
setIp
(
IP
);
config
.
setPort
(
1521
);
config
.
setUsername
(
"cdr"
);
config
.
setPassword
(
"cdr"
);
config
.
setUrl
(
"jdbc:oracle:thin:@"
+
IP
+
":1521:orcl"
);
config
.
setUrl
(
"jdbc:oracle:thin:@"
+
IP
+
":1521:orcl"
);
String
test
=
Driver
.
build
(
config
).
test
();
System
.
out
.
println
(
test
);
System
.
out
.
println
(
"end..."
);
}
@Test
public
void
schemaTest
(){
public
void
schemaTest
()
{
Driver
driver
=
getDriver
();
List
<
Schema
>
schemasAndTables
=
driver
.
getSchemasAndTables
();
System
.
out
.
println
(
"end..."
);
}
@Test
public
void
columnTest
(){
public
void
columnTest
()
{
Driver
driver
=
getDriver
();
List
<
Column
>
columns
=
driver
.
listColumns
(
"CDR"
,
"PAT_INFO"
);
System
.
out
.
println
(
"end..."
);
}
@Test
public
void
queryTest
(){
public
void
queryTest
()
{
Driver
driver
=
getDriver
();
JdbcSelectResult
selectResult
=
driver
.
query
(
"select * from CDR.PAT_INFO where ROWNUM<10"
,
10
);
System
.
out
.
println
(
"end..."
);
...
...
dlink-metadata/dlink-metadata-phoenix/src/main/java/com/dlink/metadata/convert/PhoenixTypeConvert.java
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.
metadata.constant.PhoenixEnum
;
import
com.dlink.m
etadata.rules.DbColumnType
;
import
com.dlink.m
etadata.rules.I
ColumnType
;
import
com.dlink.
assertion.Asserts
;
import
com.dlink.m
odel.Column
;
import
com.dlink.m
odel.
ColumnType
;
public
class
PhoenixTypeConvert
implements
ITypeConvert
{
@Override
public
IColumnType
convert
(
String
columnType
)
{
String
t
=
columnType
.
toLowerCase
();
public
ColumnType
convert
(
Column
column
)
{
if
(
Asserts
.
isNull
(
column
))
{
return
ColumnType
.
STRING
;
}
String
t
=
column
.
getType
().
toLowerCase
();
if
(
t
.
contains
(
"char"
)
||
t
.
contains
(
"varchar"
)
||
t
.
contains
(
"text"
)
||
t
.
contains
(
"nchar"
)
||
t
.
contains
(
"nvarchar"
)
||
t
.
contains
(
"ntext"
)
||
t
.
contains
(
"uniqueidentifier"
)
||
t
.
contains
(
"sql_variant"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"int"
)
||
t
.
contains
(
"tinyint"
)
||
t
.
contains
(
"smallint"
)
||
t
.
contains
(
"integer"
))
{
return
DbColumnType
.
INTEGER
;
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bigint"
))
{
return
DbColumnType
.
LONG
;
return
ColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"int"
)
||
t
.
contains
(
"tinyint"
)
||
t
.
contains
(
"smallint"
)
||
t
.
contains
(
"integer"
))
{
return
ColumnType
.
INTEGER
;
}
else
if
(
t
.
contains
(
"float"
))
{
return
Db
ColumnType
.
FLOAT
;
return
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"decimal"
)
||
t
.
contains
(
"money"
)
||
t
.
contains
(
"smallmoney"
)
||
t
.
contains
(
"numeric"
))
{
return
DbColumnType
.
BIG_
DECIMAL
;
return
ColumnType
.
DECIMAL
;
}
else
if
(
t
.
contains
(
"double"
))
{
return
Db
ColumnType
.
DOUBLE
;
return
ColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"boolean"
))
{
return
DbColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"time"
))
{
return
DbColumnType
.
TIME
;
}
else
if
(
t
.
contains
(
"date"
))
{
return
DbColumnType
.
DATE
;
return
ColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"smalldatetime"
)
||
t
.
contains
(
"datetime"
))
{
return
Db
ColumnType
.
TIMESTAMP
;
return
ColumnType
.
TIMESTAMP
;
}
else
if
(
t
.
contains
(
"timestamp"
)
||
t
.
contains
(
"binary"
)
||
t
.
contains
(
"varbinary"
)
||
t
.
contains
(
"image"
))
{
return
DbColumnType
.
BYTE_ARRAY
;
return
ColumnType
.
BYTES
;
}
else
if
(
t
.
contains
(
"time"
))
{
return
ColumnType
.
TIME
;
}
else
if
(
t
.
contains
(
"date"
))
{
return
ColumnType
.
DATE
;
}
return
Db
ColumnType
.
STRING
;
return
ColumnType
.
STRING
;
}
@Override
public
String
convertToDB
(
String
columnType
)
{
try
{
Integer
typeNum
=
Integer
.
valueOf
(
columnType
);
return
PhoenixEnum
.
getDataTypeEnum
(
typeNum
).
toString
();
}
catch
(
Exception
e
)
{
switch
(
columnType
.
toUpperCase
())
{
case
"UNSIGNED_INT"
:
return
"UNSIGNED_INT"
;
case
"INT"
:
case
"INTEGER"
:
return
"INTEGER"
;
case
"TINYINT"
:
return
"TINYINT"
;
case
"double"
:
return
"double"
;
case
"BIGINT"
:
return
"BIGINT"
;
case
"UNSIGNED_TINYINT"
:
return
"UNSIGNED_TINYINT"
;
case
"UNSIGNED_SMALLINT"
:
return
"UNSIGNED_SMALLINT"
;
case
"FLOAT"
:
return
"FLOAT"
;
case
"UNSIGNED_FLOAT"
:
return
"UNSIGNED_FLOAT"
;
case
"DOUBLE"
:
return
"DOUBLE"
;
case
"UNSIGNED_DOUBLE"
:
return
"UNSIGNED_DOUBLE"
;
case
"DECIMAL"
:
return
"DECIMAL"
;
case
"BOOLEAN"
:
return
"BOOLEAN"
;
case
"TIME "
:
return
"TIME "
;
case
"DATE"
:
return
"DATE"
;
case
"TIMESTAMP"
:
return
"TIMESTAMP"
;
case
"UNSIGNED_TIME"
:
return
"UNSIGNED_TIME"
;
case
"UNSIGNED_DATE"
:
return
"UNSIGNED_DATE"
;
case
"UNSIGNED_TIMESTAMP"
:
return
"UNSIGNED_TIMESTAMP"
;
case
"VARCHAR"
:
return
"VARCHAR"
;
case
"CHAR"
:
return
"VARCHAR"
;
case
"BINARY"
:
return
"BINARY"
;
case
"VARBINARY"
:
return
"VARBINARY"
;
default
:
return
"VARCHAR"
;
}
public
String
convertToDB
(
ColumnType
columnType
)
{
switch
(
columnType
)
{
case
INTEGER:
return
"integer"
;
case
DOUBLE:
return
"double"
;
case
LONG:
return
"bigint"
;
case
FLOAT:
return
"float"
;
case
DECIMAL:
return
"decimal"
;
case
BOOLEAN:
return
"boolean"
;
case
TIME:
return
"time"
;
case
DATE:
return
"date"
;
case
TIMESTAMP:
return
"timestamp"
;
case
STRING:
return
"varchar"
;
case
BYTES:
return
"binary"
;
default
:
return
"varchar"
;
}
}
}
dlink-metadata/dlink-metadata-phoenix/src/main/java/com/dlink/metadata/driver/PhoenixDriver.java
View file @
9aac98c6
...
...
@@ -51,7 +51,7 @@ public class PhoenixDriver extends AbstractJdbcDriver {
PhoenixTypeConvert
phoenixTypeConvert
=
new
PhoenixTypeConvert
();
if
(
columns
!=
null
)
{
for
(
Column
column
:
columns
)
{
sql
.
append
(
", \""
+
column
.
getColumnFamily
()
+
"\".\""
+
column
.
getName
()
+
"\" "
+
phoenixTypeConvert
.
convertToDB
(
column
.
getType
()
));
sql
.
append
(
", \""
+
column
.
getColumnFamily
()
+
"\".\""
+
column
.
getName
()
+
"\" "
+
phoenixTypeConvert
.
convertToDB
(
column
));
}
}
sql
.
append
(
" ) "
);
...
...
dlink-metadata/dlink-metadata-phoenix/src/test/java/com/dlink/metadata/PhoenixTest.java
View file @
9aac98c6
...
...
@@ -8,6 +8,7 @@ import com.dlink.model.Schema;
import
com.dlink.model.Table
;
import
org.junit.Before
;
import
org.junit.Test
;
import
java.util.LinkedHashMap
;
import
java.util.List
;
...
...
@@ -21,7 +22,7 @@ public class PhoenixTest {
config
.
setType
(
"Phoenix"
);
config
.
setUrl
(
"jdbc:phoenix:xxx"
);
try
{
driver
=
Driver
.
build
(
config
)
.
connect
()
;
driver
=
Driver
.
build
(
config
);
}
catch
(
Exception
e
)
{
System
.
err
.
println
(
"连接创建失败:"
+
e
.
getMessage
());
}
...
...
@@ -39,6 +40,7 @@ public class PhoenixTest {
}
}
}
@Test
public
void
testListTables
()
{
List
<
Table
>
tables
=
driver
.
listTables
(
""
);
...
...
dlink-metadata/dlink-metadata-postgresql/src/main/java/com/dlink/metadata/convert/PostgreSqlTypeConvert.java
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.metadata.rules.DbColumnType
;
import
com.dlink.metadata.rules.IColumnType
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.Column
;
import
com.dlink.model.ColumnType
;
/**
* PostgreSqlTypeConvert
...
...
@@ -11,60 +12,70 @@ import com.dlink.metadata.rules.IColumnType;
**/
public
class
PostgreSqlTypeConvert
implements
ITypeConvert
{
@Override
public
IColumnType
convert
(
String
columnType
)
{
String
t
=
columnType
.
toLowerCase
();
if
(
t
.
contains
(
"char"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bigint"
))
{
return
DbColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"int"
))
{
return
DbColumnType
.
INTEGER
;
}
else
if
(
t
.
contains
(
"date"
)
||
t
.
contains
(
"time"
))
{
return
DbColumnType
.
DATE
;
}
else
if
(
t
.
contains
(
"text"
))
{
return
DbColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bit"
))
{
return
DbColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"decimal"
))
{
return
DbColumnType
.
BIG_DECIMAL
;
}
else
if
(
t
.
contains
(
"clob"
))
{
return
DbColumnType
.
CLOB
;
}
else
if
(
t
.
contains
(
"blob"
))
{
return
DbColumnType
.
BYTE_ARRAY
;
}
else
if
(
t
.
contains
(
"float"
))
{
return
DbColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"double"
))
{
return
DbColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"json"
)
||
t
.
contains
(
"enum"
))
{
return
DbColumnType
.
STRING
;
public
ColumnType
convert
(
Column
column
)
{
if
(
Asserts
.
isNull
(
column
))
{
return
ColumnType
.
STRING
;
}
String
t
=
column
.
getType
().
toLowerCase
();
if
(
t
.
contains
(
"smallint"
)
||
t
.
contains
(
"int2"
)
||
t
.
contains
(
"smallserial"
)
||
t
.
contains
(
"serial2"
))
{
return
ColumnType
.
SHORT
;
}
else
if
(
t
.
contains
(
"integer"
)
||
t
.
contains
(
"serial"
))
{
return
ColumnType
.
INTEGER
;
}
else
if
(
t
.
contains
(
"bigint"
)
||
t
.
contains
(
"bigserial"
))
{
return
ColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"real"
)
||
t
.
contains
(
"float4"
))
{
return
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"float8"
)
||
t
.
contains
(
"double precision"
))
{
return
ColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"numeric"
)
||
t
.
contains
(
"decimal"
))
{
return
ColumnType
.
DECIMAL
;
}
else
if
(
t
.
contains
(
"boolean"
))
{
return
DbColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"numeric"
))
{
return
DbColumnType
.
BIG_DECIMAL
;
return
ColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"timestamp"
))
{
return
ColumnType
.
TIMESTAMP
;
}
else
if
(
t
.
contains
(
"date"
))
{
return
ColumnType
.
DATE
;
}
else
if
(
t
.
contains
(
"time"
))
{
return
ColumnType
.
TIME
;
}
else
if
(
t
.
contains
(
"char"
)
||
t
.
contains
(
"text"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bytea"
))
{
return
ColumnType
.
BYTES
;
}
else
if
(
t
.
contains
(
"array"
))
{
return
ColumnType
.
T
;
}
return
Db
ColumnType
.
STRING
;
return
ColumnType
.
STRING
;
}
@Override
public
String
convertToDB
(
String
columnType
)
{
switch
(
columnType
.
toLowerCase
()){
case
"string"
:
return
"varchar(255)"
;
case
"boolean"
:
return
"tinyint(1)"
;
case
"int"
:
case
"integer"
:
return
"int"
;
case
"double"
:
return
"double"
;
case
"float"
:
return
"float"
;
case
"long"
:
public
String
convertToDB
(
ColumnType
columnType
)
{
switch
(
columnType
)
{
case
SHORT:
return
"int2"
;
case
INTEGER:
return
"integer"
;
case
LONG:
return
"bigint"
;
case
"date"
:
return
"datetime(0)"
;
case
FLOAT:
return
"float4"
;
case
DOUBLE:
return
"float8"
;
case
DECIMAL:
return
"decimal"
;
case
BOOLEAN:
return
"boolean"
;
case
TIMESTAMP:
return
"timestamp"
;
case
DATE:
return
"date"
;
case
TIME:
return
"time"
;
case
BYTES:
return
"bytea"
;
case
T:
return
"array"
;
default
:
return
"varchar
(255)
"
;
return
"varchar"
;
}
}
}
dlink-metadata/dlink-metadata-sqlserver/src/main/java/com/dlink/metadata/convert/SqlServerTypeConvert.java
View file @
9aac98c6
package
com
.
dlink
.
metadata
.
convert
;
import
com.dlink.metadata.rules.DbColumnType
;
import
com.dlink.metadata.rules.IColumnType
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.Column
;
import
com.dlink.model.ColumnType
;
/**
*
* @operate
* @operate
* @date 2022/1/26 14:23
* @return
* @return
*/
public
class
SqlServerTypeConvert
implements
ITypeConvert
{
@Override
public
IColumnType
convert
(
String
columnType
)
{
String
t
=
columnType
.
toLowerCase
();
public
ColumnType
convert
(
Column
column
)
{
if
(
Asserts
.
isNull
(
column
))
{
return
ColumnType
.
STRING
;
}
String
t
=
column
.
getType
().
toLowerCase
();
if
(
t
.
contains
(
"char"
)
||
t
.
contains
(
"varchar"
)
||
t
.
contains
(
"text"
)
||
t
.
contains
(
"nchar"
)
||
t
.
contains
(
"nvarchar"
)
||
t
.
contains
(
"ntext"
)
||
t
.
contains
(
"uniqueidentifier"
)
||
t
.
contains
(
"sql_variant"
))
{
return
Db
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bit"
))
{
return
DbColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"int"
)
||
t
.
contains
(
"tinyint"
)
||
t
.
contains
(
"smallint"
))
{
return
DbColumnType
.
INTEGER
;
}
else
if
(
t
.
contains
(
"bigint"
))
{
return
DbColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"float"
)
)
{
return
Db
ColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"decimal"
)
||
t
.
contains
(
"money"
)
||
t
.
contains
(
"smallmoney"
)
||
t
.
contains
(
"numeric"
))
{
return
DbColumnType
.
BIG_
DECIMAL
;
}
else
if
(
t
.
contains
(
"real"
))
{
return
Db
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"smalldatetime"
)
||
t
.
contains
(
"datetime"
))
{
return
Db
ColumnType
.
TIMESTAMP
;
}
else
if
(
t
.
contains
(
"timestamp"
)
||
t
.
contains
(
"binary"
)
||
t
.
contains
(
"varbinary"
)
||
t
.
contains
(
"image"
))
{
return
DbColumnType
.
BYTE_ARRAY
;
t
.
contains
(
"nchar"
)
||
t
.
contains
(
"nvarchar"
)
||
t
.
contains
(
"ntext"
)
||
t
.
contains
(
"uniqueidentifier"
)
||
t
.
contains
(
"sql_variant"
))
{
return
ColumnType
.
STRING
;
}
else
if
(
t
.
contains
(
"bigint"
))
{
return
ColumnType
.
LONG
;
}
else
if
(
t
.
contains
(
"bit"
))
{
return
ColumnType
.
BOOLEAN
;
}
else
if
(
t
.
contains
(
"int"
)
||
t
.
contains
(
"tinyint"
)
||
t
.
contains
(
"smallint"
))
{
return
ColumnType
.
INTEGER
;
}
else
if
(
t
.
contains
(
"float"
))
{
return
ColumnType
.
DOUBLE
;
}
else
if
(
t
.
contains
(
"decimal"
)
||
t
.
contains
(
"money"
)
||
t
.
contains
(
"smallmoney"
)
||
t
.
contains
(
"numeric"
))
{
return
ColumnType
.
DECIMAL
;
}
else
if
(
t
.
contains
(
"real"
))
{
return
ColumnType
.
FLOAT
;
}
else
if
(
t
.
contains
(
"smalldatetime"
)
||
t
.
contains
(
"datetime"
))
{
return
ColumnType
.
TIMESTAMP
;
}
else
if
(
t
.
contains
(
"timestamp"
)
||
t
.
contains
(
"binary"
)
||
t
.
contains
(
"varbinary"
)
||
t
.
contains
(
"image"
))
{
return
ColumnType
.
BYTES
;
}
return
Db
ColumnType
.
STRING
;
return
ColumnType
.
STRING
;
}
@Override
public
String
convertToDB
(
String
columnType
)
{
switch
(
columnType
.
toLowerCase
())
{
case
"string"
:
return
"varchar(255)"
;
case
"boolean"
:
return
"tinyint(1)"
;
case
"int"
:
case
"integer"
:
public
String
convertToDB
(
ColumnType
columnType
)
{
switch
(
columnType
)
{
case
STRING:
return
"varchar"
;
case
BOOLEAN:
return
"bit"
;
case
LONG:
return
"bigint"
;
case
INTEGER:
return
"int"
;
case
"double"
:
case
DOUBLE
:
return
"double"
;
case
"float"
:
case
FLOAT
:
return
"float"
;
case
"date"
:
case
TIMESTAMP
:
return
"datetime(0)"
;
default
:
return
"varchar
(255)
"
;
return
"varchar"
;
}
}
}
dlink-metadata/dlink-metadata-sqlserver/src/test/java/com/dlink/metadata/SqlServerTest.java
View file @
9aac98c6
...
...
@@ -27,7 +27,7 @@ public class SqlServerTest {
config
.
setPassword
(
"OcP2020123"
);
config
.
setUrl
(
"jdbc:sqlserver://192.168.68.133:1433;DatabaseName=test"
);
try
{
driver
=
Driver
.
build
(
config
)
.
connect
()
;
driver
=
Driver
.
build
(
config
);
}
catch
(
Exception
e
)
{
System
.
err
.
println
(
"连接创建失败:"
+
e
.
getMessage
());
}
...
...
pom.xml
View file @
9aac98c6
...
...
@@ -47,6 +47,7 @@
<hibernate-validator.version>
6.2.0.Final
</hibernate-validator.version>
<!--<flyway.version>6.4.4</flyway.version>-->
<sa-token.version>
1.28.0
</sa-token.version>
<commons.version>
3.12.0
</commons.version>
<maven-jar-plugin.version>
3.2.0
</maven-jar-plugin.version>
<maven-assembly-plugin.version>
3.2.0
</maven-assembly-plugin.version>
<maven.resource.version>
3.2.0
</maven.resource.version>
...
...
@@ -102,6 +103,11 @@
<artifactId>
guava
</artifactId>
<version>
${guava.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.commons
</groupId>
<artifactId>
commons-lang3
</artifactId>
<version>
${commons.version}
</version>
</dependency>
<!--<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment