Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dlink
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
zhaowei
dlink
Commits
bfdac7ca
Commit
bfdac7ca
authored
Feb 17, 2022
by
wenmo
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
数据源fragment配置和查询fragment语法
parent
9aac98c6
Changes
21
Hide whitespace changes
Inline
Side-by-side
Showing
21 changed files
with
370 additions
and
158 deletions
+370
-158
APIExecuteSqlDTO.java
...k-admin/src/main/java/com/dlink/dto/APIExecuteSqlDTO.java
+3
-5
APIExplainSqlDTO.java
...k-admin/src/main/java/com/dlink/dto/APIExplainSqlDTO.java
+2
-4
AbstractStatementDTO.java
...min/src/main/java/com/dlink/dto/AbstractStatementDTO.java
+10
-0
StudioExecuteDTO.java
...k-admin/src/main/java/com/dlink/dto/StudioExecuteDTO.java
+8
-9
StudioServiceImpl.java
...c/main/java/com/dlink/service/impl/StudioServiceImpl.java
+50
-46
TaskServiceImpl.java
...src/main/java/com/dlink/service/impl/TaskServiceImpl.java
+6
-0
JobManager.java
dlink-core/src/main/java/com/dlink/job/JobManager.java
+53
-44
SqlParserTest.java
dlink-core/src/test/java/com/dlink/core/SqlParserTest.java
+8
-0
Executor.java
...k-executor/src/main/java/com/dlink/executor/Executor.java
+11
-6
SqlManager.java
...executor/src/main/java/com/dlink/executor/SqlManager.java
+17
-10
FlinkInterceptor.java
...src/main/java/com/dlink/interceptor/FlinkInterceptor.java
+9
-6
FlinkInterceptorResult.java
...in/java/com/dlink/interceptor/FlinkInterceptorResult.java
+47
-0
ShowFragmentParser.java
...or/src/main/java/com/dlink/parser/ShowFragmentParser.java
+20
-0
SingleSqlParserFactory.java
...rc/main/java/com/dlink/parser/SingleSqlParserFactory.java
+4
-2
Operation.java
dlink-executor/src/main/java/com/dlink/trans/Operation.java
+2
-1
Operations.java
dlink-executor/src/main/java/com/dlink/trans/Operations.java
+9
-9
CreateAggTableOperation.java
...ain/java/com/dlink/trans/ddl/CreateAggTableOperation.java
+6
-4
CreateCDCSourceOperation.java
...in/java/com/dlink/trans/ddl/CreateCDCSourceOperation.java
+8
-6
SetOperation.java
...cutor/src/main/java/com/dlink/trans/ddl/SetOperation.java
+8
-6
ShowFragmentOperation.java
.../main/java/com/dlink/trans/ddl/ShowFragmentOperation.java
+50
-0
ShowFragmentsOperation.java
...main/java/com/dlink/trans/ddl/ShowFragmentsOperation.java
+39
-0
No files found.
dlink-admin/src/main/java/com/dlink/dto/APIExecuteSqlDTO.java
View file @
bfdac7ca
...
...
@@ -20,7 +20,7 @@ import java.util.Map;
*/
@Getter
@Setter
public
class
APIExecuteSqlDTO
extends
AbstractStatementDTO
{
public
class
APIExecuteSqlDTO
extends
AbstractStatementDTO
{
// RUN_MODE
private
String
type
;
private
boolean
useResult
=
false
;
...
...
@@ -28,8 +28,6 @@ public class APIExecuteSqlDTO extends AbstractStatementDTO{
private
boolean
useAutoCancel
=
false
;
private
boolean
useStatementSet
=
false
;
private
String
address
;
private
boolean
fragment
=
false
;
// private String statement;
private
String
jobName
;
private
Integer
maxRowNum
=
100
;
private
Integer
checkPoint
=
0
;
...
...
@@ -44,8 +42,8 @@ public class APIExecuteSqlDTO extends AbstractStatementDTO{
savePointStrategy
=
3
;
}
return
new
JobConfig
(
type
,
useResult
,
useChangeLog
,
useChangeLog
,
false
,
null
,
true
,
address
,
jobName
,
fragment
,
useStatementSet
,
maxRowNum
,
checkPoint
,
parallelism
,
savePointStrategy
,
type
,
useResult
,
useChangeLog
,
useChangeLog
,
false
,
null
,
true
,
address
,
jobName
,
isFragment
()
,
useStatementSet
,
maxRowNum
,
checkPoint
,
parallelism
,
savePointStrategy
,
savePointPath
,
configuration
,
gatewayConfig
);
}
}
dlink-admin/src/main/java/com/dlink/dto/APIExplainSqlDTO.java
View file @
bfdac7ca
...
...
@@ -17,14 +17,12 @@ import java.util.Map;
*/
@Getter
@Setter
public
class
APIExplainSqlDTO
extends
AbstractStatementDTO
{
public
class
APIExplainSqlDTO
extends
AbstractStatementDTO
{
private
boolean
useStatementSet
=
false
;
private
boolean
fragment
=
false
;
// private String statement;
private
Integer
parallelism
;
private
Map
<
String
,
String
>
configuration
;
public
JobConfig
getJobConfig
()
{
return
new
JobConfig
(
"local"
,
false
,
false
,
fragment
,
useStatementSet
,
parallelism
,
configuration
);
return
new
JobConfig
(
"local"
,
false
,
false
,
isFragment
()
,
useStatementSet
,
parallelism
,
configuration
);
}
}
dlink-admin/src/main/java/com/dlink/dto/AbstractStatementDTO.java
View file @
bfdac7ca
...
...
@@ -10,6 +10,8 @@ public class AbstractStatementDTO {
private
String
statement
;
private
Integer
envId
;
private
boolean
fragment
=
false
;
public
String
getStatement
()
{
return
statement
;
...
...
@@ -26,4 +28,12 @@ public class AbstractStatementDTO {
public
void
setEnvId
(
Integer
envId
)
{
this
.
envId
=
envId
;
}
public
boolean
isFragment
()
{
return
fragment
;
}
public
void
setFragment
(
boolean
fragment
)
{
this
.
fragment
=
fragment
;
}
}
dlink-admin/src/main/java/com/dlink/dto/StudioExecuteDTO.java
View file @
bfdac7ca
...
...
@@ -19,7 +19,7 @@ import java.util.Map;
*/
@Getter
@Setter
public
class
StudioExecuteDTO
extends
AbstractStatementDTO
{
public
class
StudioExecuteDTO
extends
AbstractStatementDTO
{
// RUN_MODE
private
String
type
;
private
String
dialect
;
...
...
@@ -34,7 +34,6 @@ public class StudioExecuteDTO extends AbstractStatementDTO{
private
Integer
clusterConfigurationId
;
private
Integer
databaseId
;
private
Integer
jarId
;
private
boolean
fragment
;
private
String
jobName
;
private
Integer
taskId
;
private
Integer
maxRowNum
;
...
...
@@ -46,22 +45,22 @@ public class StudioExecuteDTO extends AbstractStatementDTO{
private
static
final
ObjectMapper
mapper
=
new
ObjectMapper
();
public
JobConfig
getJobConfig
()
{
Map
<
String
,
String
>
config
=
new
HashMap
<>();
Map
<
String
,
String
>
config
=
new
HashMap
<>();
JsonNode
paras
=
null
;
if
(
Asserts
.
isNotNullString
(
configJson
))
{
if
(
Asserts
.
isNotNullString
(
configJson
))
{
try
{
paras
=
mapper
.
readTree
(
configJson
);
}
catch
(
JsonProcessingException
e
)
{
e
.
printStackTrace
();
}
paras
.
forEach
((
JsonNode
node
)
->
{
config
.
put
(
node
.
get
(
"key"
).
asText
(),
node
.
get
(
"value"
).
asText
());
}
config
.
put
(
node
.
get
(
"key"
).
asText
(),
node
.
get
(
"value"
).
asText
());
}
);
}
return
new
JobConfig
(
type
,
useResult
,
useChangeLog
,
useAutoCancel
,
useSession
,
session
,
clusterId
,
clusterConfigurationId
,
jarId
,
taskId
,
jobName
,
fragment
,
statementSet
,
batchModel
,
maxRowNum
,
checkPoint
,
parallelism
,
savePointStrategy
,
savePointPath
,
config
);
type
,
useResult
,
useChangeLog
,
useAutoCancel
,
useSession
,
session
,
clusterId
,
clusterConfigurationId
,
jarId
,
taskId
,
jobName
,
isFragment
(),
statementSet
,
batchModel
,
maxRowNum
,
checkPoint
,
parallelism
,
savePointStrategy
,
savePointPath
,
config
);
}
}
dlink-admin/src/main/java/com/dlink/service/impl/StudioServiceImpl.java
View file @
bfdac7ca
...
...
@@ -61,28 +61,32 @@ public class StudioServiceImpl implements StudioService {
@Autowired
private
TaskService
taskService
;
private
void
addFlinkSQLEnv
(
AbstractStatementDTO
statementDTO
){
if
(
Asserts
.
isNotNull
(
statementDTO
.
getEnvId
())&&
statementDTO
.
getEnvId
()!=
0
){
private
void
addFlinkSQLEnv
(
AbstractStatementDTO
statementDTO
)
{
String
flinkWithSql
=
dataBaseService
.
getEnabledFlinkWithSql
();
if
(
statementDTO
.
isFragment
()
&&
Asserts
.
isNotNullString
(
flinkWithSql
))
{
statementDTO
.
setStatement
(
flinkWithSql
+
"\r\n"
+
statementDTO
.
getStatement
());
}
if
(
Asserts
.
isNotNull
(
statementDTO
.
getEnvId
())
&&
statementDTO
.
getEnvId
()
!=
0
)
{
Task
task
=
taskService
.
getTaskInfoById
(
statementDTO
.
getEnvId
());
if
(
Asserts
.
isNotNull
(
task
)&&
Asserts
.
isNotNullString
(
task
.
getStatement
()))
{
if
(
Asserts
.
isNotNull
(
task
)
&&
Asserts
.
isNotNullString
(
task
.
getStatement
()))
{
statementDTO
.
setStatement
(
task
.
getStatement
()
+
"\r\n"
+
statementDTO
.
getStatement
());
}
}
}
private
void
buildSession
(
JobConfig
config
){
private
void
buildSession
(
JobConfig
config
)
{
// If you are using a shared session, configure the current jobmanager address
if
(!
config
.
isUseSession
())
{
if
(!
config
.
isUseSession
())
{
config
.
setAddress
(
clusterService
.
buildEnvironmentAddress
(
config
.
isUseRemote
(),
config
.
getClusterId
()));
}
}
@Override
public
JobResult
executeSql
(
StudioExecuteDTO
studioExecuteDTO
)
{
if
(
Dialect
.
isSql
(
studioExecuteDTO
.
getDialect
()))
{
if
(
Dialect
.
isSql
(
studioExecuteDTO
.
getDialect
()))
{
return
executeCommonSql
(
SqlDTO
.
build
(
studioExecuteDTO
.
getStatement
(),
studioExecuteDTO
.
getDatabaseId
(),
studioExecuteDTO
.
getMaxRowNum
()));
}
else
{
studioExecuteDTO
.
getDatabaseId
(),
studioExecuteDTO
.
getMaxRowNum
()));
}
else
{
return
executeFlinkSql
(
studioExecuteDTO
);
}
}
...
...
@@ -103,26 +107,26 @@ public class StudioServiceImpl implements StudioService {
JobResult
result
=
new
JobResult
();
result
.
setStatement
(
sqlDTO
.
getStatement
());
result
.
setStartTime
(
LocalDateTime
.
now
());
if
(
Asserts
.
isNull
(
sqlDTO
.
getDatabaseId
()))
{
if
(
Asserts
.
isNull
(
sqlDTO
.
getDatabaseId
()))
{
result
.
setSuccess
(
false
);
result
.
setError
(
"请指定数据源"
);
result
.
setEndTime
(
LocalDateTime
.
now
());
return
result
;
}
else
{
}
else
{
DataBase
dataBase
=
dataBaseService
.
getById
(
sqlDTO
.
getDatabaseId
());
if
(
Asserts
.
isNull
(
dataBase
))
{
if
(
Asserts
.
isNull
(
dataBase
))
{
result
.
setSuccess
(
false
);
result
.
setError
(
"数据源不存在"
);
result
.
setEndTime
(
LocalDateTime
.
now
());
return
result
;
}
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
JdbcSelectResult
selectResult
=
driver
.
executeSql
(
sqlDTO
.
getStatement
(),
sqlDTO
.
getMaxRowNum
());
JdbcSelectResult
selectResult
=
driver
.
executeSql
(
sqlDTO
.
getStatement
(),
sqlDTO
.
getMaxRowNum
());
driver
.
close
();
result
.
setResult
(
selectResult
);
if
(
selectResult
.
isSuccess
())
{
if
(
selectResult
.
isSuccess
())
{
result
.
setSuccess
(
true
);
}
else
{
}
else
{
result
.
setSuccess
(
false
);
result
.
setError
(
selectResult
.
getError
());
}
...
...
@@ -134,7 +138,7 @@ public class StudioServiceImpl implements StudioService {
@Override
public
IResult
executeDDL
(
StudioDDLDTO
studioDDLDTO
)
{
JobConfig
config
=
studioDDLDTO
.
getJobConfig
();
if
(!
config
.
isUseSession
())
{
if
(!
config
.
isUseSession
())
{
config
.
setAddress
(
clusterService
.
buildEnvironmentAddress
(
config
.
isUseRemote
(),
studioDDLDTO
.
getClusterId
()));
}
JobManager
jobManager
=
JobManager
.
build
(
config
);
...
...
@@ -143,9 +147,9 @@ public class StudioServiceImpl implements StudioService {
@Override
public
List
<
SqlExplainResult
>
explainSql
(
StudioExecuteDTO
studioExecuteDTO
)
{
if
(
Dialect
.
isSql
(
studioExecuteDTO
.
getDialect
()))
{
if
(
Dialect
.
isSql
(
studioExecuteDTO
.
getDialect
()))
{
return
explainCommonSql
(
studioExecuteDTO
);
}
else
{
}
else
{
return
explainFlinkSql
(
studioExecuteDTO
);
}
}
...
...
@@ -163,15 +167,15 @@ public class StudioServiceImpl implements StudioService {
}
private
List
<
SqlExplainResult
>
explainCommonSql
(
StudioExecuteDTO
studioExecuteDTO
)
{
if
(
Asserts
.
isNull
(
studioExecuteDTO
.
getDatabaseId
()))
{
return
new
ArrayList
<
SqlExplainResult
>(){{
add
(
SqlExplainResult
.
fail
(
studioExecuteDTO
.
getStatement
(),
"请指定数据源"
));
if
(
Asserts
.
isNull
(
studioExecuteDTO
.
getDatabaseId
()))
{
return
new
ArrayList
<
SqlExplainResult
>()
{{
add
(
SqlExplainResult
.
fail
(
studioExecuteDTO
.
getStatement
(),
"请指定数据源"
));
}};
}
else
{
}
else
{
DataBase
dataBase
=
dataBaseService
.
getById
(
studioExecuteDTO
.
getDatabaseId
());
if
(
Asserts
.
isNull
(
dataBase
))
{
return
new
ArrayList
<
SqlExplainResult
>(){{
add
(
SqlExplainResult
.
fail
(
studioExecuteDTO
.
getStatement
(),
"数据源不存在"
));
if
(
Asserts
.
isNull
(
dataBase
))
{
return
new
ArrayList
<
SqlExplainResult
>()
{{
add
(
SqlExplainResult
.
fail
(
studioExecuteDTO
.
getStatement
(),
"数据源不存在"
));
}};
}
Driver
driver
=
Driver
.
build
(
dataBase
.
getDriverConfig
());
...
...
@@ -202,12 +206,12 @@ public class StudioServiceImpl implements StudioService {
JobManager
jobManager
=
JobManager
.
buildPlanMode
(
config
);
String
planJson
=
jobManager
.
getJobPlanJson
(
studioExecuteDTO
.
getStatement
());
ObjectMapper
mapper
=
new
ObjectMapper
();
ObjectNode
objectNode
=
mapper
.
createObjectNode
();
ObjectNode
objectNode
=
mapper
.
createObjectNode
();
try
{
objectNode
=
(
ObjectNode
)
mapper
.
readTree
(
planJson
);
}
catch
(
JsonProcessingException
e
)
{
e
.
printStackTrace
();
}
finally
{
}
finally
{
return
objectNode
;
}
}
...
...
@@ -219,14 +223,14 @@ public class StudioServiceImpl implements StudioService {
@Override
public
SessionInfo
createSession
(
SessionDTO
sessionDTO
,
String
createUser
)
{
if
(
sessionDTO
.
isUseRemote
())
{
if
(
sessionDTO
.
isUseRemote
())
{
Cluster
cluster
=
clusterService
.
getById
(
sessionDTO
.
getClusterId
());
SessionConfig
sessionConfig
=
SessionConfig
.
build
(
sessionDTO
.
getType
(),
true
,
cluster
.
getId
(),
cluster
.
getAlias
(),
clusterService
.
buildEnvironmentAddress
(
true
,
sessionDTO
.
getClusterId
()));
return
JobManager
.
createSession
(
sessionDTO
.
getSession
(),
sessionConfig
,
createUser
);
}
else
{
}
else
{
SessionConfig
sessionConfig
=
SessionConfig
.
build
(
sessionDTO
.
getType
(),
false
,
null
,
null
,
...
...
@@ -237,9 +241,9 @@ public class StudioServiceImpl implements StudioService {
@Override
public
boolean
clearSession
(
String
session
)
{
if
(
SessionPool
.
remove
(
session
)>
0
)
{
if
(
SessionPool
.
remove
(
session
)
>
0
)
{
return
true
;
}
else
{
}
else
{
return
false
;
}
}
...
...
@@ -267,8 +271,8 @@ public class StudioServiceImpl implements StudioService {
@Override
public
List
<
JsonNode
>
listJobs
(
Integer
clusterId
)
{
Cluster
cluster
=
clusterService
.
getById
(
clusterId
);
Asserts
.
checkNotNull
(
cluster
,
"该集群不存在"
);
try
{
Asserts
.
checkNotNull
(
cluster
,
"该集群不存在"
);
try
{
return
FlinkAPI
.
build
(
cluster
.
getJobManagerHost
()).
listJobs
();
}
catch
(
Exception
e
)
{
logger
.
info
(
"查询作业时集群不存在"
);
...
...
@@ -277,12 +281,12 @@ public class StudioServiceImpl implements StudioService {
}
@Override
public
boolean
cancel
(
Integer
clusterId
,
String
jobId
)
{
public
boolean
cancel
(
Integer
clusterId
,
String
jobId
)
{
Cluster
cluster
=
clusterService
.
getById
(
clusterId
);
Asserts
.
checkNotNull
(
cluster
,
"该集群不存在"
);
Asserts
.
checkNotNull
(
cluster
,
"该集群不存在"
);
JobConfig
jobConfig
=
new
JobConfig
();
jobConfig
.
setAddress
(
cluster
.
getJobManagerHost
());
if
(
Asserts
.
isNotNull
(
cluster
.
getClusterConfigurationId
()))
{
if
(
Asserts
.
isNotNull
(
cluster
.
getClusterConfigurationId
()))
{
Map
<
String
,
Object
>
gatewayConfig
=
clusterConfigurationService
.
getGatewayConfig
(
cluster
.
getClusterConfigurationId
());
jobConfig
.
buildGatewayConfig
(
gatewayConfig
);
}
...
...
@@ -291,13 +295,13 @@ public class StudioServiceImpl implements StudioService {
}
@Override
public
boolean
savepoint
(
Integer
clusterId
,
String
jobId
,
String
savePointType
,
String
name
)
{
public
boolean
savepoint
(
Integer
clusterId
,
String
jobId
,
String
savePointType
,
String
name
)
{
Cluster
cluster
=
clusterService
.
getById
(
clusterId
);
Asserts
.
checkNotNull
(
cluster
,
"该集群不存在"
);
Asserts
.
checkNotNull
(
cluster
,
"该集群不存在"
);
JobConfig
jobConfig
=
new
JobConfig
();
jobConfig
.
setAddress
(
cluster
.
getJobManagerHost
());
jobConfig
.
setType
(
cluster
.
getType
());
if
(
Asserts
.
isNotNull
(
cluster
.
getClusterConfigurationId
()))
{
if
(
Asserts
.
isNotNull
(
cluster
.
getClusterConfigurationId
()))
{
Map
<
String
,
Object
>
gatewayConfig
=
clusterConfigurationService
.
getGatewayConfig
(
cluster
.
getClusterConfigurationId
());
jobConfig
.
buildGatewayConfig
(
gatewayConfig
);
jobConfig
.
getGatewayConfig
().
getClusterConfig
().
setAppId
(
cluster
.
getName
());
...
...
@@ -305,10 +309,10 @@ public class StudioServiceImpl implements StudioService {
}
JobManager
jobManager
=
JobManager
.
build
(
jobConfig
);
jobManager
.
setUseGateway
(
true
);
SavePointResult
savePointResult
=
jobManager
.
savepoint
(
jobId
,
savePointType
,
null
);
if
(
Asserts
.
isNotNull
(
savePointResult
))
{
for
(
JobInfo
item
:
savePointResult
.
getJobInfos
())
{
if
(
Asserts
.
isEqualsIgnoreCase
(
jobId
,
item
.
getJobId
()))
{
SavePointResult
savePointResult
=
jobManager
.
savepoint
(
jobId
,
savePointType
,
null
);
if
(
Asserts
.
isNotNull
(
savePointResult
))
{
for
(
JobInfo
item
:
savePointResult
.
getJobInfos
())
{
if
(
Asserts
.
isEqualsIgnoreCase
(
jobId
,
item
.
getJobId
()))
{
Savepoints
savepoints
=
new
Savepoints
();
savepoints
.
setName
(
name
);
savepoints
.
setType
(
savePointType
);
...
...
@@ -322,14 +326,14 @@ public class StudioServiceImpl implements StudioService {
return
false
;
}
private
void
initUDF
(
JobConfig
config
,
String
statement
)
{
if
(!
GatewayType
.
LOCAL
.
equalsValue
(
config
.
getType
()))
{
private
void
initUDF
(
JobConfig
config
,
String
statement
)
{
if
(!
GatewayType
.
LOCAL
.
equalsValue
(
config
.
getType
()))
{
return
;
}
List
<
String
>
udfClassNameList
=
JobManager
.
getUDFClassName
(
statement
);
for
(
String
item
:
udfClassNameList
)
{
for
(
String
item
:
udfClassNameList
)
{
Task
task
=
taskService
.
getUDFByClassName
(
item
);
JobManager
.
initUDF
(
item
,
task
.
getStatement
());
JobManager
.
initUDF
(
item
,
task
.
getStatement
());
}
}
}
dlink-admin/src/main/java/com/dlink/service/impl/TaskServiceImpl.java
View file @
bfdac7ca
...
...
@@ -276,6 +276,12 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private
JobConfig
buildJobConfig
(
Task
task
)
{
boolean
isJarTask
=
Dialect
.
FLINKJAR
.
equalsVal
(
task
.
getDialect
());
if
(!
isJarTask
&&
task
.
isFragment
())
{
String
flinkWithSql
=
dataBaseService
.
getEnabledFlinkWithSql
();
if
(
Asserts
.
isNotNullString
(
flinkWithSql
))
{
task
.
setStatement
(
flinkWithSql
+
"\r\n"
+
task
.
getStatement
());
}
}
if
(!
isJarTask
&&
Asserts
.
isNotNull
(
task
.
getEnvId
())
&&
task
.
getEnvId
()
!=
0
)
{
Task
envTask
=
getTaskInfoById
(
task
.
getEnvId
());
if
(
Asserts
.
isNotNull
(
envTask
)
&&
Asserts
.
isNotNullString
(
envTask
.
getStatement
()))
{
...
...
dlink-core/src/main/java/com/dlink/job/JobManager.java
View file @
bfdac7ca
...
...
@@ -17,6 +17,7 @@ import com.dlink.gateway.result.GatewayResult;
import
com.dlink.gateway.result.SavePointResult
;
import
com.dlink.gateway.result.TestResult
;
import
com.dlink.interceptor.FlinkInterceptor
;
import
com.dlink.interceptor.FlinkInterceptorResult
;
import
com.dlink.model.SystemConfiguration
;
import
com.dlink.parser.SqlType
;
import
com.dlink.pool.ClassEntity
;
...
...
@@ -162,7 +163,7 @@ public class JobManager {
private
Executor
createExecutor
()
{
initEnvironmentSetting
();
if
(!
runMode
.
equals
(
GatewayType
.
LOCAL
)&&
!
useGateway
&&
config
.
isUseRemote
())
{
if
(!
runMode
.
equals
(
GatewayType
.
LOCAL
)
&&
!
useGateway
&&
config
.
isUseRemote
())
{
executor
=
Executor
.
buildRemoteExecutor
(
environmentSetting
,
config
.
getExecutorSetting
());
return
executor
;
}
else
{
...
...
@@ -200,7 +201,7 @@ public class JobManager {
}
public
boolean
init
()
{
if
(!
isPlanMode
)
{
if
(!
isPlanMode
)
{
runMode
=
GatewayType
.
get
(
config
.
getType
());
useGateway
=
useGateway
(
config
.
getType
());
handler
=
JobHandler
.
build
();
...
...
@@ -239,7 +240,7 @@ public class JobManager {
ready
();
String
currentSql
=
""
;
// JobParam jobParam = pretreatStatements(SqlUtil.getStatements(statement));
JobParam
jobParam
=
Explainer
.
build
(
executor
,
useStatementSet
,
sqlSeparator
).
pretreatStatements
(
SqlUtil
.
getStatements
(
statement
,
sqlSeparator
));
JobParam
jobParam
=
Explainer
.
build
(
executor
,
useStatementSet
,
sqlSeparator
).
pretreatStatements
(
SqlUtil
.
getStatements
(
statement
,
sqlSeparator
));
try
{
for
(
StatementParam
item
:
jobParam
.
getDdl
())
{
currentSql
=
item
.
getValue
();
...
...
@@ -254,12 +255,12 @@ public class JobManager {
currentSql
=
String
.
join
(
sqlSeparator
,
inserts
);
JobGraph
jobGraph
=
executor
.
getJobGraphFromInserts
(
inserts
);
GatewayResult
gatewayResult
=
null
;
if
(
GatewayType
.
YARN_APPLICATION
.
equals
(
runMode
)
||
GatewayType
.
KUBERNETES_APPLICATION
.
equals
(
runMode
))
{
if
(
GatewayType
.
YARN_APPLICATION
.
equals
(
runMode
)
||
GatewayType
.
KUBERNETES_APPLICATION
.
equals
(
runMode
))
{
config
.
addGatewayConfig
(
executor
.
getSetConfig
());
gatewayResult
=
Gateway
.
build
(
config
.
getGatewayConfig
()).
submitJar
();
}
else
{
config
.
addGatewayConfig
(
executor
.
getSetConfig
());
if
(
Asserts
.
isNotNullString
(
config
.
getSavePointPath
()))
{
if
(
Asserts
.
isNotNullString
(
config
.
getSavePointPath
()))
{
jobGraph
.
setSavepointRestoreSettings
(
SavepointRestoreSettings
.
forPath
(
config
.
getSavePointPath
()));
}
gatewayResult
=
Gateway
.
build
(
config
.
getGatewayConfig
()).
submitJobGraph
(
jobGraph
);
...
...
@@ -281,7 +282,7 @@ public class JobManager {
job
.
setJobId
(
tableResult
.
getJobClient
().
get
().
getJobID
().
toHexString
());
}
if
(
config
.
isUseResult
())
{
IResult
result
=
ResultBuilder
.
build
(
SqlType
.
INSERT
,
config
.
getMaxRowNum
(),
config
.
isUseChangeLog
(),
config
.
isUseAutoCancel
()).
getResult
(
tableResult
);
IResult
result
=
ResultBuilder
.
build
(
SqlType
.
INSERT
,
config
.
getMaxRowNum
(),
config
.
isUseChangeLog
(),
config
.
isUseAutoCancel
()).
getResult
(
tableResult
);
job
.
setResult
(
result
);
}
}
...
...
@@ -294,12 +295,12 @@ public class JobManager {
currentSql
=
String
.
join
(
sqlSeparator
,
inserts
);
JobGraph
jobGraph
=
executor
.
getJobGraphFromInserts
(
inserts
);
GatewayResult
gatewayResult
=
null
;
if
(
GatewayType
.
YARN_APPLICATION
.
equals
(
runMode
)
||
GatewayType
.
KUBERNETES_APPLICATION
.
equals
(
runMode
))
{
if
(
GatewayType
.
YARN_APPLICATION
.
equals
(
runMode
)
||
GatewayType
.
KUBERNETES_APPLICATION
.
equals
(
runMode
))
{
config
.
addGatewayConfig
(
executor
.
getSetConfig
());
gatewayResult
=
Gateway
.
build
(
config
.
getGatewayConfig
()).
submitJar
();
}
else
{
config
.
addGatewayConfig
(
executor
.
getSetConfig
());
if
(
Asserts
.
isNotNullString
(
config
.
getSavePointPath
()))
{
if
(
Asserts
.
isNotNullString
(
config
.
getSavePointPath
()))
{
jobGraph
.
setSavepointRestoreSettings
(
SavepointRestoreSettings
.
forPath
(
config
.
getSavePointPath
()));
}
gatewayResult
=
Gateway
.
build
(
config
.
getGatewayConfig
()).
submitJobGraph
(
jobGraph
);
...
...
@@ -310,15 +311,23 @@ public class JobManager {
}
else
{
for
(
StatementParam
item
:
jobParam
.
getTrans
())
{
currentSql
=
item
.
getValue
();
if
(!
FlinkInterceptor
.
build
(
executor
,
item
.
getValue
()))
{
TableResult
tableResult
=
executor
.
executeSql
(
item
.
getValue
());
if
(
tableResult
.
getJobClient
().
isPresent
())
{
job
.
setJobId
(
tableResult
.
getJobClient
().
get
().
getJobID
().
toHexString
());
}
FlinkInterceptorResult
flinkInterceptorResult
=
FlinkInterceptor
.
build
(
executor
,
item
.
getValue
());
if
(
Asserts
.
isNotNull
(
flinkInterceptorResult
.
getTableResult
()))
{
if
(
config
.
isUseResult
())
{
IResult
result
=
ResultBuilder
.
build
(
item
.
getType
(),
config
.
getMaxRowNum
(),
config
.
isUseChangeLog
(),
config
.
isUseAutoCancel
()).
getResult
(
tableResult
);
IResult
result
=
ResultBuilder
.
build
(
item
.
getType
(),
config
.
getMaxRowNum
(),
config
.
isUseChangeLog
(),
config
.
isUseAutoCancel
()).
getResult
(
flinkInterceptorResult
.
getTableResult
()
);
job
.
setResult
(
result
);
}
}
else
{
if
(!
flinkInterceptorResult
.
isNoExecute
())
{
TableResult
tableResult
=
executor
.
executeSql
(
item
.
getValue
());
if
(
tableResult
.
getJobClient
().
isPresent
())
{
job
.
setJobId
(
tableResult
.
getJobClient
().
get
().
getJobID
().
toHexString
());
}
if
(
config
.
isUseResult
())
{
IResult
result
=
ResultBuilder
.
build
(
item
.
getType
(),
config
.
getMaxRowNum
(),
config
.
isUseChangeLog
(),
config
.
isUseAutoCancel
()).
getResult
(
tableResult
);
job
.
setResult
(
result
);
}
}
}
}
}
...
...
@@ -327,7 +336,7 @@ public class JobManager {
if
(
useGateway
)
{
for
(
StatementParam
item
:
jobParam
.
getExecute
())
{
executor
.
executeSql
(
item
.
getValue
());
if
(!
useStatementSet
)
{
if
(!
useStatementSet
)
{
break
;
}
}
...
...
@@ -335,12 +344,12 @@ public class JobManager {
streamGraph
.
setJobName
(
config
.
getJobName
());
JobGraph
jobGraph
=
streamGraph
.
getJobGraph
();
GatewayResult
gatewayResult
=
null
;
if
(
GatewayType
.
YARN_APPLICATION
.
equals
(
runMode
)
||
GatewayType
.
KUBERNETES_APPLICATION
.
equals
(
runMode
))
{
if
(
GatewayType
.
YARN_APPLICATION
.
equals
(
runMode
)
||
GatewayType
.
KUBERNETES_APPLICATION
.
equals
(
runMode
))
{
config
.
addGatewayConfig
(
executor
.
getSetConfig
());
gatewayResult
=
Gateway
.
build
(
config
.
getGatewayConfig
()).
submitJar
();
}
else
{
config
.
addGatewayConfig
(
executor
.
getSetConfig
());
if
(
Asserts
.
isNotNullString
(
config
.
getSavePointPath
()))
{
if
(
Asserts
.
isNotNullString
(
config
.
getSavePointPath
()))
{
jobGraph
.
setSavepointRestoreSettings
(
SavepointRestoreSettings
.
forPath
(
config
.
getSavePointPath
()));
}
gatewayResult
=
Gateway
.
build
(
config
.
getGatewayConfig
()).
submitJobGraph
(
jobGraph
);
...
...
@@ -351,7 +360,7 @@ public class JobManager {
}
else
{
for
(
StatementParam
item
:
jobParam
.
getExecute
())
{
executor
.
executeSql
(
item
.
getValue
());
if
(!
useStatementSet
)
{
if
(!
useStatementSet
)
{
break
;
}
}
...
...
@@ -360,7 +369,7 @@ public class JobManager {
job
.
setJobId
(
jobExecutionResult
.
getJobID
().
toHexString
());
}
if
(
config
.
isUseResult
())
{
IResult
result
=
ResultBuilder
.
build
(
SqlType
.
EXECUTE
,
config
.
getMaxRowNum
(),
config
.
isUseChangeLog
(),
config
.
isUseAutoCancel
()).
getResult
(
null
);
IResult
result
=
ResultBuilder
.
build
(
SqlType
.
EXECUTE
,
config
.
getMaxRowNum
(),
config
.
isUseChangeLog
(),
config
.
isUseAutoCancel
()).
getResult
(
null
);
job
.
setResult
(
result
);
}
}
...
...
@@ -369,7 +378,7 @@ public class JobManager {
job
.
setStatus
(
Job
.
JobStatus
.
SUCCESS
);
success
();
}
catch
(
Exception
e
)
{
String
error
=
LogUtil
.
getError
(
"Exception in executing FlinkSQL:\n"
+
currentSql
,
e
);
String
error
=
LogUtil
.
getError
(
"Exception in executing FlinkSQL:\n"
+
currentSql
,
e
);
job
.
setEndTime
(
LocalDateTime
.
now
());
job
.
setStatus
(
Job
.
JobStatus
.
FAILED
);
job
.
setError
(
error
);
...
...
@@ -389,7 +398,7 @@ public class JobManager {
}
public
IResult
executeDDL
(
String
statement
)
{
String
[]
statements
=
SqlUtil
.
getStatements
(
statement
,
sqlSeparator
);
String
[]
statements
=
SqlUtil
.
getStatements
(
statement
,
sqlSeparator
);
try
{
for
(
String
item
:
statements
)
{
String
newStatement
=
executor
.
pretreatStatement
(
item
);
...
...
@@ -402,7 +411,7 @@ public class JobManager {
}
LocalDateTime
startTime
=
LocalDateTime
.
now
();
TableResult
tableResult
=
executor
.
executeSql
(
newStatement
);
IResult
result
=
ResultBuilder
.
build
(
operationType
,
config
.
getMaxRowNum
(),
false
,
false
).
getResult
(
tableResult
);
IResult
result
=
ResultBuilder
.
build
(
operationType
,
config
.
getMaxRowNum
(),
false
,
false
).
getResult
(
tableResult
);
result
.
setStartTime
(
startTime
);
return
result
;
}
...
...
@@ -436,15 +445,15 @@ public class JobManager {
}
public
ExplainResult
explainSql
(
String
statement
)
{
return
Explainer
.
build
(
executor
,
useStatementSet
,
sqlSeparator
).
explainSql
(
statement
);
return
Explainer
.
build
(
executor
,
useStatementSet
,
sqlSeparator
).
explainSql
(
statement
);
}
public
ObjectNode
getStreamGraph
(
String
statement
)
{
return
Explainer
.
build
(
executor
,
useStatementSet
,
sqlSeparator
).
getStreamGraph
(
statement
);
return
Explainer
.
build
(
executor
,
useStatementSet
,
sqlSeparator
).
getStreamGraph
(
statement
);
}
public
String
getJobPlanJson
(
String
statement
)
{
return
Explainer
.
build
(
executor
,
useStatementSet
,
sqlSeparator
).
getJobPlanInfo
(
statement
).
getJsonPlan
();
return
Explainer
.
build
(
executor
,
useStatementSet
,
sqlSeparator
).
getJobPlanInfo
(
statement
).
getJsonPlan
();
}
public
boolean
cancel
(
String
jobId
)
{
...
...
@@ -454,22 +463,22 @@ public class JobManager {
Gateway
.
build
(
config
.
getGatewayConfig
()).
savepointJob
();
return
true
;
}
else
{
try
{
try
{
return
FlinkAPI
.
build
(
config
.
getAddress
()).
stop
(
jobId
);
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
logger
.
info
(
"停止作业时集群不存在"
);
}
return
false
;
}
}
public
SavePointResult
savepoint
(
String
jobId
,
String
savePointType
,
String
savePoint
)
{
public
SavePointResult
savepoint
(
String
jobId
,
String
savePointType
,
String
savePoint
)
{
if
(
useGateway
&&
!
useRestAPI
)
{
config
.
getGatewayConfig
().
setFlinkConfig
(
FlinkConfig
.
build
(
jobId
,
ActionType
.
SAVEPOINT
.
getValue
(),
savePointType
,
null
));
return
Gateway
.
build
(
config
.
getGatewayConfig
()).
savepointJob
(
savePoint
);
}
else
{
return
FlinkAPI
.
build
(
config
.
getAddress
()).
savepoints
(
jobId
,
savePointType
);
return
FlinkAPI
.
build
(
config
.
getAddress
()).
savepoints
(
jobId
,
savePointType
);
}
}
...
...
@@ -486,7 +495,7 @@ public class JobManager {
job
.
setStatus
(
Job
.
JobStatus
.
SUCCESS
);
success
();
}
catch
(
Exception
e
)
{
String
error
=
LogUtil
.
getError
(
"Exception in executing Jar:\n"
+
config
.
getGatewayConfig
().
getAppConfig
().
getUserJarPath
(),
e
);
String
error
=
LogUtil
.
getError
(
"Exception in executing Jar:\n"
+
config
.
getGatewayConfig
().
getAppConfig
().
getUserJarPath
(),
e
);
job
.
setEndTime
(
LocalDateTime
.
now
());
job
.
setStatus
(
Job
.
JobStatus
.
FAILED
);
job
.
setError
(
error
);
...
...
@@ -497,39 +506,39 @@ public class JobManager {
return
job
.
getJobResult
();
}
public
static
TestResult
testGateway
(
GatewayConfig
gatewayConfig
){
public
static
TestResult
testGateway
(
GatewayConfig
gatewayConfig
)
{
return
Gateway
.
build
(
gatewayConfig
).
test
();
}
public
String
exportSql
(
String
sql
){
public
String
exportSql
(
String
sql
)
{
String
statement
=
executor
.
pretreatStatement
(
sql
);
StringBuilder
sb
=
new
StringBuilder
();
if
(
Asserts
.
isNotNullString
(
config
.
getJobName
()))
{
if
(
Asserts
.
isNotNullString
(
config
.
getJobName
()))
{
sb
.
append
(
"set "
+
PipelineOptions
.
NAME
.
key
()
+
" = "
+
config
.
getJobName
()
+
";\r\n"
);
}
if
(
Asserts
.
isNotNull
(
config
.
getParallelism
()))
{
if
(
Asserts
.
isNotNull
(
config
.
getParallelism
()))
{
sb
.
append
(
"set "
+
CoreOptions
.
DEFAULT_PARALLELISM
.
key
()
+
" = "
+
config
.
getParallelism
()
+
";\r\n"
);
}
if
(
Asserts
.
isNotNull
(
config
.
getCheckpoint
()))
{
if
(
Asserts
.
isNotNull
(
config
.
getCheckpoint
()))
{
sb
.
append
(
"set "
+
ExecutionCheckpointingOptions
.
CHECKPOINTING_INTERVAL
.
key
()
+
" = "
+
config
.
getCheckpoint
()
+
";\r\n"
);
}
if
(
Asserts
.
isNotNullString
(
config
.
getSavePointPath
()))
{
if
(
Asserts
.
isNotNullString
(
config
.
getSavePointPath
()))
{
sb
.
append
(
"set "
+
SavepointConfigOptions
.
SAVEPOINT_PATH
+
" = "
+
config
.
getSavePointPath
()
+
";\r\n"
);
}
if
(
Asserts
.
isNotNull
(
config
.
getGatewayConfig
())&&
Asserts
.
isNotNull
(
config
.
getGatewayConfig
().
getFlinkConfig
().
getConfiguration
()))
{
if
(
Asserts
.
isNotNull
(
config
.
getGatewayConfig
())
&&
Asserts
.
isNotNull
(
config
.
getGatewayConfig
().
getFlinkConfig
().
getConfiguration
()))
{
for
(
Map
.
Entry
<
String
,
String
>
entry
:
config
.
getGatewayConfig
().
getFlinkConfig
().
getConfiguration
().
entrySet
())
{
sb
.
append
(
"set "
+
entry
.
getKey
()
+
" = "
+
entry
.
getValue
()
+
";\r\n"
);
}
}
switch
(
GatewayType
.
get
(
config
.
getType
())){
switch
(
GatewayType
.
get
(
config
.
getType
()))
{
case
YARN_PER_JOB:
case
YARN_APPLICATION:
sb
.
append
(
"set "
+
DeploymentOptions
.
TARGET
.
key
()
+
" = "
+
GatewayType
.
get
(
config
.
getType
()).
getLongValue
()
+
";\r\n"
);
if
(
Asserts
.
isNotNull
(
config
.
getGatewayConfig
()))
{
if
(
Asserts
.
isNotNull
(
config
.
getGatewayConfig
()))
{
sb
.
append
(
"set "
+
YarnConfigOptions
.
PROVIDED_LIB_DIRS
.
key
()
+
" = "
+
Collections
.
singletonList
(
config
.
getGatewayConfig
().
getClusterConfig
().
getFlinkLibPath
())
+
";\r\n"
);
}
if
(
Asserts
.
isNotNull
(
config
.
getGatewayConfig
())&&
Asserts
.
isNotNullString
(
config
.
getGatewayConfig
().
getFlinkConfig
().
getJobName
()))
{
if
(
Asserts
.
isNotNull
(
config
.
getGatewayConfig
())
&&
Asserts
.
isNotNullString
(
config
.
getGatewayConfig
().
getFlinkConfig
().
getJobName
()))
{
sb
.
append
(
"set "
+
YarnConfigOptions
.
APPLICATION_NAME
.
key
()
+
" = "
+
config
.
getGatewayConfig
().
getFlinkConfig
().
getJobName
()
+
";\r\n"
);
}
}
...
...
@@ -537,7 +546,7 @@ public class JobManager {
return
sb
.
toString
();
}
public
static
List
<
String
>
getUDFClassName
(
String
statement
){
public
static
List
<
String
>
getUDFClassName
(
String
statement
)
{
Pattern
pattern
=
Pattern
.
compile
(
"function (.*?)'(.*?)'"
,
Pattern
.
CASE_INSENSITIVE
);
Matcher
matcher
=
pattern
.
matcher
(
statement
);
List
<
String
>
classNameList
=
new
ArrayList
<>();
...
...
@@ -547,10 +556,10 @@ public class JobManager {
return
classNameList
;
}
public
static
void
initUDF
(
String
className
,
String
code
)
{
if
(
ClassPool
.
exist
(
ClassEntity
.
build
(
className
,
code
)))
{
public
static
void
initUDF
(
String
className
,
String
code
)
{
if
(
ClassPool
.
exist
(
ClassEntity
.
build
(
className
,
code
)))
{
UDFUtil
.
initClassLoader
(
className
);
}
else
{
}
else
{
UDFUtil
.
buildClass
(
code
);
}
}
...
...
dlink-core/src/test/java/com/dlink/core/SqlParserTest.java
View file @
bfdac7ca
...
...
@@ -78,4 +78,12 @@ public class SqlParserTest {
Map
<
String
,
List
<
String
>>
lists
=
SingleSqlParserFactory
.
generateParser
(
sql
);
System
.
out
.
println
(
lists
.
toString
());
}
@Test
public
void
showFragmentTest
(){
String
sql
=
"show fragment test"
;
Map
<
String
,
List
<
String
>>
lists
=
SingleSqlParserFactory
.
generateParser
(
sql
);
System
.
out
.
println
(
lists
.
toString
());
}
}
dlink-executor/src/main/java/com/dlink/executor/Executor.java
View file @
bfdac7ca
...
...
@@ -2,6 +2,7 @@ package com.dlink.executor;
import
com.dlink.assertion.Asserts
;
import
com.dlink.interceptor.FlinkInterceptor
;
import
com.dlink.interceptor.FlinkInterceptorResult
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
...
...
@@ -197,8 +198,8 @@ public abstract class Executor {
return
FlinkInterceptor
.
pretreatStatement
(
this
,
statement
);
}
private
boolean
pretreatExecute
(
String
statement
)
{
return
!
FlinkInterceptor
.
build
(
this
,
statement
);
private
FlinkInterceptorResult
pretreatExecute
(
String
statement
)
{
return
FlinkInterceptor
.
build
(
this
,
statement
);
}
public
JobExecutionResult
execute
(
String
jobName
)
throws
Exception
{
...
...
@@ -207,7 +208,11 @@ public abstract class Executor {
public
TableResult
executeSql
(
String
statement
)
{
statement
=
pretreatStatement
(
statement
);
if
(
pretreatExecute
(
statement
))
{
FlinkInterceptorResult
flinkInterceptorResult
=
pretreatExecute
(
statement
);
if
(
Asserts
.
isNotNull
(
flinkInterceptorResult
.
getTableResult
()))
{
return
flinkInterceptorResult
.
getTableResult
();
}
if
(!
flinkInterceptorResult
.
isNoExecute
())
{
return
stEnvironment
.
executeSql
(
statement
);
}
else
{
return
CustomTableResultImpl
.
TABLE_RESULT_OK
;
...
...
@@ -216,7 +221,7 @@ public abstract class Executor {
public
String
explainSql
(
String
statement
,
ExplainDetail
...
extraDetails
)
{
statement
=
pretreatStatement
(
statement
);
if
(
pretreatExecute
(
statement
))
{
if
(
!
pretreatExecute
(
statement
).
isNoExecute
(
))
{
return
stEnvironment
.
explainSql
(
statement
,
extraDetails
);
}
else
{
return
""
;
...
...
@@ -225,7 +230,7 @@ public abstract class Executor {
public
SqlExplainResult
explainSqlRecord
(
String
statement
,
ExplainDetail
...
extraDetails
)
{
statement
=
pretreatStatement
(
statement
);
if
(
Asserts
.
isNotNullString
(
statement
)
&&
pretreatExecute
(
statement
))
{
if
(
Asserts
.
isNotNullString
(
statement
)
&&
!
pretreatExecute
(
statement
).
isNoExecute
(
))
{
return
stEnvironment
.
explainSqlRecord
(
statement
,
extraDetails
);
}
else
{
return
null
;
...
...
@@ -234,7 +239,7 @@ public abstract class Executor {
public
ObjectNode
getStreamGraph
(
String
statement
)
{
statement
=
pretreatStatement
(
statement
);
if
(
pretreatExecute
(
statement
))
{
if
(
!
pretreatExecute
(
statement
).
isNoExecute
(
))
{
return
stEnvironment
.
getStreamGraph
(
statement
);
}
else
{
return
null
;
...
...
dlink-executor/src/main/java/com/dlink/executor/SqlManager.java
View file @
bfdac7ca
...
...
@@ -11,12 +11,7 @@ import org.apache.flink.table.catalog.exceptions.CatalogException;
import
org.apache.flink.types.Row
;
import
org.apache.flink.util.StringUtils
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.HashMap
;
import
java.util.Iterator
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.*
;
import
java.util.regex.Matcher
;
import
java.util.regex.Pattern
;
...
...
@@ -26,8 +21,9 @@ import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* Flink Sql Fragment Manager
* @author wenmo
* @since 2021/6/7 22:06
*
* @author wenmo
* @since 2021/6/7 22:06
**/
public
final
class
SqlManager
{
...
...
@@ -111,6 +107,16 @@ public final class SqlManager {
}
}
public
TableResult
getSqlFragmentResult
(
String
sqlFragmentName
)
{
if
(
Asserts
.
isNullString
(
sqlFragmentName
))
{
return
CustomTableResultImpl
.
buildTableResult
(
new
ArrayList
<>(
Arrays
.
asList
(
new
TableSchemaField
(
"fragment"
,
DataTypes
.
STRING
()))),
new
ArrayList
<>());
}
String
sqlFragment
=
getSqlFragment
(
sqlFragmentName
);
List
<
Row
>
rows
=
new
ArrayList
<>();
rows
.
add
(
Row
.
of
(
sqlFragment
));
return
CustomTableResultImpl
.
buildTableResult
(
new
ArrayList
<>(
Arrays
.
asList
(
new
TableSchemaField
(
"fragment"
,
DataTypes
.
STRING
()))),
rows
);
}
/**
* Get a fragment of sql under the given name. The sql fragment name must be existed.
*
...
...
@@ -126,7 +132,7 @@ public final class SqlManager {
for
(
String
key
:
sqlFragments
.
keySet
())
{
rows
.
add
(
Row
.
of
(
key
));
}
return
CustomTableResultImpl
.
buildTableResult
(
new
ArrayList
<>(
Arrays
.
asList
(
new
TableSchemaField
(
"
sql fragment n
ame"
,
DataTypes
.
STRING
()))),
rows
);
return
CustomTableResultImpl
.
buildTableResult
(
new
ArrayList
<>(
Arrays
.
asList
(
new
TableSchemaField
(
"
fragmentN
ame"
,
DataTypes
.
STRING
()))),
rows
);
}
public
Iterator
getSqlFragmentsIterator
()
{
...
...
@@ -141,9 +147,10 @@ public final class SqlManager {
return
environment
.
fromValues
(
keys
);
}
public
boolean
checkShowFragments
(
String
sql
){
public
boolean
checkShowFragments
(
String
sql
)
{
return
SHOW_FRAGMENTS
.
equals
(
sql
.
trim
().
toUpperCase
());
}
/**
* Parse some variables under the given sql.
*
...
...
dlink-executor/src/main/java/com/dlink/interceptor/FlinkInterceptor.java
View file @
bfdac7ca
...
...
@@ -3,11 +3,12 @@ package com.dlink.interceptor;
import
com.dlink.assertion.Asserts
;
import
com.dlink.catalog.function.FunctionManager
;
import
com.dlink.catalog.function.UDFunction
;
import
com.dlink.executor.Executor
;
import
com.dlink.executor.CustomTableEnvironmentImpl
;
import
com.dlink.executor.Executor
;
import
com.dlink.trans.Operation
;
import
com.dlink.trans.Operations
;
import
com.dlink.utils.SqlUtil
;
import
org.apache.flink.table.api.TableResult
;
import
org.apache.flink.table.functions.AggregateFunction
;
import
org.apache.flink.table.functions.ScalarFunction
;
import
org.apache.flink.table.functions.TableAggregateFunction
;
...
...
@@ -27,7 +28,7 @@ public class FlinkInterceptor {
public
static
String
pretreatStatement
(
Executor
executor
,
String
statement
)
{
statement
=
SqlUtil
.
removeNote
(
statement
);
if
(
executor
.
isUseSqlFragment
())
{
if
(
executor
.
isUseSqlFragment
())
{
statement
=
executor
.
getSqlManager
().
parseVariable
(
statement
);
}
// initFunctions(executor.getCustomTableEnvironmentImpl(), statement);
...
...
@@ -35,13 +36,15 @@ public class FlinkInterceptor {
}
// return false to continue with executeSql
public
static
boolean
build
(
Executor
executor
,
String
statement
)
{
public
static
FlinkInterceptorResult
build
(
Executor
executor
,
String
statement
)
{
boolean
noExecute
=
false
;
TableResult
tableResult
=
null
;
Operation
operation
=
Operations
.
buildOperation
(
statement
);
if
(
Asserts
.
isNotNull
(
operation
))
{
operation
.
build
(
executor
);
return
operation
.
noExecute
();
tableResult
=
operation
.
build
(
executor
);
noExecute
=
operation
.
noExecute
();
}
return
false
;
return
FlinkInterceptorResult
.
build
(
noExecute
,
tableResult
)
;
}
@Deprecated
...
...
dlink-executor/src/main/java/com/dlink/interceptor/FlinkInterceptorResult.java
0 → 100644
View file @
bfdac7ca
package
com
.
dlink
.
interceptor
;
import
org.apache.flink.table.api.TableResult
;
/**
* FlinkInterceptorResult
*
* @author wenmo
* @since 2022/2/17 16:36
**/
public
class
FlinkInterceptorResult
{
private
boolean
noExecute
;
private
TableResult
tableResult
;
public
FlinkInterceptorResult
()
{
}
public
FlinkInterceptorResult
(
boolean
noExecute
,
TableResult
tableResult
)
{
this
.
noExecute
=
noExecute
;
this
.
tableResult
=
tableResult
;
}
public
boolean
isNoExecute
()
{
return
noExecute
;
}
public
void
setNoExecute
(
boolean
noExecute
)
{
this
.
noExecute
=
noExecute
;
}
public
TableResult
getTableResult
()
{
return
tableResult
;
}
public
void
setTableResult
(
TableResult
tableResult
)
{
this
.
tableResult
=
tableResult
;
}
public
static
FlinkInterceptorResult
buildResult
(
TableResult
tableResult
)
{
return
new
FlinkInterceptorResult
(
false
,
tableResult
);
}
public
static
FlinkInterceptorResult
build
(
boolean
noExecute
,
TableResult
tableResult
)
{
return
new
FlinkInterceptorResult
(
noExecute
,
tableResult
);
}
}
dlink-executor/src/main/java/com/dlink/parser/ShowFragmentParser.java
0 → 100644
View file @
bfdac7ca
package
com
.
dlink
.
parser
;
/**
* ShowFragmentsParser
*
* @author wenmo
* @since 2022/2/17 16:19
**/
public
class
ShowFragmentParser
extends
BaseSingleSqlParser
{
public
ShowFragmentParser
(
String
originalSql
)
{
super
(
originalSql
);
}
@Override
protected
void
initializeSegments
()
{
//SHOW FRAGMENT (.+)
segments
.
add
(
new
SqlSegment
(
"FRAGMENT"
,
"(show\\s+fragment)\\s+(.*)( ENDOFSQL)"
,
","
));
}
}
dlink-executor/src/main/java/com/dlink/parser/SingleSqlParserFactory.java
View file @
bfdac7ca
...
...
@@ -13,10 +13,10 @@ import java.util.regex.Pattern;
*/
public
class
SingleSqlParserFactory
{
public
static
Map
<
String
,
List
<
String
>>
generateParser
(
String
sql
)
{
public
static
Map
<
String
,
List
<
String
>>
generateParser
(
String
sql
)
{
BaseSingleSqlParser
tmp
=
null
;
// sql = sql.replace("\n"," ").replaceAll("\\s{1,}", " ") +" ENDOFSQL";
sql
=
sql
.
replace
(
"\r\n"
,
" "
).
replace
(
"\n"
,
" "
)
+
" ENDOFSQL"
;
sql
=
sql
.
replace
(
"\r\n"
,
" "
).
replace
(
"\n"
,
" "
)
+
" ENDOFSQL"
;
if
(
contains
(
sql
,
"(insert\\s+into)(.+)(select)(.+)(from)(.+)"
))
{
tmp
=
new
InsertSelectSqlParser
(
sql
);
}
else
if
(
contains
(
sql
,
"(create\\s+aggtable)(.+)(as\\s+select)(.+)"
))
{
...
...
@@ -37,6 +37,8 @@ public class SingleSqlParserFactory {
}
else
if
(
contains
(
sql
,
"(use)(.+)"
))
{
}
else
if
(
contains
(
sql
,
"(set)(.+)"
))
{
tmp
=
new
SetSqlParser
(
sql
);
}
else
if
(
contains
(
sql
,
"(show\\s+fragment)\\s+(.+)"
))
{
tmp
=
new
ShowFragmentParser
(
sql
);
}
else
{
}
return
tmp
.
splitSql2Segment
();
...
...
dlink-executor/src/main/java/com/dlink/trans/Operation.java
View file @
bfdac7ca
package
com
.
dlink
.
trans
;
import
com.dlink.executor.Executor
;
import
org.apache.flink.table.api.TableResult
;
/**
* Operation
...
...
@@ -14,7 +15,7 @@ public interface Operation {
Operation
create
(
String
statement
);
void
build
(
Executor
executor
);
TableResult
build
(
Executor
executor
);
boolean
noExecute
();
}
dlink-executor/src/main/java/com/dlink/trans/Operations.java
View file @
bfdac7ca
package
com
.
dlink
.
trans
;
import
com.dlink.parser.SqlType
;
import
com.dlink.trans.ddl.CreateAggTableOperation
;
import
com.dlink.trans.ddl.CreateCDCSourceOperation
;
import
com.dlink.trans.ddl.SetOperation
;
import
com.dlink.trans.ddl.*
;
/**
* Operations
...
...
@@ -14,12 +12,14 @@ import com.dlink.trans.ddl.SetOperation;
public
class
Operations
{
private
static
Operation
[]
operations
=
{
new
CreateAggTableOperation
()
new
CreateAggTableOperation
()
,
new
SetOperation
()
,
new
CreateCDCSourceOperation
()
,
new
ShowFragmentsOperation
()
,
new
ShowFragmentOperation
()
};
public
static
SqlType
getSqlTypeFromStatements
(
String
statement
){
public
static
SqlType
getSqlTypeFromStatements
(
String
statement
)
{
String
[]
statements
=
statement
.
split
(
";"
);
SqlType
sqlType
=
SqlType
.
UNKNOWN
;
for
(
String
item
:
statements
)
{
...
...
@@ -27,7 +27,7 @@ public class Operations {
continue
;
}
sqlType
=
Operations
.
getOperationType
(
item
);
if
(
sqlType
==
SqlType
.
INSERT
||
sqlType
==
SqlType
.
SELECT
)
{
if
(
sqlType
==
SqlType
.
INSERT
||
sqlType
==
SqlType
.
SELECT
)
{
return
sqlType
;
}
}
...
...
@@ -46,10 +46,10 @@ public class Operations {
return
type
;
}
public
static
Operation
buildOperation
(
String
statement
){
String
sql
=
statement
.
replace
(
"\n"
,
" "
).
replaceAll
(
"\\s{1,}"
,
" "
).
trim
().
toUpperCase
();
public
static
Operation
buildOperation
(
String
statement
)
{
String
sql
=
statement
.
replace
(
"\n"
,
" "
).
replaceAll
(
"\\s{1,}"
,
" "
).
trim
().
toUpperCase
();
for
(
int
i
=
0
;
i
<
operations
.
length
;
i
++)
{
if
(
sql
.
startsWith
(
operations
[
i
].
getHandle
()))
{
if
(
sql
.
startsWith
(
operations
[
i
].
getHandle
()))
{
return
operations
[
i
].
create
(
statement
);
}
}
...
...
dlink-executor/src/main/java/com/dlink/trans/ddl/CreateAggTableOperation.java
View file @
bfdac7ca
...
...
@@ -4,6 +4,7 @@ import com.dlink.executor.Executor;
import
com.dlink.trans.AbstractOperation
;
import
com.dlink.trans.Operation
;
import
org.apache.flink.table.api.Table
;
import
org.apache.flink.table.api.TableResult
;
import
java.util.List
;
...
...
@@ -13,7 +14,7 @@ import java.util.List;
* @author wenmo
* @since 2021/6/13 19:24
*/
public
class
CreateAggTableOperation
extends
AbstractOperation
implements
Operation
{
public
class
CreateAggTableOperation
extends
AbstractOperation
implements
Operation
{
private
String
KEY_WORD
=
"CREATE AGGTABLE"
;
...
...
@@ -35,11 +36,11 @@ public class CreateAggTableOperation extends AbstractOperation implements Operat
}
@Override
public
void
build
(
Executor
executor
)
{
public
TableResult
build
(
Executor
executor
)
{
AggTable
aggTable
=
AggTable
.
build
(
statement
);
Table
source
=
executor
.
getCustomTableEnvironment
().
sqlQuery
(
"select * from "
+
aggTable
.
getTable
());
Table
source
=
executor
.
getCustomTableEnvironment
().
sqlQuery
(
"select * from "
+
aggTable
.
getTable
());
List
<
String
>
wheres
=
aggTable
.
getWheres
();
if
(
wheres
!=
null
&&
wheres
.
size
()>
0
)
{
if
(
wheres
!=
null
&&
wheres
.
size
()
>
0
)
{
for
(
String
s
:
wheres
)
{
source
=
source
.
filter
(
s
);
}
...
...
@@ -48,5 +49,6 @@ public class CreateAggTableOperation extends AbstractOperation implements Operat
.
flatAggregate
(
aggTable
.
getAggBy
())
.
select
(
aggTable
.
getColumns
());
executor
.
getCustomTableEnvironment
().
registerTable
(
aggTable
.
getName
(),
sink
);
return
null
;
}
}
dlink-executor/src/main/java/com/dlink/trans/ddl/CreateCDCSourceOperation.java
View file @
bfdac7ca
...
...
@@ -5,9 +5,10 @@ import com.dlink.executor.Executor;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.trans.AbstractOperation
;
import
com.dlink.trans.Operation
;
import
org.apache.flink.table.api.TableResult
;
/**
*
TODO
*
CreateCDCSourceOperation
*
* @author wenmo
* @since 2022/1/29 23:25
...
...
@@ -34,15 +35,16 @@ public class CreateCDCSourceOperation extends AbstractOperation implements Opera
}
@Override
public
void
build
(
Executor
executor
)
{
public
TableResult
build
(
Executor
executor
)
{
CDCSource
cdcSource
=
CDCSource
.
build
(
statement
);
FlinkCDCConfig
config
=
new
FlinkCDCConfig
(
cdcSource
.
getHostname
(),
cdcSource
.
getPort
(),
cdcSource
.
getUsername
()
,
cdcSource
.
getPassword
(),
cdcSource
.
getCheckpoint
(),
cdcSource
.
getParallelism
(),
cdcSource
.
getDatabase
(),
cdcSource
.
getTable
()
,
cdcSource
.
getStartupMode
(),
cdcSource
.
getTopic
(),
cdcSource
.
getBrokers
());
FlinkCDCConfig
config
=
new
FlinkCDCConfig
(
cdcSource
.
getHostname
(),
cdcSource
.
getPort
(),
cdcSource
.
getUsername
()
,
cdcSource
.
getPassword
(),
cdcSource
.
getCheckpoint
(),
cdcSource
.
getParallelism
(),
cdcSource
.
getDatabase
(),
cdcSource
.
getTable
()
,
cdcSource
.
getStartupMode
(),
cdcSource
.
getTopic
(),
cdcSource
.
getBrokers
());
try
{
FlinkCDCMergeBuilder
.
buildMySqlCDC
(
executor
.
getStreamExecutionEnvironment
(),
config
);
FlinkCDCMergeBuilder
.
buildMySqlCDC
(
executor
.
getStreamExecutionEnvironment
(),
config
);
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
}
return
null
;
}
}
dlink-executor/src/main/java/com/dlink/trans/ddl/SetOperation.java
View file @
bfdac7ca
...
...
@@ -7,6 +7,7 @@ import com.dlink.trans.AbstractOperation;
import
com.dlink.trans.Operation
;
import
org.apache.commons.lang3.StringUtils
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.table.api.TableResult
;
import
java.util.HashMap
;
import
java.util.List
;
...
...
@@ -40,22 +41,23 @@ public class SetOperation extends AbstractOperation implements Operation {
}
@Override
public
void
build
(
Executor
executor
)
{
public
TableResult
build
(
Executor
executor
)
{
try
{
if
(
null
!=
Class
.
forName
(
"org.apache.log4j.Logger"
))
{
if
(
null
!=
Class
.
forName
(
"org.apache.log4j.Logger"
))
{
executor
.
parseAndLoadConfiguration
(
statement
);
return
;
return
null
;
}
}
catch
(
ClassNotFoundException
e
)
{
}
Map
<
String
,
List
<
String
>>
map
=
SingleSqlParserFactory
.
generateParser
(
statement
);
if
(
Asserts
.
isNotNullMap
(
map
)&&
map
.
size
()==
2
)
{
Map
<
String
,
List
<
String
>>
map
=
SingleSqlParserFactory
.
generateParser
(
statement
);
if
(
Asserts
.
isNotNullMap
(
map
)
&&
map
.
size
()
==
2
)
{
Map
<
String
,
String
>
confMap
=
new
HashMap
<>();
confMap
.
put
(
StringUtils
.
join
(
map
.
get
(
"SET"
),
"."
),
StringUtils
.
join
(
map
.
get
(
"="
),
","
));
executor
.
getCustomTableEnvironment
().
getConfig
().
addConfiguration
(
Configuration
.
fromMap
(
confMap
));
Configuration
configuration
=
Configuration
.
fromMap
(
confMap
);
executor
.
getExecutionConfig
().
configure
(
configuration
,
null
);
executor
.
getExecutionConfig
().
configure
(
configuration
,
null
);
executor
.
getCustomTableEnvironment
().
getConfig
().
addConfiguration
(
configuration
);
}
return
null
;
}
}
dlink-executor/src/main/java/com/dlink/trans/ddl/ShowFragmentOperation.java
0 → 100644
View file @
bfdac7ca
package
com
.
dlink
.
trans
.
ddl
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.executor.Executor
;
import
com.dlink.parser.SingleSqlParserFactory
;
import
com.dlink.trans.AbstractOperation
;
import
com.dlink.trans.Operation
;
import
org.apache.commons.lang3.StringUtils
;
import
org.apache.flink.table.api.TableResult
;
import
java.util.List
;
import
java.util.Map
;
/**
* ShowFragmentOperation
*
* @author wenmo
* @since 2022/2/17 17:08
**/
public
class
ShowFragmentOperation
extends
AbstractOperation
implements
Operation
{
private
String
KEY_WORD
=
"SHOW FRAGMENT "
;
public
ShowFragmentOperation
()
{
}
public
ShowFragmentOperation
(
String
statement
)
{
super
(
statement
);
}
@Override
public
String
getHandle
()
{
return
KEY_WORD
;
}
@Override
public
Operation
create
(
String
statement
)
{
return
new
ShowFragmentOperation
(
statement
);
}
@Override
public
TableResult
build
(
Executor
executor
)
{
Map
<
String
,
List
<
String
>>
map
=
SingleSqlParserFactory
.
generateParser
(
statement
);
if
(
Asserts
.
isNotNullMap
(
map
))
{
if
(
map
.
containsKey
(
"FRAGMENT"
))
{
return
executor
.
getSqlManager
().
getSqlFragmentResult
(
StringUtils
.
join
(
map
.
get
(
"FRAGMENT"
),
""
));
}
}
return
executor
.
getSqlManager
().
getSqlFragmentResult
(
null
);
}
}
dlink-executor/src/main/java/com/dlink/trans/ddl/ShowFragmentsOperation.java
0 → 100644
View file @
bfdac7ca
package
com
.
dlink
.
trans
.
ddl
;
import
com.dlink.executor.Executor
;
import
com.dlink.trans.AbstractOperation
;
import
com.dlink.trans.Operation
;
import
org.apache.flink.table.api.TableResult
;
/**
* ShowFragmentsOperation
*
* @author wenmo
* @since 2022/2/17 16:31
**/
public
class
ShowFragmentsOperation
extends
AbstractOperation
implements
Operation
{
private
String
KEY_WORD
=
"SHOW FRAGMENTS"
;
public
ShowFragmentsOperation
()
{
}
public
ShowFragmentsOperation
(
String
statement
)
{
super
(
statement
);
}
@Override
public
String
getHandle
()
{
return
KEY_WORD
;
}
@Override
public
Operation
create
(
String
statement
)
{
return
new
ShowFragmentsOperation
(
statement
);
}
@Override
public
TableResult
build
(
Executor
executor
)
{
return
executor
.
getSqlManager
().
getSqlFragments
();
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment