Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dlink
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
zhaowei
dlink
Commits
429a25da
Unverified
Commit
429a25da
authored
Apr 18, 2022
by
aiwenmo
Committed by
GitHub
Apr 18, 2022
Browse files
Options
Browse Files
Download
Plain Diff
[Feature-422][*] CDCSource sync kafka topics
[Feature-422][*] CDCSource sync kafka topics
parents
bf048afa
7c753b85
Changes
81
Hide whitespace changes
Inline
Side-by-side
Showing
81 changed files
with
2880 additions
and
3254 deletions
+2880
-3254
pom.xml
dlink-app/dlink-app-1.11/pom.xml
+4
-0
CustomTableEnvironmentImpl.java
...n/java/com/dlink/executor/CustomTableEnvironmentImpl.java
+0
-216
CustomTableResultImpl.java
...c/main/java/com/dlink/executor/CustomTableResultImpl.java
+0
-262
TableSchemaField.java
...11/src/main/java/com/dlink/executor/TableSchemaField.java
+0
-33
pom.xml
dlink-app/dlink-app-1.12/pom.xml
+4
-0
CustomTableEnvironmentImpl.java
...n/java/com/dlink/executor/CustomTableEnvironmentImpl.java
+0
-218
CustomTableResultImpl.java
...c/main/java/com/dlink/executor/CustomTableResultImpl.java
+0
-369
TableSchemaField.java
...12/src/main/java/com/dlink/executor/TableSchemaField.java
+0
-33
pom.xml
dlink-app/dlink-app-1.13/pom.xml
+4
-0
CustomTableEnvironmentImpl.java
...n/java/com/dlink/executor/CustomTableEnvironmentImpl.java
+0
-259
CustomTableResultImpl.java
...c/main/java/com/dlink/executor/CustomTableResultImpl.java
+0
-395
TableSchemaField.java
...13/src/main/java/com/dlink/executor/TableSchemaField.java
+0
-33
pom.xml
dlink-app/dlink-app-1.14/pom.xml
+4
-0
CustomTableEnvironmentImpl.java
...n/java/com/dlink/executor/CustomTableEnvironmentImpl.java
+0
-329
CustomTableResultImpl.java
...c/main/java/com/dlink/executor/CustomTableResultImpl.java
+0
-395
TableSchemaField.java
...14/src/main/java/com/dlink/executor/TableSchemaField.java
+0
-33
pom.xml
dlink-app/dlink-app-base/pom.xml
+6
-1
pom.xml
dlink-client/dlink-client-1.11/pom.xml
+3
-73
AbstractCDCBuilder.java
...-1.11/src/main/java/com/dlink/cdc/AbstractCDCBuilder.java
+37
-0
AbstractSinkBuilder.java
...1.11/src/main/java/com/dlink/cdc/AbstractSinkBuilder.java
+29
-0
CDCBuilder.java
...k-client-1.11/src/main/java/com/dlink/cdc/CDCBuilder.java
+13
-1
FlinkCDCMergeBuilder.java
....11/src/main/java/com/dlink/cdc/FlinkCDCMergeBuilder.java
+0
-35
SinkBuilder.java
...-client-1.11/src/main/java/com/dlink/cdc/SinkBuilder.java
+22
-0
SinkBuilderFactory.java
...-1.11/src/main/java/com/dlink/cdc/SinkBuilderFactory.java
+31
-0
KafkaSinkBuilder.java
...1/src/main/java/com/dlink/cdc/kafka/KafkaSinkBuilder.java
+96
-0
MysqlCDCBuilder.java
...11/src/main/java/com/dlink/cdc/mysql/MysqlCDCBuilder.java
+63
-0
CustomTableEnvironmentImpl.java
...n/java/com/dlink/executor/CustomTableEnvironmentImpl.java
+113
-18
CustomTableResultImpl.java
...c/main/java/com/dlink/executor/CustomTableResultImpl.java
+29
-24
pom.xml
dlink-client/dlink-client-1.12/pom.xml
+3
-73
AbstractCDCBuilder.java
...-1.12/src/main/java/com/dlink/cdc/AbstractCDCBuilder.java
+37
-0
AbstractSinkBuilder.java
...1.12/src/main/java/com/dlink/cdc/AbstractSinkBuilder.java
+29
-0
CDCBuilder.java
...k-client-1.12/src/main/java/com/dlink/cdc/CDCBuilder.java
+13
-1
FlinkCDCMergeBuilder.java
....12/src/main/java/com/dlink/cdc/FlinkCDCMergeBuilder.java
+0
-39
SinkBuilder.java
...-client-1.12/src/main/java/com/dlink/cdc/SinkBuilder.java
+22
-0
SinkBuilderFactory.java
...-1.12/src/main/java/com/dlink/cdc/SinkBuilderFactory.java
+31
-0
KafkaSinkBuilder.java
...2/src/main/java/com/dlink/cdc/kafka/KafkaSinkBuilder.java
+96
-0
MysqlCDCBuilder.java
...12/src/main/java/com/dlink/cdc/mysql/MysqlCDCBuilder.java
+63
-0
CustomTableEnvironmentImpl.java
...n/java/com/dlink/executor/CustomTableEnvironmentImpl.java
+112
-18
pom.xml
dlink-client/dlink-client-1.13/pom.xml
+3
-94
AbstractCDCBuilder.java
...-1.13/src/main/java/com/dlink/cdc/AbstractCDCBuilder.java
+37
-0
AbstractSinkBuilder.java
...1.13/src/main/java/com/dlink/cdc/AbstractSinkBuilder.java
+29
-0
CDCBuilder.java
...k-client-1.13/src/main/java/com/dlink/cdc/CDCBuilder.java
+13
-1
FlinkCDCMergeBuilder.java
....13/src/main/java/com/dlink/cdc/FlinkCDCMergeBuilder.java
+0
-35
SinkBuilder.java
...-client-1.13/src/main/java/com/dlink/cdc/SinkBuilder.java
+22
-0
SinkBuilderFactory.java
...-1.13/src/main/java/com/dlink/cdc/SinkBuilderFactory.java
+33
-0
JdbcSinkBuilder.java
....13/src/main/java/com/dlink/cdc/jdbc/JdbcSinkBuilder.java
+131
-0
KafkaSinkBuilder.java
...3/src/main/java/com/dlink/cdc/kafka/KafkaSinkBuilder.java
+96
-0
MysqlCDCBuilder.java
...13/src/main/java/com/dlink/cdc/mysql/MysqlCDCBuilder.java
+64
-1
OracleCDCBuilder.java
.../src/main/java/com/dlink/cdc/oracle/OracleCDCBuilder.java
+51
-1
CustomTableEnvironmentImpl.java
...n/java/com/dlink/executor/CustomTableEnvironmentImpl.java
+172
-19
pom.xml
dlink-client/dlink-client-1.14/pom.xml
+3
-84
AbstractCDCBuilder.java
...-1.14/src/main/java/com/dlink/cdc/AbstractCDCBuilder.java
+37
-0
AbstractSinkBuilder.java
...1.14/src/main/java/com/dlink/cdc/AbstractSinkBuilder.java
+29
-0
CDCBuilder.java
...k-client-1.14/src/main/java/com/dlink/cdc/CDCBuilder.java
+13
-1
FlinkCDCMergeBuilder.java
....14/src/main/java/com/dlink/cdc/FlinkCDCMergeBuilder.java
+0
-41
SinkBuilder.java
...-client-1.14/src/main/java/com/dlink/cdc/SinkBuilder.java
+22
-0
SinkBuilderFactory.java
...-1.14/src/main/java/com/dlink/cdc/SinkBuilderFactory.java
+33
-0
JdbcSinkBuilder.java
....14/src/main/java/com/dlink/cdc/jdbc/JdbcSinkBuilder.java
+124
-0
KafkaSinkBuilder.java
...4/src/main/java/com/dlink/cdc/kafka/KafkaSinkBuilder.java
+107
-0
MysqlCDCBuilder.java
...14/src/main/java/com/dlink/cdc/mysql/MysqlCDCBuilder.java
+63
-0
OracleCDCBuilder.java
.../src/main/java/com/dlink/cdc/oracle/OracleCDCBuilder.java
+51
-0
CustomTableEnvironmentImpl.java
...n/java/com/dlink/executor/CustomTableEnvironmentImpl.java
+153
-61
ClientConstant.java
...base/src/main/java/com/dlink/constant/ClientConstant.java
+17
-0
FlinkParamConstant.java
.../src/main/java/com/dlink/constant/FlinkParamConstant.java
+1
-1
CustomTableEnvironment.java
.../main/java/com/dlink/executor/CustomTableEnvironment.java
+22
-3
FlinkCDCConfig.java
...nt-base/src/main/java/com/dlink/model/FlinkCDCConfig.java
+19
-17
Schema.java
dlink-common/src/main/java/com/dlink/model/Schema.java
+7
-3
Table.java
dlink-common/src/main/java/com/dlink/model/Table.java
+80
-6
pom.xml
dlink-core/pom.xml
+7
-2
pom.xml
dlink-executor/pom.xml
+11
-1
CDCSource.java
...executor/src/main/java/com/dlink/trans/ddl/CDCSource.java
+18
-19
CreateCDCSourceOperation.java
...in/java/com/dlink/trans/ddl/CreateCDCSourceOperation.java
+48
-3
pom.xml
dlink-flink/dlink-flink-1.11/pom.xml
+95
-0
pom.xml
dlink-flink/dlink-flink-1.12/pom.xml
+95
-0
pom.xml
dlink-flink/dlink-flink-1.13/pom.xml
+119
-0
pom.xml
dlink-flink/dlink-flink-1.14/pom.xml
+105
-0
pom.xml
dlink-flink/pom.xml
+26
-0
pom.xml
dlink-function/pom.xml
+5
-0
pom.xml
dlink-gateway/pom.xml
+12
-2
DriverConfig.java
...src/main/java/com/dlink/metadata/driver/DriverConfig.java
+9
-0
pom.xml
pom.xml
+34
-1
No files found.
dlink-app/dlink-app-1.11/pom.xml
View file @
429a25da
...
@@ -33,6 +33,10 @@
...
@@ -33,6 +33,10 @@
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-1.11
</artifactId>
<artifactId>
dlink-client-1.11
</artifactId>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.11
</artifactId>
<scope>
provided
</scope>
<scope>
provided
</scope>
</dependency>
</dependency>
<dependency>
<dependency>
...
...
dlink-app/dlink-app-1.11/src/main/java/com/dlink/executor/CustomTableEnvironmentImpl.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.configuration.PipelineOptions
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.FunctionCatalog
;
import
org.apache.flink.table.catalog.GenericInMemoryCatalog
;
import
org.apache.flink.table.delegation.Executor
;
import
org.apache.flink.table.delegation.ExecutorFactory
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.table.delegation.PlannerFactory
;
import
org.apache.flink.table.factories.ComponentFactoryService
;
import
org.apache.flink.table.functions.AggregateFunction
;
import
org.apache.flink.table.functions.TableAggregateFunction
;
import
org.apache.flink.table.functions.TableFunction
;
import
org.apache.flink.table.functions.UserDefinedFunctionHelper
;
import
org.apache.flink.table.module.ModuleManager
;
import
org.apache.flink.table.operations.ExplainOperation
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.Operation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
java.lang.reflect.Method
;
import
java.util.ArrayList
;
import
java.util.List
;
import
java.util.Map
;
/**
* 定制TableEnvironmentImpl
*
* @author wenmo
* @since 2021/6/7 22:06
**/
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
protected
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
TableConfig
tableConfig
,
Executor
executor
,
FunctionCatalog
functionCatalog
,
Planner
planner
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
build
());
}
public
static
CustomTableEnvironmentImpl
createBatch
(
StreamExecutionEnvironment
executionEnvironment
)
{
Configuration
configuration
=
new
Configuration
();
configuration
.
setString
(
"execution.runtime-mode"
,
"BATCH"
);
TableConfig
tableConfig
=
new
TableConfig
();
tableConfig
.
addConfiguration
(
configuration
);
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
useBlinkPlanner
().
inBatchMode
().
build
(),
tableConfig
);
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
)
{
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ModuleManager
moduleManager
=
new
ModuleManager
();
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
().
classLoader
(
classLoader
).
config
(
tableConfig
.
getConfiguration
()).
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
())).
executionConfig
(
executionEnvironment
.
getConfig
()).
build
();
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Planner
planner
=
(
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)).
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
settings
.
isStreamingMode
(),
classLoader
);
}
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
try
{
ExecutorFactory
executorFactory
=
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
Method
createMethod
=
executorFactory
.
getClass
().
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
}
catch
(
Exception
var4
)
{
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
var4
);
}
}
public
ObjectNode
getStreamGraph
(
String
statement
)
{
List
<
Operation
>
operations
=
super
.
parser
.
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
else
{
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
operations
.
size
();
i
++)
{
if
(
operations
.
get
(
i
)
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operations
.
get
(
i
));
}
}
List
<
Transformation
<?>>
trans
=
super
.
planner
.
translate
(
modifyOperations
);
if
(
execEnv
instanceof
ExecutorBase
)
{
StreamGraph
streamGraph
=
ExecutorUtils
.
generateStreamGraph
(((
ExecutorBase
)
execEnv
).
getExecutionEnvironment
(),
trans
);
JSONGenerator
jsonGenerator
=
new
JSONGenerator
(
streamGraph
);
String
json
=
jsonGenerator
.
getJSON
();
ObjectMapper
mapper
=
new
ObjectMapper
();
ObjectNode
objectNode
=
mapper
.
createObjectNode
();
try
{
objectNode
=
(
ObjectNode
)
mapper
.
readTree
(
json
);
}
catch
(
JsonProcessingException
e
)
{
e
.
printStackTrace
();
}
finally
{
return
objectNode
;
}
}
else
{
throw
new
TableException
(
"Unsupported SQL query! ExecEnv need a ExecutorBase."
);
}
}
}
public
JobPlanInfo
getJobPlanInfo
(
List
<
String
>
statements
)
{
return
new
JobPlanInfo
(
JsonPlanGenerator
.
generatePlan
(
getJobGraphFromInserts
(
statements
)));
}
public
StreamGraph
getStreamGraphFromInserts
(
List
<
String
>
statements
)
{
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
();
for
(
String
statement
:
statements
)
{
List
<
Operation
>
operations
=
getParser
().
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Only single statement is supported."
);
}
else
{
Operation
operation
=
operations
.
get
(
0
);
if
(
operation
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operation
);
}
else
{
throw
new
TableException
(
"Only insert statement is supported now."
);
}
}
}
List
<
Transformation
<?>>
trans
=
getPlanner
().
translate
(
modifyOperations
);
if
(
execEnv
instanceof
ExecutorBase
)
{
StreamGraph
streamGraph
=
ExecutorUtils
.
generateStreamGraph
(((
ExecutorBase
)
execEnv
).
getExecutionEnvironment
(),
trans
);
if
(
tableConfig
.
getConfiguration
().
containsKey
(
PipelineOptions
.
NAME
.
key
()))
{
streamGraph
.
setJobName
(
tableConfig
.
getConfiguration
().
getString
(
PipelineOptions
.
NAME
));
}
return
streamGraph
;
}
else
{
throw
new
TableException
(
"Unsupported SQL query! ExecEnv need a ExecutorBase."
);
}
}
public
JobGraph
getJobGraphFromInserts
(
List
<
String
>
statements
)
{
return
getStreamGraphFromInserts
(
statements
).
getJobGraph
();
}
public
SqlExplainResult
explainSqlRecord
(
String
statement
,
ExplainDetail
...
extraDetails
)
{
SqlExplainResult
record
=
new
SqlExplainResult
();
List
<
Operation
>
operations
=
parser
.
parse
(
statement
);
record
.
setParseTrue
(
true
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
Operation
operation
=
operationlist
.
get
(
i
);
if
(
operation
instanceof
ModifyOperation
)
{
record
.
setType
(
"Modify DML"
);
}
else
if
(
operation
instanceof
ExplainOperation
)
{
record
.
setType
(
"Explain DML"
);
}
else
if
(
operation
instanceof
QueryOperation
)
{
record
.
setType
(
"Query DML"
);
}
else
{
record
.
setExplain
(
operation
.
asSummaryString
());
operationlist
.
remove
(
i
);
record
.
setType
(
"DDL"
);
i
=
i
-
1
;
}
}
record
.
setExplainTrue
(
true
);
if
(
operationlist
.
size
()
==
0
)
{
return
record
;
}
record
.
setExplain
(
planner
.
explain
(
operationlist
,
extraDetails
));
return
record
;
}
public
<
T
>
void
registerFunction
(
String
name
,
TableFunction
<
T
>
tableFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfTableFunction
(
tableFunction
);
this
.
functionCatalog
.
registerTempSystemTableFunction
(
name
,
tableFunction
,
typeInfo
);
}
public
<
T
,
ACC
>
void
registerFunction
(
String
name
,
AggregateFunction
<
T
,
ACC
>
aggregateFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfAggregateFunction
(
aggregateFunction
);
TypeInformation
<
ACC
>
accTypeInfo
=
UserDefinedFunctionHelper
.
getAccumulatorTypeOfAggregateFunction
(
aggregateFunction
);
this
.
functionCatalog
.
registerTempSystemAggregateFunction
(
name
,
aggregateFunction
,
typeInfo
,
accTypeInfo
);
}
public
<
T
,
ACC
>
void
registerFunction
(
String
name
,
TableAggregateFunction
<
T
,
ACC
>
tableAggregateFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfAggregateFunction
(
tableAggregateFunction
);
TypeInformation
<
ACC
>
accTypeInfo
=
UserDefinedFunctionHelper
.
getAccumulatorTypeOfAggregateFunction
(
tableAggregateFunction
);
this
.
functionCatalog
.
registerTempSystemAggregateFunction
(
name
,
tableAggregateFunction
,
typeInfo
,
accTypeInfo
);
}
public
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
return
false
;
}
}
dlink-app/dlink-app-1.11/src/main/java/com/dlink/executor/CustomTableResultImpl.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
org.apache.flink.annotation.Internal
;
import
org.apache.flink.core.execution.JobClient
;
import
org.apache.flink.table.api.*
;
import
org.apache.flink.table.utils.PrintUtils
;
import
org.apache.flink.types.Row
;
import
org.apache.flink.util.CloseableIterator
;
import
org.apache.flink.util.Preconditions
;
import
javax.annotation.Nullable
;
import
java.io.PrintWriter
;
import
java.util.Collections
;
import
java.util.Iterator
;
import
java.util.List
;
import
java.util.Optional
;
/**
* 定制TableResultImpl
*
* @author wenmo
* @since 2021/6/7 22:06
**/
@Internal
class
CustomTableResultImpl
implements
TableResult
{
public
static
final
TableResult
TABLE_RESULT_OK
=
CustomTableResultImpl
.
builder
()
.
resultKind
(
ResultKind
.
SUCCESS
)
.
tableSchema
(
TableSchema
.
builder
().
field
(
"result"
,
DataTypes
.
STRING
()).
build
())
.
data
(
Collections
.
singletonList
(
Row
.
of
(
"OK"
)))
.
build
();
private
final
JobClient
jobClient
;
private
final
TableSchema
tableSchema
;
private
final
ResultKind
resultKind
;
private
final
CloseableIterator
<
Row
>
data
;
private
final
PrintStyle
printStyle
;
private
CustomTableResultImpl
(
@Nullable
JobClient
jobClient
,
TableSchema
tableSchema
,
ResultKind
resultKind
,
CloseableIterator
<
Row
>
data
,
PrintStyle
printStyle
)
{
this
.
jobClient
=
jobClient
;
this
.
tableSchema
=
Preconditions
.
checkNotNull
(
tableSchema
,
"tableSchema should not be null"
);
this
.
resultKind
=
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
this
.
data
=
Preconditions
.
checkNotNull
(
data
,
"data should not be null"
);
this
.
printStyle
=
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
}
public
static
TableResult
buildTableResult
(
List
<
TableSchemaField
>
fields
,
List
<
Row
>
rows
)
{
Builder
builder
=
builder
().
resultKind
(
ResultKind
.
SUCCESS
);
if
(
fields
.
size
()
>
0
)
{
TableSchema
.
Builder
tableSchemaBuild
=
TableSchema
.
builder
();
for
(
int
i
=
0
;
i
<
fields
.
size
();
i
++)
{
tableSchemaBuild
.
field
(
fields
.
get
(
i
).
getName
(),
fields
.
get
(
i
).
getType
());
}
builder
.
tableSchema
(
tableSchemaBuild
.
build
()).
data
(
rows
);
}
return
builder
.
build
();
}
@Override
public
Optional
<
JobClient
>
getJobClient
()
{
return
Optional
.
ofNullable
(
jobClient
);
}
@Override
public
TableSchema
getTableSchema
()
{
return
tableSchema
;
}
@Override
public
ResultKind
getResultKind
()
{
return
resultKind
;
}
@Override
public
CloseableIterator
<
Row
>
collect
()
{
return
data
;
}
@Override
public
void
print
()
{
Iterator
<
Row
>
it
=
collect
();
if
(
printStyle
instanceof
TableauStyle
)
{
int
maxColumnWidth
=
((
TableauStyle
)
printStyle
).
getMaxColumnWidth
();
String
nullColumn
=
((
TableauStyle
)
printStyle
).
getNullColumn
();
boolean
deriveColumnWidthByType
=
((
TableauStyle
)
printStyle
).
isDeriveColumnWidthByType
();
PrintUtils
.
printAsTableauForm
(
getTableSchema
(),
it
,
new
PrintWriter
(
System
.
out
),
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
);
}
else
if
(
printStyle
instanceof
RawContentStyle
)
{
while
(
it
.
hasNext
())
{
System
.
out
.
println
(
String
.
join
(
","
,
PrintUtils
.
rowToString
(
it
.
next
())));
}
}
else
{
throw
new
TableException
(
"Unsupported print style: "
+
printStyle
);
}
}
public
static
Builder
builder
()
{
return
new
Builder
();
}
/**
* Builder for creating a {@link CustomTableResultImpl}.
*/
public
static
class
Builder
{
private
JobClient
jobClient
=
null
;
private
TableSchema
tableSchema
=
null
;
private
ResultKind
resultKind
=
null
;
private
CloseableIterator
<
Row
>
data
=
null
;
private
PrintStyle
printStyle
=
PrintStyle
.
tableau
(
Integer
.
MAX_VALUE
,
PrintUtils
.
NULL_COLUMN
,
false
);
private
Builder
()
{
}
/**
* Specifies job client which associates the submitted Flink job.
*
* @param jobClient a {@link JobClient} for the submitted Flink job.
*/
public
Builder
jobClient
(
JobClient
jobClient
)
{
this
.
jobClient
=
jobClient
;
return
this
;
}
/**
* Specifies table schema of the execution result.
*
* @param tableSchema a {@link TableSchema} for the execution result.
*/
public
Builder
tableSchema
(
TableSchema
tableSchema
)
{
Preconditions
.
checkNotNull
(
tableSchema
,
"tableSchema should not be null"
);
this
.
tableSchema
=
tableSchema
;
return
this
;
}
/**
* Specifies result kind of the execution result.
*
* @param resultKind a {@link ResultKind} for the execution result.
*/
public
Builder
resultKind
(
ResultKind
resultKind
)
{
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
this
.
resultKind
=
resultKind
;
return
this
;
}
/**
* Specifies an row iterator as the execution result.
*
* @param rowIterator a row iterator as the execution result.
*/
public
Builder
data
(
CloseableIterator
<
Row
>
rowIterator
)
{
Preconditions
.
checkNotNull
(
rowIterator
,
"rowIterator should not be null"
);
this
.
data
=
rowIterator
;
return
this
;
}
/**
* Specifies an row list as the execution result.
*
* @param rowList a row list as the execution result.
*/
public
Builder
data
(
List
<
Row
>
rowList
)
{
Preconditions
.
checkNotNull
(
rowList
,
"listRows should not be null"
);
this
.
data
=
CloseableIterator
.
adapterForIterator
(
rowList
.
iterator
());
return
this
;
}
/**
* Specifies print style. Default is {@link TableauStyle} with max integer column width.
*/
public
Builder
setPrintStyle
(
PrintStyle
printStyle
)
{
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
this
.
printStyle
=
printStyle
;
return
this
;
}
/**
* Returns a {@link TableResult} instance.
*/
public
TableResult
build
()
{
return
new
CustomTableResultImpl
(
jobClient
,
tableSchema
,
resultKind
,
data
,
printStyle
);
}
}
/**
* Root interface for all print styles.
*/
public
interface
PrintStyle
{
/**
* Create a tableau print style with given max column width, null column, and a flag to
* indicate whether the column width is derived from type (true) or content (false), which
* prints the result schema and content as tableau form.
*/
static
PrintStyle
tableau
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
)
{
Preconditions
.
checkArgument
(
maxColumnWidth
>
0
,
"maxColumnWidth should be greater than 0"
);
Preconditions
.
checkNotNull
(
nullColumn
,
"nullColumn should not be null"
);
return
new
TableauStyle
(
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
);
}
/**
* Create a raw content print style, which only print the result content as raw form. column
* delimiter is ",", row delimiter is "\n".
*/
static
PrintStyle
rawContent
()
{
return
new
RawContentStyle
();
}
}
/**
* print the result schema and content as tableau form.
*/
private
static
final
class
TableauStyle
implements
PrintStyle
{
/**
* A flag to indicate whether the column width is derived from type (true) or content
* (false).
*/
private
final
boolean
deriveColumnWidthByType
;
private
final
int
maxColumnWidth
;
private
final
String
nullColumn
;
private
TableauStyle
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
)
{
this
.
deriveColumnWidthByType
=
deriveColumnWidthByType
;
this
.
maxColumnWidth
=
maxColumnWidth
;
this
.
nullColumn
=
nullColumn
;
}
public
boolean
isDeriveColumnWidthByType
()
{
return
deriveColumnWidthByType
;
}
int
getMaxColumnWidth
()
{
return
maxColumnWidth
;
}
String
getNullColumn
()
{
return
nullColumn
;
}
}
/**
* only print the result content as raw form. column delimiter is ",", row delimiter is "\n".
*/
private
static
final
class
RawContentStyle
implements
PrintStyle
{
}
}
dlink-app/dlink-app-1.11/src/main/java/com/dlink/executor/TableSchemaField.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
org.apache.flink.table.types.DataType
;
/**
* @author wenmo
* @since 2021/6/7 22:06
**/
public
class
TableSchemaField
{
private
String
name
;
private
DataType
type
;
public
TableSchemaField
(
String
name
,
DataType
type
)
{
this
.
name
=
name
;
this
.
type
=
type
;
}
public
String
getName
()
{
return
name
;
}
public
void
setName
(
String
name
)
{
this
.
name
=
name
;
}
public
DataType
getType
()
{
return
type
;
}
public
void
setType
(
DataType
type
)
{
this
.
type
=
type
;
}
}
dlink-app/dlink-app-1.12/pom.xml
View file @
429a25da
...
@@ -33,6 +33,10 @@
...
@@ -33,6 +33,10 @@
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-1.12
</artifactId>
<artifactId>
dlink-client-1.12
</artifactId>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.12
</artifactId>
<scope>
provided
</scope>
<scope>
provided
</scope>
</dependency>
</dependency>
<dependency>
<dependency>
...
...
dlink-app/dlink-app-1.12/src/main/java/com/dlink/executor/CustomTableEnvironmentImpl.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
org.apache.flink.api.common.RuntimeExecutionMode
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.configuration.ExecutionOptions
;
import
org.apache.flink.configuration.PipelineOptions
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.FunctionCatalog
;
import
org.apache.flink.table.catalog.GenericInMemoryCatalog
;
import
org.apache.flink.table.delegation.Executor
;
import
org.apache.flink.table.delegation.ExecutorFactory
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.table.delegation.PlannerFactory
;
import
org.apache.flink.table.factories.ComponentFactoryService
;
import
org.apache.flink.table.functions.AggregateFunction
;
import
org.apache.flink.table.functions.TableAggregateFunction
;
import
org.apache.flink.table.functions.TableFunction
;
import
org.apache.flink.table.functions.UserDefinedFunctionHelper
;
import
org.apache.flink.table.module.ModuleManager
;
import
org.apache.flink.table.operations.ExplainOperation
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.Operation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
java.lang.reflect.Method
;
import
java.util.ArrayList
;
import
java.util.List
;
import
java.util.Map
;
/**
* 定制TableEnvironmentImpl
*
* @author wenmo
* @since 2021/6/7 22:06
**/
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
protected
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
TableConfig
tableConfig
,
Executor
executor
,
FunctionCatalog
functionCatalog
,
Planner
planner
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
build
());
}
public
static
CustomTableEnvironmentImpl
createBatch
(
StreamExecutionEnvironment
executionEnvironment
)
{
Configuration
configuration
=
new
Configuration
();
configuration
.
set
(
ExecutionOptions
.
RUNTIME_MODE
,
RuntimeExecutionMode
.
BATCH
);
TableConfig
tableConfig
=
new
TableConfig
();
tableConfig
.
addConfiguration
(
configuration
);
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
useBlinkPlanner
().
inBatchMode
().
build
(),
tableConfig
);
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
)
{
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ModuleManager
moduleManager
=
new
ModuleManager
();
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
().
classLoader
(
classLoader
).
config
(
tableConfig
.
getConfiguration
()).
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
())).
executionConfig
(
executionEnvironment
.
getConfig
()).
build
();
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Planner
planner
=
(
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)).
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
settings
.
isStreamingMode
(),
classLoader
);
}
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
try
{
ExecutorFactory
executorFactory
=
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
Method
createMethod
=
executorFactory
.
getClass
().
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
}
catch
(
Exception
var4
)
{
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
var4
);
}
}
public
ObjectNode
getStreamGraph
(
String
statement
)
{
List
<
Operation
>
operations
=
super
.
parser
.
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
else
{
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
operations
.
size
();
i
++)
{
if
(
operations
.
get
(
i
)
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operations
.
get
(
i
));
}
}
List
<
Transformation
<?>>
trans
=
super
.
planner
.
translate
(
modifyOperations
);
if
(
execEnv
instanceof
ExecutorBase
)
{
StreamGraph
streamGraph
=
ExecutorUtils
.
generateStreamGraph
(((
ExecutorBase
)
execEnv
).
getExecutionEnvironment
(),
trans
);
JSONGenerator
jsonGenerator
=
new
JSONGenerator
(
streamGraph
);
String
json
=
jsonGenerator
.
getJSON
();
ObjectMapper
mapper
=
new
ObjectMapper
();
ObjectNode
objectNode
=
mapper
.
createObjectNode
();
try
{
objectNode
=
(
ObjectNode
)
mapper
.
readTree
(
json
);
}
catch
(
JsonProcessingException
e
)
{
e
.
printStackTrace
();
}
finally
{
return
objectNode
;
}
}
else
{
throw
new
TableException
(
"Unsupported SQL query! ExecEnv need a ExecutorBase."
);
}
}
}
public
JobPlanInfo
getJobPlanInfo
(
List
<
String
>
statements
)
{
return
new
JobPlanInfo
(
JsonPlanGenerator
.
generatePlan
(
getJobGraphFromInserts
(
statements
)));
}
public
StreamGraph
getStreamGraphFromInserts
(
List
<
String
>
statements
)
{
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
();
for
(
String
statement
:
statements
)
{
List
<
Operation
>
operations
=
getParser
().
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Only single statement is supported."
);
}
else
{
Operation
operation
=
operations
.
get
(
0
);
if
(
operation
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operation
);
}
else
{
throw
new
TableException
(
"Only insert statement is supported now."
);
}
}
}
List
<
Transformation
<?>>
trans
=
getPlanner
().
translate
(
modifyOperations
);
if
(
execEnv
instanceof
ExecutorBase
)
{
StreamGraph
streamGraph
=
ExecutorUtils
.
generateStreamGraph
(((
ExecutorBase
)
execEnv
).
getExecutionEnvironment
(),
trans
);
if
(
tableConfig
.
getConfiguration
().
containsKey
(
PipelineOptions
.
NAME
.
key
()))
{
streamGraph
.
setJobName
(
tableConfig
.
getConfiguration
().
getString
(
PipelineOptions
.
NAME
));
}
return
streamGraph
;
}
else
{
throw
new
TableException
(
"Unsupported SQL query! ExecEnv need a ExecutorBase."
);
}
}
public
JobGraph
getJobGraphFromInserts
(
List
<
String
>
statements
)
{
return
getStreamGraphFromInserts
(
statements
).
getJobGraph
();
}
public
SqlExplainResult
explainSqlRecord
(
String
statement
,
ExplainDetail
...
extraDetails
)
{
SqlExplainResult
record
=
new
SqlExplainResult
();
List
<
Operation
>
operations
=
parser
.
parse
(
statement
);
record
.
setParseTrue
(
true
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
Operation
operation
=
operationlist
.
get
(
i
);
if
(
operation
instanceof
ModifyOperation
)
{
record
.
setType
(
"Modify DML"
);
}
else
if
(
operation
instanceof
ExplainOperation
)
{
record
.
setType
(
"Explain DML"
);
}
else
if
(
operation
instanceof
QueryOperation
)
{
record
.
setType
(
"Query DML"
);
}
else
{
record
.
setExplain
(
operation
.
asSummaryString
());
operationlist
.
remove
(
i
);
record
.
setType
(
"DDL"
);
i
=
i
-
1
;
}
}
record
.
setExplainTrue
(
true
);
if
(
operationlist
.
size
()
==
0
)
{
return
record
;
}
record
.
setExplain
(
planner
.
explain
(
operationlist
,
extraDetails
));
return
record
;
}
public
<
T
>
void
registerFunction
(
String
name
,
TableFunction
<
T
>
tableFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfTableFunction
(
tableFunction
);
this
.
functionCatalog
.
registerTempSystemTableFunction
(
name
,
tableFunction
,
typeInfo
);
}
public
<
T
,
ACC
>
void
registerFunction
(
String
name
,
AggregateFunction
<
T
,
ACC
>
aggregateFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfAggregateFunction
(
aggregateFunction
);
TypeInformation
<
ACC
>
accTypeInfo
=
UserDefinedFunctionHelper
.
getAccumulatorTypeOfAggregateFunction
(
aggregateFunction
);
this
.
functionCatalog
.
registerTempSystemAggregateFunction
(
name
,
aggregateFunction
,
typeInfo
,
accTypeInfo
);
}
public
<
T
,
ACC
>
void
registerFunction
(
String
name
,
TableAggregateFunction
<
T
,
ACC
>
tableAggregateFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfAggregateFunction
(
tableAggregateFunction
);
TypeInformation
<
ACC
>
accTypeInfo
=
UserDefinedFunctionHelper
.
getAccumulatorTypeOfAggregateFunction
(
tableAggregateFunction
);
this
.
functionCatalog
.
registerTempSystemAggregateFunction
(
name
,
tableAggregateFunction
,
typeInfo
,
accTypeInfo
);
}
public
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
return
false
;
}
}
\ No newline at end of file
dlink-app/dlink-app-1.12/src/main/java/com/dlink/executor/CustomTableResultImpl.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
org.apache.flink.annotation.Internal
;
import
org.apache.flink.core.execution.JobClient
;
import
org.apache.flink.table.api.*
;
import
org.apache.flink.table.utils.PrintUtils
;
import
org.apache.flink.types.Row
;
import
org.apache.flink.util.CloseableIterator
;
import
org.apache.flink.util.Preconditions
;
import
javax.annotation.Nullable
;
import
java.io.PrintWriter
;
import
java.util.Collections
;
import
java.util.Iterator
;
import
java.util.List
;
import
java.util.Optional
;
import
java.util.concurrent.*
;
/**
* 定制CustomTableResultImpl
*
* @author wenmo
* @since 2021/6/7 22:06
**/
@Internal
class
CustomTableResultImpl
implements
TableResult
{
public
static
final
TableResult
TABLE_RESULT_OK
=
CustomTableResultImpl
.
builder
()
.
resultKind
(
ResultKind
.
SUCCESS
)
.
tableSchema
(
TableSchema
.
builder
().
field
(
"result"
,
DataTypes
.
STRING
()).
build
())
.
data
(
Collections
.
singletonList
(
Row
.
of
(
"OK"
)))
.
build
();
private
final
JobClient
jobClient
;
private
final
TableSchema
tableSchema
;
private
final
ResultKind
resultKind
;
private
final
CloseableRowIteratorWrapper
data
;
private
final
PrintStyle
printStyle
;
private
CustomTableResultImpl
(
@Nullable
JobClient
jobClient
,
TableSchema
tableSchema
,
ResultKind
resultKind
,
CloseableIterator
<
Row
>
data
,
PrintStyle
printStyle
)
{
this
.
jobClient
=
jobClient
;
this
.
tableSchema
=
Preconditions
.
checkNotNull
(
tableSchema
,
"tableSchema should not be null"
);
this
.
resultKind
=
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
Preconditions
.
checkNotNull
(
data
,
"data should not be null"
);
this
.
data
=
new
CloseableRowIteratorWrapper
(
data
);
this
.
printStyle
=
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
}
public
static
TableResult
buildTableResult
(
List
<
TableSchemaField
>
fields
,
List
<
Row
>
rows
)
{
Builder
builder
=
builder
().
resultKind
(
ResultKind
.
SUCCESS
);
if
(
fields
.
size
()
>
0
)
{
TableSchema
.
Builder
tableSchemaBuild
=
TableSchema
.
builder
();
for
(
int
i
=
0
;
i
<
fields
.
size
();
i
++)
{
tableSchemaBuild
.
field
(
fields
.
get
(
i
).
getName
(),
fields
.
get
(
i
).
getType
());
}
builder
.
tableSchema
(
tableSchemaBuild
.
build
()).
data
(
rows
);
}
return
builder
.
build
();
}
@Override
public
Optional
<
JobClient
>
getJobClient
()
{
return
Optional
.
ofNullable
(
jobClient
);
}
@Override
public
void
await
()
throws
InterruptedException
,
ExecutionException
{
try
{
awaitInternal
(-
1
,
TimeUnit
.
MILLISECONDS
);
}
catch
(
TimeoutException
e
)
{
// do nothing
}
}
@Override
public
void
await
(
long
timeout
,
TimeUnit
unit
)
throws
InterruptedException
,
ExecutionException
,
TimeoutException
{
awaitInternal
(
timeout
,
unit
);
}
private
void
awaitInternal
(
long
timeout
,
TimeUnit
unit
)
throws
InterruptedException
,
ExecutionException
,
TimeoutException
{
if
(
jobClient
==
null
)
{
return
;
}
ExecutorService
executor
=
Executors
.
newFixedThreadPool
(
1
,
r
->
new
Thread
(
r
,
"TableResult-await-thread"
));
try
{
CompletableFuture
<
Void
>
future
=
CompletableFuture
.
runAsync
(
()
->
{
while
(!
data
.
isFirstRowReady
())
{
try
{
Thread
.
sleep
(
100
);
}
catch
(
InterruptedException
e
)
{
throw
new
TableException
(
"Thread is interrupted"
);
}
}
},
executor
);
if
(
timeout
>=
0
)
{
future
.
get
(
timeout
,
unit
);
}
else
{
future
.
get
();
}
}
finally
{
executor
.
shutdown
();
}
}
@Override
public
TableSchema
getTableSchema
()
{
return
tableSchema
;
}
@Override
public
ResultKind
getResultKind
()
{
return
resultKind
;
}
@Override
public
CloseableIterator
<
Row
>
collect
()
{
return
data
;
}
@Override
public
void
print
()
{
Iterator
<
Row
>
it
=
collect
();
if
(
printStyle
instanceof
TableauStyle
)
{
int
maxColumnWidth
=
((
TableauStyle
)
printStyle
).
getMaxColumnWidth
();
String
nullColumn
=
((
TableauStyle
)
printStyle
).
getNullColumn
();
boolean
deriveColumnWidthByType
=
((
TableauStyle
)
printStyle
).
isDeriveColumnWidthByType
();
boolean
printRowKind
=
((
TableauStyle
)
printStyle
).
isPrintRowKind
();
PrintUtils
.
printAsTableauForm
(
getTableSchema
(),
it
,
new
PrintWriter
(
System
.
out
),
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
,
printRowKind
);
}
else
if
(
printStyle
instanceof
RawContentStyle
)
{
while
(
it
.
hasNext
())
{
System
.
out
.
println
(
String
.
join
(
","
,
PrintUtils
.
rowToString
(
it
.
next
())));
}
}
else
{
throw
new
TableException
(
"Unsupported print style: "
+
printStyle
);
}
}
public
static
Builder
builder
()
{
return
new
Builder
();
}
/**
* Builder for creating a {@link CustomTableResultImpl}.
*/
public
static
class
Builder
{
private
JobClient
jobClient
=
null
;
private
TableSchema
tableSchema
=
null
;
private
ResultKind
resultKind
=
null
;
private
CloseableIterator
<
Row
>
data
=
null
;
private
PrintStyle
printStyle
=
PrintStyle
.
tableau
(
Integer
.
MAX_VALUE
,
PrintUtils
.
NULL_COLUMN
,
false
,
false
);
private
Builder
()
{
}
/**
* Specifies job client which associates the submitted Flink job.
*
* @param jobClient a {@link JobClient} for the submitted Flink job.
*/
public
Builder
jobClient
(
JobClient
jobClient
)
{
this
.
jobClient
=
jobClient
;
return
this
;
}
/**
* Specifies table schema of the execution result.
*
* @param tableSchema a {@link TableSchema} for the execution result.
*/
public
Builder
tableSchema
(
TableSchema
tableSchema
)
{
Preconditions
.
checkNotNull
(
tableSchema
,
"tableSchema should not be null"
);
this
.
tableSchema
=
tableSchema
;
return
this
;
}
/**
* Specifies result kind of the execution result.
*
* @param resultKind a {@link ResultKind} for the execution result.
*/
public
Builder
resultKind
(
ResultKind
resultKind
)
{
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
this
.
resultKind
=
resultKind
;
return
this
;
}
/**
* Specifies an row iterator as the execution result.
*
* @param rowIterator a row iterator as the execution result.
*/
public
Builder
data
(
CloseableIterator
<
Row
>
rowIterator
)
{
Preconditions
.
checkNotNull
(
rowIterator
,
"rowIterator should not be null"
);
this
.
data
=
rowIterator
;
return
this
;
}
/**
* Specifies an row list as the execution result.
*
* @param rowList a row list as the execution result.
*/
public
Builder
data
(
List
<
Row
>
rowList
)
{
Preconditions
.
checkNotNull
(
rowList
,
"listRows should not be null"
);
this
.
data
=
CloseableIterator
.
adapterForIterator
(
rowList
.
iterator
());
return
this
;
}
/**
* Specifies print style. Default is {@link TableauStyle} with max integer column width.
*/
public
Builder
setPrintStyle
(
PrintStyle
printStyle
)
{
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
this
.
printStyle
=
printStyle
;
return
this
;
}
/**
* Returns a {@link TableResult} instance.
*/
public
TableResult
build
()
{
return
new
CustomTableResultImpl
(
jobClient
,
tableSchema
,
resultKind
,
data
,
printStyle
);
}
}
/**
* Root interface for all print styles.
*/
public
interface
PrintStyle
{
/**
* Create a tableau print style with given max column width, null column, change mode
* indicator and a flag to indicate whether the column width is derived from type (true) or
* content (false), which prints the result schema and content as tableau form.
*/
static
PrintStyle
tableau
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
,
boolean
printRowKind
)
{
Preconditions
.
checkArgument
(
maxColumnWidth
>
0
,
"maxColumnWidth should be greater than 0"
);
Preconditions
.
checkNotNull
(
nullColumn
,
"nullColumn should not be null"
);
return
new
TableauStyle
(
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
,
printRowKind
);
}
/**
* Create a raw content print style, which only print the result content as raw form. column
* delimiter is ",", row delimiter is "\n".
*/
static
PrintStyle
rawContent
()
{
return
new
RawContentStyle
();
}
}
/**
* print the result schema and content as tableau form.
*/
private
static
final
class
TableauStyle
implements
PrintStyle
{
/**
* A flag to indicate whether the column width is derived from type (true) or content
* (false).
*/
private
final
boolean
deriveColumnWidthByType
;
private
final
int
maxColumnWidth
;
private
final
String
nullColumn
;
/**
* A flag to indicate whether print row kind info.
*/
private
final
boolean
printRowKind
;
private
TableauStyle
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
,
boolean
printRowKind
)
{
this
.
deriveColumnWidthByType
=
deriveColumnWidthByType
;
this
.
maxColumnWidth
=
maxColumnWidth
;
this
.
nullColumn
=
nullColumn
;
this
.
printRowKind
=
printRowKind
;
}
public
boolean
isDeriveColumnWidthByType
()
{
return
deriveColumnWidthByType
;
}
int
getMaxColumnWidth
()
{
return
maxColumnWidth
;
}
String
getNullColumn
()
{
return
nullColumn
;
}
public
boolean
isPrintRowKind
()
{
return
printRowKind
;
}
}
/**
* only print the result content as raw form. column delimiter is ",", row delimiter is "\n".
*/
private
static
final
class
RawContentStyle
implements
PrintStyle
{
}
/**
* A {@link CloseableIterator} wrapper class that can return whether the first row is ready.
*
* <p>The first row is ready when {@link #hasNext} method returns true or {@link #next()} method
* returns a row. The execution order of {@link TableResult#collect} method and {@link
* TableResult#await()} may be arbitrary, this class will record whether the first row is ready
* (or accessed).
*/
private
static
final
class
CloseableRowIteratorWrapper
implements
CloseableIterator
<
Row
>
{
private
final
CloseableIterator
<
Row
>
iterator
;
private
boolean
isFirstRowReady
=
false
;
private
CloseableRowIteratorWrapper
(
CloseableIterator
<
Row
>
iterator
)
{
this
.
iterator
=
iterator
;
}
@Override
public
void
close
()
throws
Exception
{
iterator
.
close
();
}
@Override
public
boolean
hasNext
()
{
boolean
hasNext
=
iterator
.
hasNext
();
isFirstRowReady
=
isFirstRowReady
||
hasNext
;
return
hasNext
;
}
@Override
public
Row
next
()
{
Row
next
=
iterator
.
next
();
isFirstRowReady
=
true
;
return
next
;
}
public
boolean
isFirstRowReady
()
{
return
isFirstRowReady
||
hasNext
();
}
}
}
dlink-app/dlink-app-1.12/src/main/java/com/dlink/executor/TableSchemaField.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
org.apache.flink.table.types.DataType
;
/**
* @author wenmo
* @since 2021/6/7 22:06
**/
public
class
TableSchemaField
{
private
String
name
;
private
DataType
type
;
public
TableSchemaField
(
String
name
,
DataType
type
)
{
this
.
name
=
name
;
this
.
type
=
type
;
}
public
String
getName
()
{
return
name
;
}
public
void
setName
(
String
name
)
{
this
.
name
=
name
;
}
public
DataType
getType
()
{
return
type
;
}
public
void
setType
(
DataType
type
)
{
this
.
type
=
type
;
}
}
dlink-app/dlink-app-1.13/pom.xml
View file @
429a25da
...
@@ -33,6 +33,10 @@
...
@@ -33,6 +33,10 @@
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-1.13
</artifactId>
<artifactId>
dlink-client-1.13
</artifactId>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.13
</artifactId>
<scope>
provided
</scope>
<scope>
provided
</scope>
</dependency>
</dependency>
<dependency>
<dependency>
...
...
dlink-app/dlink-app-1.13/src/main/java/com/dlink/executor/CustomTableEnvironmentImpl.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
org.apache.flink.api.common.RuntimeExecutionMode
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.configuration.ExecutionOptions
;
import
org.apache.flink.configuration.PipelineOptions
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.FunctionCatalog
;
import
org.apache.flink.table.catalog.GenericInMemoryCatalog
;
import
org.apache.flink.table.delegation.Executor
;
import
org.apache.flink.table.delegation.ExecutorFactory
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.table.delegation.PlannerFactory
;
import
org.apache.flink.table.factories.ComponentFactoryService
;
import
org.apache.flink.table.functions.AggregateFunction
;
import
org.apache.flink.table.functions.TableAggregateFunction
;
import
org.apache.flink.table.functions.TableFunction
;
import
org.apache.flink.table.functions.UserDefinedFunctionHelper
;
import
org.apache.flink.table.module.ModuleManager
;
import
org.apache.flink.table.operations.ExplainOperation
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.Operation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.operations.command.ResetOperation
;
import
org.apache.flink.table.operations.command.SetOperation
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
java.lang.reflect.Method
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
/**
* 定制TableEnvironmentImpl
*
* @author wenmo
* @since 2021/6/7 22:06
**/
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
protected
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
TableConfig
tableConfig
,
Executor
executor
,
FunctionCatalog
functionCatalog
,
Planner
planner
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
build
());
}
public
static
CustomTableEnvironmentImpl
createBatch
(
StreamExecutionEnvironment
executionEnvironment
)
{
Configuration
configuration
=
new
Configuration
();
configuration
.
set
(
ExecutionOptions
.
RUNTIME_MODE
,
RuntimeExecutionMode
.
BATCH
);
TableConfig
tableConfig
=
new
TableConfig
();
tableConfig
.
addConfiguration
(
configuration
);
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
useBlinkPlanner
().
inBatchMode
().
build
(),
tableConfig
);
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
)
{
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ModuleManager
moduleManager
=
new
ModuleManager
();
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
().
classLoader
(
classLoader
).
config
(
tableConfig
.
getConfiguration
()).
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
())).
executionConfig
(
executionEnvironment
.
getConfig
()).
build
();
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Planner
planner
=
(
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)).
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
settings
.
isStreamingMode
(),
classLoader
);
}
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
try
{
ExecutorFactory
executorFactory
=
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
Method
createMethod
=
executorFactory
.
getClass
().
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
}
catch
(
Exception
var4
)
{
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
var4
);
}
}
public
ObjectNode
getStreamGraph
(
String
statement
)
{
List
<
Operation
>
operations
=
super
.
getParser
().
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
else
{
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
operations
.
size
();
i
++)
{
if
(
operations
.
get
(
i
)
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operations
.
get
(
i
));
}
}
List
<
Transformation
<?>>
trans
=
getPlanner
().
translate
(
modifyOperations
);
if
(
execEnv
instanceof
ExecutorBase
)
{
StreamGraph
streamGraph
=
ExecutorUtils
.
generateStreamGraph
(((
ExecutorBase
)
execEnv
).
getExecutionEnvironment
(),
trans
);
JSONGenerator
jsonGenerator
=
new
JSONGenerator
(
streamGraph
);
String
json
=
jsonGenerator
.
getJSON
();
ObjectMapper
mapper
=
new
ObjectMapper
();
ObjectNode
objectNode
=
mapper
.
createObjectNode
();
try
{
objectNode
=
(
ObjectNode
)
mapper
.
readTree
(
json
);
}
catch
(
JsonProcessingException
e
)
{
e
.
printStackTrace
();
}
finally
{
return
objectNode
;
}
}
else
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() need a single SQL to query."
);
}
}
}
public
JobPlanInfo
getJobPlanInfo
(
List
<
String
>
statements
)
{
return
new
JobPlanInfo
(
JsonPlanGenerator
.
generatePlan
(
getJobGraphFromInserts
(
statements
)));
}
public
StreamGraph
getStreamGraphFromInserts
(
List
<
String
>
statements
)
{
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
();
for
(
String
statement
:
statements
)
{
List
<
Operation
>
operations
=
getParser
().
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Only single statement is supported."
);
}
else
{
Operation
operation
=
operations
.
get
(
0
);
if
(
operation
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operation
);
}
else
{
throw
new
TableException
(
"Only insert statement is supported now."
);
}
}
}
List
<
Transformation
<?>>
trans
=
getPlanner
().
translate
(
modifyOperations
);
if
(
execEnv
instanceof
ExecutorBase
)
{
StreamGraph
streamGraph
=
ExecutorUtils
.
generateStreamGraph
(((
ExecutorBase
)
execEnv
).
getExecutionEnvironment
(),
trans
);
if
(
tableConfig
.
getConfiguration
().
containsKey
(
PipelineOptions
.
NAME
.
key
()))
{
streamGraph
.
setJobName
(
tableConfig
.
getConfiguration
().
getString
(
PipelineOptions
.
NAME
));
}
return
streamGraph
;
}
else
{
throw
new
TableException
(
"Unsupported SQL query! ExecEnv need a ExecutorBase."
);
}
}
public
JobGraph
getJobGraphFromInserts
(
List
<
String
>
statements
)
{
return
getStreamGraphFromInserts
(
statements
).
getJobGraph
();
}
public
SqlExplainResult
explainSqlRecord
(
String
statement
,
ExplainDetail
...
extraDetails
)
{
SqlExplainResult
record
=
new
SqlExplainResult
();
List
<
Operation
>
operations
=
getParser
().
parse
(
statement
);
record
.
setParseTrue
(
true
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
Operation
operation
=
operationlist
.
get
(
i
);
if
(
operation
instanceof
ModifyOperation
)
{
record
.
setType
(
"Modify DML"
);
}
else
if
(
operation
instanceof
ExplainOperation
)
{
record
.
setType
(
"Explain DML"
);
}
else
if
(
operation
instanceof
QueryOperation
)
{
record
.
setType
(
"Query DML"
);
}
else
{
record
.
setExplain
(
operation
.
asSummaryString
());
operationlist
.
remove
(
i
);
record
.
setType
(
"DDL"
);
i
=
i
-
1
;
}
}
record
.
setExplainTrue
(
true
);
if
(
operationlist
.
size
()
==
0
)
{
//record.setExplain("DDL语句不进行解释。");
return
record
;
}
record
.
setExplain
(
planner
.
explain
(
operationlist
,
extraDetails
));
return
record
;
}
public
<
T
>
void
registerFunction
(
String
name
,
TableFunction
<
T
>
tableFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfTableFunction
(
tableFunction
);
this
.
functionCatalog
.
registerTempSystemTableFunction
(
name
,
tableFunction
,
typeInfo
);
}
public
<
T
,
ACC
>
void
registerFunction
(
String
name
,
AggregateFunction
<
T
,
ACC
>
aggregateFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfAggregateFunction
(
aggregateFunction
);
TypeInformation
<
ACC
>
accTypeInfo
=
UserDefinedFunctionHelper
.
getAccumulatorTypeOfAggregateFunction
(
aggregateFunction
);
this
.
functionCatalog
.
registerTempSystemAggregateFunction
(
name
,
aggregateFunction
,
typeInfo
,
accTypeInfo
);
}
public
<
T
,
ACC
>
void
registerFunction
(
String
name
,
TableAggregateFunction
<
T
,
ACC
>
tableAggregateFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfAggregateFunction
(
tableAggregateFunction
);
TypeInformation
<
ACC
>
accTypeInfo
=
UserDefinedFunctionHelper
.
getAccumulatorTypeOfAggregateFunction
(
tableAggregateFunction
);
this
.
functionCatalog
.
registerTempSystemAggregateFunction
(
name
,
tableAggregateFunction
,
typeInfo
,
accTypeInfo
);
}
public
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
List
<
Operation
>
operations
=
getParser
().
parse
(
statement
);
for
(
Operation
operation
:
operations
)
{
if
(
operation
instanceof
SetOperation
)
{
callSet
((
SetOperation
)
operation
,
environment
,
setMap
);
return
true
;
}
else
if
(
operation
instanceof
ResetOperation
)
{
callReset
((
ResetOperation
)
operation
,
environment
,
setMap
);
return
true
;
}
}
return
false
;
}
private
void
callSet
(
SetOperation
setOperation
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
if
(
setOperation
.
getKey
().
isPresent
()
&&
setOperation
.
getValue
().
isPresent
())
{
String
key
=
setOperation
.
getKey
().
get
().
trim
();
String
value
=
setOperation
.
getValue
().
get
().
trim
();
Map
<
String
,
String
>
confMap
=
new
HashMap
<>();
confMap
.
put
(
key
,
value
);
setMap
.
put
(
key
,
value
);
Configuration
configuration
=
Configuration
.
fromMap
(
confMap
);
environment
.
getConfig
().
configure
(
configuration
,
null
);
getConfig
().
addConfiguration
(
configuration
);
}
}
private
void
callReset
(
ResetOperation
resetOperation
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
if
(
resetOperation
.
getKey
().
isPresent
())
{
String
key
=
resetOperation
.
getKey
().
get
().
trim
();
Map
<
String
,
String
>
confMap
=
new
HashMap
<>();
confMap
.
put
(
key
,
null
);
setMap
.
remove
(
key
);
Configuration
configuration
=
Configuration
.
fromMap
(
confMap
);
environment
.
getConfig
().
configure
(
configuration
,
null
);
getConfig
().
addConfiguration
(
configuration
);
}
else
{
setMap
.
clear
();
}
}
}
dlink-app/dlink-app-1.13/src/main/java/com/dlink/executor/CustomTableResultImpl.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
org.apache.flink.annotation.Internal
;
import
org.apache.flink.core.execution.JobClient
;
import
org.apache.flink.table.api.DataTypes
;
import
org.apache.flink.table.api.ResultKind
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.TableResult
;
import
org.apache.flink.table.catalog.Column
;
import
org.apache.flink.table.catalog.ResolvedSchema
;
import
org.apache.flink.table.types.DataType
;
import
org.apache.flink.table.utils.PrintUtils
;
import
org.apache.flink.types.Row
;
import
org.apache.flink.util.CloseableIterator
;
import
org.apache.flink.util.Preconditions
;
import
javax.annotation.Nullable
;
import
java.io.PrintWriter
;
import
java.time.ZoneId
;
import
java.util.*
;
import
java.util.concurrent.*
;
/**
* 定制CustomTableResultImpl
*
* @author wenmo
* @since 2021/6/7 22:06
**/
@Internal
public
class
CustomTableResultImpl
implements
TableResult
{
public
static
final
TableResult
TABLE_RESULT_OK
=
CustomTableResultImpl
.
builder
()
.
resultKind
(
ResultKind
.
SUCCESS
)
.
schema
(
ResolvedSchema
.
of
(
Column
.
physical
(
"result"
,
DataTypes
.
STRING
())))
.
data
(
Collections
.
singletonList
(
Row
.
of
(
"OK"
)))
.
build
();
private
final
JobClient
jobClient
;
private
final
ResolvedSchema
resolvedSchema
;
private
final
ResultKind
resultKind
;
private
final
CloseableRowIteratorWrapper
data
;
private
final
PrintStyle
printStyle
;
private
final
ZoneId
sessionTimeZone
;
private
CustomTableResultImpl
(
@Nullable
JobClient
jobClient
,
ResolvedSchema
resolvedSchema
,
ResultKind
resultKind
,
CloseableIterator
<
Row
>
data
,
PrintStyle
printStyle
,
ZoneId
sessionTimeZone
)
{
this
.
jobClient
=
jobClient
;
this
.
resolvedSchema
=
Preconditions
.
checkNotNull
(
resolvedSchema
,
"resolvedSchema should not be null"
);
this
.
resultKind
=
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
Preconditions
.
checkNotNull
(
data
,
"data should not be null"
);
this
.
data
=
new
CloseableRowIteratorWrapper
(
data
);
this
.
printStyle
=
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
this
.
sessionTimeZone
=
Preconditions
.
checkNotNull
(
sessionTimeZone
,
"sessionTimeZone should not be null"
);
}
public
static
TableResult
buildTableResult
(
List
<
TableSchemaField
>
fields
,
List
<
Row
>
rows
)
{
Builder
builder
=
builder
().
resultKind
(
ResultKind
.
SUCCESS
);
if
(
fields
.
size
()
>
0
)
{
List
<
String
>
columnNames
=
new
ArrayList
<>();
List
<
DataType
>
columnTypes
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
fields
.
size
();
i
++)
{
columnNames
.
add
(
fields
.
get
(
i
).
getName
());
columnTypes
.
add
(
fields
.
get
(
i
).
getType
());
}
builder
.
schema
(
ResolvedSchema
.
physical
(
columnNames
,
columnTypes
)).
data
(
rows
);
}
return
builder
.
build
();
}
@Override
public
Optional
<
JobClient
>
getJobClient
()
{
return
Optional
.
ofNullable
(
jobClient
);
}
@Override
public
void
await
()
throws
InterruptedException
,
ExecutionException
{
try
{
awaitInternal
(-
1
,
TimeUnit
.
MILLISECONDS
);
}
catch
(
TimeoutException
e
)
{
// do nothing
}
}
@Override
public
void
await
(
long
timeout
,
TimeUnit
unit
)
throws
InterruptedException
,
ExecutionException
,
TimeoutException
{
awaitInternal
(
timeout
,
unit
);
}
private
void
awaitInternal
(
long
timeout
,
TimeUnit
unit
)
throws
InterruptedException
,
ExecutionException
,
TimeoutException
{
if
(
jobClient
==
null
)
{
return
;
}
ExecutorService
executor
=
Executors
.
newFixedThreadPool
(
1
,
r
->
new
Thread
(
r
,
"TableResult-await-thread"
));
try
{
CompletableFuture
<
Void
>
future
=
CompletableFuture
.
runAsync
(
()
->
{
while
(!
data
.
isFirstRowReady
())
{
try
{
Thread
.
sleep
(
100
);
}
catch
(
InterruptedException
e
)
{
throw
new
TableException
(
"Thread is interrupted"
);
}
}
},
executor
);
if
(
timeout
>=
0
)
{
future
.
get
(
timeout
,
unit
);
}
else
{
future
.
get
();
}
}
finally
{
executor
.
shutdown
();
}
}
@Override
public
ResolvedSchema
getResolvedSchema
()
{
return
resolvedSchema
;
}
@Override
public
ResultKind
getResultKind
()
{
return
resultKind
;
}
@Override
public
CloseableIterator
<
Row
>
collect
()
{
return
data
;
}
@Override
public
void
print
()
{
Iterator
<
Row
>
it
=
collect
();
if
(
printStyle
instanceof
TableauStyle
)
{
int
maxColumnWidth
=
((
TableauStyle
)
printStyle
).
getMaxColumnWidth
();
String
nullColumn
=
((
TableauStyle
)
printStyle
).
getNullColumn
();
boolean
deriveColumnWidthByType
=
((
TableauStyle
)
printStyle
).
isDeriveColumnWidthByType
();
boolean
printRowKind
=
((
TableauStyle
)
printStyle
).
isPrintRowKind
();
PrintUtils
.
printAsTableauForm
(
getResolvedSchema
(),
it
,
new
PrintWriter
(
System
.
out
),
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
,
printRowKind
,
sessionTimeZone
);
}
else
if
(
printStyle
instanceof
RawContentStyle
)
{
while
(
it
.
hasNext
())
{
System
.
out
.
println
(
String
.
join
(
","
,
PrintUtils
.
rowToString
(
it
.
next
(),
getResolvedSchema
(),
sessionTimeZone
)));
}
}
else
{
throw
new
TableException
(
"Unsupported print style: "
+
printStyle
);
}
}
public
static
Builder
builder
()
{
return
new
Builder
();
}
/**
* Builder for creating a {@link CustomTableResultImpl}.
*/
public
static
class
Builder
{
private
JobClient
jobClient
=
null
;
private
ResolvedSchema
resolvedSchema
=
null
;
private
ResultKind
resultKind
=
null
;
private
CloseableIterator
<
Row
>
data
=
null
;
private
PrintStyle
printStyle
=
PrintStyle
.
tableau
(
Integer
.
MAX_VALUE
,
PrintUtils
.
NULL_COLUMN
,
false
,
false
);
private
ZoneId
sessionTimeZone
=
ZoneId
.
of
(
"UTC"
);
private
Builder
()
{
}
/**
* Specifies job client which associates the submitted Flink job.
*
* @param jobClient a {@link JobClient} for the submitted Flink job.
*/
public
Builder
jobClient
(
JobClient
jobClient
)
{
this
.
jobClient
=
jobClient
;
return
this
;
}
/**
* Specifies schema of the execution result.
*
* @param resolvedSchema a {@link ResolvedSchema} for the execution result.
*/
public
Builder
schema
(
ResolvedSchema
resolvedSchema
)
{
Preconditions
.
checkNotNull
(
resolvedSchema
,
"resolvedSchema should not be null"
);
this
.
resolvedSchema
=
resolvedSchema
;
return
this
;
}
/**
* Specifies result kind of the execution result.
*
* @param resultKind a {@link ResultKind} for the execution result.
*/
public
Builder
resultKind
(
ResultKind
resultKind
)
{
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
this
.
resultKind
=
resultKind
;
return
this
;
}
/**
* Specifies an row iterator as the execution result.
*
* @param rowIterator a row iterator as the execution result.
*/
public
Builder
data
(
CloseableIterator
<
Row
>
rowIterator
)
{
Preconditions
.
checkNotNull
(
rowIterator
,
"rowIterator should not be null"
);
this
.
data
=
rowIterator
;
return
this
;
}
/**
* Specifies an row list as the execution result.
*
* @param rowList a row list as the execution result.
*/
public
Builder
data
(
List
<
Row
>
rowList
)
{
Preconditions
.
checkNotNull
(
rowList
,
"listRows should not be null"
);
this
.
data
=
CloseableIterator
.
adapterForIterator
(
rowList
.
iterator
());
return
this
;
}
/**
* Specifies print style. Default is {@link TableauStyle} with max integer column width.
*/
public
Builder
setPrintStyle
(
PrintStyle
printStyle
)
{
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
this
.
printStyle
=
printStyle
;
return
this
;
}
/**
* Specifies session time zone.
*/
public
Builder
setSessionTimeZone
(
ZoneId
sessionTimeZone
)
{
Preconditions
.
checkNotNull
(
sessionTimeZone
,
"sessionTimeZone should not be null"
);
this
.
sessionTimeZone
=
sessionTimeZone
;
return
this
;
}
/**
* Returns a {@link TableResult} instance.
*/
public
TableResult
build
()
{
return
new
CustomTableResultImpl
(
jobClient
,
resolvedSchema
,
resultKind
,
data
,
printStyle
,
sessionTimeZone
);
}
}
/**
* Root interface for all print styles.
*/
public
interface
PrintStyle
{
/**
* Create a tableau print style with given max column width, null column, change mode
* indicator and a flag to indicate whether the column width is derived from type (true) or
* content (false), which prints the result schema and content as tableau form.
*/
static
PrintStyle
tableau
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
,
boolean
printRowKind
)
{
Preconditions
.
checkArgument
(
maxColumnWidth
>
0
,
"maxColumnWidth should be greater than 0"
);
Preconditions
.
checkNotNull
(
nullColumn
,
"nullColumn should not be null"
);
return
new
TableauStyle
(
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
,
printRowKind
);
}
/**
* Create a raw content print style, which only print the result content as raw form. column
* delimiter is ",", row delimiter is "\n".
*/
static
PrintStyle
rawContent
()
{
return
new
RawContentStyle
();
}
}
/**
* print the result schema and content as tableau form.
*/
private
static
final
class
TableauStyle
implements
PrintStyle
{
/**
* A flag to indicate whether the column width is derived from type (true) or content
* (false).
*/
private
final
boolean
deriveColumnWidthByType
;
private
final
int
maxColumnWidth
;
private
final
String
nullColumn
;
/**
* A flag to indicate whether print row kind info.
*/
private
final
boolean
printRowKind
;
private
TableauStyle
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
,
boolean
printRowKind
)
{
this
.
deriveColumnWidthByType
=
deriveColumnWidthByType
;
this
.
maxColumnWidth
=
maxColumnWidth
;
this
.
nullColumn
=
nullColumn
;
this
.
printRowKind
=
printRowKind
;
}
public
boolean
isDeriveColumnWidthByType
()
{
return
deriveColumnWidthByType
;
}
int
getMaxColumnWidth
()
{
return
maxColumnWidth
;
}
String
getNullColumn
()
{
return
nullColumn
;
}
public
boolean
isPrintRowKind
()
{
return
printRowKind
;
}
}
/**
* only print the result content as raw form. column delimiter is ",", row delimiter is "\n".
*/
private
static
final
class
RawContentStyle
implements
PrintStyle
{
}
/**
* A {@link CloseableIterator} wrapper class that can return whether the first row is ready.
*
* <p>The first row is ready when {@link #hasNext} method returns true or {@link #next()} method
* returns a row. The execution order of {@link TableResult#collect} method and {@link
* TableResult#await()} may be arbitrary, this class will record whether the first row is ready
* (or accessed).
*/
private
static
final
class
CloseableRowIteratorWrapper
implements
CloseableIterator
<
Row
>
{
private
final
CloseableIterator
<
Row
>
iterator
;
private
boolean
isFirstRowReady
=
false
;
private
CloseableRowIteratorWrapper
(
CloseableIterator
<
Row
>
iterator
)
{
this
.
iterator
=
iterator
;
}
@Override
public
void
close
()
throws
Exception
{
iterator
.
close
();
}
@Override
public
boolean
hasNext
()
{
boolean
hasNext
=
iterator
.
hasNext
();
isFirstRowReady
=
isFirstRowReady
||
hasNext
;
return
hasNext
;
}
@Override
public
Row
next
()
{
Row
next
=
iterator
.
next
();
isFirstRowReady
=
true
;
return
next
;
}
public
boolean
isFirstRowReady
()
{
return
isFirstRowReady
||
hasNext
();
}
}
}
\ No newline at end of file
dlink-app/dlink-app-1.13/src/main/java/com/dlink/executor/TableSchemaField.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
org.apache.flink.table.types.DataType
;
/**
* @author wenmo
* @since 2021/6/7 22:06
**/
public
class
TableSchemaField
{
private
String
name
;
private
DataType
type
;
public
TableSchemaField
(
String
name
,
DataType
type
)
{
this
.
name
=
name
;
this
.
type
=
type
;
}
public
String
getName
()
{
return
name
;
}
public
void
setName
(
String
name
)
{
this
.
name
=
name
;
}
public
DataType
getType
()
{
return
type
;
}
public
void
setType
(
DataType
type
)
{
this
.
type
=
type
;
}
}
dlink-app/dlink-app-1.14/pom.xml
View file @
429a25da
...
@@ -33,6 +33,10 @@
...
@@ -33,6 +33,10 @@
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-1.14
</artifactId>
<artifactId>
dlink-client-1.14
</artifactId>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.14
</artifactId>
<scope>
provided
</scope>
<scope>
provided
</scope>
</dependency>
</dependency>
<dependency>
<dependency>
...
...
dlink-app/dlink-app-1.14/src/main/java/com/dlink/executor/CustomTableEnvironmentImpl.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
org.apache.flink.api.common.RuntimeExecutionMode
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.configuration.ExecutionOptions
;
import
org.apache.flink.configuration.PipelineOptions
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.FunctionCatalog
;
import
org.apache.flink.table.catalog.GenericInMemoryCatalog
;
import
org.apache.flink.table.delegation.Executor
;
import
org.apache.flink.table.delegation.ExecutorFactory
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.table.factories.FactoryUtil
;
import
org.apache.flink.table.factories.PlannerFactoryUtil
;
import
org.apache.flink.table.functions.AggregateFunction
;
import
org.apache.flink.table.functions.TableAggregateFunction
;
import
org.apache.flink.table.functions.TableFunction
;
import
org.apache.flink.table.functions.UserDefinedFunctionHelper
;
import
org.apache.flink.table.module.ModuleManager
;
import
org.apache.flink.table.operations.ExplainOperation
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.Operation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.operations.command.ResetOperation
;
import
org.apache.flink.table.operations.command.SetOperation
;
import
org.apache.flink.table.planner.delegation.DefaultExecutor
;
import
java.lang.reflect.Method
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
/**
* 定制TableEnvironmentImpl
*
* @author wenmo
* @since 2021/10/22 10:02
**/
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
protected
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
TableConfig
tableConfig
,
Executor
executor
,
FunctionCatalog
functionCatalog
,
Planner
planner
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
}
public
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
FunctionCatalog
functionCatalog
,
TableConfig
tableConfig
,
StreamExecutionEnvironment
executionEnvironment
,
Planner
planner
,
Executor
executor
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
build
(),
TableConfig
.
getDefault
());
}
public
static
CustomTableEnvironmentImpl
createBatch
(
StreamExecutionEnvironment
executionEnvironment
)
{
Configuration
configuration
=
new
Configuration
();
configuration
.
set
(
ExecutionOptions
.
RUNTIME_MODE
,
RuntimeExecutionMode
.
BATCH
);
TableConfig
tableConfig
=
new
TableConfig
();
tableConfig
.
addConfiguration
(
configuration
);
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
useBlinkPlanner
().
inBatchMode
().
build
(),
tableConfig
);
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
// temporary solution until FLINK-15635 is fixed
final
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
final
ModuleManager
moduleManager
=
new
ModuleManager
();
final
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
()
.
classLoader
(
classLoader
)
.
config
(
tableConfig
.
getConfiguration
())
.
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
()))
.
executionConfig
(
executionEnvironment
.
getConfig
())
.
build
();
final
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
final
Executor
executor
=
lookupExecutor
(
classLoader
,
settings
.
getExecutor
(),
executionEnvironment
);
final
Planner
planner
=
PlannerFactoryUtil
.
createPlanner
(
settings
.
getPlanner
(),
executor
,
tableConfig
,
catalogManager
,
functionCatalog
);
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
functionCatalog
,
tableConfig
,
executionEnvironment
,
planner
,
executor
,
settings
.
isStreamingMode
(),
classLoader
);
}
private
static
Executor
lookupExecutor
(
ClassLoader
classLoader
,
String
executorIdentifier
,
StreamExecutionEnvironment
executionEnvironment
)
{
try
{
final
ExecutorFactory
executorFactory
=
FactoryUtil
.
discoverFactory
(
classLoader
,
ExecutorFactory
.
class
,
executorIdentifier
);
final
Method
createMethod
=
executorFactory
.
getClass
()
.
getMethod
(
"create"
,
StreamExecutionEnvironment
.
class
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executionEnvironment
);
}
catch
(
Exception
e
)
{
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
e
);
}
}
public
ObjectNode
getStreamGraph
(
String
statement
)
{
List
<
Operation
>
operations
=
super
.
getParser
().
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
else
{
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
operations
.
size
();
i
++)
{
if
(
operations
.
get
(
i
)
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operations
.
get
(
i
));
}
}
List
<
Transformation
<?>>
trans
=
super
.
planner
.
translate
(
modifyOperations
);
if
(
execEnv
instanceof
DefaultExecutor
)
{
StreamGraph
streamGraph
=
((
DefaultExecutor
)
execEnv
).
getExecutionEnvironment
().
generateStreamGraph
(
trans
);
JSONGenerator
jsonGenerator
=
new
JSONGenerator
(
streamGraph
);
String
json
=
jsonGenerator
.
getJSON
();
ObjectMapper
mapper
=
new
ObjectMapper
();
ObjectNode
objectNode
=
mapper
.
createObjectNode
();
try
{
objectNode
=
(
ObjectNode
)
mapper
.
readTree
(
json
);
}
catch
(
JsonProcessingException
e
)
{
e
.
printStackTrace
();
}
finally
{
return
objectNode
;
}
}
else
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() need a single SQL to query."
);
}
}
}
@Override
public
JobPlanInfo
getJobPlanInfo
(
List
<
String
>
statements
)
{
return
new
JobPlanInfo
(
JsonPlanGenerator
.
generatePlan
(
getJobGraphFromInserts
(
statements
)));
}
public
StreamGraph
getStreamGraphFromInserts
(
List
<
String
>
statements
)
{
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
();
for
(
String
statement
:
statements
)
{
List
<
Operation
>
operations
=
getParser
().
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Only single statement is supported."
);
}
else
{
Operation
operation
=
operations
.
get
(
0
);
if
(
operation
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operation
);
}
else
{
throw
new
TableException
(
"Only insert statement is supported now."
);
}
}
}
List
<
Transformation
<?>>
trans
=
getPlanner
().
translate
(
modifyOperations
);
if
(
execEnv
instanceof
DefaultExecutor
)
{
StreamGraph
streamGraph
=
((
DefaultExecutor
)
execEnv
).
getExecutionEnvironment
().
generateStreamGraph
(
trans
);
if
(
tableConfig
.
getConfiguration
().
containsKey
(
PipelineOptions
.
NAME
.
key
()))
{
streamGraph
.
setJobName
(
tableConfig
.
getConfiguration
().
getString
(
PipelineOptions
.
NAME
));
}
return
streamGraph
;
}
else
{
throw
new
TableException
(
"Unsupported SQL query! ExecEnv need a ExecutorBase."
);
}
}
public
JobGraph
getJobGraphFromInserts
(
List
<
String
>
statements
)
{
return
getStreamGraphFromInserts
(
statements
).
getJobGraph
();
}
public
SqlExplainResult
explainSqlRecord
(
String
statement
,
ExplainDetail
...
extraDetails
)
{
SqlExplainResult
record
=
new
SqlExplainResult
();
List
<
Operation
>
operations
=
getParser
().
parse
(
statement
);
record
.
setParseTrue
(
true
);
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
Operation
operation
=
operationlist
.
get
(
i
);
if
(
operation
instanceof
ModifyOperation
)
{
record
.
setType
(
"Modify DML"
);
}
else
if
(
operation
instanceof
ExplainOperation
)
{
record
.
setType
(
"Explain DML"
);
}
else
if
(
operation
instanceof
QueryOperation
)
{
record
.
setType
(
"Query DML"
);
}
else
{
record
.
setExplain
(
operation
.
asSummaryString
());
operationlist
.
remove
(
i
);
record
.
setType
(
"DDL"
);
i
=
i
-
1
;
}
}
record
.
setExplainTrue
(
true
);
if
(
operationlist
.
size
()
==
0
)
{
//record.setExplain("DDL语句不进行解释。");
return
record
;
}
record
.
setExplain
(
planner
.
explain
(
operationlist
,
extraDetails
));
return
record
;
}
public
<
T
>
void
registerFunction
(
String
name
,
TableFunction
<
T
>
tableFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfTableFunction
(
tableFunction
);
this
.
functionCatalog
.
registerTempSystemTableFunction
(
name
,
tableFunction
,
typeInfo
);
}
public
<
T
,
ACC
>
void
registerFunction
(
String
name
,
AggregateFunction
<
T
,
ACC
>
aggregateFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfAggregateFunction
(
aggregateFunction
);
TypeInformation
<
ACC
>
accTypeInfo
=
UserDefinedFunctionHelper
.
getAccumulatorTypeOfAggregateFunction
(
aggregateFunction
);
this
.
functionCatalog
.
registerTempSystemAggregateFunction
(
name
,
aggregateFunction
,
typeInfo
,
accTypeInfo
);
}
public
<
T
,
ACC
>
void
registerFunction
(
String
name
,
TableAggregateFunction
<
T
,
ACC
>
tableAggregateFunction
)
{
TypeInformation
<
T
>
typeInfo
=
UserDefinedFunctionHelper
.
getReturnTypeOfAggregateFunction
(
tableAggregateFunction
);
TypeInformation
<
ACC
>
accTypeInfo
=
UserDefinedFunctionHelper
.
getAccumulatorTypeOfAggregateFunction
(
tableAggregateFunction
);
this
.
functionCatalog
.
registerTempSystemAggregateFunction
(
name
,
tableAggregateFunction
,
typeInfo
,
accTypeInfo
);
}
public
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
List
<
Operation
>
operations
=
getParser
().
parse
(
statement
);
for
(
Operation
operation
:
operations
)
{
if
(
operation
instanceof
SetOperation
)
{
callSet
((
SetOperation
)
operation
,
environment
,
setMap
);
return
true
;
}
else
if
(
operation
instanceof
ResetOperation
)
{
callReset
((
ResetOperation
)
operation
,
environment
,
setMap
);
return
true
;
}
}
return
false
;
}
private
void
callSet
(
SetOperation
setOperation
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
if
(
setOperation
.
getKey
().
isPresent
()
&&
setOperation
.
getValue
().
isPresent
())
{
String
key
=
setOperation
.
getKey
().
get
().
trim
();
String
value
=
setOperation
.
getValue
().
get
().
trim
();
if
(
Asserts
.
isNullString
(
key
)
||
Asserts
.
isNullString
(
value
))
{
return
;
}
Map
<
String
,
String
>
confMap
=
new
HashMap
<>();
confMap
.
put
(
key
,
value
);
setMap
.
put
(
key
,
value
);
Configuration
configuration
=
Configuration
.
fromMap
(
confMap
);
environment
.
getConfig
().
configure
(
configuration
,
null
);
getConfig
().
addConfiguration
(
configuration
);
}
}
private
void
callReset
(
ResetOperation
resetOperation
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
if
(
resetOperation
.
getKey
().
isPresent
())
{
String
key
=
resetOperation
.
getKey
().
get
().
trim
();
if
(
Asserts
.
isNullString
(
key
))
{
return
;
}
Map
<
String
,
String
>
confMap
=
new
HashMap
<>();
confMap
.
put
(
key
,
null
);
setMap
.
remove
(
key
);
Configuration
configuration
=
Configuration
.
fromMap
(
confMap
);
environment
.
getConfig
().
configure
(
configuration
,
null
);
getConfig
().
addConfiguration
(
configuration
);
}
else
{
setMap
.
clear
();
}
}
}
dlink-app/dlink-app-1.14/src/main/java/com/dlink/executor/CustomTableResultImpl.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
org.apache.flink.annotation.Internal
;
import
org.apache.flink.core.execution.JobClient
;
import
org.apache.flink.table.api.DataTypes
;
import
org.apache.flink.table.api.ResultKind
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.TableResult
;
import
org.apache.flink.table.catalog.Column
;
import
org.apache.flink.table.catalog.ResolvedSchema
;
import
org.apache.flink.table.types.DataType
;
import
org.apache.flink.table.utils.PrintUtils
;
import
org.apache.flink.types.Row
;
import
org.apache.flink.util.CloseableIterator
;
import
org.apache.flink.util.Preconditions
;
import
javax.annotation.Nullable
;
import
java.io.PrintWriter
;
import
java.time.ZoneId
;
import
java.util.*
;
import
java.util.concurrent.*
;
/**
* 定制TableResultImpl
*
* @author wenmo
* @since 2021/10/22 10:02
**/
@Internal
public
class
CustomTableResultImpl
implements
TableResult
{
public
static
final
TableResult
TABLE_RESULT_OK
=
CustomTableResultImpl
.
builder
()
.
resultKind
(
ResultKind
.
SUCCESS
)
.
schema
(
ResolvedSchema
.
of
(
Column
.
physical
(
"result"
,
DataTypes
.
STRING
())))
.
data
(
Collections
.
singletonList
(
Row
.
of
(
"OK"
)))
.
build
();
private
final
JobClient
jobClient
;
private
final
ResolvedSchema
resolvedSchema
;
private
final
ResultKind
resultKind
;
private
final
CloseableRowIteratorWrapper
data
;
private
final
PrintStyle
printStyle
;
private
final
ZoneId
sessionTimeZone
;
private
CustomTableResultImpl
(
@Nullable
JobClient
jobClient
,
ResolvedSchema
resolvedSchema
,
ResultKind
resultKind
,
CloseableIterator
<
Row
>
data
,
PrintStyle
printStyle
,
ZoneId
sessionTimeZone
)
{
this
.
jobClient
=
jobClient
;
this
.
resolvedSchema
=
Preconditions
.
checkNotNull
(
resolvedSchema
,
"resolvedSchema should not be null"
);
this
.
resultKind
=
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
Preconditions
.
checkNotNull
(
data
,
"data should not be null"
);
this
.
data
=
new
CloseableRowIteratorWrapper
(
data
);
this
.
printStyle
=
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
this
.
sessionTimeZone
=
Preconditions
.
checkNotNull
(
sessionTimeZone
,
"sessionTimeZone should not be null"
);
}
public
static
TableResult
buildTableResult
(
List
<
TableSchemaField
>
fields
,
List
<
Row
>
rows
)
{
Builder
builder
=
builder
().
resultKind
(
ResultKind
.
SUCCESS
);
if
(
fields
.
size
()
>
0
)
{
List
<
String
>
columnNames
=
new
ArrayList
<>();
List
<
DataType
>
columnTypes
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
fields
.
size
();
i
++)
{
columnNames
.
add
(
fields
.
get
(
i
).
getName
());
columnTypes
.
add
(
fields
.
get
(
i
).
getType
());
}
builder
.
schema
(
ResolvedSchema
.
physical
(
columnNames
,
columnTypes
)).
data
(
rows
);
}
return
builder
.
build
();
}
@Override
public
Optional
<
JobClient
>
getJobClient
()
{
return
Optional
.
ofNullable
(
jobClient
);
}
@Override
public
void
await
()
throws
InterruptedException
,
ExecutionException
{
try
{
awaitInternal
(-
1
,
TimeUnit
.
MILLISECONDS
);
}
catch
(
TimeoutException
e
)
{
// do nothing
}
}
@Override
public
void
await
(
long
timeout
,
TimeUnit
unit
)
throws
InterruptedException
,
ExecutionException
,
TimeoutException
{
awaitInternal
(
timeout
,
unit
);
}
private
void
awaitInternal
(
long
timeout
,
TimeUnit
unit
)
throws
InterruptedException
,
ExecutionException
,
TimeoutException
{
if
(
jobClient
==
null
)
{
return
;
}
ExecutorService
executor
=
Executors
.
newFixedThreadPool
(
1
,
r
->
new
Thread
(
r
,
"TableResult-await-thread"
));
try
{
CompletableFuture
<
Void
>
future
=
CompletableFuture
.
runAsync
(
()
->
{
while
(!
data
.
isFirstRowReady
())
{
try
{
Thread
.
sleep
(
100
);
}
catch
(
InterruptedException
e
)
{
throw
new
TableException
(
"Thread is interrupted"
);
}
}
},
executor
);
if
(
timeout
>=
0
)
{
future
.
get
(
timeout
,
unit
);
}
else
{
future
.
get
();
}
}
finally
{
executor
.
shutdown
();
}
}
@Override
public
ResolvedSchema
getResolvedSchema
()
{
return
resolvedSchema
;
}
@Override
public
ResultKind
getResultKind
()
{
return
resultKind
;
}
@Override
public
CloseableIterator
<
Row
>
collect
()
{
return
data
;
}
@Override
public
void
print
()
{
Iterator
<
Row
>
it
=
collect
();
if
(
printStyle
instanceof
TableauStyle
)
{
int
maxColumnWidth
=
((
TableauStyle
)
printStyle
).
getMaxColumnWidth
();
String
nullColumn
=
((
TableauStyle
)
printStyle
).
getNullColumn
();
boolean
deriveColumnWidthByType
=
((
TableauStyle
)
printStyle
).
isDeriveColumnWidthByType
();
boolean
printRowKind
=
((
TableauStyle
)
printStyle
).
isPrintRowKind
();
PrintUtils
.
printAsTableauForm
(
getResolvedSchema
(),
it
,
new
PrintWriter
(
System
.
out
),
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
,
printRowKind
,
sessionTimeZone
);
}
else
if
(
printStyle
instanceof
RawContentStyle
)
{
while
(
it
.
hasNext
())
{
System
.
out
.
println
(
String
.
join
(
","
,
PrintUtils
.
rowToString
(
it
.
next
(),
getResolvedSchema
(),
sessionTimeZone
)));
}
}
else
{
throw
new
TableException
(
"Unsupported print style: "
+
printStyle
);
}
}
public
static
Builder
builder
()
{
return
new
Builder
();
}
/**
* Builder for creating a {@link CustomTableResultImpl}.
*/
public
static
class
Builder
{
private
JobClient
jobClient
=
null
;
private
ResolvedSchema
resolvedSchema
=
null
;
private
ResultKind
resultKind
=
null
;
private
CloseableIterator
<
Row
>
data
=
null
;
private
PrintStyle
printStyle
=
PrintStyle
.
tableau
(
Integer
.
MAX_VALUE
,
PrintUtils
.
NULL_COLUMN
,
false
,
false
);
private
ZoneId
sessionTimeZone
=
ZoneId
.
of
(
"UTC"
);
private
Builder
()
{
}
/**
* Specifies job client which associates the submitted Flink job.
*
* @param jobClient a {@link JobClient} for the submitted Flink job.
*/
public
Builder
jobClient
(
JobClient
jobClient
)
{
this
.
jobClient
=
jobClient
;
return
this
;
}
/**
* Specifies schema of the execution result.
*
* @param resolvedSchema a {@link ResolvedSchema} for the execution result.
*/
public
Builder
schema
(
ResolvedSchema
resolvedSchema
)
{
Preconditions
.
checkNotNull
(
resolvedSchema
,
"resolvedSchema should not be null"
);
this
.
resolvedSchema
=
resolvedSchema
;
return
this
;
}
/**
* Specifies result kind of the execution result.
*
* @param resultKind a {@link ResultKind} for the execution result.
*/
public
Builder
resultKind
(
ResultKind
resultKind
)
{
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
this
.
resultKind
=
resultKind
;
return
this
;
}
/**
* Specifies an row iterator as the execution result.
*
* @param rowIterator a row iterator as the execution result.
*/
public
Builder
data
(
CloseableIterator
<
Row
>
rowIterator
)
{
Preconditions
.
checkNotNull
(
rowIterator
,
"rowIterator should not be null"
);
this
.
data
=
rowIterator
;
return
this
;
}
/**
* Specifies an row list as the execution result.
*
* @param rowList a row list as the execution result.
*/
public
Builder
data
(
List
<
Row
>
rowList
)
{
Preconditions
.
checkNotNull
(
rowList
,
"listRows should not be null"
);
this
.
data
=
CloseableIterator
.
adapterForIterator
(
rowList
.
iterator
());
return
this
;
}
/**
* Specifies print style. Default is {@link TableauStyle} with max integer column width.
*/
public
Builder
setPrintStyle
(
PrintStyle
printStyle
)
{
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
this
.
printStyle
=
printStyle
;
return
this
;
}
/**
* Specifies session time zone.
*/
public
Builder
setSessionTimeZone
(
ZoneId
sessionTimeZone
)
{
Preconditions
.
checkNotNull
(
sessionTimeZone
,
"sessionTimeZone should not be null"
);
this
.
sessionTimeZone
=
sessionTimeZone
;
return
this
;
}
/**
* Returns a {@link TableResult} instance.
*/
public
TableResult
build
()
{
return
new
CustomTableResultImpl
(
jobClient
,
resolvedSchema
,
resultKind
,
data
,
printStyle
,
sessionTimeZone
);
}
}
/**
* Root interface for all print styles.
*/
public
interface
PrintStyle
{
/**
* Create a tableau print style with given max column width, null column, change mode
* indicator and a flag to indicate whether the column width is derived from type (true) or
* content (false), which prints the result schema and content as tableau form.
*/
static
PrintStyle
tableau
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
,
boolean
printRowKind
)
{
Preconditions
.
checkArgument
(
maxColumnWidth
>
0
,
"maxColumnWidth should be greater than 0"
);
Preconditions
.
checkNotNull
(
nullColumn
,
"nullColumn should not be null"
);
return
new
TableauStyle
(
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
,
printRowKind
);
}
/**
* Create a raw content print style, which only print the result content as raw form. column
* delimiter is ",", row delimiter is "\n".
*/
static
PrintStyle
rawContent
()
{
return
new
RawContentStyle
();
}
}
/**
* print the result schema and content as tableau form.
*/
private
static
final
class
TableauStyle
implements
PrintStyle
{
/**
* A flag to indicate whether the column width is derived from type (true) or content
* (false).
*/
private
final
boolean
deriveColumnWidthByType
;
private
final
int
maxColumnWidth
;
private
final
String
nullColumn
;
/**
* A flag to indicate whether print row kind info.
*/
private
final
boolean
printRowKind
;
private
TableauStyle
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
,
boolean
printRowKind
)
{
this
.
deriveColumnWidthByType
=
deriveColumnWidthByType
;
this
.
maxColumnWidth
=
maxColumnWidth
;
this
.
nullColumn
=
nullColumn
;
this
.
printRowKind
=
printRowKind
;
}
public
boolean
isDeriveColumnWidthByType
()
{
return
deriveColumnWidthByType
;
}
int
getMaxColumnWidth
()
{
return
maxColumnWidth
;
}
String
getNullColumn
()
{
return
nullColumn
;
}
public
boolean
isPrintRowKind
()
{
return
printRowKind
;
}
}
/**
* only print the result content as raw form. column delimiter is ",", row delimiter is "\n".
*/
private
static
final
class
RawContentStyle
implements
PrintStyle
{
}
/**
* A {@link CloseableIterator} wrapper class that can return whether the first row is ready.
*
* <p>The first row is ready when {@link #hasNext} method returns true or {@link #next()} method
* returns a row. The execution order of {@link TableResult#collect} method and {@link
* TableResult#await()} may be arbitrary, this class will record whether the first row is ready
* (or accessed).
*/
private
static
final
class
CloseableRowIteratorWrapper
implements
CloseableIterator
<
Row
>
{
private
final
CloseableIterator
<
Row
>
iterator
;
private
boolean
isFirstRowReady
=
false
;
private
CloseableRowIteratorWrapper
(
CloseableIterator
<
Row
>
iterator
)
{
this
.
iterator
=
iterator
;
}
@Override
public
void
close
()
throws
Exception
{
iterator
.
close
();
}
@Override
public
boolean
hasNext
()
{
boolean
hasNext
=
iterator
.
hasNext
();
isFirstRowReady
=
isFirstRowReady
||
hasNext
;
return
hasNext
;
}
@Override
public
Row
next
()
{
Row
next
=
iterator
.
next
();
isFirstRowReady
=
true
;
return
next
;
}
public
boolean
isFirstRowReady
()
{
return
isFirstRowReady
||
hasNext
();
}
}
}
\ No newline at end of file
dlink-app/dlink-app-1.14/src/main/java/com/dlink/executor/TableSchemaField.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
executor
;
import
org.apache.flink.table.types.DataType
;
/**
* @author wenmo
* @since 2021/10/22 10:02
**/
public
class
TableSchemaField
{
private
String
name
;
private
DataType
type
;
public
TableSchemaField
(
String
name
,
DataType
type
)
{
this
.
name
=
name
;
this
.
type
=
type
;
}
public
String
getName
()
{
return
name
;
}
public
void
setName
(
String
name
)
{
this
.
name
=
name
;
}
public
DataType
getType
()
{
return
type
;
}
public
void
setType
(
DataType
type
)
{
this
.
type
=
type
;
}
}
dlink-app/dlink-app-base/pom.xml
View file @
429a25da
...
@@ -25,7 +25,12 @@
...
@@ -25,7 +25,12 @@
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-1.13
</artifactId>
<artifactId>
dlink-client-${dlink.flink.version}
</artifactId>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-${dlink.flink.version}
</artifactId>
<scope>
provided
</scope>
<scope>
provided
</scope>
</dependency>
</dependency>
<dependency>
<dependency>
...
...
dlink-client/dlink-client-1.11/pom.xml
View file @
429a25da
...
@@ -13,11 +13,8 @@
...
@@ -13,11 +13,8 @@
<properties>
<properties>
<java.version>
1.8
</java.version>
<java.version>
1.8
</java.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<flink.version>
1.11.6
</flink.version>
<flinkcdc.version>
1.1.0
</flinkcdc.version>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.target>
1.8
</maven.compiler.target>
<maven.compiler.target>
1.8
</maven.compiler.target>
<junit.version>
4.12
</junit.version>
</properties>
</properties>
<dependencies>
<dependencies>
...
@@ -26,81 +23,14 @@
...
@@ -26,81 +23,14 @@
<artifactId>
dlink-client-base
</artifactId>
<artifactId>
dlink-client-base
</artifactId>
<version>
${project.version}
</version>
<version>
${project.version}
</version>
</dependency>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner-blink_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-clients_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-yarn_2.11
</artifactId>
<version>
${flink.version}
</version>
<exclusions>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-client
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce-client-core
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-hadoop
</artifactId>
<artifactId>
dlink-common
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-kubernetes_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
com.alibaba.ververica
</groupId>
<artifactId>
flink-connector-mysql-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-common
</artifactId>
<artifactId>
dlink-flink-1.11
</artifactId>
<scope>
provided
</scope>
</dependency>
</dependency>
</dependencies>
</dependencies>
</project>
</project>
\ No newline at end of file
dlink-client/dlink-client-1.11/src/main/java/com/dlink/cdc/AbstractCDCBuilder.java
View file @
429a25da
package
com
.
dlink
.
cdc
;
package
com
.
dlink
.
cdc
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.List
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
/**
/**
...
@@ -26,4 +32,35 @@ public abstract class AbstractCDCBuilder {
...
@@ -26,4 +32,35 @@ public abstract class AbstractCDCBuilder {
public
void
setConfig
(
FlinkCDCConfig
config
)
{
public
void
setConfig
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
this
.
config
=
config
;
}
}
public
List
<
String
>
getSchemaList
()
{
List
<
String
>
schemaList
=
new
ArrayList
<>();
String
schema
=
config
.
getSchema
();
if
(
Asserts
.
isNullString
(
schema
))
{
return
schemaList
;
}
String
[]
schemas
=
schema
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
schemaList
,
schemas
);
List
<
String
>
tableList
=
getTableList
();
for
(
String
tableName
:
tableList
)
{
if
(
Asserts
.
isNotNullString
(
tableName
)
&&
tableName
.
contains
(
"."
))
{
String
[]
names
=
tableName
.
split
(
"."
);
if
(!
schemaList
.
contains
(
names
[
0
]))
{
schemaList
.
add
(
names
[
0
]);
}
}
}
return
schemaList
;
}
public
List
<
String
>
getTableList
()
{
List
<
String
>
tableList
=
new
ArrayList
<>();
String
table
=
config
.
getTable
();
if
(
Asserts
.
isNullString
(
table
))
{
return
tableList
;
}
String
[]
tables
=
table
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
tableList
,
tables
);
return
tableList
;
}
}
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/cdc/AbstractSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* AbstractCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:28
**/
public
abstract
class
AbstractSinkBuilder
{
protected
FlinkCDCConfig
config
;
public
AbstractSinkBuilder
()
{
}
public
AbstractSinkBuilder
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
}
public
FlinkCDCConfig
getConfig
()
{
return
config
;
}
public
void
setConfig
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
}
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/cdc/CDCBuilder.java
View file @
429a25da
...
@@ -3,7 +3,11 @@ package com.dlink.cdc;
...
@@ -3,7 +3,11 @@ package com.dlink.cdc;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
/**
/**
* CDCBuilder
* CDCBuilder
...
@@ -17,5 +21,13 @@ public interface CDCBuilder {
...
@@ -17,5 +21,13 @@ public interface CDCBuilder {
CDCBuilder
create
(
FlinkCDCConfig
config
);
CDCBuilder
create
(
FlinkCDCConfig
config
);
DataStreamSource
build
(
StreamExecutionEnvironment
env
);
DataStreamSource
<
String
>
build
(
StreamExecutionEnvironment
env
);
List
<
String
>
getSchemaList
();
List
<
String
>
getTableList
();
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
();
String
getInsertSQL
(
Table
table
,
String
sourceName
);
}
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/cdc/FlinkCDCMergeBuilder.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
cdc
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* FlinkCDCMergeBuilder
*
* @author wenmo
* @since 2022/1/29 22:37
*/
public
class
FlinkCDCMergeBuilder
{
public
static
void
buildMySqlCDC
(
StreamExecutionEnvironment
env
,
FlinkCDCConfig
config
)
{
if
(
Asserts
.
isNotNull
(
config
.
getParallelism
()))
{
env
.
setParallelism
(
config
.
getParallelism
());
}
if
(
Asserts
.
isNotNull
(
config
.
getCheckpoint
()))
{
env
.
enableCheckpointing
(
config
.
getCheckpoint
());
}
DataStreamSource
<
String
>
streamSource
=
CDCBuilderFactory
.
buildCDCBuilder
(
config
).
build
(
env
);
streamSource
.
addSink
(
getKafkaProducer
(
config
.
getBrokers
(),
config
.
getTopic
()));
}
private
static
FlinkKafkaProducer
<
String
>
getKafkaProducer
(
String
brokers
,
String
topic
)
{
return
new
FlinkKafkaProducer
<
String
>(
brokers
,
topic
,
new
SimpleStringSchema
());
}
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/cdc/SinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* SinkBuilder
*
* @author wenmo
* @since 2022/4/12 21:09
**/
public
interface
SinkBuilder
{
String
getHandle
();
SinkBuilder
create
(
FlinkCDCConfig
config
);
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
);
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/cdc/SinkBuilderFactory.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.kafka.KafkaSinkBuilder
;
import
com.dlink.exception.FlinkClientException
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* SinkBuilderFactory
*
* @author wenmo
* @since 2022/4/12 21:12
**/
public
class
SinkBuilderFactory
{
private
static
SinkBuilder
[]
sinkBuilders
=
{
new
KafkaSinkBuilder
(),
};
public
static
SinkBuilder
buildSinkBuilder
(
FlinkCDCConfig
config
)
{
if
(
Asserts
.
isNull
(
config
)
||
Asserts
.
isNullString
(
config
.
getSink
().
get
(
"connector"
)))
{
throw
new
FlinkClientException
(
"请指定 Sink connector。"
);
}
for
(
int
i
=
0
;
i
<
sinkBuilders
.
length
;
i
++)
{
if
(
config
.
getSink
().
get
(
"connector"
).
equals
(
sinkBuilders
[
i
].
getHandle
()))
{
return
sinkBuilders
[
i
].
create
(
config
);
}
}
throw
new
FlinkClientException
(
"未匹配到对应 Sink 类型的【"
+
config
.
getSink
().
get
(
"connector"
)
+
"】。"
);
}
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/cdc/kafka/KafkaSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
.
kafka
;
import
org.apache.flink.api.common.functions.FilterFunction
;
import
org.apache.flink.api.common.functions.MapFunction
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
;
import
java.util.LinkedHashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractSinkBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.SinkBuilder
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
/**
* MysqlCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:29
**/
public
class
KafkaSinkBuilder
extends
AbstractSinkBuilder
implements
SinkBuilder
{
private
final
static
String
KEY_WORD
=
"kafka"
;
public
KafkaSinkBuilder
()
{
}
public
KafkaSinkBuilder
(
FlinkCDCConfig
config
)
{
super
(
config
);
}
@Override
public
String
getHandle
()
{
return
KEY_WORD
;
}
@Override
public
SinkBuilder
create
(
FlinkCDCConfig
config
)
{
return
new
KafkaSinkBuilder
(
config
);
}
@Override
public
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
)
{
if
(
Asserts
.
isNotNullString
(
config
.
getSink
().
get
(
"topic"
)))
{
dataStreamSource
.
addSink
(
new
FlinkKafkaProducer
<
String
>(
config
.
getSink
().
get
(
"brokers"
),
config
.
getSink
().
get
(
"topic"
),
new
SimpleStringSchema
()));
}
else
{
final
List
<
Schema
>
schemaList
=
config
.
getSchemaList
();
if
(
Asserts
.
isNotNullCollection
(
schemaList
))
{
SingleOutputStreamOperator
<
Map
>
mapOperator
=
dataStreamSource
.
map
(
new
MapFunction
<
String
,
Map
>()
{
@Override
public
Map
map
(
String
value
)
throws
Exception
{
ObjectMapper
objectMapper
=
new
ObjectMapper
();
return
objectMapper
.
readValue
(
value
,
Map
.
class
);
}
});
for
(
Schema
schema
:
schemaList
)
{
for
(
Table
table
:
schema
.
getTables
())
{
final
String
tableName
=
table
.
getName
();
final
String
schemaName
=
table
.
getSchema
();
SingleOutputStreamOperator
<
Map
>
filterOperator
=
mapOperator
.
filter
(
new
FilterFunction
<
Map
>()
{
@Override
public
boolean
filter
(
Map
value
)
throws
Exception
{
LinkedHashMap
source
=
(
LinkedHashMap
)
value
.
get
(
"source"
);
return
tableName
.
equals
(
source
.
get
(
"table"
).
toString
())
&&
schemaName
.
equals
(
source
.
get
(
"db"
).
toString
());
}
});
SingleOutputStreamOperator
<
String
>
stringOperator
=
filterOperator
.
map
(
new
MapFunction
<
Map
,
String
>()
{
@Override
public
String
map
(
Map
value
)
throws
Exception
{
ObjectMapper
objectMapper
=
new
ObjectMapper
();
return
objectMapper
.
writeValueAsString
(
value
);
}
});
stringOperator
.
addSink
(
new
FlinkKafkaProducer
<
String
>(
config
.
getSink
().
get
(
"brokers"
),
table
.
getSchemaTableName
(),
new
SimpleStringSchema
()));
}
}
}
}
return
dataStreamSource
;
}
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/cdc/mysql/MysqlCDCBuilder.java
View file @
429a25da
...
@@ -3,13 +3,21 @@ package com.dlink.cdc.mysql;
...
@@ -3,13 +3,21 @@ package com.dlink.cdc.mysql;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.alibaba.ververica.cdc.connectors.mysql.MySQLSource
;
import
com.alibaba.ververica.cdc.connectors.mysql.MySQLSource
;
import
com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema
;
import
com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.constant.ClientConstant
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
/**
/**
* MysqlCDCBuilder
* MysqlCDCBuilder
...
@@ -20,6 +28,7 @@ import com.dlink.model.FlinkCDCConfig;
...
@@ -20,6 +28,7 @@ import com.dlink.model.FlinkCDCConfig;
public
class
MysqlCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
public
class
MysqlCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
private
String
KEY_WORD
=
"mysql-cdc"
;
private
String
KEY_WORD
=
"mysql-cdc"
;
private
final
static
String
METADATA_TYPE
=
"MySql"
;
public
MysqlCDCBuilder
()
{
public
MysqlCDCBuilder
()
{
}
}
...
@@ -55,4 +64,58 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
...
@@ -55,4 +64,58 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
.
deserializer
(
new
StringDebeziumDeserializationSchema
());
.
deserializer
(
new
StringDebeziumDeserializationSchema
());
return
env
.
addSource
(
sourceBuilder
.
build
(),
"MySQL CDC Source"
);
return
env
.
addSource
(
sourceBuilder
.
build
(),
"MySQL CDC Source"
);
}
}
public
List
<
String
>
getSchemaList
()
{
List
<
String
>
schemaList
=
new
ArrayList
<>();
String
schema
=
config
.
getDatabase
();
if
(
Asserts
.
isNullString
(
schema
))
{
return
schemaList
;
}
String
[]
schemas
=
schema
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
schemaList
,
schemas
);
return
schemaList
;
}
public
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
()
{
Map
<
String
,
Map
<
String
,
String
>>
allConfigMap
=
new
HashMap
<>();
List
<
String
>
schemaList
=
getSchemaList
();
for
(
String
schema
:
schemaList
)
{
Map
<
String
,
String
>
configMap
=
new
HashMap
<>();
configMap
.
put
(
ClientConstant
.
METADATA_TYPE
,
METADATA_TYPE
);
StringBuilder
sb
=
new
StringBuilder
(
"jdbc:mysql://"
);
sb
.
append
(
config
.
getHostname
());
sb
.
append
(
":"
);
sb
.
append
(
config
.
getPort
());
sb
.
append
(
"/"
);
sb
.
append
(
schema
);
configMap
.
put
(
ClientConstant
.
METADATA_NAME
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_URL
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_USERNAME
,
config
.
getUsername
());
configMap
.
put
(
ClientConstant
.
METADATA_PASSWORD
,
config
.
getPassword
());
allConfigMap
.
put
(
schema
,
configMap
);
}
return
allConfigMap
;
}
@Override
public
String
getInsertSQL
(
Table
table
,
String
sourceName
)
{
StringBuilder
sb
=
new
StringBuilder
(
"INSERT INTO "
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
" SELECT\n"
);
for
(
int
i
=
0
;
i
<
table
.
getColumns
().
size
();
i
++)
{
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
}
sb
.
append
(
"`"
+
table
.
getColumns
().
get
(
i
).
getName
()
+
"` \n"
);
}
sb
.
append
(
" FROM "
);
sb
.
append
(
sourceName
);
/* sb.append(" WHERE database_name = '");
sb.append(table.getSchema());
sb.append("' and table_name = '");
sb.append(table.getName());
sb.append("'");*/
return
sb
.
toString
();
}
}
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/executor/CustomTableEnvironmentImpl.java
View file @
429a25da
package
com
.
dlink
.
executor
;
package
com
.
dlink
.
executor
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.configuration.Configuration
;
...
@@ -11,11 +7,13 @@ import org.apache.flink.configuration.PipelineOptions;
...
@@ -11,11 +7,13 @@ import org.apache.flink.configuration.PipelineOptions;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.datastream.DataStream
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.Table
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
...
@@ -38,12 +36,19 @@ import org.apache.flink.table.operations.Operation;
...
@@ -38,12 +36,19 @@ import org.apache.flink.table.operations.Operation;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
org.apache.flink.types.Row
;
import
java.lang.reflect.Method
;
import
java.lang.reflect.Method
;
import
java.util.ArrayList
;
import
java.util.ArrayList
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
com.dlink.exception.FlinkClientException
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
/**
/**
* 定制TableEnvironmentImpl
* 定制TableEnvironmentImpl
*
*
...
@@ -52,10 +57,31 @@ import java.util.Map;
...
@@ -52,10 +57,31 @@ import java.util.Map;
**/
**/
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
protected
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
TableConfig
tableConfig
,
Executor
executor
,
FunctionCatalog
functionCatalog
,
Planner
planner
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
private
final
StreamExecutionEnvironment
executionEnvironment
;
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
public
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
FunctionCatalog
functionCatalog
,
TableConfig
tableConfig
,
StreamExecutionEnvironment
executionEnvironment
,
Planner
planner
,
Executor
executor
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
this
.
executionEnvironment
=
executionEnvironment
;
}
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
build
());
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
build
());
}
}
...
@@ -72,26 +98,79 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -72,26 +98,79 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
}
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
if
(!
settings
.
isStreamingMode
())
{
throw
new
TableException
(
"StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment."
);
}
// temporary solution until FLINK-15635 is fixed
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ModuleManager
moduleManager
=
new
ModuleManager
();
ModuleManager
moduleManager
=
new
ModuleManager
();
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
().
classLoader
(
classLoader
).
config
(
tableConfig
.
getConfiguration
()).
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
())).
executionConfig
(
executionEnvironment
.
getConfig
()).
build
();
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
()
.
classLoader
(
classLoader
)
.
config
(
tableConfig
.
getConfiguration
())
.
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
()))
.
executionConfig
(
executionEnvironment
.
getConfig
())
.
build
();
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Planner
planner
=
(
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)).
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
Planner
planner
=
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
settings
.
isStreamingMode
(),
classLoader
);
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)
.
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
functionCatalog
,
tableConfig
,
executionEnvironment
,
planner
,
executor
,
settings
.
isStreamingMode
(),
classLoader
);
}
}
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
try
{
try
{
ExecutorFactory
executorFactory
=
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
ExecutorFactory
executorFactory
=
Method
createMethod
=
executorFactory
.
getClass
().
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
Method
createMethod
=
}
catch
(
Exception
var4
)
{
executorFactory
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
var4
);
.
getClass
()
.
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
}
catch
(
Exception
e
)
{
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
e
);
}
}
}
}
...
@@ -167,7 +246,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -167,7 +246,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
record
.
setParseTrue
(
true
);
record
.
setParseTrue
(
true
);
if
(
operations
.
size
()
!=
1
)
{
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
}
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
...
@@ -213,4 +292,20 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -213,4 +292,20 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
public
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
public
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
return
false
;
return
false
;
}
}
@Override
public
Table
fromChangelogStream
(
DataStream
<
Row
>
dataStream
)
{
throw
new
FlinkClientException
(
"Flink 1.12 not support"
);
}
@Override
public
<
T
>
void
registerDataStream
(
String
name
,
DataStream
<
T
>
dataStream
)
{
throw
new
FlinkClientException
(
"Flink 1.12 not support"
);
}
@Override
public
<
T
>
void
createTemporaryView
(
String
path
,
DataStream
<
T
>
dataStream
)
{
throw
new
FlinkClientException
(
"Flink 1.12 not support"
);
}
}
}
dlink-client/dlink-client-1.11/src/main/java/com/dlink/executor/CustomTableResultImpl.java
View file @
429a25da
...
@@ -2,19 +2,24 @@ package com.dlink.executor;
...
@@ -2,19 +2,24 @@ package com.dlink.executor;
import
org.apache.flink.annotation.Internal
;
import
org.apache.flink.annotation.Internal
;
import
org.apache.flink.core.execution.JobClient
;
import
org.apache.flink.core.execution.JobClient
;
import
org.apache.flink.table.api.*
;
import
org.apache.flink.table.api.DataTypes
;
import
org.apache.flink.table.api.ResultKind
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.TableResult
;
import
org.apache.flink.table.api.TableSchema
;
import
org.apache.flink.table.utils.PrintUtils
;
import
org.apache.flink.table.utils.PrintUtils
;
import
org.apache.flink.types.Row
;
import
org.apache.flink.types.Row
;
import
org.apache.flink.util.CloseableIterator
;
import
org.apache.flink.util.CloseableIterator
;
import
org.apache.flink.util.Preconditions
;
import
org.apache.flink.util.Preconditions
;
import
javax.annotation.Nullable
;
import
java.io.PrintWriter
;
import
java.io.PrintWriter
;
import
java.util.Collections
;
import
java.util.Collections
;
import
java.util.Iterator
;
import
java.util.Iterator
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Optional
;
import
java.util.Optional
;
import
javax.annotation.Nullable
;
/**
/**
* 定制TableResultImpl
* 定制TableResultImpl
*
*
...
@@ -24,11 +29,11 @@ import java.util.Optional;
...
@@ -24,11 +29,11 @@ import java.util.Optional;
@Internal
@Internal
class
CustomTableResultImpl
implements
TableResult
{
class
CustomTableResultImpl
implements
TableResult
{
public
static
final
TableResult
TABLE_RESULT_OK
=
public
static
final
TableResult
TABLE_RESULT_OK
=
CustomTableResultImpl
.
builder
()
CustomTableResultImpl
.
builder
()
.
resultKind
(
ResultKind
.
SUCCESS
)
.
resultKind
(
ResultKind
.
SUCCESS
)
.
tableSchema
(
TableSchema
.
builder
().
field
(
"result"
,
DataTypes
.
STRING
()).
build
())
.
tableSchema
(
TableSchema
.
builder
().
field
(
"result"
,
DataTypes
.
STRING
()).
build
())
.
data
(
Collections
.
singletonList
(
Row
.
of
(
"OK"
)))
.
data
(
Collections
.
singletonList
(
Row
.
of
(
"OK"
)))
.
build
();
.
build
();
private
final
JobClient
jobClient
;
private
final
JobClient
jobClient
;
private
final
TableSchema
tableSchema
;
private
final
TableSchema
tableSchema
;
...
@@ -37,14 +42,14 @@ class CustomTableResultImpl implements TableResult {
...
@@ -37,14 +42,14 @@ class CustomTableResultImpl implements TableResult {
private
final
PrintStyle
printStyle
;
private
final
PrintStyle
printStyle
;
private
CustomTableResultImpl
(
private
CustomTableResultImpl
(
@Nullable
JobClient
jobClient
,
@Nullable
JobClient
jobClient
,
TableSchema
tableSchema
,
TableSchema
tableSchema
,
ResultKind
resultKind
,
ResultKind
resultKind
,
CloseableIterator
<
Row
>
data
,
CloseableIterator
<
Row
>
data
,
PrintStyle
printStyle
)
{
PrintStyle
printStyle
)
{
this
.
jobClient
=
jobClient
;
this
.
jobClient
=
jobClient
;
this
.
tableSchema
=
this
.
tableSchema
=
Preconditions
.
checkNotNull
(
tableSchema
,
"tableSchema should not be null"
);
Preconditions
.
checkNotNull
(
tableSchema
,
"tableSchema should not be null"
);
this
.
resultKind
=
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
this
.
resultKind
=
Preconditions
.
checkNotNull
(
resultKind
,
"resultKind should not be null"
);
this
.
data
=
Preconditions
.
checkNotNull
(
data
,
"data should not be null"
);
this
.
data
=
Preconditions
.
checkNotNull
(
data
,
"data should not be null"
);
this
.
printStyle
=
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
this
.
printStyle
=
Preconditions
.
checkNotNull
(
printStyle
,
"printStyle should not be null"
);
...
@@ -89,14 +94,14 @@ class CustomTableResultImpl implements TableResult {
...
@@ -89,14 +94,14 @@ class CustomTableResultImpl implements TableResult {
int
maxColumnWidth
=
((
TableauStyle
)
printStyle
).
getMaxColumnWidth
();
int
maxColumnWidth
=
((
TableauStyle
)
printStyle
).
getMaxColumnWidth
();
String
nullColumn
=
((
TableauStyle
)
printStyle
).
getNullColumn
();
String
nullColumn
=
((
TableauStyle
)
printStyle
).
getNullColumn
();
boolean
deriveColumnWidthByType
=
boolean
deriveColumnWidthByType
=
((
TableauStyle
)
printStyle
).
isDeriveColumnWidthByType
();
((
TableauStyle
)
printStyle
).
isDeriveColumnWidthByType
();
PrintUtils
.
printAsTableauForm
(
PrintUtils
.
printAsTableauForm
(
getTableSchema
(),
getTableSchema
(),
it
,
it
,
new
PrintWriter
(
System
.
out
),
new
PrintWriter
(
System
.
out
),
maxColumnWidth
,
maxColumnWidth
,
nullColumn
,
nullColumn
,
deriveColumnWidthByType
);
deriveColumnWidthByType
);
}
else
if
(
printStyle
instanceof
RawContentStyle
)
{
}
else
if
(
printStyle
instanceof
RawContentStyle
)
{
while
(
it
.
hasNext
())
{
while
(
it
.
hasNext
())
{
System
.
out
.
println
(
String
.
join
(
","
,
PrintUtils
.
rowToString
(
it
.
next
())));
System
.
out
.
println
(
String
.
join
(
","
,
PrintUtils
.
rowToString
(
it
.
next
())));
...
@@ -119,7 +124,7 @@ class CustomTableResultImpl implements TableResult {
...
@@ -119,7 +124,7 @@ class CustomTableResultImpl implements TableResult {
private
ResultKind
resultKind
=
null
;
private
ResultKind
resultKind
=
null
;
private
CloseableIterator
<
Row
>
data
=
null
;
private
CloseableIterator
<
Row
>
data
=
null
;
private
PrintStyle
printStyle
=
private
PrintStyle
printStyle
=
PrintStyle
.
tableau
(
Integer
.
MAX_VALUE
,
PrintUtils
.
NULL_COLUMN
,
false
);
PrintStyle
.
tableau
(
Integer
.
MAX_VALUE
,
PrintUtils
.
NULL_COLUMN
,
false
);
private
Builder
()
{
private
Builder
()
{
}
}
...
@@ -205,9 +210,9 @@ class CustomTableResultImpl implements TableResult {
...
@@ -205,9 +210,9 @@ class CustomTableResultImpl implements TableResult {
* prints the result schema and content as tableau form.
* prints the result schema and content as tableau form.
*/
*/
static
PrintStyle
tableau
(
static
PrintStyle
tableau
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
)
{
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
)
{
Preconditions
.
checkArgument
(
Preconditions
.
checkArgument
(
maxColumnWidth
>
0
,
"maxColumnWidth should be greater than 0"
);
maxColumnWidth
>
0
,
"maxColumnWidth should be greater than 0"
);
Preconditions
.
checkNotNull
(
nullColumn
,
"nullColumn should not be null"
);
Preconditions
.
checkNotNull
(
nullColumn
,
"nullColumn should not be null"
);
return
new
TableauStyle
(
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
);
return
new
TableauStyle
(
maxColumnWidth
,
nullColumn
,
deriveColumnWidthByType
);
}
}
...
@@ -235,7 +240,7 @@ class CustomTableResultImpl implements TableResult {
...
@@ -235,7 +240,7 @@ class CustomTableResultImpl implements TableResult {
private
final
String
nullColumn
;
private
final
String
nullColumn
;
private
TableauStyle
(
private
TableauStyle
(
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
)
{
int
maxColumnWidth
,
String
nullColumn
,
boolean
deriveColumnWidthByType
)
{
this
.
deriveColumnWidthByType
=
deriveColumnWidthByType
;
this
.
deriveColumnWidthByType
=
deriveColumnWidthByType
;
this
.
maxColumnWidth
=
maxColumnWidth
;
this
.
maxColumnWidth
=
maxColumnWidth
;
this
.
nullColumn
=
nullColumn
;
this
.
nullColumn
=
nullColumn
;
...
...
dlink-client/dlink-client-1.12/pom.xml
View file @
429a25da
...
@@ -14,11 +14,8 @@
...
@@ -14,11 +14,8 @@
<properties>
<properties>
<java.version>
1.8
</java.version>
<java.version>
1.8
</java.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<flink.version>
1.12.7
</flink.version>
<flinkcdc.version>
1.3.0
</flinkcdc.version>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.target>
1.8
</maven.compiler.target>
<maven.compiler.target>
1.8
</maven.compiler.target>
<junit.version>
4.12
</junit.version>
</properties>
</properties>
<dependencies>
<dependencies>
...
@@ -27,81 +24,14 @@
...
@@ -27,81 +24,14 @@
<artifactId>
dlink-client-base
</artifactId>
<artifactId>
dlink-client-base
</artifactId>
<version>
${project.version}
</version>
<version>
${project.version}
</version>
</dependency>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner-blink_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-clients_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-yarn_2.11
</artifactId>
<version>
${flink.version}
</version>
<exclusions>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-client
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce-client-core
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-hadoop
</artifactId>
<artifactId>
dlink-common
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-kubernetes_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
com.alibaba.ververica
</groupId>
<artifactId>
flink-connector-mysql-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-common
</artifactId>
<artifactId>
dlink-flink-1.12
</artifactId>
<scope>
provided
</scope>
</dependency>
</dependency>
</dependencies>
</dependencies>
</project>
</project>
\ No newline at end of file
dlink-client/dlink-client-1.12/src/main/java/com/dlink/cdc/AbstractCDCBuilder.java
View file @
429a25da
package
com
.
dlink
.
cdc
;
package
com
.
dlink
.
cdc
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.List
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
/**
/**
...
@@ -26,4 +32,35 @@ public abstract class AbstractCDCBuilder {
...
@@ -26,4 +32,35 @@ public abstract class AbstractCDCBuilder {
public
void
setConfig
(
FlinkCDCConfig
config
)
{
public
void
setConfig
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
this
.
config
=
config
;
}
}
public
List
<
String
>
getSchemaList
()
{
List
<
String
>
schemaList
=
new
ArrayList
<>();
String
schema
=
config
.
getSchema
();
if
(
Asserts
.
isNullString
(
schema
))
{
return
schemaList
;
}
String
[]
schemas
=
schema
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
schemaList
,
schemas
);
List
<
String
>
tableList
=
getTableList
();
for
(
String
tableName
:
tableList
)
{
if
(
Asserts
.
isNotNullString
(
tableName
)
&&
tableName
.
contains
(
"."
))
{
String
[]
names
=
tableName
.
split
(
"."
);
if
(!
schemaList
.
contains
(
names
[
0
]))
{
schemaList
.
add
(
names
[
0
]);
}
}
}
return
schemaList
;
}
public
List
<
String
>
getTableList
()
{
List
<
String
>
tableList
=
new
ArrayList
<>();
String
table
=
config
.
getTable
();
if
(
Asserts
.
isNullString
(
table
))
{
return
tableList
;
}
String
[]
tables
=
table
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
tableList
,
tables
);
return
tableList
;
}
}
}
dlink-client/dlink-client-1.12/src/main/java/com/dlink/cdc/AbstractSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* AbstractCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:28
**/
public
abstract
class
AbstractSinkBuilder
{
protected
FlinkCDCConfig
config
;
public
AbstractSinkBuilder
()
{
}
public
AbstractSinkBuilder
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
}
public
FlinkCDCConfig
getConfig
()
{
return
config
;
}
public
void
setConfig
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
}
}
dlink-client/dlink-client-1.12/src/main/java/com/dlink/cdc/CDCBuilder.java
View file @
429a25da
...
@@ -3,7 +3,11 @@ package com.dlink.cdc;
...
@@ -3,7 +3,11 @@ package com.dlink.cdc;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
/**
/**
* CDCBuilder
* CDCBuilder
...
@@ -17,5 +21,13 @@ public interface CDCBuilder {
...
@@ -17,5 +21,13 @@ public interface CDCBuilder {
CDCBuilder
create
(
FlinkCDCConfig
config
);
CDCBuilder
create
(
FlinkCDCConfig
config
);
DataStreamSource
build
(
StreamExecutionEnvironment
env
);
DataStreamSource
<
String
>
build
(
StreamExecutionEnvironment
env
);
List
<
String
>
getSchemaList
();
List
<
String
>
getTableList
();
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
();
String
getInsertSQL
(
Table
table
,
String
sourceName
);
}
}
dlink-client/dlink-client-1.12/src/main/java/com/dlink/cdc/FlinkCDCMergeBuilder.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
cdc
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
;
import
com.alibaba.ververica.cdc.connectors.mysql.MySQLSource
;
import
com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions
;
import
com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* FlinkCDCMergeBuilder
*
* @author wenmo
* @since 2022/1/29 22:37
*/
public
class
FlinkCDCMergeBuilder
{
public
static
void
buildMySqlCDC
(
StreamExecutionEnvironment
env
,
FlinkCDCConfig
config
)
{
if
(
Asserts
.
isNotNull
(
config
.
getParallelism
()))
{
env
.
setParallelism
(
config
.
getParallelism
());
}
if
(
Asserts
.
isNotNull
(
config
.
getCheckpoint
()))
{
env
.
enableCheckpointing
(
config
.
getCheckpoint
());
}
DataStreamSource
<
String
>
streamSource
=
CDCBuilderFactory
.
buildCDCBuilder
(
config
).
build
(
env
);
streamSource
.
addSink
(
getKafkaProducer
(
config
.
getBrokers
(),
config
.
getTopic
()));
}
private
static
FlinkKafkaProducer
<
String
>
getKafkaProducer
(
String
brokers
,
String
topic
)
{
return
new
FlinkKafkaProducer
<
String
>(
brokers
,
topic
,
new
SimpleStringSchema
());
}
}
dlink-client/dlink-client-1.12/src/main/java/com/dlink/cdc/SinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* SinkBuilder
*
* @author wenmo
* @since 2022/4/12 21:09
**/
public
interface
SinkBuilder
{
String
getHandle
();
SinkBuilder
create
(
FlinkCDCConfig
config
);
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
);
}
dlink-client/dlink-client-1.12/src/main/java/com/dlink/cdc/SinkBuilderFactory.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.kafka.KafkaSinkBuilder
;
import
com.dlink.exception.FlinkClientException
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* SinkBuilderFactory
*
* @author wenmo
* @since 2022/4/12 21:12
**/
public
class
SinkBuilderFactory
{
private
static
SinkBuilder
[]
sinkBuilders
=
{
new
KafkaSinkBuilder
(),
};
public
static
SinkBuilder
buildSinkBuilder
(
FlinkCDCConfig
config
)
{
if
(
Asserts
.
isNull
(
config
)
||
Asserts
.
isNullString
(
config
.
getSink
().
get
(
"connector"
)))
{
throw
new
FlinkClientException
(
"请指定 Sink connector。"
);
}
for
(
int
i
=
0
;
i
<
sinkBuilders
.
length
;
i
++)
{
if
(
config
.
getSink
().
get
(
"connector"
).
equals
(
sinkBuilders
[
i
].
getHandle
()))
{
return
sinkBuilders
[
i
].
create
(
config
);
}
}
throw
new
FlinkClientException
(
"未匹配到对应 Sink 类型的【"
+
config
.
getSink
().
get
(
"connector"
)
+
"】。"
);
}
}
dlink-client/dlink-client-1.12/src/main/java/com/dlink/cdc/kafka/KafkaSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
.
kafka
;
import
org.apache.flink.api.common.functions.FilterFunction
;
import
org.apache.flink.api.common.functions.MapFunction
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
;
import
java.util.LinkedHashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractSinkBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.SinkBuilder
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
/**
* MysqlCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:29
**/
public
class
KafkaSinkBuilder
extends
AbstractSinkBuilder
implements
SinkBuilder
{
private
final
static
String
KEY_WORD
=
"kafka"
;
public
KafkaSinkBuilder
()
{
}
public
KafkaSinkBuilder
(
FlinkCDCConfig
config
)
{
super
(
config
);
}
@Override
public
String
getHandle
()
{
return
KEY_WORD
;
}
@Override
public
SinkBuilder
create
(
FlinkCDCConfig
config
)
{
return
new
KafkaSinkBuilder
(
config
);
}
@Override
public
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
)
{
if
(
Asserts
.
isNotNullString
(
config
.
getSink
().
get
(
"topic"
)))
{
dataStreamSource
.
addSink
(
new
FlinkKafkaProducer
<
String
>(
config
.
getSink
().
get
(
"brokers"
),
config
.
getSink
().
get
(
"topic"
),
new
SimpleStringSchema
()));
}
else
{
final
List
<
Schema
>
schemaList
=
config
.
getSchemaList
();
if
(
Asserts
.
isNotNullCollection
(
schemaList
))
{
SingleOutputStreamOperator
<
Map
>
mapOperator
=
dataStreamSource
.
map
(
new
MapFunction
<
String
,
Map
>()
{
@Override
public
Map
map
(
String
value
)
throws
Exception
{
ObjectMapper
objectMapper
=
new
ObjectMapper
();
return
objectMapper
.
readValue
(
value
,
Map
.
class
);
}
});
for
(
Schema
schema
:
schemaList
)
{
for
(
Table
table
:
schema
.
getTables
())
{
final
String
tableName
=
table
.
getName
();
final
String
schemaName
=
table
.
getSchema
();
SingleOutputStreamOperator
<
Map
>
filterOperator
=
mapOperator
.
filter
(
new
FilterFunction
<
Map
>()
{
@Override
public
boolean
filter
(
Map
value
)
throws
Exception
{
LinkedHashMap
source
=
(
LinkedHashMap
)
value
.
get
(
"source"
);
return
tableName
.
equals
(
source
.
get
(
"table"
).
toString
())
&&
schemaName
.
equals
(
source
.
get
(
"db"
).
toString
());
}
});
SingleOutputStreamOperator
<
String
>
stringOperator
=
filterOperator
.
map
(
new
MapFunction
<
Map
,
String
>()
{
@Override
public
String
map
(
Map
value
)
throws
Exception
{
ObjectMapper
objectMapper
=
new
ObjectMapper
();
return
objectMapper
.
writeValueAsString
(
value
);
}
});
stringOperator
.
addSink
(
new
FlinkKafkaProducer
<
String
>(
config
.
getSink
().
get
(
"brokers"
),
table
.
getSchemaTableName
(),
new
SimpleStringSchema
()));
}
}
}
}
return
dataStreamSource
;
}
}
dlink-client/dlink-client-1.12/src/main/java/com/dlink/cdc/mysql/MysqlCDCBuilder.java
View file @
429a25da
...
@@ -3,14 +3,22 @@ package com.dlink.cdc.mysql;
...
@@ -3,14 +3,22 @@ package com.dlink.cdc.mysql;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.alibaba.ververica.cdc.connectors.mysql.MySQLSource
;
import
com.alibaba.ververica.cdc.connectors.mysql.MySQLSource
;
import
com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions
;
import
com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions
;
import
com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema
;
import
com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.constant.ClientConstant
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
/**
/**
* MysqlCDCBuilder
* MysqlCDCBuilder
...
@@ -21,6 +29,7 @@ import com.dlink.model.FlinkCDCConfig;
...
@@ -21,6 +29,7 @@ import com.dlink.model.FlinkCDCConfig;
public
class
MysqlCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
public
class
MysqlCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
private
String
KEY_WORD
=
"mysql-cdc"
;
private
String
KEY_WORD
=
"mysql-cdc"
;
private
final
static
String
METADATA_TYPE
=
"MySql"
;
public
MysqlCDCBuilder
()
{
public
MysqlCDCBuilder
()
{
}
}
...
@@ -73,4 +82,58 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
...
@@ -73,4 +82,58 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
}
}
return
env
.
addSource
(
sourceBuilder
.
build
(),
"MySQL CDC Source"
);
return
env
.
addSource
(
sourceBuilder
.
build
(),
"MySQL CDC Source"
);
}
}
public
List
<
String
>
getSchemaList
()
{
List
<
String
>
schemaList
=
new
ArrayList
<>();
String
schema
=
config
.
getDatabase
();
if
(
Asserts
.
isNullString
(
schema
))
{
return
schemaList
;
}
String
[]
schemas
=
schema
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
schemaList
,
schemas
);
return
schemaList
;
}
public
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
()
{
Map
<
String
,
Map
<
String
,
String
>>
allConfigMap
=
new
HashMap
<>();
List
<
String
>
schemaList
=
getSchemaList
();
for
(
String
schema
:
schemaList
)
{
Map
<
String
,
String
>
configMap
=
new
HashMap
<>();
configMap
.
put
(
ClientConstant
.
METADATA_TYPE
,
METADATA_TYPE
);
StringBuilder
sb
=
new
StringBuilder
(
"jdbc:mysql://"
);
sb
.
append
(
config
.
getHostname
());
sb
.
append
(
":"
);
sb
.
append
(
config
.
getPort
());
sb
.
append
(
"/"
);
sb
.
append
(
schema
);
configMap
.
put
(
ClientConstant
.
METADATA_NAME
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_URL
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_USERNAME
,
config
.
getUsername
());
configMap
.
put
(
ClientConstant
.
METADATA_PASSWORD
,
config
.
getPassword
());
allConfigMap
.
put
(
schema
,
configMap
);
}
return
allConfigMap
;
}
@Override
public
String
getInsertSQL
(
Table
table
,
String
sourceName
)
{
StringBuilder
sb
=
new
StringBuilder
(
"INSERT INTO "
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
" SELECT\n"
);
for
(
int
i
=
0
;
i
<
table
.
getColumns
().
size
();
i
++)
{
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
}
sb
.
append
(
"`"
+
table
.
getColumns
().
get
(
i
).
getName
()
+
"` \n"
);
}
sb
.
append
(
" FROM "
);
sb
.
append
(
sourceName
);
/* sb.append(" WHERE database_name = '");
sb.append(table.getSchema());
sb.append("' and table_name = '");
sb.append(table.getName());
sb.append("'");*/
return
sb
.
toString
();
}
}
}
dlink-client/dlink-client-1.12/src/main/java/com/dlink/executor/CustomTableEnvironmentImpl.java
View file @
429a25da
package
com
.
dlink
.
executor
;
package
com
.
dlink
.
executor
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
org.apache.flink.api.common.RuntimeExecutionMode
;
import
org.apache.flink.api.common.RuntimeExecutionMode
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.api.dag.Transformation
;
...
@@ -13,11 +9,13 @@ import org.apache.flink.configuration.PipelineOptions;
...
@@ -13,11 +9,13 @@ import org.apache.flink.configuration.PipelineOptions;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.datastream.DataStream
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.Table
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
...
@@ -40,12 +38,19 @@ import org.apache.flink.table.operations.Operation;
...
@@ -40,12 +38,19 @@ import org.apache.flink.table.operations.Operation;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
org.apache.flink.types.Row
;
import
java.lang.reflect.Method
;
import
java.lang.reflect.Method
;
import
java.util.ArrayList
;
import
java.util.ArrayList
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
com.dlink.exception.FlinkClientException
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
/**
/**
* 定制TableEnvironmentImpl
* 定制TableEnvironmentImpl
*
*
...
@@ -54,8 +59,28 @@ import java.util.Map;
...
@@ -54,8 +59,28 @@ import java.util.Map;
**/
**/
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
protected
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
TableConfig
tableConfig
,
Executor
executor
,
FunctionCatalog
functionCatalog
,
Planner
planner
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
private
final
StreamExecutionEnvironment
executionEnvironment
;
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
public
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
FunctionCatalog
functionCatalog
,
TableConfig
tableConfig
,
StreamExecutionEnvironment
executionEnvironment
,
Planner
planner
,
Executor
executor
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
this
.
executionEnvironment
=
executionEnvironment
;
}
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
...
@@ -74,29 +99,83 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -74,29 +99,83 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
}
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
if
(!
settings
.
isStreamingMode
())
{
throw
new
TableException
(
"StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment."
);
}
// temporary solution until FLINK-15635 is fixed
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ModuleManager
moduleManager
=
new
ModuleManager
();
ModuleManager
moduleManager
=
new
ModuleManager
();
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
().
classLoader
(
classLoader
).
config
(
tableConfig
.
getConfiguration
()).
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
())).
executionConfig
(
executionEnvironment
.
getConfig
()).
build
();
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
()
.
classLoader
(
classLoader
)
.
config
(
tableConfig
.
getConfiguration
())
.
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
()))
.
executionConfig
(
executionEnvironment
.
getConfig
())
.
build
();
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Planner
planner
=
(
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)).
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
Planner
planner
=
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
settings
.
isStreamingMode
(),
classLoader
);
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)
.
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
functionCatalog
,
tableConfig
,
executionEnvironment
,
planner
,
executor
,
settings
.
isStreamingMode
(),
classLoader
);
}
}
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
try
{
try
{
ExecutorFactory
executorFactory
=
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
ExecutorFactory
executorFactory
=
Method
createMethod
=
executorFactory
.
getClass
().
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
Method
createMethod
=
}
catch
(
Exception
var4
)
{
executorFactory
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
var4
);
.
getClass
()
.
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
}
catch
(
Exception
e
)
{
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
e
);
}
}
}
}
public
ObjectNode
getStreamGraph
(
String
statement
)
{
public
ObjectNode
getStreamGraph
(
String
statement
)
{
List
<
Operation
>
operations
=
super
.
parser
.
parse
(
statement
);
List
<
Operation
>
operations
=
super
.
parser
.
parse
(
statement
);
if
(
operations
.
size
()
!=
1
)
{
if
(
operations
.
size
()
!=
1
)
{
...
@@ -169,7 +248,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -169,7 +248,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
record
.
setParseTrue
(
true
);
record
.
setParseTrue
(
true
);
if
(
operations
.
size
()
!=
1
)
{
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
}
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
...
@@ -215,4 +294,19 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -215,4 +294,19 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
public
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
public
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
environment
,
Map
<
String
,
Object
>
setMap
)
{
return
false
;
return
false
;
}
}
@Override
public
Table
fromChangelogStream
(
DataStream
<
Row
>
dataStream
)
{
throw
new
FlinkClientException
(
"Flink 1.12 not support"
);
}
@Override
public
<
T
>
void
registerDataStream
(
String
name
,
DataStream
<
T
>
dataStream
)
{
throw
new
FlinkClientException
(
"Flink 1.12 not support"
);
}
@Override
public
<
T
>
void
createTemporaryView
(
String
path
,
DataStream
<
T
>
dataStream
)
{
throw
new
FlinkClientException
(
"Flink 1.12 not support"
);
}
}
}
\ No newline at end of file
dlink-client/dlink-client-1.13/pom.xml
View file @
429a25da
...
@@ -14,114 +14,23 @@
...
@@ -14,114 +14,23 @@
<properties>
<properties>
<java.version>
1.8
</java.version>
<java.version>
1.8
</java.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<flink.version>
1.13.6
</flink.version>
<flinkcdc.version>
2.2.0
</flinkcdc.version>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.target>
1.8
</maven.compiler.target>
<maven.compiler.target>
1.8
</maven.compiler.target>
<junit.version>
4.12
</junit.version>
</properties>
</properties>
<dependencies>
<dependencies>
<dependency>
<groupId>
org.apache.commons
</groupId>
<artifactId>
commons-lang3
</artifactId>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-base
</artifactId>
<artifactId>
dlink-client-base
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner-blink_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-clients_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-yarn_2.11
</artifactId>
<version>
${flink.version}
</version>
<exclusions>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-client
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce-client-core
</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-hadoop
</artifactId>
<artifactId>
dlink-common
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-kubernetes_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
com.ververica
</groupId>
<artifactId>
flink-connector-mysql-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
com.ververica
</groupId>
<artifactId>
flink-connector-oracle-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-common
</artifactId>
<artifactId>
dlink-flink-1.13
</artifactId>
<scope>
provided
</scope>
</dependency>
</dependency>
</dependencies>
</dependencies>
<!--<build>
<!--<build>
...
...
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/AbstractCDCBuilder.java
View file @
429a25da
package
com
.
dlink
.
cdc
;
package
com
.
dlink
.
cdc
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.List
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
/**
/**
...
@@ -26,4 +32,35 @@ public abstract class AbstractCDCBuilder {
...
@@ -26,4 +32,35 @@ public abstract class AbstractCDCBuilder {
public
void
setConfig
(
FlinkCDCConfig
config
)
{
public
void
setConfig
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
this
.
config
=
config
;
}
}
public
List
<
String
>
getSchemaList
()
{
List
<
String
>
schemaList
=
new
ArrayList
<>();
String
schema
=
config
.
getSchema
();
if
(
Asserts
.
isNullString
(
schema
))
{
return
schemaList
;
}
String
[]
schemas
=
schema
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
schemaList
,
schemas
);
List
<
String
>
tableList
=
getTableList
();
for
(
String
tableName
:
tableList
)
{
if
(
Asserts
.
isNotNullString
(
tableName
)
&&
tableName
.
contains
(
"."
))
{
String
[]
names
=
tableName
.
split
(
"."
);
if
(!
schemaList
.
contains
(
names
[
0
]))
{
schemaList
.
add
(
names
[
0
]);
}
}
}
return
schemaList
;
}
public
List
<
String
>
getTableList
()
{
List
<
String
>
tableList
=
new
ArrayList
<>();
String
table
=
config
.
getTable
();
if
(
Asserts
.
isNullString
(
table
))
{
return
tableList
;
}
String
[]
tables
=
table
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
tableList
,
tables
);
return
tableList
;
}
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/AbstractSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* AbstractCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:28
**/
public
abstract
class
AbstractSinkBuilder
{
protected
FlinkCDCConfig
config
;
public
AbstractSinkBuilder
()
{
}
public
AbstractSinkBuilder
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
}
public
FlinkCDCConfig
getConfig
()
{
return
config
;
}
public
void
setConfig
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/CDCBuilder.java
View file @
429a25da
...
@@ -3,7 +3,11 @@ package com.dlink.cdc;
...
@@ -3,7 +3,11 @@ package com.dlink.cdc;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
/**
/**
* CDCBuilder
* CDCBuilder
...
@@ -17,5 +21,13 @@ public interface CDCBuilder {
...
@@ -17,5 +21,13 @@ public interface CDCBuilder {
CDCBuilder
create
(
FlinkCDCConfig
config
);
CDCBuilder
create
(
FlinkCDCConfig
config
);
DataStreamSource
build
(
StreamExecutionEnvironment
env
);
DataStreamSource
<
String
>
build
(
StreamExecutionEnvironment
env
);
List
<
String
>
getSchemaList
();
List
<
String
>
getTableList
();
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
();
String
getInsertSQL
(
Table
table
,
String
sourceName
);
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/FlinkCDCMergeBuilder.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
cdc
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* FlinkCDCMergeBuilder
*
* @author wenmo
* @since 2022/1/29 22:37
*/
public
class
FlinkCDCMergeBuilder
{
public
static
void
buildMySqlCDC
(
StreamExecutionEnvironment
env
,
FlinkCDCConfig
config
)
{
if
(
Asserts
.
isNotNull
(
config
.
getParallelism
()))
{
env
.
setParallelism
(
config
.
getParallelism
());
}
if
(
Asserts
.
isNotNull
(
config
.
getCheckpoint
()))
{
env
.
enableCheckpointing
(
config
.
getCheckpoint
());
}
DataStreamSource
<
String
>
streamSource
=
CDCBuilderFactory
.
buildCDCBuilder
(
config
).
build
(
env
);
streamSource
.
addSink
(
getKafkaProducer
(
config
.
getBrokers
(),
config
.
getTopic
()));
}
private
static
FlinkKafkaProducer
<
String
>
getKafkaProducer
(
String
brokers
,
String
topic
)
{
return
new
FlinkKafkaProducer
<
String
>(
brokers
,
topic
,
new
SimpleStringSchema
());
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/SinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* SinkBuilder
*
* @author wenmo
* @since 2022/4/12 21:09
**/
public
interface
SinkBuilder
{
String
getHandle
();
SinkBuilder
create
(
FlinkCDCConfig
config
);
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
);
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/SinkBuilderFactory.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.jdbc.JdbcSinkBuilder
;
import
com.dlink.cdc.kafka.KafkaSinkBuilder
;
import
com.dlink.exception.FlinkClientException
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* SinkBuilderFactory
*
* @author wenmo
* @since 2022/4/12 21:12
**/
public
class
SinkBuilderFactory
{
private
static
SinkBuilder
[]
sinkBuilders
=
{
new
KafkaSinkBuilder
(),
new
JdbcSinkBuilder
(),
};
public
static
SinkBuilder
buildSinkBuilder
(
FlinkCDCConfig
config
)
{
if
(
Asserts
.
isNull
(
config
)
||
Asserts
.
isNullString
(
config
.
getSink
().
get
(
"connector"
)))
{
throw
new
FlinkClientException
(
"请指定 Sink connector。"
);
}
for
(
int
i
=
0
;
i
<
sinkBuilders
.
length
;
i
++)
{
if
(
config
.
getSink
().
get
(
"connector"
).
equals
(
sinkBuilders
[
i
].
getHandle
()))
{
return
sinkBuilders
[
i
].
create
(
config
);
}
}
throw
new
FlinkClientException
(
"未匹配到对应 Sink 类型的【"
+
config
.
getSink
().
get
(
"connector"
)
+
"】。"
);
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/jdbc/JdbcSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
.
jdbc
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractSinkBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.SinkBuilder
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
import
org.apache.flink.api.common.functions.FilterFunction
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.connector.jdbc.JdbcConnectionOptions
;
import
org.apache.flink.connector.jdbc.JdbcExecutionOptions
;
import
org.apache.flink.connector.jdbc.JdbcSink
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
;
import
org.apache.flink.table.api.StatementSet
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.Operation
;
import
java.util.ArrayList
;
import
java.util.List
;
import
java.util.Map
;
/**
* MysqlCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:29
**/
public
class
JdbcSinkBuilder
extends
AbstractSinkBuilder
implements
SinkBuilder
{
private
final
static
String
KEY_WORD
=
"jdbc"
;
private
final
static
String
TABLE_NAME
=
"cdc_table"
;
public
JdbcSinkBuilder
()
{
}
public
JdbcSinkBuilder
(
FlinkCDCConfig
config
)
{
super
(
config
);
}
@Override
public
String
getHandle
()
{
return
KEY_WORD
;
}
@Override
public
SinkBuilder
create
(
FlinkCDCConfig
config
)
{
return
new
JdbcSinkBuilder
(
config
);
}
/*@Override
public DataStreamSource build(StreamExecutionEnvironment env, DataStreamSource<String> dataStreamSource) {
final List<Schema> schemaList = config.getSchemaList();
if (Asserts.isNotNullCollection(schemaList)) {
for (Schema schema : schemaList) {
for (Table table : schema.getTables()) {
*//*dataStreamSource.filter(new FilterFunction<Map>() {
@Override
public boolean filter(Map value) throws Exception {
return value.containsKey("table_name") && table.getName().equals(value.get("table_name"));
}
});
dataStreamSource.addSink(
JdbcSink.sink(
"insert into books (id, title, authors, year) values (?, ?, ?, ?)",
(statement, book) -> {
statement.setLong(1, book.id);
statement.setString(2, book.title);
statement.setString(3, book.authors);
statement.setInt(4, book.year);
},
JdbcExecutionOptions.builder()
.withBatchSize(1000)
.withBatchIntervalMs(200)
.withMaxRetries(5)
.build(),
new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
.withUrl("jdbc:postgresql://dbhost:5432/postgresdb")
.withDriverName("org.postgresql.Driver")
.withUsername("someUser")
.withPassword("somePassword")
.build()
));*//*
}
}
}
return dataStreamSource;
}*/
@Override
public
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
)
{
final
List
<
Schema
>
schemaList
=
config
.
getSchemaList
();
if
(
Asserts
.
isNotNullCollection
(
schemaList
))
{
/*org.apache.flink.table.api.Table table = env.fromChangelogStream(dataStreamSource);
env.registerTable("cdc_table",table);*/
customTableEnvironment
.
registerDataStream
(
TABLE_NAME
,
dataStreamSource
);
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
();
StringBuilder
sb
=
new
StringBuilder
();
for
(
Map
.
Entry
<
String
,
String
>
entry
:
config
.
getSink
().
entrySet
())
{
sb
.
append
(
"'"
);
sb
.
append
(
entry
.
getKey
());
sb
.
append
(
"' = '"
);
sb
.
append
(
entry
.
getValue
());
sb
.
append
(
"',\n"
);
}
for
(
Schema
schema
:
schemaList
)
{
for
(
Table
item
:
schema
.
getTables
())
{
customTableEnvironment
.
executeSql
(
item
.
getFlinkTableSql
(
sb
.
toString
()+
"'table-name' = '"
+
item
.
getSchemaTableName
()+
"'\n"
));
List
<
Operation
>
operations
=
customTableEnvironment
.
getParser
().
parse
(
cdcBuilder
.
getInsertSQL
(
item
,
TABLE_NAME
));
if
(
operations
.
size
()
>
0
)
{
Operation
operation
=
operations
.
get
(
0
);
if
(
operation
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operation
);
}
}
}
}
List
<
Transformation
<?>>
trans
=
customTableEnvironment
.
getPlanner
().
translate
(
modifyOperations
);
for
(
Transformation
<?>
item
:
trans
)
{
env
.
addOperator
(
item
);
}
}
return
dataStreamSource
;
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/kafka/KafkaSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
.
kafka
;
import
org.apache.flink.api.common.functions.FilterFunction
;
import
org.apache.flink.api.common.functions.MapFunction
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
;
import
java.util.LinkedHashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractSinkBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.SinkBuilder
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
/**
* MysqlCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:29
**/
public
class
KafkaSinkBuilder
extends
AbstractSinkBuilder
implements
SinkBuilder
{
private
final
static
String
KEY_WORD
=
"kafka"
;
public
KafkaSinkBuilder
()
{
}
public
KafkaSinkBuilder
(
FlinkCDCConfig
config
)
{
super
(
config
);
}
@Override
public
String
getHandle
()
{
return
KEY_WORD
;
}
@Override
public
SinkBuilder
create
(
FlinkCDCConfig
config
)
{
return
new
KafkaSinkBuilder
(
config
);
}
@Override
public
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
)
{
if
(
Asserts
.
isNotNullString
(
config
.
getSink
().
get
(
"topic"
)))
{
dataStreamSource
.
addSink
(
new
FlinkKafkaProducer
<
String
>(
config
.
getSink
().
get
(
"brokers"
),
config
.
getSink
().
get
(
"topic"
),
new
SimpleStringSchema
()));
}
else
{
final
List
<
Schema
>
schemaList
=
config
.
getSchemaList
();
if
(
Asserts
.
isNotNullCollection
(
schemaList
))
{
SingleOutputStreamOperator
<
Map
>
mapOperator
=
dataStreamSource
.
map
(
new
MapFunction
<
String
,
Map
>()
{
@Override
public
Map
map
(
String
value
)
throws
Exception
{
ObjectMapper
objectMapper
=
new
ObjectMapper
();
return
objectMapper
.
readValue
(
value
,
Map
.
class
);
}
});
for
(
Schema
schema
:
schemaList
)
{
for
(
Table
table
:
schema
.
getTables
())
{
final
String
tableName
=
table
.
getName
();
final
String
schemaName
=
table
.
getSchema
();
SingleOutputStreamOperator
<
Map
>
filterOperator
=
mapOperator
.
filter
(
new
FilterFunction
<
Map
>()
{
@Override
public
boolean
filter
(
Map
value
)
throws
Exception
{
LinkedHashMap
source
=
(
LinkedHashMap
)
value
.
get
(
"source"
);
return
tableName
.
equals
(
source
.
get
(
"table"
).
toString
())
&&
schemaName
.
equals
(
source
.
get
(
"db"
).
toString
());
}
});
SingleOutputStreamOperator
<
String
>
stringOperator
=
filterOperator
.
map
(
new
MapFunction
<
Map
,
String
>()
{
@Override
public
String
map
(
Map
value
)
throws
Exception
{
ObjectMapper
objectMapper
=
new
ObjectMapper
();
return
objectMapper
.
writeValueAsString
(
value
);
}
});
stringOperator
.
addSink
(
new
FlinkKafkaProducer
<
String
>(
config
.
getSink
().
get
(
"brokers"
),
table
.
getSchemaTableName
(),
new
SimpleStringSchema
()));
}
}
}
}
return
dataStreamSource
;
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/mysql/MysqlCDCBuilder.java
View file @
429a25da
...
@@ -4,11 +4,19 @@ import org.apache.flink.api.common.eventtime.WatermarkStrategy;
...
@@ -4,11 +4,19 @@ import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.constant.ClientConstant
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
import
com.ververica.cdc.connectors.mysql.source.MySqlSource
;
import
com.ververica.cdc.connectors.mysql.source.MySqlSource
;
import
com.ververica.cdc.connectors.mysql.source.MySqlSourceBuilder
;
import
com.ververica.cdc.connectors.mysql.source.MySqlSourceBuilder
;
import
com.ververica.cdc.connectors.mysql.table.StartupOptions
;
import
com.ververica.cdc.connectors.mysql.table.StartupOptions
;
...
@@ -22,7 +30,8 @@ import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
...
@@ -22,7 +30,8 @@ import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
**/
**/
public
class
MysqlCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
public
class
MysqlCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
private
String
KEY_WORD
=
"mysql-cdc"
;
private
final
static
String
KEY_WORD
=
"mysql-cdc"
;
private
final
static
String
METADATA_TYPE
=
"MySql"
;
public
MysqlCDCBuilder
()
{
public
MysqlCDCBuilder
()
{
}
}
...
@@ -77,4 +86,58 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
...
@@ -77,4 +86,58 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
}
}
return
env
.
fromSource
(
sourceBuilder
.
build
(),
WatermarkStrategy
.
noWatermarks
(),
"MySQL CDC Source"
);
return
env
.
fromSource
(
sourceBuilder
.
build
(),
WatermarkStrategy
.
noWatermarks
(),
"MySQL CDC Source"
);
}
}
public
List
<
String
>
getSchemaList
()
{
List
<
String
>
schemaList
=
new
ArrayList
<>();
String
schema
=
config
.
getDatabase
();
if
(
Asserts
.
isNullString
(
schema
))
{
return
schemaList
;
}
String
[]
schemas
=
schema
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
schemaList
,
schemas
);
return
schemaList
;
}
public
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
()
{
Map
<
String
,
Map
<
String
,
String
>>
allConfigMap
=
new
HashMap
<>();
List
<
String
>
schemaList
=
getSchemaList
();
for
(
String
schema
:
schemaList
)
{
Map
<
String
,
String
>
configMap
=
new
HashMap
<>();
configMap
.
put
(
ClientConstant
.
METADATA_TYPE
,
METADATA_TYPE
);
StringBuilder
sb
=
new
StringBuilder
(
"jdbc:mysql://"
);
sb
.
append
(
config
.
getHostname
());
sb
.
append
(
":"
);
sb
.
append
(
config
.
getPort
());
sb
.
append
(
"/"
);
sb
.
append
(
schema
);
configMap
.
put
(
ClientConstant
.
METADATA_NAME
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_URL
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_USERNAME
,
config
.
getUsername
());
configMap
.
put
(
ClientConstant
.
METADATA_PASSWORD
,
config
.
getPassword
());
allConfigMap
.
put
(
schema
,
configMap
);
}
return
allConfigMap
;
}
@Override
public
String
getInsertSQL
(
Table
table
,
String
sourceName
)
{
StringBuilder
sb
=
new
StringBuilder
(
"INSERT INTO "
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
" SELECT\n"
);
for
(
int
i
=
0
;
i
<
table
.
getColumns
().
size
();
i
++)
{
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
}
sb
.
append
(
"`"
+
table
.
getColumns
().
get
(
i
).
getName
()
+
"` \n"
);
}
sb
.
append
(
" FROM "
);
sb
.
append
(
sourceName
);
sb
.
append
(
" WHERE database_name = '"
);
sb
.
append
(
table
.
getSchema
());
sb
.
append
(
"' and table_name = '"
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
"'"
);
return
sb
.
toString
();
}
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/cdc/oracle/OracleCDCBuilder.java
View file @
429a25da
...
@@ -3,10 +3,16 @@ package com.dlink.cdc.oracle;
...
@@ -3,10 +3,16 @@ package com.dlink.cdc.oracle;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.constant.ClientConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
import
com.ververica.cdc.connectors.oracle.OracleSource
;
import
com.ververica.cdc.connectors.oracle.OracleSource
;
import
com.ververica.cdc.connectors.oracle.table.StartupOptions
;
import
com.ververica.cdc.connectors.oracle.table.StartupOptions
;
import
com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema
;
import
com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema
;
...
@@ -19,7 +25,8 @@ import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
...
@@ -19,7 +25,8 @@ import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
**/
**/
public
class
OracleCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
public
class
OracleCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
private
String
KEY_WORD
=
"oracle-cdc"
;
private
final
static
String
KEY_WORD
=
"oracle-cdc"
;
private
final
static
String
METADATA_TYPE
=
"Oracle"
;
public
OracleCDCBuilder
()
{
public
OracleCDCBuilder
()
{
}
}
...
@@ -71,4 +78,47 @@ public class OracleCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
...
@@ -71,4 +78,47 @@ public class OracleCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
}
}
return
env
.
addSource
(
sourceBuilder
.
build
(),
"Oracle CDC Source"
);
return
env
.
addSource
(
sourceBuilder
.
build
(),
"Oracle CDC Source"
);
}
}
public
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
()
{
Map
<
String
,
Map
<
String
,
String
>>
allConfigList
=
new
HashMap
<>();
List
<
String
>
schemaList
=
getSchemaList
();
for
(
String
schema
:
schemaList
)
{
Map
<
String
,
String
>
configMap
=
new
HashMap
<>();
configMap
.
put
(
ClientConstant
.
METADATA_TYPE
,
METADATA_TYPE
);
StringBuilder
sb
=
new
StringBuilder
(
"jdbc:oracle:thin:@"
);
sb
.
append
(
config
.
getHostname
());
sb
.
append
(
":"
);
sb
.
append
(
config
.
getPort
());
sb
.
append
(
":"
);
sb
.
append
(
config
.
getDatabase
());
configMap
.
put
(
ClientConstant
.
METADATA_NAME
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_URL
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_USERNAME
,
config
.
getUsername
());
configMap
.
put
(
ClientConstant
.
METADATA_PASSWORD
,
config
.
getPassword
());
allConfigList
.
put
(
schema
,
configMap
);
}
return
allConfigList
;
}
@Override
public
String
getInsertSQL
(
Table
table
,
String
sourceName
)
{
StringBuilder
sb
=
new
StringBuilder
(
"INSERT INTO "
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
" SELECT\n"
);
for
(
int
i
=
0
;
i
<
table
.
getColumns
().
size
();
i
++)
{
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
}
sb
.
append
(
"`"
+
table
.
getColumns
().
get
(
i
).
getName
()
+
"` \n"
);
}
sb
.
append
(
" FROM "
);
sb
.
append
(
sourceName
);
sb
.
append
(
" WHERE schema_name = '"
);
sb
.
append
(
table
.
getSchema
());
sb
.
append
(
"' and table_name = '"
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
"'"
);
return
sb
.
toString
();
}
}
}
dlink-client/dlink-client-1.13/src/main/java/com/dlink/executor/CustomTableEnvironmentImpl.java
View file @
429a25da
package
com
.
dlink
.
executor
;
package
com
.
dlink
.
executor
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
org.apache.flink.api.common.RuntimeExecutionMode
;
import
org.apache.flink.api.common.RuntimeExecutionMode
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.api.dag.Transformation
;
...
@@ -14,21 +9,31 @@ import org.apache.flink.configuration.PipelineOptions;
...
@@ -14,21 +9,31 @@ import org.apache.flink.configuration.PipelineOptions;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.datastream.DataStream
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.Schema
;
import
org.apache.flink.table.api.Table
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.ExternalSchemaTranslator
;
import
org.apache.flink.table.catalog.FunctionCatalog
;
import
org.apache.flink.table.catalog.FunctionCatalog
;
import
org.apache.flink.table.catalog.GenericInMemoryCatalog
;
import
org.apache.flink.table.catalog.GenericInMemoryCatalog
;
import
org.apache.flink.table.catalog.ObjectIdentifier
;
import
org.apache.flink.table.catalog.ResolvedSchema
;
import
org.apache.flink.table.catalog.SchemaResolver
;
import
org.apache.flink.table.catalog.UnresolvedIdentifier
;
import
org.apache.flink.table.connector.ChangelogMode
;
import
org.apache.flink.table.delegation.Executor
;
import
org.apache.flink.table.delegation.Executor
;
import
org.apache.flink.table.delegation.ExecutorFactory
;
import
org.apache.flink.table.delegation.ExecutorFactory
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.table.delegation.PlannerFactory
;
import
org.apache.flink.table.delegation.PlannerFactory
;
import
org.apache.flink.table.expressions.ApiExpressionUtils
;
import
org.apache.flink.table.factories.ComponentFactoryService
;
import
org.apache.flink.table.factories.ComponentFactoryService
;
import
org.apache.flink.table.functions.AggregateFunction
;
import
org.apache.flink.table.functions.AggregateFunction
;
import
org.apache.flink.table.functions.TableAggregateFunction
;
import
org.apache.flink.table.functions.TableAggregateFunction
;
...
@@ -36,19 +41,32 @@ import org.apache.flink.table.functions.TableFunction;
...
@@ -36,19 +41,32 @@ import org.apache.flink.table.functions.TableFunction;
import
org.apache.flink.table.functions.UserDefinedFunctionHelper
;
import
org.apache.flink.table.functions.UserDefinedFunctionHelper
;
import
org.apache.flink.table.module.ModuleManager
;
import
org.apache.flink.table.module.ModuleManager
;
import
org.apache.flink.table.operations.ExplainOperation
;
import
org.apache.flink.table.operations.ExplainOperation
;
import
org.apache.flink.table.operations.JavaExternalQueryOperation
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.Operation
;
import
org.apache.flink.table.operations.Operation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.operations.command.ResetOperation
;
import
org.apache.flink.table.operations.command.ResetOperation
;
import
org.apache.flink.table.operations.command.SetOperation
;
import
org.apache.flink.table.operations.command.SetOperation
;
import
org.apache.flink.table.operations.utils.OperationTreeBuilder
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.delegation.ExecutorBase
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
org.apache.flink.table.planner.utils.ExecutorUtils
;
import
org.apache.flink.types.Row
;
import
org.apache.flink.util.Preconditions
;
import
java.lang.reflect.Method
;
import
java.lang.reflect.Method
;
import
java.util.ArrayList
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
java.util.stream.Collectors
;
import
javax.annotation.Nullable
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.core.JsonProcessingException
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
/**
/**
* 定制TableEnvironmentImpl
* 定制TableEnvironmentImpl
...
@@ -58,8 +76,28 @@ import java.util.Map;
...
@@ -58,8 +76,28 @@ import java.util.Map;
**/
**/
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
protected
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
TableConfig
tableConfig
,
Executor
executor
,
FunctionCatalog
functionCatalog
,
Planner
planner
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
private
final
StreamExecutionEnvironment
executionEnvironment
;
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
public
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
FunctionCatalog
functionCatalog
,
TableConfig
tableConfig
,
StreamExecutionEnvironment
executionEnvironment
,
Planner
planner
,
Executor
executor
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
this
.
executionEnvironment
=
executionEnvironment
;
}
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
...
@@ -74,30 +112,77 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -74,30 +112,77 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
useBlinkPlanner
().
inBatchMode
().
build
(),
tableConfig
);
return
create
(
executionEnvironment
,
EnvironmentSettings
.
newInstance
().
useBlinkPlanner
().
inBatchMode
().
build
(),
tableConfig
);
}
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
)
{
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
)
{
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
return
create
(
executionEnvironment
,
settings
,
new
TableConfig
());
}
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
ModuleManager
moduleManager
=
new
ModuleManager
();
ModuleManager
moduleManager
=
new
ModuleManager
();
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
().
classLoader
(
classLoader
).
config
(
tableConfig
.
getConfiguration
()).
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
())).
executionConfig
(
executionEnvironment
.
getConfig
()).
build
();
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
()
.
classLoader
(
classLoader
)
.
config
(
tableConfig
.
getConfiguration
())
.
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
()))
.
executionConfig
(
executionEnvironment
.
getConfig
())
.
build
();
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Map
<
String
,
String
>
executorProperties
=
settings
.
toExecutorProperties
();
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Executor
executor
=
lookupExecutor
(
executorProperties
,
executionEnvironment
);
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Map
<
String
,
String
>
plannerProperties
=
settings
.
toPlannerProperties
();
Planner
planner
=
(
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)).
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
Planner
planner
=
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
settings
.
isStreamingMode
(),
classLoader
);
ComponentFactoryService
.
find
(
PlannerFactory
.
class
,
plannerProperties
)
.
create
(
plannerProperties
,
executor
,
tableConfig
,
functionCatalog
,
catalogManager
);
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
moduleManager
,
functionCatalog
,
tableConfig
,
executionEnvironment
,
planner
,
executor
,
settings
.
isStreamingMode
(),
classLoader
);
}
}
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
private
static
Executor
lookupExecutor
(
Map
<
String
,
String
>
executorProperties
,
StreamExecutionEnvironment
executionEnvironment
)
{
try
{
try
{
ExecutorFactory
executorFactory
=
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
ExecutorFactory
executorFactory
=
Method
createMethod
=
executorFactory
.
getClass
().
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
ComponentFactoryService
.
find
(
ExecutorFactory
.
class
,
executorProperties
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
Method
createMethod
=
}
catch
(
Exception
var4
)
{
executorFactory
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
var4
);
.
getClass
()
.
getMethod
(
"create"
,
Map
.
class
,
StreamExecutionEnvironment
.
class
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executorProperties
,
executionEnvironment
);
}
catch
(
Exception
e
)
{
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
e
);
}
}
}
}
...
@@ -173,7 +258,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -173,7 +258,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
record
.
setParseTrue
(
true
);
record
.
setParseTrue
(
true
);
if
(
operations
.
size
()
!=
1
)
{
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
}
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
...
@@ -263,4 +348,72 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -263,4 +348,72 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
setMap
.
clear
();
setMap
.
clear
();
}
}
}
}
@Override
public
Table
fromChangelogStream
(
DataStream
<
Row
>
dataStream
)
{
return
fromStreamInternal
(
dataStream
,
null
,
null
,
ChangelogMode
.
all
());
}
@Override
public
<
T
>
void
registerDataStream
(
String
name
,
DataStream
<
T
>
dataStream
)
{
createTemporaryView
(
name
,
dataStream
);
}
@Override
public
<
T
>
void
createTemporaryView
(
String
path
,
DataStream
<
T
>
dataStream
)
{
createTemporaryView
(
path
,
fromStreamInternal
(
dataStream
,
null
,
path
,
ChangelogMode
.
insertOnly
()));
}
private
<
T
>
Table
fromStreamInternal
(
DataStream
<
T
>
dataStream
,
@Nullable
Schema
schema
,
@Nullable
String
viewPath
,
ChangelogMode
changelogMode
)
{
Preconditions
.
checkNotNull
(
dataStream
,
"Data stream must not be null."
);
Preconditions
.
checkNotNull
(
changelogMode
,
"Changelog mode must not be null."
);
final
CatalogManager
catalogManager
=
getCatalogManager
();
final
SchemaResolver
schemaResolver
=
catalogManager
.
getSchemaResolver
();
final
OperationTreeBuilder
operationTreeBuilder
=
getOperationTreeBuilder
();
final
UnresolvedIdentifier
unresolvedIdentifier
;
if
(
viewPath
!=
null
)
{
unresolvedIdentifier
=
getParser
().
parseIdentifier
(
viewPath
);
}
else
{
unresolvedIdentifier
=
UnresolvedIdentifier
.
of
(
"Unregistered_DataStream_Source_"
+
dataStream
.
getId
());
}
final
ObjectIdentifier
objectIdentifier
=
catalogManager
.
qualifyIdentifier
(
unresolvedIdentifier
);
final
ExternalSchemaTranslator
.
InputResult
schemaTranslationResult
=
ExternalSchemaTranslator
.
fromExternal
(
catalogManager
.
getDataTypeFactory
(),
dataStream
.
getType
(),
schema
);
final
ResolvedSchema
resolvedSchema
=
schemaTranslationResult
.
getSchema
().
resolve
(
schemaResolver
);
final
QueryOperation
scanOperation
=
new
JavaExternalQueryOperation
<>(
objectIdentifier
,
dataStream
,
schemaTranslationResult
.
getPhysicalDataType
(),
schemaTranslationResult
.
isTopLevelRecord
(),
changelogMode
,
resolvedSchema
);
final
List
<
String
>
projections
=
schemaTranslationResult
.
getProjections
();
if
(
projections
==
null
)
{
return
createTable
(
scanOperation
);
}
final
QueryOperation
projectOperation
=
operationTreeBuilder
.
project
(
projections
.
stream
()
.
map
(
ApiExpressionUtils:
:
unresolvedRef
)
.
collect
(
Collectors
.
toList
()),
scanOperation
);
return
createTable
(
projectOperation
);
}
}
}
dlink-client/dlink-client-1.14/pom.xml
View file @
429a25da
...
@@ -13,12 +13,8 @@
...
@@ -13,12 +13,8 @@
<properties>
<properties>
<java.version>
1.8
</java.version>
<java.version>
1.8
</java.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<flink.version>
1.14.3
</flink.version>
<flinkcdc.version>
2.2.0
</flinkcdc.version>
<commons.version>
1.3.1
</commons.version>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.target>
1.8
</maven.compiler.target>
<maven.compiler.target>
1.8
</maven.compiler.target>
<junit.version>
4.12
</junit.version>
</properties>
</properties>
<dependencies>
<dependencies>
...
@@ -27,91 +23,14 @@
...
@@ -27,91 +23,14 @@
<artifactId>
dlink-client-base
</artifactId>
<artifactId>
dlink-client-base
</artifactId>
<version>
${project.version}
</version>
<version>
${project.version}
</version>
</dependency>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-clients_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-yarn_2.11
</artifactId>
<version>
${flink.version}
</version>
<exclusions>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-client
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce-client-core
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-hadoop
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-kubernetes_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
com.ververica
</groupId>
<artifactId>
flink-connector-mysql-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
com.ververica
</groupId>
<artifactId>
flink-connector-oracle-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-common
</artifactId>
<artifactId>
dlink-common
</artifactId>
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com
mons-cli
</groupId>
<groupId>
com
.dlink
</groupId>
<artifactId>
commons-cli
</artifactId>
<artifactId>
dlink-flink-1.14
</artifactId>
<version>
${commons.version}
</version
>
<!-- <scope>provided</scope>--
>
</dependency>
</dependency>
</dependencies>
</dependencies>
</project>
</project>
\ No newline at end of file
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/AbstractCDCBuilder.java
View file @
429a25da
package
com
.
dlink
.
cdc
;
package
com
.
dlink
.
cdc
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.List
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
/**
/**
...
@@ -26,4 +32,35 @@ public abstract class AbstractCDCBuilder {
...
@@ -26,4 +32,35 @@ public abstract class AbstractCDCBuilder {
public
void
setConfig
(
FlinkCDCConfig
config
)
{
public
void
setConfig
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
this
.
config
=
config
;
}
}
public
List
<
String
>
getSchemaList
()
{
List
<
String
>
schemaList
=
new
ArrayList
<>();
String
schema
=
config
.
getSchema
();
if
(
Asserts
.
isNullString
(
schema
))
{
return
schemaList
;
}
String
[]
schemas
=
schema
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
schemaList
,
schemas
);
List
<
String
>
tableList
=
getTableList
();
for
(
String
tableName
:
tableList
)
{
if
(
Asserts
.
isNotNullString
(
tableName
)
&&
tableName
.
contains
(
"."
))
{
String
[]
names
=
tableName
.
split
(
"."
);
if
(!
schemaList
.
contains
(
names
[
0
]))
{
schemaList
.
add
(
names
[
0
]);
}
}
}
return
schemaList
;
}
public
List
<
String
>
getTableList
()
{
List
<
String
>
tableList
=
new
ArrayList
<>();
String
table
=
config
.
getTable
();
if
(
Asserts
.
isNullString
(
table
))
{
return
tableList
;
}
String
[]
tables
=
table
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
tableList
,
tables
);
return
tableList
;
}
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/AbstractSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* AbstractCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:28
**/
public
abstract
class
AbstractSinkBuilder
{
protected
FlinkCDCConfig
config
;
public
AbstractSinkBuilder
()
{
}
public
AbstractSinkBuilder
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
}
public
FlinkCDCConfig
getConfig
()
{
return
config
;
}
public
void
setConfig
(
FlinkCDCConfig
config
)
{
this
.
config
=
config
;
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/CDCBuilder.java
View file @
429a25da
...
@@ -3,7 +3,11 @@ package com.dlink.cdc;
...
@@ -3,7 +3,11 @@ package com.dlink.cdc;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
/**
/**
* CDCBuilder
* CDCBuilder
...
@@ -17,5 +21,13 @@ public interface CDCBuilder {
...
@@ -17,5 +21,13 @@ public interface CDCBuilder {
CDCBuilder
create
(
FlinkCDCConfig
config
);
CDCBuilder
create
(
FlinkCDCConfig
config
);
DataStreamSource
build
(
StreamExecutionEnvironment
env
);
DataStreamSource
<
String
>
build
(
StreamExecutionEnvironment
env
);
List
<
String
>
getSchemaList
();
List
<
String
>
getTableList
();
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
();
String
getInsertSQL
(
Table
table
,
String
sourceName
);
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/FlinkCDCMergeBuilder.java
deleted
100644 → 0
View file @
bf048afa
package
com
.
dlink
.
cdc
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema
;
import
org.apache.flink.connector.kafka.sink.KafkaSink
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* FlinkCDCMergeBuilder
*
* @author wenmo
* @since 2022/1/29 22:37
*/
public
class
FlinkCDCMergeBuilder
{
public
static
void
buildMySqlCDC
(
StreamExecutionEnvironment
env
,
FlinkCDCConfig
config
)
{
if
(
Asserts
.
isNotNull
(
config
.
getParallelism
()))
{
env
.
setParallelism
(
config
.
getParallelism
());
}
if
(
Asserts
.
isNotNull
(
config
.
getCheckpoint
()))
{
env
.
enableCheckpointing
(
config
.
getCheckpoint
());
}
DataStreamSource
<
String
>
streamSource
=
CDCBuilderFactory
.
buildCDCBuilder
(
config
).
build
(
env
);
streamSource
.
sinkTo
(
getKafkaProducer
(
config
.
getBrokers
(),
config
.
getTopic
()));
}
private
static
KafkaSink
<
String
>
getKafkaProducer
(
String
brokers
,
String
topic
)
{
return
KafkaSink
.<
String
>
builder
()
.
setBootstrapServers
(
brokers
)
.
setRecordSerializer
(
KafkaRecordSerializationSchema
.
builder
()
.
setTopic
(
topic
)
.
setValueSerializationSchema
(
new
SimpleStringSchema
())
.
build
()
)
.
build
();
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/SinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* SinkBuilder
*
* @author wenmo
* @since 2022/4/12 21:09
**/
public
interface
SinkBuilder
{
String
getHandle
();
SinkBuilder
create
(
FlinkCDCConfig
config
);
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
);
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/SinkBuilderFactory.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.jdbc.JdbcSinkBuilder
;
import
com.dlink.cdc.kafka.KafkaSinkBuilder
;
import
com.dlink.exception.FlinkClientException
;
import
com.dlink.model.FlinkCDCConfig
;
/**
* SinkBuilderFactory
*
* @author wenmo
* @since 2022/4/12 21:12
**/
public
class
SinkBuilderFactory
{
private
static
SinkBuilder
[]
sinkBuilders
=
{
new
KafkaSinkBuilder
(),
new
JdbcSinkBuilder
(),
};
public
static
SinkBuilder
buildSinkBuilder
(
FlinkCDCConfig
config
)
{
if
(
Asserts
.
isNull
(
config
)
||
Asserts
.
isNullString
(
config
.
getSink
().
get
(
"connector"
)))
{
throw
new
FlinkClientException
(
"请指定 Sink connector。"
);
}
for
(
int
i
=
0
;
i
<
sinkBuilders
.
length
;
i
++)
{
if
(
config
.
getSink
().
get
(
"connector"
).
equals
(
sinkBuilders
[
i
].
getHandle
()))
{
return
sinkBuilders
[
i
].
create
(
config
);
}
}
throw
new
FlinkClientException
(
"未匹配到对应 Sink 类型的【"
+
config
.
getSink
().
get
(
"connector"
)
+
"】。"
);
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/jdbc/JdbcSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
.
jdbc
;
import
org.apache.flink.api.dag.Transformation
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.Operation
;
import
java.util.ArrayList
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractSinkBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.SinkBuilder
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
/**
* MysqlCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:29
**/
public
class
JdbcSinkBuilder
extends
AbstractSinkBuilder
implements
SinkBuilder
{
private
final
static
String
KEY_WORD
=
"jdbc"
;
private
final
static
String
TABLE_NAME
=
"cdc_table"
;
public
JdbcSinkBuilder
()
{
}
public
JdbcSinkBuilder
(
FlinkCDCConfig
config
)
{
super
(
config
);
}
@Override
public
String
getHandle
()
{
return
KEY_WORD
;
}
@Override
public
SinkBuilder
create
(
FlinkCDCConfig
config
)
{
return
new
JdbcSinkBuilder
(
config
);
}
/*@Override
public DataStreamSource build(StreamExecutionEnvironment env, DataStreamSource<String> dataStreamSource) {
final List<Schema> schemaList = config.getSchemaList();
if (Asserts.isNotNullCollection(schemaList)) {
for (Schema schema : schemaList) {
for (Table table : schema.getTables()) {
*//*dataStreamSource.filter(new FilterFunction<Map>() {
@Override
public boolean filter(Map value) throws Exception {
return value.containsKey("table_name") && table.getName().equals(value.get("table_name"));
}
});
dataStreamSource.addSink(
JdbcSink.sink(
"insert into books (id, title, authors, year) values (?, ?, ?, ?)",
(statement, book) -> {
statement.setLong(1, book.id);
statement.setString(2, book.title);
statement.setString(3, book.authors);
statement.setInt(4, book.year);
},
JdbcExecutionOptions.builder()
.withBatchSize(1000)
.withBatchIntervalMs(200)
.withMaxRetries(5)
.build(),
new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
.withUrl("jdbc:postgresql://dbhost:5432/postgresdb")
.withDriverName("org.postgresql.Driver")
.withUsername("someUser")
.withPassword("somePassword")
.build()
));*//*
}
}
}
return dataStreamSource;
}*/
@Override
public
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
)
{
final
List
<
Schema
>
schemaList
=
config
.
getSchemaList
();
if
(
Asserts
.
isNotNullCollection
(
schemaList
))
{
/*org.apache.flink.table.api.Table table = env.fromChangelogStream(dataStreamSource);
env.registerTable("cdc_table",table);*/
customTableEnvironment
.
registerDataStream
(
TABLE_NAME
,
dataStreamSource
);
List
<
ModifyOperation
>
modifyOperations
=
new
ArrayList
();
StringBuilder
sb
=
new
StringBuilder
();
for
(
Map
.
Entry
<
String
,
String
>
entry
:
config
.
getSink
().
entrySet
())
{
sb
.
append
(
"'"
);
sb
.
append
(
entry
.
getKey
());
sb
.
append
(
"' = '"
);
sb
.
append
(
entry
.
getValue
());
sb
.
append
(
"',\n"
);
}
for
(
Schema
schema
:
schemaList
)
{
for
(
Table
item
:
schema
.
getTables
())
{
customTableEnvironment
.
executeSql
(
item
.
getFlinkTableSql
(
sb
.
toString
()
+
"'table-name' = '"
+
item
.
getSchemaTableName
()
+
"'\n"
));
List
<
Operation
>
operations
=
customTableEnvironment
.
getParser
().
parse
(
cdcBuilder
.
getInsertSQL
(
item
,
TABLE_NAME
));
if
(
operations
.
size
()
>
0
)
{
Operation
operation
=
operations
.
get
(
0
);
if
(
operation
instanceof
ModifyOperation
)
{
modifyOperations
.
add
((
ModifyOperation
)
operation
);
}
}
}
}
List
<
Transformation
<?>>
trans
=
customTableEnvironment
.
getPlanner
().
translate
(
modifyOperations
);
for
(
Transformation
<?>
item
:
trans
)
{
env
.
addOperator
(
item
);
}
}
return
dataStreamSource
;
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/kafka/KafkaSinkBuilder.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
cdc
.
kafka
;
import
org.apache.flink.api.common.functions.FilterFunction
;
import
org.apache.flink.api.common.functions.MapFunction
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema
;
import
org.apache.flink.connector.kafka.sink.KafkaSink
;
import
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.LinkedHashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractSinkBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.SinkBuilder
;
import
com.dlink.executor.CustomTableEnvironment
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
/**
* MysqlCDCBuilder
*
* @author wenmo
* @since 2022/4/12 21:29
**/
public
class
KafkaSinkBuilder
extends
AbstractSinkBuilder
implements
SinkBuilder
{
private
final
static
String
KEY_WORD
=
"kafka"
;
public
KafkaSinkBuilder
()
{
}
public
KafkaSinkBuilder
(
FlinkCDCConfig
config
)
{
super
(
config
);
}
@Override
public
String
getHandle
()
{
return
KEY_WORD
;
}
@Override
public
SinkBuilder
create
(
FlinkCDCConfig
config
)
{
return
new
KafkaSinkBuilder
(
config
);
}
@Override
public
DataStreamSource
build
(
CDCBuilder
cdcBuilder
,
StreamExecutionEnvironment
env
,
CustomTableEnvironment
customTableEnvironment
,
DataStreamSource
<
String
>
dataStreamSource
)
{
if
(
Asserts
.
isNotNullString
(
config
.
getSink
().
get
(
"topic"
)))
{
dataStreamSource
.
sinkTo
(
KafkaSink
.<
String
>
builder
()
.
setBootstrapServers
(
config
.
getSink
().
get
(
"brokers"
))
.
setRecordSerializer
(
KafkaRecordSerializationSchema
.
builder
()
.
setTopic
(
config
.
getSink
().
get
(
"topic"
))
.
setValueSerializationSchema
(
new
SimpleStringSchema
())
.
build
()
)
.
build
());
}
else
{
final
List
<
Schema
>
schemaList
=
config
.
getSchemaList
();
if
(
Asserts
.
isNotNullCollection
(
schemaList
))
{
SingleOutputStreamOperator
<
Map
>
mapOperator
=
dataStreamSource
.
map
(
new
MapFunction
<
String
,
Map
>()
{
@Override
public
Map
map
(
String
value
)
throws
Exception
{
ObjectMapper
objectMapper
=
new
ObjectMapper
();
return
objectMapper
.
readValue
(
value
,
Map
.
class
);
}
});
for
(
Schema
schema
:
schemaList
)
{
for
(
Table
table
:
schema
.
getTables
())
{
final
String
tableName
=
table
.
getName
();
final
String
schemaName
=
table
.
getSchema
();
SingleOutputStreamOperator
<
Map
>
filterOperator
=
mapOperator
.
filter
(
new
FilterFunction
<
Map
>()
{
@Override
public
boolean
filter
(
Map
value
)
throws
Exception
{
LinkedHashMap
source
=
(
LinkedHashMap
)
value
.
get
(
"source"
);
return
tableName
.
equals
(
source
.
get
(
"table"
).
toString
())
&&
schemaName
.
equals
(
source
.
get
(
"db"
).
toString
());
}
});
SingleOutputStreamOperator
<
String
>
stringOperator
=
filterOperator
.
map
(
new
MapFunction
<
Map
,
String
>()
{
@Override
public
String
map
(
Map
value
)
throws
Exception
{
ObjectMapper
objectMapper
=
new
ObjectMapper
();
return
objectMapper
.
writeValueAsString
(
value
);
}
});
stringOperator
.
sinkTo
(
KafkaSink
.<
String
>
builder
()
.
setBootstrapServers
(
config
.
getSink
().
get
(
"brokers"
))
.
setRecordSerializer
(
KafkaRecordSerializationSchema
.
builder
()
.
setTopic
(
table
.
getSchemaTableName
())
.
setValueSerializationSchema
(
new
SimpleStringSchema
())
.
build
()
)
.
build
());
}
}
}
}
return
dataStreamSource
;
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/mysql/MysqlCDCBuilder.java
View file @
429a25da
...
@@ -4,11 +4,19 @@ import org.apache.flink.api.common.eventtime.WatermarkStrategy;
...
@@ -4,11 +4,19 @@ import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.constant.ClientConstant
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.constant.FlinkParamConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
import
com.ververica.cdc.connectors.mysql.source.MySqlSource
;
import
com.ververica.cdc.connectors.mysql.source.MySqlSource
;
import
com.ververica.cdc.connectors.mysql.source.MySqlSourceBuilder
;
import
com.ververica.cdc.connectors.mysql.source.MySqlSourceBuilder
;
import
com.ververica.cdc.connectors.mysql.table.StartupOptions
;
import
com.ververica.cdc.connectors.mysql.table.StartupOptions
;
...
@@ -23,6 +31,7 @@ import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
...
@@ -23,6 +31,7 @@ import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
public
class
MysqlCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
public
class
MysqlCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
private
String
KEY_WORD
=
"mysql-cdc"
;
private
String
KEY_WORD
=
"mysql-cdc"
;
private
final
static
String
METADATA_TYPE
=
"MySql"
;
public
MysqlCDCBuilder
()
{
public
MysqlCDCBuilder
()
{
}
}
...
@@ -77,4 +86,58 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
...
@@ -77,4 +86,58 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
}
}
return
env
.
fromSource
(
sourceBuilder
.
build
(),
WatermarkStrategy
.
noWatermarks
(),
"MySQL CDC Source"
);
return
env
.
fromSource
(
sourceBuilder
.
build
(),
WatermarkStrategy
.
noWatermarks
(),
"MySQL CDC Source"
);
}
}
public
List
<
String
>
getSchemaList
()
{
List
<
String
>
schemaList
=
new
ArrayList
<>();
String
schema
=
config
.
getDatabase
();
if
(
Asserts
.
isNullString
(
schema
))
{
return
schemaList
;
}
String
[]
schemas
=
schema
.
split
(
FlinkParamConstant
.
SPLIT
);
Collections
.
addAll
(
schemaList
,
schemas
);
return
schemaList
;
}
public
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
()
{
Map
<
String
,
Map
<
String
,
String
>>
allConfigMap
=
new
HashMap
<>();
List
<
String
>
schemaList
=
getSchemaList
();
for
(
String
schema
:
schemaList
)
{
Map
<
String
,
String
>
configMap
=
new
HashMap
<>();
configMap
.
put
(
ClientConstant
.
METADATA_TYPE
,
METADATA_TYPE
);
StringBuilder
sb
=
new
StringBuilder
(
"jdbc:mysql://"
);
sb
.
append
(
config
.
getHostname
());
sb
.
append
(
":"
);
sb
.
append
(
config
.
getPort
());
sb
.
append
(
"/"
);
sb
.
append
(
schema
);
configMap
.
put
(
ClientConstant
.
METADATA_NAME
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_URL
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_USERNAME
,
config
.
getUsername
());
configMap
.
put
(
ClientConstant
.
METADATA_PASSWORD
,
config
.
getPassword
());
allConfigMap
.
put
(
schema
,
configMap
);
}
return
allConfigMap
;
}
@Override
public
String
getInsertSQL
(
Table
table
,
String
sourceName
)
{
StringBuilder
sb
=
new
StringBuilder
(
"INSERT INTO "
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
" SELECT\n"
);
for
(
int
i
=
0
;
i
<
table
.
getColumns
().
size
();
i
++)
{
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
}
sb
.
append
(
"`"
+
table
.
getColumns
().
get
(
i
).
getName
()
+
"` \n"
);
}
sb
.
append
(
" FROM "
);
sb
.
append
(
sourceName
);
/* sb.append(" WHERE database_name = '");
sb.append(table.getSchema());
sb.append("' and table_name = '");
sb.append(table.getName());
sb.append("'");*/
return
sb
.
toString
();
}
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/cdc/oracle/OracleCDCBuilder.java
View file @
429a25da
...
@@ -3,10 +3,16 @@ package com.dlink.cdc.oracle;
...
@@ -3,10 +3,16 @@ package com.dlink.cdc.oracle;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.AbstractCDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.constant.ClientConstant
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Table
;
import
com.ververica.cdc.connectors.oracle.OracleSource
;
import
com.ververica.cdc.connectors.oracle.OracleSource
;
import
com.ververica.cdc.connectors.oracle.table.StartupOptions
;
import
com.ververica.cdc.connectors.oracle.table.StartupOptions
;
import
com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema
;
import
com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema
;
...
@@ -20,6 +26,8 @@ import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
...
@@ -20,6 +26,8 @@ import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
public
class
OracleCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
public
class
OracleCDCBuilder
extends
AbstractCDCBuilder
implements
CDCBuilder
{
private
String
KEY_WORD
=
"oracle-cdc"
;
private
String
KEY_WORD
=
"oracle-cdc"
;
private
final
static
String
METADATA_TYPE
=
"MySql"
;
public
OracleCDCBuilder
()
{
public
OracleCDCBuilder
()
{
}
}
...
@@ -71,4 +79,47 @@ public class OracleCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
...
@@ -71,4 +79,47 @@ public class OracleCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
}
}
return
env
.
addSource
(
sourceBuilder
.
build
(),
"Oracle CDC Source"
);
return
env
.
addSource
(
sourceBuilder
.
build
(),
"Oracle CDC Source"
);
}
}
public
Map
<
String
,
Map
<
String
,
String
>>
parseMetaDataConfigs
()
{
Map
<
String
,
Map
<
String
,
String
>>
allConfigList
=
new
HashMap
<>();
List
<
String
>
schemaList
=
getSchemaList
();
for
(
String
schema
:
schemaList
)
{
Map
<
String
,
String
>
configMap
=
new
HashMap
<>();
configMap
.
put
(
ClientConstant
.
METADATA_TYPE
,
METADATA_TYPE
);
StringBuilder
sb
=
new
StringBuilder
(
"jdbc:oracle:thin:@"
);
sb
.
append
(
config
.
getHostname
());
sb
.
append
(
":"
);
sb
.
append
(
config
.
getPort
());
sb
.
append
(
":"
);
sb
.
append
(
config
.
getDatabase
());
configMap
.
put
(
ClientConstant
.
METADATA_NAME
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_URL
,
sb
.
toString
());
configMap
.
put
(
ClientConstant
.
METADATA_USERNAME
,
config
.
getUsername
());
configMap
.
put
(
ClientConstant
.
METADATA_PASSWORD
,
config
.
getPassword
());
allConfigList
.
put
(
schema
,
configMap
);
}
return
allConfigList
;
}
@Override
public
String
getInsertSQL
(
Table
table
,
String
sourceName
)
{
StringBuilder
sb
=
new
StringBuilder
(
"INSERT INTO "
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
" SELECT\n"
);
for
(
int
i
=
0
;
i
<
table
.
getColumns
().
size
();
i
++)
{
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
}
sb
.
append
(
"`"
+
table
.
getColumns
().
get
(
i
).
getName
()
+
"` \n"
);
}
sb
.
append
(
" FROM "
);
sb
.
append
(
sourceName
);
sb
.
append
(
" WHERE schema_name = '"
);
sb
.
append
(
table
.
getSchema
());
sb
.
append
(
"' and table_name = '"
);
sb
.
append
(
table
.
getName
());
sb
.
append
(
"'"
);
return
sb
.
toString
();
}
}
}
dlink-client/dlink-client-1.14/src/main/java/com/dlink/executor/CustomTableEnvironmentImpl.java
View file @
429a25da
...
@@ -14,20 +14,31 @@ import org.apache.flink.configuration.PipelineOptions;
...
@@ -14,20 +14,31 @@ import org.apache.flink.configuration.PipelineOptions;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.datastream.DataStream
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.JSONGenerator
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.EnvironmentSettings
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.Schema
;
import
org.apache.flink.table.api.Table
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.TableException
;
import
org.apache.flink.table.api.ValidationException
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.api.internal.TableEnvironmentImpl
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.FunctionCatalog
;
import
org.apache.flink.table.catalog.FunctionCatalog
;
import
org.apache.flink.table.catalog.GenericInMemoryCatalog
;
import
org.apache.flink.table.catalog.GenericInMemoryCatalog
;
import
org.apache.flink.table.catalog.ObjectIdentifier
;
import
org.apache.flink.table.catalog.ResolvedSchema
;
import
org.apache.flink.table.catalog.SchemaResolver
;
import
org.apache.flink.table.catalog.SchemaTranslator
;
import
org.apache.flink.table.catalog.UnresolvedIdentifier
;
import
org.apache.flink.table.connector.ChangelogMode
;
import
org.apache.flink.table.delegation.Executor
;
import
org.apache.flink.table.delegation.Executor
;
import
org.apache.flink.table.delegation.ExecutorFactory
;
import
org.apache.flink.table.delegation.ExecutorFactory
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.table.expressions.ApiExpressionUtils
;
import
org.apache.flink.table.factories.FactoryUtil
;
import
org.apache.flink.table.factories.FactoryUtil
;
import
org.apache.flink.table.factories.PlannerFactoryUtil
;
import
org.apache.flink.table.factories.PlannerFactoryUtil
;
import
org.apache.flink.table.functions.AggregateFunction
;
import
org.apache.flink.table.functions.AggregateFunction
;
...
@@ -36,18 +47,24 @@ import org.apache.flink.table.functions.TableFunction;
...
@@ -36,18 +47,24 @@ import org.apache.flink.table.functions.TableFunction;
import
org.apache.flink.table.functions.UserDefinedFunctionHelper
;
import
org.apache.flink.table.functions.UserDefinedFunctionHelper
;
import
org.apache.flink.table.module.ModuleManager
;
import
org.apache.flink.table.module.ModuleManager
;
import
org.apache.flink.table.operations.ExplainOperation
;
import
org.apache.flink.table.operations.ExplainOperation
;
import
org.apache.flink.table.operations.JavaExternalQueryOperation
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.ModifyOperation
;
import
org.apache.flink.table.operations.Operation
;
import
org.apache.flink.table.operations.Operation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.operations.QueryOperation
;
import
org.apache.flink.table.operations.command.ResetOperation
;
import
org.apache.flink.table.operations.command.ResetOperation
;
import
org.apache.flink.table.operations.command.SetOperation
;
import
org.apache.flink.table.operations.command.SetOperation
;
import
org.apache.flink.table.operations.utils.OperationTreeBuilder
;
import
org.apache.flink.table.planner.delegation.DefaultExecutor
;
import
org.apache.flink.table.planner.delegation.DefaultExecutor
;
import
org.apache.flink.types.Row
;
import
org.apache.flink.util.Preconditions
;
import
javax.annotation.Nullable
;
import
java.lang.reflect.Method
;
import
java.lang.reflect.Method
;
import
java.util.ArrayList
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
java.util.stream.Collectors
;
/**
/**
* 定制TableEnvironmentImpl
* 定制TableEnvironmentImpl
...
@@ -57,29 +74,28 @@ import java.util.Map;
...
@@ -57,29 +74,28 @@ import java.util.Map;
**/
**/
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
public
class
CustomTableEnvironmentImpl
extends
TableEnvironmentImpl
implements
CustomTableEnvironment
{
protected
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
TableConfig
tableConfig
,
Executor
executor
,
FunctionCatalog
functionCatalog
,
Planner
planner
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
private
final
StreamExecutionEnvironment
executionEnvironment
;
super
(
catalogManager
,
moduleManager
,
tableConfig
,
executor
,
functionCatalog
,
planner
,
isStreamingMode
,
userClassLoader
);
}
public
CustomTableEnvironmentImpl
(
public
CustomTableEnvironmentImpl
(
CatalogManager
catalogManager
,
CatalogManager
catalogManager
,
ModuleManager
moduleManager
,
ModuleManager
moduleManager
,
FunctionCatalog
functionCatalog
,
FunctionCatalog
functionCatalog
,
TableConfig
tableConfig
,
TableConfig
tableConfig
,
StreamExecutionEnvironment
executionEnvironment
,
StreamExecutionEnvironment
executionEnvironment
,
Planner
planner
,
Planner
planner
,
Executor
executor
,
Executor
executor
,
boolean
isStreamingMode
,
boolean
isStreamingMode
,
ClassLoader
userClassLoader
)
{
ClassLoader
userClassLoader
)
{
super
(
super
(
catalogManager
,
catalogManager
,
moduleManager
,
moduleManager
,
tableConfig
,
tableConfig
,
executor
,
executor
,
functionCatalog
,
functionCatalog
,
planner
,
planner
,
isStreamingMode
,
isStreamingMode
,
userClassLoader
);
userClassLoader
);
this
.
executionEnvironment
=
executionEnvironment
;
}
}
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
)
{
...
@@ -95,9 +111,9 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -95,9 +111,9 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
}
}
public
static
CustomTableEnvironmentImpl
create
(
public
static
CustomTableEnvironmentImpl
create
(
StreamExecutionEnvironment
executionEnvironment
,
StreamExecutionEnvironment
executionEnvironment
,
EnvironmentSettings
settings
,
EnvironmentSettings
settings
,
TableConfig
tableConfig
)
{
TableConfig
tableConfig
)
{
// temporary solution until FLINK-15635 is fixed
// temporary solution until FLINK-15635 is fixed
final
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
final
ClassLoader
classLoader
=
Thread
.
currentThread
().
getContextClassLoader
();
...
@@ -105,61 +121,61 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -105,61 +121,61 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
final
ModuleManager
moduleManager
=
new
ModuleManager
();
final
ModuleManager
moduleManager
=
new
ModuleManager
();
final
CatalogManager
catalogManager
=
final
CatalogManager
catalogManager
=
CatalogManager
.
newBuilder
()
CatalogManager
.
newBuilder
()
.
classLoader
(
classLoader
)
.
classLoader
(
classLoader
)
.
config
(
tableConfig
.
getConfiguration
())
.
config
(
tableConfig
.
getConfiguration
())
.
defaultCatalog
(
.
defaultCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInCatalogName
(),
new
GenericInMemoryCatalog
(
new
GenericInMemoryCatalog
(
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInCatalogName
(),
settings
.
getBuiltInDatabaseName
()))
settings
.
getBuiltInDatabaseName
()))
.
executionConfig
(
executionEnvironment
.
getConfig
())
.
executionConfig
(
executionEnvironment
.
getConfig
())
.
build
();
.
build
();
final
FunctionCatalog
functionCatalog
=
final
FunctionCatalog
functionCatalog
=
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
new
FunctionCatalog
(
tableConfig
,
catalogManager
,
moduleManager
);
final
Executor
executor
=
final
Executor
executor
=
lookupExecutor
(
classLoader
,
settings
.
getExecutor
(),
executionEnvironment
);
lookupExecutor
(
classLoader
,
settings
.
getExecutor
(),
executionEnvironment
);
final
Planner
planner
=
final
Planner
planner
=
PlannerFactoryUtil
.
createPlanner
(
PlannerFactoryUtil
.
createPlanner
(
settings
.
getPlanner
(),
settings
.
getPlanner
(),
executor
,
executor
,
tableConfig
,
tableConfig
,
catalogManager
,
catalogManager
,
functionCatalog
);
functionCatalog
);
return
new
CustomTableEnvironmentImpl
(
return
new
CustomTableEnvironmentImpl
(
catalogManager
,
catalogManager
,
moduleManager
,
moduleManager
,
functionCatalog
,
functionCatalog
,
tableConfig
,
tableConfig
,
executionEnvironment
,
executionEnvironment
,
planner
,
planner
,
executor
,
executor
,
settings
.
isStreamingMode
(),
settings
.
isStreamingMode
(),
classLoader
);
classLoader
);
}
}
private
static
Executor
lookupExecutor
(
private
static
Executor
lookupExecutor
(
ClassLoader
classLoader
,
ClassLoader
classLoader
,
String
executorIdentifier
,
String
executorIdentifier
,
StreamExecutionEnvironment
executionEnvironment
)
{
StreamExecutionEnvironment
executionEnvironment
)
{
try
{
try
{
final
ExecutorFactory
executorFactory
=
final
ExecutorFactory
executorFactory
=
FactoryUtil
.
discoverFactory
(
FactoryUtil
.
discoverFactory
(
classLoader
,
ExecutorFactory
.
class
,
executorIdentifier
);
classLoader
,
ExecutorFactory
.
class
,
executorIdentifier
);
final
Method
createMethod
=
final
Method
createMethod
=
executorFactory
executorFactory
.
getClass
()
.
getClass
()
.
getMethod
(
"create"
,
StreamExecutionEnvironment
.
class
);
.
getMethod
(
"create"
,
StreamExecutionEnvironment
.
class
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executionEnvironment
);
return
(
Executor
)
createMethod
.
invoke
(
executorFactory
,
executionEnvironment
);
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
throw
new
TableException
(
throw
new
TableException
(
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
"Could not instantiate the executor. Make sure a planner module is on the classpath"
,
e
);
e
);
}
}
}
}
...
@@ -236,7 +252,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -236,7 +252,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
record
.
setParseTrue
(
true
);
record
.
setParseTrue
(
true
);
if
(
operations
.
size
()
!=
1
)
{
if
(
operations
.
size
()
!=
1
)
{
throw
new
TableException
(
throw
new
TableException
(
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
"Unsupported SQL query! explainSql() only accepts a single SQL query."
);
}
}
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
List
<
Operation
>
operationlist
=
new
ArrayList
<>(
operations
);
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
for
(
int
i
=
0
;
i
<
operationlist
.
size
();
i
++)
{
...
@@ -326,4 +342,80 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
...
@@ -326,4 +342,80 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
setMap
.
clear
();
setMap
.
clear
();
}
}
}
}
@Override
public
Table
fromChangelogStream
(
DataStream
<
Row
>
dataStream
)
{
return
fromStreamInternal
(
dataStream
,
null
,
null
,
ChangelogMode
.
all
());
}
@Override
public
<
T
>
void
registerDataStream
(
String
name
,
DataStream
<
T
>
dataStream
)
{
createTemporaryView
(
name
,
dataStream
);
}
@Override
public
<
T
>
void
createTemporaryView
(
String
path
,
DataStream
<
T
>
dataStream
)
{
createTemporaryView
(
path
,
fromStreamInternal
(
dataStream
,
null
,
path
,
ChangelogMode
.
insertOnly
()));
}
private
<
T
>
Table
fromStreamInternal
(
DataStream
<
T
>
dataStream
,
@Nullable
Schema
schema
,
@Nullable
String
viewPath
,
ChangelogMode
changelogMode
)
{
Preconditions
.
checkNotNull
(
dataStream
,
"Data stream must not be null."
);
Preconditions
.
checkNotNull
(
changelogMode
,
"Changelog mode must not be null."
);
if
(
dataStream
.
getExecutionEnvironment
()
!=
executionEnvironment
)
{
throw
new
ValidationException
(
"The DataStream's StreamExecutionEnvironment must be identical to the one that "
+
"has been passed to the StreamTableEnvironment during instantiation."
);
}
final
CatalogManager
catalogManager
=
getCatalogManager
();
final
SchemaResolver
schemaResolver
=
catalogManager
.
getSchemaResolver
();
final
OperationTreeBuilder
operationTreeBuilder
=
getOperationTreeBuilder
();
final
UnresolvedIdentifier
unresolvedIdentifier
;
if
(
viewPath
!=
null
)
{
unresolvedIdentifier
=
getParser
().
parseIdentifier
(
viewPath
);
}
else
{
unresolvedIdentifier
=
UnresolvedIdentifier
.
of
(
"Unregistered_DataStream_Source_"
+
dataStream
.
getId
());
}
final
ObjectIdentifier
objectIdentifier
=
catalogManager
.
qualifyIdentifier
(
unresolvedIdentifier
);
final
SchemaTranslator
.
ConsumingResult
schemaTranslationResult
=
SchemaTranslator
.
createConsumingResult
(
catalogManager
.
getDataTypeFactory
(),
dataStream
.
getType
(),
schema
);
final
ResolvedSchema
resolvedSchema
=
schemaTranslationResult
.
getSchema
().
resolve
(
schemaResolver
);
final
QueryOperation
scanOperation
=
new
JavaExternalQueryOperation
<>(
objectIdentifier
,
dataStream
,
schemaTranslationResult
.
getPhysicalDataType
(),
schemaTranslationResult
.
isTopLevelRecord
(),
changelogMode
,
resolvedSchema
);
final
List
<
String
>
projections
=
schemaTranslationResult
.
getProjections
();
if
(
projections
==
null
)
{
return
createTable
(
scanOperation
);
}
final
QueryOperation
projectOperation
=
operationTreeBuilder
.
project
(
projections
.
stream
()
.
map
(
ApiExpressionUtils:
:
unresolvedRef
)
.
collect
(
Collectors
.
toList
()),
scanOperation
);
return
createTable
(
projectOperation
);
}
}
}
dlink-client/dlink-client-base/src/main/java/com/dlink/constant/ClientConstant.java
0 → 100644
View file @
429a25da
package
com
.
dlink
.
constant
;
/**
* ClientConstant
*
* @author wenmo
* @since 2022/4/14 23:23
**/
public
final
class
ClientConstant
{
public
static
final
String
METADATA_NAME
=
"name"
;
public
static
final
String
METADATA_TYPE
=
"type"
;
public
static
final
String
METADATA_URL
=
"url"
;
public
static
final
String
METADATA_USERNAME
=
"username"
;
public
static
final
String
METADATA_PASSWORD
=
"password"
;
}
dlink-client/dlink-client-base/src/main/java/com/dlink/constant/FlinkParamConstant.java
View file @
429a25da
...
@@ -13,5 +13,5 @@ public final class FlinkParamConstant {
...
@@ -13,5 +13,5 @@ public final class FlinkParamConstant {
public
static
final
String
USERNAME
=
"username"
;
public
static
final
String
USERNAME
=
"username"
;
public
static
final
String
PASSWORD
=
"password"
;
public
static
final
String
PASSWORD
=
"password"
;
public
static
final
String
SPLIT
=
"
;
"
;
public
static
final
String
SPLIT
=
"
,
"
;
}
}
dlink-client/dlink-client-base/src/main/java/com/dlink/executor/CustomTableEnvironment.java
View file @
429a25da
package
com
.
dlink
.
executor
;
package
com
.
dlink
.
executor
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.jobgraph.JobGraph
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.runtime.rest.messages.JobPlanInfo
;
import
org.apache.flink.streaming.api.datastream.DataStream
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.streaming.api.graph.StreamGraph
;
import
org.apache.flink.table.api.*
;
import
org.apache.flink.table.api.ExplainDetail
;
import
org.apache.flink.table.api.StatementSet
;
import
org.apache.flink.table.api.Table
;
import
org.apache.flink.table.api.TableConfig
;
import
org.apache.flink.table.api.TableResult
;
import
org.apache.flink.table.catalog.Catalog
;
import
org.apache.flink.table.catalog.Catalog
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.catalog.CatalogManager
;
import
org.apache.flink.table.delegation.Parser
;
import
org.apache.flink.table.delegation.Planner
;
import
org.apache.flink.types.Row
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
java.util.Optional
;
import
java.util.Optional
;
import
com.dlink.result.SqlExplainResult
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
/**
/**
* CustomTableEnvironment
* CustomTableEnvironment
*
*
...
@@ -53,4 +62,14 @@ public interface CustomTableEnvironment {
...
@@ -53,4 +62,14 @@ public interface CustomTableEnvironment {
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
config
,
Map
<
String
,
Object
>
setMap
);
boolean
parseAndLoadConfiguration
(
String
statement
,
StreamExecutionEnvironment
config
,
Map
<
String
,
Object
>
setMap
);
StatementSet
createStatementSet
();
StatementSet
createStatementSet
();
Table
fromChangelogStream
(
DataStream
<
Row
>
dataStream
);
<
T
>
void
registerDataStream
(
String
name
,
DataStream
<
T
>
dataStream
);
<
T
>
void
createTemporaryView
(
String
path
,
DataStream
<
T
>
dataStream
);
Parser
getParser
();
Planner
getPlanner
();
}
}
dlink-client/dlink-client-base/src/main/java/com/dlink/model/FlinkCDCConfig.java
View file @
429a25da
package
com
.
dlink
.
model
;
package
com
.
dlink
.
model
;
import
java.util.List
;
import
java.util.Map
;
/**
/**
* FlinkCDCConfig
* FlinkCDCConfig
*
*
...
@@ -19,14 +22,14 @@ public class FlinkCDCConfig {
...
@@ -19,14 +22,14 @@ public class FlinkCDCConfig {
private
String
schema
;
private
String
schema
;
private
String
table
;
private
String
table
;
private
String
startupMode
;
private
String
startupMode
;
private
String
topic
;
private
Map
<
String
,
String
>
sink
;
private
String
brokers
;
private
List
<
Schema
>
schemaList
;
public
FlinkCDCConfig
()
{
public
FlinkCDCConfig
()
{
}
}
public
FlinkCDCConfig
(
String
type
,
String
hostname
,
int
port
,
String
username
,
String
password
,
int
checkpoint
,
int
parallelism
,
String
database
,
String
schema
,
String
table
,
String
startupMode
,
public
FlinkCDCConfig
(
String
type
,
String
hostname
,
int
port
,
String
username
,
String
password
,
int
checkpoint
,
int
parallelism
,
String
database
,
String
schema
,
String
table
,
String
startupMode
,
String
topic
,
String
brokers
)
{
Map
<
String
,
String
>
sink
)
{
this
.
type
=
type
;
this
.
type
=
type
;
this
.
hostname
=
hostname
;
this
.
hostname
=
hostname
;
this
.
port
=
port
;
this
.
port
=
port
;
...
@@ -38,8 +41,7 @@ public class FlinkCDCConfig {
...
@@ -38,8 +41,7 @@ public class FlinkCDCConfig {
this
.
schema
=
schema
;
this
.
schema
=
schema
;
this
.
table
=
table
;
this
.
table
=
table
;
this
.
startupMode
=
startupMode
;
this
.
startupMode
=
startupMode
;
this
.
topic
=
topic
;
this
.
sink
=
sink
;
this
.
brokers
=
brokers
;
}
}
public
String
getType
()
{
public
String
getType
()
{
...
@@ -122,20 +124,12 @@ public class FlinkCDCConfig {
...
@@ -122,20 +124,12 @@ public class FlinkCDCConfig {
this
.
table
=
table
;
this
.
table
=
table
;
}
}
public
String
getTopic
()
{
public
Map
<
String
,
String
>
getSink
()
{
return
topic
;
return
sink
;
}
public
void
setTopic
(
String
topic
)
{
this
.
topic
=
topic
;
}
public
String
getBrokers
()
{
return
brokers
;
}
}
public
void
set
Brokers
(
String
brokers
)
{
public
void
set
Sink
(
Map
<
String
,
String
>
sink
)
{
this
.
brokers
=
brokers
;
this
.
sink
=
sink
;
}
}
public
String
getStartupMode
()
{
public
String
getStartupMode
()
{
...
@@ -145,4 +139,12 @@ public class FlinkCDCConfig {
...
@@ -145,4 +139,12 @@ public class FlinkCDCConfig {
public
void
setStartupMode
(
String
startupMode
)
{
public
void
setStartupMode
(
String
startupMode
)
{
this
.
startupMode
=
startupMode
;
this
.
startupMode
=
startupMode
;
}
}
public
List
<
Schema
>
getSchemaList
()
{
return
schemaList
;
}
public
void
setSchemaList
(
List
<
Schema
>
schemaList
)
{
this
.
schemaList
=
schemaList
;
}
}
}
dlink-common/src/main/java/com/dlink/model/Schema.java
View file @
429a25da
package
com
.
dlink
.
model
;
package
com
.
dlink
.
model
;
import
lombok.Getter
;
import
lombok.Setter
;
import
java.io.Serializable
;
import
java.io.Serializable
;
import
java.util.ArrayList
;
import
java.util.ArrayList
;
import
java.util.List
;
import
java.util.List
;
import
lombok.Getter
;
import
lombok.Setter
;
/**
/**
* Schema
* Schema
*
*
...
@@ -31,6 +31,10 @@ public class Schema implements Serializable, Comparable<Schema> {
...
@@ -31,6 +31,10 @@ public class Schema implements Serializable, Comparable<Schema> {
this
.
tables
=
tables
;
this
.
tables
=
tables
;
}
}
public
static
Schema
build
(
String
name
)
{
return
new
Schema
(
name
);
}
@Override
@Override
public
int
compareTo
(
Schema
o
)
{
public
int
compareTo
(
Schema
o
)
{
return
this
.
name
.
compareTo
(
o
.
getName
());
return
this
.
name
.
compareTo
(
o
.
getName
());
...
...
dlink-common/src/main/java/com/dlink/model/Table.java
View file @
429a25da
package
com
.
dlink
.
model
;
package
com
.
dlink
.
model
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.utils.SqlUtil
;
import
lombok.Getter
;
import
lombok.Setter
;
import
java.io.Serializable
;
import
java.io.Serializable
;
import
java.util.ArrayList
;
import
java.util.ArrayList
;
import
java.util.Date
;
import
java.util.Date
;
import
java.util.List
;
import
java.util.List
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.utils.SqlUtil
;
import
lombok.Getter
;
import
lombok.Setter
;
/**
/**
* Table
* Table
*
*
...
@@ -43,6 +44,10 @@ public class Table implements Serializable, Comparable<Table> {
...
@@ -43,6 +44,10 @@ public class Table implements Serializable, Comparable<Table> {
this
.
columns
=
columns
;
this
.
columns
=
columns
;
}
}
public
String
getSchemaTableName
()
{
return
Asserts
.
isNullString
(
schema
)
?
name
:
schema
+
"."
+
name
;
}
@Override
@Override
public
int
compareTo
(
Table
o
)
{
public
int
compareTo
(
Table
o
)
{
return
this
.
name
.
compareTo
(
o
.
getName
());
return
this
.
name
.
compareTo
(
o
.
getName
());
...
@@ -69,6 +74,55 @@ public class Table implements Serializable, Comparable<Table> {
...
@@ -69,6 +74,55 @@ public class Table implements Serializable, Comparable<Table> {
return
tableWithSql
;
return
tableWithSql
;
}
}
public
String
getFlinkTableSql
(
String
flinkConfig
)
{
StringBuilder
sb
=
new
StringBuilder
();
sb
.
append
(
"CREATE TABLE IF NOT EXISTS "
+
name
+
" (\n"
);
List
<
String
>
pks
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
columns
.
size
();
i
++)
{
String
type
=
columns
.
get
(
i
).
getJavaType
().
getFlinkType
();
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
}
sb
.
append
(
"`"
+
columns
.
get
(
i
).
getName
()
+
"` "
+
type
);
if
(
Asserts
.
isNotNullString
(
columns
.
get
(
i
).
getComment
()))
{
if
(
columns
.
get
(
i
).
getComment
().
contains
(
"\'"
)
|
columns
.
get
(
i
).
getComment
().
contains
(
"\""
))
{
sb
.
append
(
" COMMENT '"
+
columns
.
get
(
i
).
getComment
().
replaceAll
(
"\"|'"
,
""
)
+
"'"
);
}
else
{
sb
.
append
(
" COMMENT '"
+
columns
.
get
(
i
).
getComment
()
+
"'"
);
}
}
sb
.
append
(
"\n"
);
if
(
columns
.
get
(
i
).
isKeyFlag
())
{
pks
.
add
(
columns
.
get
(
i
).
getName
());
}
}
StringBuilder
pksb
=
new
StringBuilder
(
"PRIMARY KEY ( "
);
for
(
int
i
=
0
;
i
<
pks
.
size
();
i
++)
{
if
(
i
>
0
)
{
pksb
.
append
(
","
);
}
pksb
.
append
(
"`"
+
pks
.
get
(
i
)
+
"`"
);
}
pksb
.
append
(
" ) NOT ENFORCED\n"
);
if
(
pks
.
size
()
>
0
)
{
sb
.
append
(
" ,"
);
sb
.
append
(
pksb
);
}
sb
.
append
(
")"
);
if
(
Asserts
.
isNotNullString
(
comment
))
{
if
(
comment
.
contains
(
"\'"
)
|
comment
.
contains
(
"\""
))
{
sb
.
append
(
" COMMENT '"
+
comment
.
replaceAll
(
"\"|'"
,
""
)
+
"'\n"
);
}
else
{
sb
.
append
(
" COMMENT '"
+
comment
+
"'\n"
);
}
}
sb
.
append
(
" WITH (\n"
);
sb
.
append
(
getFlinkTableWith
(
flinkConfig
));
sb
.
append
(
")\n"
);
return
sb
.
toString
();
}
public
String
getFlinkTableSql
(
String
catalogName
,
String
flinkConfig
)
{
public
String
getFlinkTableSql
(
String
catalogName
,
String
flinkConfig
)
{
StringBuilder
sb
=
new
StringBuilder
(
"DROP TABLE IF EXISTS "
);
StringBuilder
sb
=
new
StringBuilder
(
"DROP TABLE IF EXISTS "
);
String
fullSchemaName
=
catalogName
+
"."
+
schema
+
"."
+
name
;
String
fullSchemaName
=
catalogName
+
"."
+
schema
+
"."
+
name
;
...
@@ -120,7 +174,6 @@ public class Table implements Serializable, Comparable<Table> {
...
@@ -120,7 +174,6 @@ public class Table implements Serializable, Comparable<Table> {
return
sb
.
toString
();
return
sb
.
toString
();
}
}
public
String
getSqlSelect
(
String
catalogName
)
{
public
String
getSqlSelect
(
String
catalogName
)
{
StringBuilder
sb
=
new
StringBuilder
(
"SELECT\n"
);
StringBuilder
sb
=
new
StringBuilder
(
"SELECT\n"
);
for
(
int
i
=
0
;
i
<
columns
.
size
();
i
++)
{
for
(
int
i
=
0
;
i
<
columns
.
size
();
i
++)
{
...
@@ -146,4 +199,25 @@ public class Table implements Serializable, Comparable<Table> {
...
@@ -146,4 +199,25 @@ public class Table implements Serializable, Comparable<Table> {
}
}
return
sb
.
toString
();
return
sb
.
toString
();
}
}
public
String
getCDCSqlInsertIntoBySourceName
(
String
sourceName
,
String
schemaName
,
String
tableName
)
{
StringBuilder
sb
=
new
StringBuilder
(
"INSERT INTO "
);
sb
.
append
(
name
);
sb
.
append
(
" SELECT\n"
);
for
(
int
i
=
0
;
i
<
columns
.
size
();
i
++)
{
sb
.
append
(
" "
);
if
(
i
>
0
)
{
sb
.
append
(
","
);
}
sb
.
append
(
"`"
+
columns
.
get
(
i
).
getName
()
+
"` \n"
);
}
sb
.
append
(
" FROM "
);
sb
.
append
(
sourceName
);
sb
.
append
(
" WHERE database_name = '"
);
sb
.
append
(
schemaName
);
sb
.
append
(
"' and table_name = '"
);
sb
.
append
(
tableName
);
sb
.
append
(
"'"
);
return
sb
.
toString
();
}
}
}
dlink-core/pom.xml
View file @
429a25da
...
@@ -47,7 +47,7 @@
...
@@ -47,7 +47,7 @@
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-
1.13
</artifactId>
<artifactId>
dlink-client-
${dlink.flink.version}
</artifactId>
<scope>
${scope.runtime}
</scope>
<scope>
${scope.runtime}
</scope>
</dependency>
</dependency>
<dependency>
<dependency>
...
@@ -57,7 +57,12 @@
...
@@ -57,7 +57,12 @@
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-connector-jdbc-1.13
</artifactId>
<artifactId>
dlink-connector-jdbc-${dlink.flink.version}
</artifactId>
<scope>
${scope.runtime}
</scope>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-${dlink.flink.version}
</artifactId>
<scope>
${scope.runtime}
</scope>
<scope>
${scope.runtime}
</scope>
</dependency>
</dependency>
<dependency>
<dependency>
...
...
dlink-executor/pom.xml
View file @
429a25da
...
@@ -29,7 +29,12 @@
...
@@ -29,7 +29,12 @@
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-1.13
</artifactId>
<artifactId>
dlink-client-${dlink.flink.version}
</artifactId>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-${dlink.flink.version}
</artifactId>
<scope>
provided
</scope>
<scope>
provided
</scope>
</dependency>
</dependency>
<dependency>
<dependency>
...
@@ -37,6 +42,11 @@
...
@@ -37,6 +42,11 @@
<artifactId>
dlink-function
</artifactId>
<artifactId>
dlink-function
</artifactId>
<scope>
provided
</scope>
<scope>
provided
</scope>
</dependency>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-metadata-base
</artifactId>
<scope>
provided
</scope>
</dependency>
</dependencies>
</dependencies>
</project>
</project>
\ No newline at end of file
dlink-executor/src/main/java/com/dlink/trans/ddl/CDCSource.java
View file @
429a25da
...
@@ -30,11 +30,10 @@ public class CDCSource {
...
@@ -30,11 +30,10 @@ public class CDCSource {
private
String
schema
;
private
String
schema
;
private
String
table
;
private
String
table
;
private
String
startupMode
;
private
String
startupMode
;
private
String
topic
;
private
Map
<
String
,
String
>
sink
;
private
String
brokers
;
public
CDCSource
(
String
type
,
String
statement
,
String
name
,
String
hostname
,
Integer
port
,
String
username
,
String
password
,
Integer
checkpoint
,
Integer
parallelism
,
String
startupMode
,
public
CDCSource
(
String
type
,
String
statement
,
String
name
,
String
hostname
,
Integer
port
,
String
username
,
String
password
,
Integer
checkpoint
,
Integer
parallelism
,
String
startupMode
,
String
topic
,
String
brokers
)
{
Map
<
String
,
String
>
sink
)
{
this
.
type
=
type
;
this
.
type
=
type
;
this
.
statement
=
statement
;
this
.
statement
=
statement
;
this
.
name
=
name
;
this
.
name
=
name
;
...
@@ -45,13 +44,22 @@ public class CDCSource {
...
@@ -45,13 +44,22 @@ public class CDCSource {
this
.
checkpoint
=
checkpoint
;
this
.
checkpoint
=
checkpoint
;
this
.
parallelism
=
parallelism
;
this
.
parallelism
=
parallelism
;
this
.
startupMode
=
startupMode
;
this
.
startupMode
=
startupMode
;
this
.
topic
=
topic
;
this
.
sink
=
sink
;
this
.
brokers
=
brokers
;
}
}
public
static
CDCSource
build
(
String
statement
)
{
public
static
CDCSource
build
(
String
statement
)
{
Map
<
String
,
List
<
String
>>
map
=
SingleSqlParserFactory
.
generateParser
(
statement
);
Map
<
String
,
List
<
String
>>
map
=
SingleSqlParserFactory
.
generateParser
(
statement
);
Map
<
String
,
String
>
config
=
getKeyValue
(
map
.
get
(
"WITH"
));
Map
<
String
,
String
>
config
=
getKeyValue
(
map
.
get
(
"WITH"
));
Map
<
String
,
String
>
sink
=
new
HashMap
<>();
for
(
Map
.
Entry
<
String
,
String
>
entry
:
config
.
entrySet
())
{
if
(
entry
.
getKey
().
startsWith
(
"sink."
))
{
String
key
=
entry
.
getKey
();
key
=
key
.
replace
(
"sink."
,
""
);
if
(!
sink
.
containsKey
(
key
))
{
sink
.
put
(
entry
.
getKey
().
replace
(
"sink."
,
""
),
entry
.
getValue
());
}
}
}
CDCSource
cdcSource
=
new
CDCSource
(
CDCSource
cdcSource
=
new
CDCSource
(
config
.
get
(
"type"
),
config
.
get
(
"type"
),
statement
,
statement
,
...
@@ -63,8 +71,7 @@ public class CDCSource {
...
@@ -63,8 +71,7 @@ public class CDCSource {
Integer
.
valueOf
(
config
.
get
(
"checkpoint"
)),
Integer
.
valueOf
(
config
.
get
(
"checkpoint"
)),
Integer
.
valueOf
(
config
.
get
(
"parallelism"
)),
Integer
.
valueOf
(
config
.
get
(
"parallelism"
)),
config
.
get
(
"startup"
),
config
.
get
(
"startup"
),
config
.
get
(
"topic"
),
sink
config
.
get
(
"brokers"
)
);
);
if
(
Asserts
.
isNotNullString
(
config
.
get
(
"database"
)))
{
if
(
Asserts
.
isNotNullString
(
config
.
get
(
"database"
)))
{
cdcSource
.
setDatabase
(
config
.
get
(
"database"
));
cdcSource
.
setDatabase
(
config
.
get
(
"database"
));
...
@@ -186,20 +193,12 @@ public class CDCSource {
...
@@ -186,20 +193,12 @@ public class CDCSource {
this
.
table
=
table
;
this
.
table
=
table
;
}
}
public
String
getTopic
()
{
public
Map
<
String
,
String
>
getSink
()
{
return
topic
;
return
sink
;
}
public
void
setTopic
(
String
topic
)
{
this
.
topic
=
topic
;
}
public
String
getBrokers
()
{
return
brokers
;
}
}
public
void
set
Brokers
(
String
brokers
)
{
public
void
set
Sink
(
Map
<
String
,
String
>
sink
)
{
this
.
brokers
=
brokers
;
this
.
sink
=
sink
;
}
}
public
String
getStartupMode
()
{
public
String
getStartupMode
()
{
...
...
dlink-executor/src/main/java/com/dlink/trans/ddl/CreateCDCSourceOperation.java
View file @
429a25da
package
com
.
dlink
.
trans
.
ddl
;
package
com
.
dlink
.
trans
.
ddl
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.table.api.TableResult
;
import
org.apache.flink.table.api.TableResult
;
import
com.dlink.cdc.FlinkCDCMergeBuilder
;
import
java.util.ArrayList
;
import
java.util.List
;
import
java.util.Map
;
import
com.dlink.assertion.Asserts
;
import
com.dlink.cdc.CDCBuilder
;
import
com.dlink.cdc.CDCBuilderFactory
;
import
com.dlink.cdc.SinkBuilderFactory
;
import
com.dlink.executor.Executor
;
import
com.dlink.executor.Executor
;
import
com.dlink.metadata.driver.Driver
;
import
com.dlink.metadata.driver.DriverConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.FlinkCDCConfig
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
import
com.dlink.trans.AbstractOperation
;
import
com.dlink.trans.AbstractOperation
;
import
com.dlink.trans.Operation
;
import
com.dlink.trans.Operation
;
...
@@ -40,9 +53,41 @@ public class CreateCDCSourceOperation extends AbstractOperation implements Opera
...
@@ -40,9 +53,41 @@ public class CreateCDCSourceOperation extends AbstractOperation implements Opera
CDCSource
cdcSource
=
CDCSource
.
build
(
statement
);
CDCSource
cdcSource
=
CDCSource
.
build
(
statement
);
FlinkCDCConfig
config
=
new
FlinkCDCConfig
(
cdcSource
.
getType
(),
cdcSource
.
getHostname
(),
cdcSource
.
getPort
(),
cdcSource
.
getUsername
()
FlinkCDCConfig
config
=
new
FlinkCDCConfig
(
cdcSource
.
getType
(),
cdcSource
.
getHostname
(),
cdcSource
.
getPort
(),
cdcSource
.
getUsername
()
,
cdcSource
.
getPassword
(),
cdcSource
.
getCheckpoint
(),
cdcSource
.
getParallelism
(),
cdcSource
.
getDatabase
(),
cdcSource
.
getSchema
()
,
cdcSource
.
getPassword
(),
cdcSource
.
getCheckpoint
(),
cdcSource
.
getParallelism
(),
cdcSource
.
getDatabase
(),
cdcSource
.
getSchema
()
,
cdcSource
.
getTable
(),
cdcSource
.
getStartupMode
(),
cdcSource
.
get
Topic
(),
cdcSource
.
getBrokers
());
,
cdcSource
.
getTable
(),
cdcSource
.
getStartupMode
(),
cdcSource
.
get
Sink
());
try
{
try
{
FlinkCDCMergeBuilder
.
buildMySqlCDC
(
executor
.
getStreamExecutionEnvironment
(),
config
);
CDCBuilder
cdcBuilder
=
CDCBuilderFactory
.
buildCDCBuilder
(
config
);
Map
<
String
,
Map
<
String
,
String
>>
allConfigMap
=
cdcBuilder
.
parseMetaDataConfigs
();
List
<
Schema
>
schemaList
=
new
ArrayList
<>();
final
List
<
String
>
schemaNameList
=
cdcBuilder
.
getSchemaList
();
final
List
<
String
>
tableRegList
=
cdcBuilder
.
getTableList
();
for
(
String
schemaName
:
schemaNameList
)
{
Schema
schema
=
Schema
.
build
(
schemaName
);
if
(!
allConfigMap
.
containsKey
(
schemaName
))
{
continue
;
}
DriverConfig
driverConfig
=
DriverConfig
.
build
(
allConfigMap
.
get
(
schemaName
));
Driver
driver
=
Driver
.
build
(
driverConfig
);
final
List
<
Table
>
tables
=
driver
.
getTablesAndColumns
(
schemaName
);
for
(
Table
table
:
tables
)
{
for
(
String
tableReg
:
tableRegList
)
{
if
(
table
.
getSchemaTableName
().
matches
(
tableReg
)
&&
!
schema
.
getTables
().
contains
(
Table
.
build
(
table
.
getName
())))
{
schema
.
getTables
().
add
(
table
);
break
;
}
}
}
schemaList
.
add
(
schema
);
}
config
.
setSchemaList
(
schemaList
);
StreamExecutionEnvironment
streamExecutionEnvironment
=
executor
.
getStreamExecutionEnvironment
();
if
(
Asserts
.
isNotNull
(
config
.
getParallelism
()))
{
streamExecutionEnvironment
.
setParallelism
(
config
.
getParallelism
());
}
if
(
Asserts
.
isNotNull
(
config
.
getCheckpoint
()))
{
streamExecutionEnvironment
.
enableCheckpointing
(
config
.
getCheckpoint
());
}
DataStreamSource
<
String
>
streamSource
=
cdcBuilder
.
build
(
streamExecutionEnvironment
);
SinkBuilderFactory
.
buildSinkBuilder
(
config
).
build
(
cdcBuilder
,
streamExecutionEnvironment
,
executor
.
getCustomTableEnvironment
(),
streamSource
);
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
e
.
printStackTrace
();
}
}
...
...
dlink-flink/dlink-flink-1.11/pom.xml
0 → 100644
View file @
429a25da
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<parent>
<artifactId>
dlink-flink
</artifactId>
<groupId>
com.dlink
</groupId>
<version>
0.6.2
</version>
</parent>
<modelVersion>
4.0.0
</modelVersion>
<artifactId>
dlink-flink-1.11
</artifactId>
<properties>
<java.version>
1.8
</java.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<flink.version>
1.11.6
</flink.version>
<flinkcdc.version>
1.1.0
</flinkcdc.version>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.target>
1.8
</maven.compiler.target>
<junit.version>
4.12
</junit.version>
</properties>
<dependencies>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner-blink_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-clients_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-yarn_2.11
</artifactId>
<version>
${flink.version}
</version>
<exclusions>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-client
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce-client-core
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-kubernetes_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
com.alibaba.ververica
</groupId>
<artifactId>
flink-connector-mysql-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
</dependencies>
</project>
\ No newline at end of file
dlink-flink/dlink-flink-1.12/pom.xml
0 → 100644
View file @
429a25da
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<parent>
<artifactId>
dlink-flink
</artifactId>
<groupId>
com.dlink
</groupId>
<version>
0.6.2
</version>
</parent>
<modelVersion>
4.0.0
</modelVersion>
<artifactId>
dlink-flink-1.12
</artifactId>
<properties>
<java.version>
1.8
</java.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<flink.version>
1.12.7
</flink.version>
<flinkcdc.version>
1.3.0
</flinkcdc.version>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.target>
1.8
</maven.compiler.target>
<junit.version>
4.12
</junit.version>
</properties>
<dependencies>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner-blink_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-clients_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-yarn_2.11
</artifactId>
<version>
${flink.version}
</version>
<exclusions>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-client
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce-client-core
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-kubernetes_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
com.alibaba.ververica
</groupId>
<artifactId>
flink-connector-mysql-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
</dependencies>
</project>
\ No newline at end of file
dlink-flink/dlink-flink-1.13/pom.xml
0 → 100644
View file @
429a25da
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<parent>
<artifactId>
dlink-flink
</artifactId>
<groupId>
com.dlink
</groupId>
<version>
0.6.2
</version>
</parent>
<modelVersion>
4.0.0
</modelVersion>
<artifactId>
dlink-flink-1.13
</artifactId>
<properties>
<java.version>
1.8
</java.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<flink.version>
1.13.6
</flink.version>
<flinkcdc.version>
2.2.0
</flinkcdc.version>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.target>
1.8
</maven.compiler.target>
<junit.version>
4.12
</junit.version>
</properties>
<dependencies>
<dependency>
<groupId>
org.apache.commons
</groupId>
<artifactId>
commons-lang3
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner-blink_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-clients_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-yarn_2.11
</artifactId>
<version>
${flink.version}
</version>
<exclusions>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-client
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce-client-core
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-kubernetes_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
com.ververica
</groupId>
<artifactId>
flink-connector-mysql-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
com.ververica
</groupId>
<artifactId>
flink-connector-oracle-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-jdbc_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
</dependencies>
</project>
\ No newline at end of file
dlink-flink/dlink-flink-1.14/pom.xml
0 → 100644
View file @
429a25da
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<parent>
<artifactId>
dlink-flink
</artifactId>
<groupId>
com.dlink
</groupId>
<version>
0.6.2
</version>
</parent>
<modelVersion>
4.0.0
</modelVersion>
<artifactId>
dlink-flink-1.14
</artifactId>
<properties>
<java.version>
1.8
</java.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<flink.version>
1.14.4
</flink.version>
<flinkcdc.version>
2.2.0
</flinkcdc.version>
<commons.version>
1.3.1
</commons.version>
<maven.compiler.source>
1.8
</maven.compiler.source>
<maven.compiler.target>
1.8
</maven.compiler.target>
<junit.version>
4.12
</junit.version>
</properties>
<dependencies>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-clients_${scala.binary.version}
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
</exclusions>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-yarn_2.11
</artifactId>
<version>
${flink.version}
</version>
<exclusions>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-yarn-client
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce-client-core
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-kubernetes_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_${scala.binary.version}
</artifactId>
<version>
${flink.version}
</version>
</dependency>
<dependency>
<groupId>
com.ververica
</groupId>
<artifactId>
flink-connector-mysql-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
com.ververica
</groupId>
<artifactId>
flink-connector-oracle-cdc
</artifactId>
<version>
${flinkcdc.version}
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
<dependency>
<groupId>
commons-cli
</groupId>
<artifactId>
commons-cli
</artifactId>
<version>
${commons.version}
</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
dlink-flink/pom.xml
0 → 100644
View file @
429a25da
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<parent>
<artifactId>
dlink
</artifactId>
<groupId>
com.dlink
</groupId>
<version>
0.6.2
</version>
</parent>
<modelVersion>
4.0.0
</modelVersion>
<artifactId>
dlink-flink
</artifactId>
<packaging>
pom
</packaging>
<modules>
<module>
dlink-flink-1.11
</module>
<module>
dlink-flink-1.12
</module>
<module>
dlink-flink-1.13
</module>
<module>
dlink-flink-1.14
</module>
</modules>
<properties>
<maven.compiler.source>
8
</maven.compiler.source>
<maven.compiler.target>
8
</maven.compiler.target>
</properties>
</project>
\ No newline at end of file
dlink-function/pom.xml
View file @
429a25da
...
@@ -17,6 +17,11 @@
...
@@ -17,6 +17,11 @@
<artifactId>
dlink-client-1.13
</artifactId>
<artifactId>
dlink-client-1.13
</artifactId>
<scope>
provided
</scope>
<scope>
provided
</scope>
</dependency>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.13
</artifactId>
<scope>
provided
</scope>
</dependency>
</dependencies>
</dependencies>
</project>
</project>
\ No newline at end of file
dlink-gateway/pom.xml
View file @
429a25da
...
@@ -41,8 +41,18 @@
...
@@ -41,8 +41,18 @@
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-1.13
</artifactId>
<artifactId>
dlink-client-${dlink.flink.version}
</artifactId>
<scope>
provided
</scope>
<scope>
${scope.runtime}
</scope>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-${dlink.flink.version}
</artifactId>
<scope>
${scope.runtime}
</scope>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-client-hadoop
</artifactId>
<scope>
${scope.runtime}
</scope>
</dependency>
</dependency>
</dependencies>
</dependencies>
...
...
dlink-metadata/dlink-metadata-base/src/main/java/com/dlink/metadata/driver/DriverConfig.java
View file @
429a25da
package
com
.
dlink
.
metadata
.
driver
;
package
com
.
dlink
.
metadata
.
driver
;
import
com.dlink.assertion.Asserts
;
import
lombok.Getter
;
import
lombok.Getter
;
import
lombok.Setter
;
import
lombok.Setter
;
import
java.util.Map
;
/**
/**
* DriverConfig
* DriverConfig
*
*
...
@@ -33,4 +36,10 @@ public class DriverConfig {
...
@@ -33,4 +36,10 @@ public class DriverConfig {
this
.
username
=
username
;
this
.
username
=
username
;
this
.
password
=
password
;
this
.
password
=
password
;
}
}
public
static
DriverConfig
build
(
Map
<
String
,
String
>
confMap
)
{
Asserts
.
checkNull
(
confMap
,
"数据源配置不能为空"
);
return
new
DriverConfig
(
confMap
.
get
(
"name"
),
confMap
.
get
(
"type"
),
confMap
.
get
(
"url"
),
confMap
.
get
(
"username"
)
,
confMap
.
get
(
"password"
));
}
}
}
pom.xml
View file @
429a25da
...
@@ -10,6 +10,7 @@
...
@@ -10,6 +10,7 @@
<version>
0.6.2
</version>
<version>
0.6.2
</version>
<modules>
<modules>
<module>
dlink-flink
</module>
<module>
dlink-common
</module>
<module>
dlink-common
</module>
<module>
dlink-client
</module>
<module>
dlink-client
</module>
<module>
dlink-gateway
</module>
<module>
dlink-gateway
</module>
...
@@ -55,6 +56,8 @@
...
@@ -55,6 +56,8 @@
<junit.version>
4.12
</junit.version>
<junit.version>
4.12
</junit.version>
<!-- `provided` for product environment ,`compile` for dev environment -->
<!-- `provided` for product environment ,`compile` for dev environment -->
<scope.runtime>
provided
</scope.runtime>
<scope.runtime>
provided
</scope.runtime>
<!-- modify flink version -->
<dlink.flink.version>
1.13
</dlink.flink.version>
<scala.binary.version>
2.11
</scala.binary.version>
<scala.binary.version>
2.11
</scala.binary.version>
<protobuf-java.version>
2.5.0
</protobuf-java.version>
<protobuf-java.version>
2.5.0
</protobuf-java.version>
<jedis.version>
2.9.0
</jedis.version>
<jedis.version>
2.9.0
</jedis.version>
...
@@ -207,6 +210,11 @@
...
@@ -207,6 +210,11 @@
<artifactId>
dlink-client-1.11
</artifactId>
<artifactId>
dlink-client-1.11
</artifactId>
<version>
${project.version}
</version>
<version>
${project.version}
</version>
</dependency>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-connector-jdbc-1.11
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-connector-jdbc-1.12
</artifactId>
<artifactId>
dlink-connector-jdbc-1.12
</artifactId>
...
@@ -217,6 +225,11 @@
...
@@ -217,6 +225,11 @@
<artifactId>
dlink-connector-jdbc-1.13
</artifactId>
<artifactId>
dlink-connector-jdbc-1.13
</artifactId>
<version>
${project.version}
</version>
<version>
${project.version}
</version>
</dependency>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-connector-jdbc-1.14
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-connector-phoenix-1.13
</artifactId>
<artifactId>
dlink-connector-phoenix-1.13
</artifactId>
...
@@ -311,7 +324,7 @@
...
@@ -311,7 +324,7 @@
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-alert-feishu
</artifactId>
<artifactId>
dlink-alert-feishu
</artifactId>
<version>
${project.version}
</version>
<version>
${project.version}
</version>
</dependency>
</dependency>
<dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-alert-email
</artifactId>
<artifactId>
dlink-alert-email
</artifactId>
...
@@ -327,6 +340,26 @@
...
@@ -327,6 +340,26 @@
<artifactId>
dlink-app-base
</artifactId>
<artifactId>
dlink-app-base
</artifactId>
<version>
${project.version}
</version>
<version>
${project.version}
</version>
</dependency>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.11
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.12
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.13
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-flink-1.14
</artifactId>
<version>
${project.version}
</version>
</dependency>
<dependency>
<dependency>
<groupId>
org.apache.httpcomponents
</groupId>
<groupId>
org.apache.httpcomponents
</groupId>
<artifactId>
httpclient
</artifactId>
<artifactId>
httpclient
</artifactId>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment