Commit f6d81b81 authored by wenmo's avatar wenmo

client format

parent 2a471036
......@@ -21,10 +21,10 @@ import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
public class FlinkCDCMergeBuilder {
public static void buildMySqlCDC(StreamExecutionEnvironment env, FlinkCDCConfig config) {
if(Asserts.isNotNull(config.getParallelism())){
if (Asserts.isNotNull(config.getParallelism())) {
env.setParallelism(config.getParallelism());
}
if(Asserts.isNotNull(config.getCheckpoint())){
if (Asserts.isNotNull(config.getCheckpoint())) {
env.enableCheckpointing(config.getCheckpoint());
}
MySqlSourceBuilder<String> sourceBuilder = MySqlSource.<String>builder()
......@@ -32,16 +32,16 @@ public class FlinkCDCMergeBuilder {
.port(config.getPort())
.username(config.getUsername())
.password(config.getPassword());
if(Asserts.isNotNull(config.getDatabase())&&config.getDatabase().size()>0){
if (Asserts.isNotNull(config.getDatabase()) && config.getDatabase().size() > 0) {
sourceBuilder.databaseList(config.getDatabase().toArray(new String[0]));
}
if(Asserts.isNotNull(config.getTable())&&config.getTable().size()>0){
if (Asserts.isNotNull(config.getTable()) && config.getTable().size() > 0) {
sourceBuilder.tableList(config.getTable().toArray(new String[0]));
}
MySqlSourceBuilder<String> builder = sourceBuilder
.deserializer(new JsonDebeziumDeserializationSchema());
if(Asserts.isNotNullString(config.getStartupMode())){
switch (config.getStartupMode().toUpperCase()){
if (Asserts.isNotNullString(config.getStartupMode())) {
switch (config.getStartupMode().toUpperCase()) {
case "INITIAL":
builder.startupOptions(StartupOptions.initial());
break;
......@@ -54,12 +54,12 @@ public class FlinkCDCMergeBuilder {
default:
builder.startupOptions(StartupOptions.latest());
}
}else {
} else {
builder.startupOptions(StartupOptions.latest());
}
MySqlSource<String> sourceFunction = builder.build();
DataStreamSource<String> streamSource = env.fromSource(sourceFunction, WatermarkStrategy.noWatermarks(), "MySQL Source");
streamSource.addSink(getKafkaProducer(config.getBrokers(),config.getTopic()));
streamSource.addSink(getKafkaProducer(config.getBrokers(), config.getTopic()));
}
private static FlinkKafkaProducer<String> getKafkaProducer(String brokers, String topic) {
......
......@@ -17,8 +17,9 @@ import java.util.Optional;
/**
* 定制TableResultImpl
* @author wenmo
* @since 2021/6/7 22:06
*
* @author wenmo
* @since 2021/6/7 22:06
**/
@Internal
class CustomTableResultImpl implements TableResult {
......@@ -49,12 +50,12 @@ class CustomTableResultImpl implements TableResult {
this.printStyle = Preconditions.checkNotNull(printStyle, "printStyle should not be null");
}
public static TableResult buildTableResult(List<TableSchemaField> fields,List<Row> rows){
public static TableResult buildTableResult(List<TableSchemaField> fields, List<Row> rows) {
Builder builder = builder().resultKind(ResultKind.SUCCESS);
if(fields.size()>0) {
if (fields.size() > 0) {
TableSchema.Builder tableSchemaBuild = TableSchema.builder();
for (int i = 0; i < fields.size(); i++) {
tableSchemaBuild.field(fields.get(i).getName(),fields.get(i).getType());
tableSchemaBuild.field(fields.get(i).getName(), fields.get(i).getType());
}
builder.tableSchema(tableSchemaBuild.build()).data(rows);
}
......@@ -109,7 +110,9 @@ class CustomTableResultImpl implements TableResult {
return new Builder();
}
/** Builder for creating a {@link CustomTableResultImpl}. */
/**
* Builder for creating a {@link CustomTableResultImpl}.
*/
public static class Builder {
private JobClient jobClient = null;
private TableSchema tableSchema = null;
......@@ -118,7 +121,8 @@ class CustomTableResultImpl implements TableResult {
private PrintStyle printStyle =
PrintStyle.tableau(Integer.MAX_VALUE, PrintUtils.NULL_COLUMN, false);
private Builder() {}
private Builder() {
}
/**
* Specifies job client which associates the submitted Flink job.
......@@ -174,20 +178,26 @@ class CustomTableResultImpl implements TableResult {
return this;
}
/** Specifies print style. Default is {@link TableauStyle} with max integer column width. */
/**
* Specifies print style. Default is {@link TableauStyle} with max integer column width.
*/
public Builder setPrintStyle(PrintStyle printStyle) {
Preconditions.checkNotNull(printStyle, "printStyle should not be null");
this.printStyle = printStyle;
return this;
}
/** Returns a {@link TableResult} instance. */
/**
* Returns a {@link TableResult} instance.
*/
public TableResult build() {
return new CustomTableResultImpl(jobClient, tableSchema, resultKind, data, printStyle);
}
}
/** Root interface for all print styles. */
/**
* Root interface for all print styles.
*/
public interface PrintStyle {
/**
* Create a tableau print style with given max column width, null column, and a flag to
......@@ -211,7 +221,9 @@ class CustomTableResultImpl implements TableResult {
}
}
/** print the result schema and content as tableau form. */
/**
* print the result schema and content as tableau form.
*/
private static final class TableauStyle implements PrintStyle {
/**
* A flag to indicate whether the column width is derived from type (true) or content
......@@ -245,5 +257,6 @@ class CustomTableResultImpl implements TableResult {
/**
* only print the result content as raw form. column delimiter is ",", row delimiter is "\n".
*/
private static final class RawContentStyle implements PrintStyle {}
private static final class RawContentStyle implements PrintStyle {
}
}
......@@ -3,8 +3,8 @@ package com.dlink.executor;
import org.apache.flink.table.types.DataType;
/**
* @author wenmo
* @since 2021/6/7 22:06
* @author wenmo
* @since 2021/6/7 22:06
**/
public class TableSchemaField {
private String name;
......
......@@ -17,18 +17,18 @@ import java.util.Optional;
*/
public class FlinkUtil {
public static List<String> getFieldNamesFromCatalogManager(CatalogManager catalogManager, String catalog, String database, String table){
public static List<String> getFieldNamesFromCatalogManager(CatalogManager catalogManager, String catalog, String database, String table) {
Optional<CatalogManager.TableLookupResult> tableOpt = catalogManager.getTable(
ObjectIdentifier.of(catalog, database, table)
);
if (tableOpt.isPresent()) {
return Arrays.asList(tableOpt.get().getResolvedSchema().getFieldNames());
}else{
} else {
return new ArrayList<String>();
}
}
public static List<String> catchColumn(TableResult tableResult){
public static List<String> catchColumn(TableResult tableResult) {
return Arrays.asList(tableResult.getTableSchema().getFieldNames());
}
}
......@@ -21,10 +21,10 @@ import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
public class FlinkCDCMergeBuilder {
public static void buildMySqlCDC(StreamExecutionEnvironment env, FlinkCDCConfig config) {
if(Asserts.isNotNull(config.getParallelism())){
if (Asserts.isNotNull(config.getParallelism())) {
env.setParallelism(config.getParallelism());
}
if(Asserts.isNotNull(config.getCheckpoint())){
if (Asserts.isNotNull(config.getCheckpoint())) {
env.enableCheckpointing(config.getCheckpoint());
}
MySqlSourceBuilder<String> sourceBuilder = MySqlSource.<String>builder()
......@@ -32,16 +32,16 @@ public class FlinkCDCMergeBuilder {
.port(config.getPort())
.username(config.getUsername())
.password(config.getPassword());
if(Asserts.isNotNull(config.getDatabase())&&config.getDatabase().size()>0){
if (Asserts.isNotNull(config.getDatabase()) && config.getDatabase().size() > 0) {
sourceBuilder.databaseList(config.getDatabase().toArray(new String[0]));
}
if(Asserts.isNotNull(config.getTable())&&config.getTable().size()>0){
if (Asserts.isNotNull(config.getTable()) && config.getTable().size() > 0) {
sourceBuilder.tableList(config.getTable().toArray(new String[0]));
}
MySqlSourceBuilder<String> builder = sourceBuilder
.deserializer(new JsonDebeziumDeserializationSchema());
if(Asserts.isNotNullString(config.getStartupMode())){
switch (config.getStartupMode().toUpperCase()){
if (Asserts.isNotNullString(config.getStartupMode())) {
switch (config.getStartupMode().toUpperCase()) {
case "INITIAL":
builder.startupOptions(StartupOptions.initial());
break;
......@@ -54,12 +54,12 @@ public class FlinkCDCMergeBuilder {
default:
builder.startupOptions(StartupOptions.latest());
}
}else {
} else {
builder.startupOptions(StartupOptions.latest());
}
MySqlSource<String> sourceFunction = builder.build();
DataStreamSource<String> streamSource = env.fromSource(sourceFunction, WatermarkStrategy.noWatermarks(), "MySQL Source");
streamSource.addSink(getKafkaProducer(config.getBrokers(),config.getTopic()));
streamSource.addSink(getKafkaProducer(config.getBrokers(), config.getTopic()));
}
private static FlinkKafkaProducer<String> getKafkaProducer(String brokers, String topic) {
......
......@@ -2,11 +2,7 @@ package com.dlink.executor;
import org.apache.flink.annotation.Internal;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.ResultKind;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.*;
import org.apache.flink.table.utils.PrintUtils;
import org.apache.flink.types.Row;
import org.apache.flink.util.CloseableIterator;
......@@ -18,17 +14,13 @@ import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.*;
/**
* 定制CustomTableResultImpl
* @author wenmo
* @since 2021/6/7 22:06
*
* @author wenmo
* @since 2021/6/7 22:06
**/
@Internal
class CustomTableResultImpl implements TableResult {
......@@ -60,12 +52,12 @@ class CustomTableResultImpl implements TableResult {
this.printStyle = Preconditions.checkNotNull(printStyle, "printStyle should not be null");
}
public static TableResult buildTableResult(List<TableSchemaField> fields,List<Row> rows){
public static TableResult buildTableResult(List<TableSchemaField> fields, List<Row> rows) {
Builder builder = builder().resultKind(ResultKind.SUCCESS);
if(fields.size()>0) {
if (fields.size() > 0) {
TableSchema.Builder tableSchemaBuild = TableSchema.builder();
for (int i = 0; i < fields.size(); i++) {
tableSchemaBuild.field(fields.get(i).getName(),fields.get(i).getType());
tableSchemaBuild.field(fields.get(i).getName(), fields.get(i).getType());
}
builder.tableSchema(tableSchemaBuild.build()).data(rows);
}
......@@ -169,7 +161,9 @@ class CustomTableResultImpl implements TableResult {
return new Builder();
}
/** Builder for creating a {@link CustomTableResultImpl}. */
/**
* Builder for creating a {@link CustomTableResultImpl}.
*/
public static class Builder {
private JobClient jobClient = null;
private TableSchema tableSchema = null;
......@@ -178,7 +172,8 @@ class CustomTableResultImpl implements TableResult {
private PrintStyle printStyle =
PrintStyle.tableau(Integer.MAX_VALUE, PrintUtils.NULL_COLUMN, false, false);
private Builder() {}
private Builder() {
}
/**
* Specifies job client which associates the submitted Flink job.
......@@ -234,20 +229,26 @@ class CustomTableResultImpl implements TableResult {
return this;
}
/** Specifies print style. Default is {@link TableauStyle} with max integer column width. */
/**
* Specifies print style. Default is {@link TableauStyle} with max integer column width.
*/
public Builder setPrintStyle(PrintStyle printStyle) {
Preconditions.checkNotNull(printStyle, "printStyle should not be null");
this.printStyle = printStyle;
return this;
}
/** Returns a {@link TableResult} instance. */
/**
* Returns a {@link TableResult} instance.
*/
public TableResult build() {
return new CustomTableResultImpl(jobClient, tableSchema, resultKind, data, printStyle);
}
}
/** Root interface for all print styles. */
/**
* Root interface for all print styles.
*/
public interface PrintStyle {
/**
* Create a tableau print style with given max column width, null column, change mode
......@@ -275,7 +276,9 @@ class CustomTableResultImpl implements TableResult {
}
}
/** print the result schema and content as tableau form. */
/**
* print the result schema and content as tableau form.
*/
private static final class TableauStyle implements PrintStyle {
/**
* A flag to indicate whether the column width is derived from type (true) or content
......@@ -285,7 +288,9 @@ class CustomTableResultImpl implements TableResult {
private final int maxColumnWidth;
private final String nullColumn;
/** A flag to indicate whether print row kind info. */
/**
* A flag to indicate whether print row kind info.
*/
private final boolean printRowKind;
private TableauStyle(
......@@ -319,7 +324,8 @@ class CustomTableResultImpl implements TableResult {
/**
* only print the result content as raw form. column delimiter is ",", row delimiter is "\n".
*/
private static final class RawContentStyle implements PrintStyle {}
private static final class RawContentStyle implements PrintStyle {
}
/**
* A {@link CloseableIterator} wrapper class that can return whether the first row is ready.
......
......@@ -3,8 +3,8 @@ package com.dlink.executor;
import org.apache.flink.table.types.DataType;
/**
* @author wenmo
* @since 2021/6/7 22:06
* @author wenmo
* @since 2021/6/7 22:06
**/
public class TableSchemaField {
private String name;
......
......@@ -17,18 +17,18 @@ import java.util.Optional;
*/
public class FlinkUtil {
public static List<String> getFieldNamesFromCatalogManager(CatalogManager catalogManager, String catalog, String database, String table){
public static List<String> getFieldNamesFromCatalogManager(CatalogManager catalogManager, String catalog, String database, String table) {
Optional<CatalogManager.TableLookupResult> tableOpt = catalogManager.getTable(
ObjectIdentifier.of(catalog, database, table)
);
if (tableOpt.isPresent()) {
return Arrays.asList(tableOpt.get().getResolvedSchema().getFieldNames());
}else{
} else {
return new ArrayList<String>();
}
}
public static List<String> catchColumn(TableResult tableResult){
public static List<String> catchColumn(TableResult tableResult) {
return Arrays.asList(tableResult.getTableSchema().getFieldNames());
}
}
......@@ -21,10 +21,10 @@ import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
public class FlinkCDCMergeBuilder {
public static void buildMySqlCDC(StreamExecutionEnvironment env, FlinkCDCConfig config) {
if(Asserts.isNotNull(config.getParallelism())){
if (Asserts.isNotNull(config.getParallelism())) {
env.setParallelism(config.getParallelism());
}
if(Asserts.isNotNull(config.getCheckpoint())){
if (Asserts.isNotNull(config.getCheckpoint())) {
env.enableCheckpointing(config.getCheckpoint());
}
MySqlSourceBuilder<String> sourceBuilder = MySqlSource.<String>builder()
......@@ -32,16 +32,16 @@ public class FlinkCDCMergeBuilder {
.port(config.getPort())
.username(config.getUsername())
.password(config.getPassword());
if(Asserts.isNotNull(config.getDatabase())&&config.getDatabase().size()>0){
if (Asserts.isNotNull(config.getDatabase()) && config.getDatabase().size() > 0) {
sourceBuilder.databaseList(config.getDatabase().toArray(new String[0]));
}
if(Asserts.isNotNull(config.getTable())&&config.getTable().size()>0){
if (Asserts.isNotNull(config.getTable()) && config.getTable().size() > 0) {
sourceBuilder.tableList(config.getTable().toArray(new String[0]));
}
MySqlSourceBuilder<String> builder = sourceBuilder
.deserializer(new JsonDebeziumDeserializationSchema());
if(Asserts.isNotNullString(config.getStartupMode())){
switch (config.getStartupMode().toUpperCase()){
if (Asserts.isNotNullString(config.getStartupMode())) {
switch (config.getStartupMode().toUpperCase()) {
case "INITIAL":
builder.startupOptions(StartupOptions.initial());
break;
......@@ -54,12 +54,12 @@ public class FlinkCDCMergeBuilder {
default:
builder.startupOptions(StartupOptions.latest());
}
}else {
} else {
builder.startupOptions(StartupOptions.latest());
}
MySqlSource<String> sourceFunction = builder.build();
DataStreamSource<String> streamSource = env.fromSource(sourceFunction, WatermarkStrategy.noWatermarks(), "MySQL Source");
streamSource.addSink(getKafkaProducer(config.getBrokers(),config.getTopic()));
streamSource.addSink(getKafkaProducer(config.getBrokers(), config.getTopic()));
}
private static FlinkKafkaProducer<String> getKafkaProducer(String brokers, String topic) {
......
......@@ -235,7 +235,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
if (setOperation.getKey().isPresent() && setOperation.getValue().isPresent()) {
String key = setOperation.getKey().get().trim();
String value = setOperation.getValue().get().trim();
if(Asserts.isNullString(key)||Asserts.isNullString(value)){
if (Asserts.isNullString(key) || Asserts.isNullString(value)) {
return;
}
Map<String, String> confMap = new HashMap<>();
......@@ -250,7 +250,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
private void callReset(ResetOperation resetOperation, StreamExecutionEnvironment environment, Map<String, Object> setMap) {
if (resetOperation.getKey().isPresent()) {
String key = resetOperation.getKey().get().trim();
if(Asserts.isNullString(key)){
if (Asserts.isNullString(key)) {
return;
}
Map<String, String> confMap = new HashMap<>();
......
......@@ -17,22 +17,14 @@ import org.apache.flink.util.Preconditions;
import javax.annotation.Nullable;
import java.io.PrintWriter;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.*;
import java.util.concurrent.*;
/**
* 定制CustomTableResultImpl
* @author wenmo
* @since 2021/6/7 22:06
*
* @author wenmo
* @since 2021/6/7 22:06
**/
@Internal
public class CustomTableResultImpl implements TableResult {
......@@ -68,16 +60,16 @@ public class CustomTableResultImpl implements TableResult {
Preconditions.checkNotNull(sessionTimeZone, "sessionTimeZone should not be null");
}
public static TableResult buildTableResult(List<TableSchemaField> fields,List<Row> rows){
public static TableResult buildTableResult(List<TableSchemaField> fields, List<Row> rows) {
Builder builder = builder().resultKind(ResultKind.SUCCESS);
if(fields.size()>0) {
if (fields.size() > 0) {
List<String> columnNames = new ArrayList<>();
List<DataType> columnTypes = new ArrayList<>();
for (int i = 0; i < fields.size(); i++) {
columnNames.add(fields.get(i).getName());
columnTypes.add(fields.get(i).getType());
}
builder.schema(ResolvedSchema.physical(columnNames,columnTypes)).data(rows);
builder.schema(ResolvedSchema.physical(columnNames, columnTypes)).data(rows);
}
return builder.build();
}
......@@ -184,7 +176,9 @@ public class CustomTableResultImpl implements TableResult {
return new Builder();
}
/** Builder for creating a {@link CustomTableResultImpl}. */
/**
* Builder for creating a {@link CustomTableResultImpl}.
*/
public static class Builder {
private JobClient jobClient = null;
private ResolvedSchema resolvedSchema = null;
......@@ -194,7 +188,8 @@ public class CustomTableResultImpl implements TableResult {
PrintStyle.tableau(Integer.MAX_VALUE, PrintUtils.NULL_COLUMN, false, false);
private ZoneId sessionTimeZone = ZoneId.of("UTC");
private Builder() {}
private Builder() {
}
/**
* Specifies job client which associates the submitted Flink job.
......@@ -250,28 +245,36 @@ public class CustomTableResultImpl implements TableResult {
return this;
}
/** Specifies print style. Default is {@link TableauStyle} with max integer column width. */
/**
* Specifies print style. Default is {@link TableauStyle} with max integer column width.
*/
public Builder setPrintStyle(PrintStyle printStyle) {
Preconditions.checkNotNull(printStyle, "printStyle should not be null");
this.printStyle = printStyle;
return this;
}
/** Specifies session time zone. */
/**
* Specifies session time zone.
*/
public Builder setSessionTimeZone(ZoneId sessionTimeZone) {
Preconditions.checkNotNull(sessionTimeZone, "sessionTimeZone should not be null");
this.sessionTimeZone = sessionTimeZone;
return this;
}
/** Returns a {@link TableResult} instance. */
/**
* Returns a {@link TableResult} instance.
*/
public TableResult build() {
return new CustomTableResultImpl(
jobClient, resolvedSchema, resultKind, data, printStyle, sessionTimeZone);
}
}
/** Root interface for all print styles. */
/**
* Root interface for all print styles.
*/
public interface PrintStyle {
/**
* Create a tableau print style with given max column width, null column, change mode
......@@ -299,7 +302,9 @@ public class CustomTableResultImpl implements TableResult {
}
}
/** print the result schema and content as tableau form. */
/**
* print the result schema and content as tableau form.
*/
private static final class TableauStyle implements PrintStyle {
/**
* A flag to indicate whether the column width is derived from type (true) or content
......@@ -309,7 +314,9 @@ public class CustomTableResultImpl implements TableResult {
private final int maxColumnWidth;
private final String nullColumn;
/** A flag to indicate whether print row kind info. */
/**
* A flag to indicate whether print row kind info.
*/
private final boolean printRowKind;
private TableauStyle(
......@@ -343,7 +350,8 @@ public class CustomTableResultImpl implements TableResult {
/**
* only print the result content as raw form. column delimiter is ",", row delimiter is "\n".
*/
private static final class RawContentStyle implements PrintStyle {}
private static final class RawContentStyle implements PrintStyle {
}
/**
* A {@link CloseableIterator} wrapper class that can return whether the first row is ready.
......
......@@ -3,8 +3,8 @@ package com.dlink.executor;
import org.apache.flink.table.types.DataType;
/**
* @author wenmo
* @since 2021/6/7 22:06
* @author wenmo
* @since 2021/6/7 22:06
**/
public class TableSchemaField {
private String name;
......
......@@ -4,7 +4,9 @@ import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.ObjectIdentifier;
import java.util.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* FlinkUtil
......@@ -14,18 +16,18 @@ import java.util.*;
*/
public class FlinkUtil {
public static List<String> getFieldNamesFromCatalogManager(CatalogManager catalogManager, String catalog, String database, String table){
public static List<String> getFieldNamesFromCatalogManager(CatalogManager catalogManager, String catalog, String database, String table) {
Optional<CatalogManager.TableLookupResult> tableOpt = catalogManager.getTable(
ObjectIdentifier.of(catalog, database, table)
);
if (tableOpt.isPresent()) {
return tableOpt.get().getResolvedSchema().getColumnNames();
}else{
} else {
return new ArrayList<>();
}
}
public static List<String> catchColumn(TableResult tableResult){
public static List<String> catchColumn(TableResult tableResult) {
return tableResult.getResolvedSchema().getColumnNames();
}
}
......@@ -21,10 +21,10 @@ import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
public class FlinkCDCMergeBuilder {
public static void buildMySqlCDC(StreamExecutionEnvironment env, FlinkCDCConfig config) {
if(Asserts.isNotNull(config.getParallelism())){
if (Asserts.isNotNull(config.getParallelism())) {
env.setParallelism(config.getParallelism());
}
if(Asserts.isNotNull(config.getCheckpoint())){
if (Asserts.isNotNull(config.getCheckpoint())) {
env.enableCheckpointing(config.getCheckpoint());
}
MySqlSourceBuilder<String> sourceBuilder = MySqlSource.<String>builder()
......@@ -32,16 +32,16 @@ public class FlinkCDCMergeBuilder {
.port(config.getPort())
.username(config.getUsername())
.password(config.getPassword());
if(Asserts.isNotNull(config.getDatabase())&&config.getDatabase().size()>0){
if (Asserts.isNotNull(config.getDatabase()) && config.getDatabase().size() > 0) {
sourceBuilder.databaseList(config.getDatabase().toArray(new String[0]));
}
if(Asserts.isNotNull(config.getTable())&&config.getTable().size()>0){
if (Asserts.isNotNull(config.getTable()) && config.getTable().size() > 0) {
sourceBuilder.tableList(config.getTable().toArray(new String[0]));
}
MySqlSourceBuilder<String> builder = sourceBuilder
.deserializer(new JsonDebeziumDeserializationSchema());
if(Asserts.isNotNullString(config.getStartupMode())){
switch (config.getStartupMode().toUpperCase()){
if (Asserts.isNotNullString(config.getStartupMode())) {
switch (config.getStartupMode().toUpperCase()) {
case "INITIAL":
builder.startupOptions(StartupOptions.initial());
break;
......@@ -54,12 +54,12 @@ public class FlinkCDCMergeBuilder {
default:
builder.startupOptions(StartupOptions.latest());
}
}else {
} else {
builder.startupOptions(StartupOptions.latest());
}
MySqlSource<String> sourceFunction = builder.build();
DataStreamSource<String> streamSource = env.fromSource(sourceFunction, WatermarkStrategy.noWatermarks(), "MySQL Source");
streamSource.addSink(getKafkaProducer(config.getBrokers(),config.getTopic()));
streamSource.addSink(getKafkaProducer(config.getBrokers(), config.getTopic()));
}
private static FlinkKafkaProducer<String> getKafkaProducer(String brokers, String topic) {
......
......@@ -298,7 +298,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
if (setOperation.getKey().isPresent() && setOperation.getValue().isPresent()) {
String key = setOperation.getKey().get().trim();
String value = setOperation.getValue().get().trim();
if(Asserts.isNullString(key)||Asserts.isNullString(value)){
if (Asserts.isNullString(key) || Asserts.isNullString(value)) {
return;
}
Map<String, String> confMap = new HashMap<>();
......@@ -313,7 +313,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements
private void callReset(ResetOperation resetOperation, StreamExecutionEnvironment environment, Map<String, Object> setMap) {
if (resetOperation.getKey().isPresent()) {
String key = resetOperation.getKey().get().trim();
if(Asserts.isNullString(key)){
if (Asserts.isNullString(key)) {
return;
}
Map<String, String> confMap = new HashMap<>();
......
......@@ -17,22 +17,14 @@ import org.apache.flink.util.Preconditions;
import javax.annotation.Nullable;
import java.io.PrintWriter;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.*;
import java.util.concurrent.*;
/**
* 定制TableResultImpl
* @author wenmo
* @since 2021/10/22 10:02
*
* @author wenmo
* @since 2021/10/22 10:02
**/
@Internal
public class CustomTableResultImpl implements TableResult {
......@@ -68,16 +60,16 @@ public class CustomTableResultImpl implements TableResult {
Preconditions.checkNotNull(sessionTimeZone, "sessionTimeZone should not be null");
}
public static TableResult buildTableResult(List<TableSchemaField> fields,List<Row> rows){
public static TableResult buildTableResult(List<TableSchemaField> fields, List<Row> rows) {
Builder builder = builder().resultKind(ResultKind.SUCCESS);
if(fields.size()>0) {
if (fields.size() > 0) {
List<String> columnNames = new ArrayList<>();
List<DataType> columnTypes = new ArrayList<>();
for (int i = 0; i < fields.size(); i++) {
columnNames.add(fields.get(i).getName());
columnTypes.add(fields.get(i).getType());
}
builder.schema(ResolvedSchema.physical(columnNames,columnTypes)).data(rows);
builder.schema(ResolvedSchema.physical(columnNames, columnTypes)).data(rows);
}
return builder.build();
}
......@@ -184,7 +176,9 @@ public class CustomTableResultImpl implements TableResult {
return new Builder();
}
/** Builder for creating a {@link CustomTableResultImpl}. */
/**
* Builder for creating a {@link CustomTableResultImpl}.
*/
public static class Builder {
private JobClient jobClient = null;
private ResolvedSchema resolvedSchema = null;
......@@ -194,7 +188,8 @@ public class CustomTableResultImpl implements TableResult {
PrintStyle.tableau(Integer.MAX_VALUE, PrintUtils.NULL_COLUMN, false, false);
private ZoneId sessionTimeZone = ZoneId.of("UTC");
private Builder() {}
private Builder() {
}
/**
* Specifies job client which associates the submitted Flink job.
......@@ -250,28 +245,36 @@ public class CustomTableResultImpl implements TableResult {
return this;
}
/** Specifies print style. Default is {@link TableauStyle} with max integer column width. */
/**
* Specifies print style. Default is {@link TableauStyle} with max integer column width.
*/
public Builder setPrintStyle(PrintStyle printStyle) {
Preconditions.checkNotNull(printStyle, "printStyle should not be null");
this.printStyle = printStyle;
return this;
}
/** Specifies session time zone. */
/**
* Specifies session time zone.
*/
public Builder setSessionTimeZone(ZoneId sessionTimeZone) {
Preconditions.checkNotNull(sessionTimeZone, "sessionTimeZone should not be null");
this.sessionTimeZone = sessionTimeZone;
return this;
}
/** Returns a {@link TableResult} instance. */
/**
* Returns a {@link TableResult} instance.
*/
public TableResult build() {
return new CustomTableResultImpl(
jobClient, resolvedSchema, resultKind, data, printStyle, sessionTimeZone);
}
}
/** Root interface for all print styles. */
/**
* Root interface for all print styles.
*/
public interface PrintStyle {
/**
* Create a tableau print style with given max column width, null column, change mode
......@@ -299,7 +302,9 @@ public class CustomTableResultImpl implements TableResult {
}
}
/** print the result schema and content as tableau form. */
/**
* print the result schema and content as tableau form.
*/
private static final class TableauStyle implements PrintStyle {
/**
* A flag to indicate whether the column width is derived from type (true) or content
......@@ -309,7 +314,9 @@ public class CustomTableResultImpl implements TableResult {
private final int maxColumnWidth;
private final String nullColumn;
/** A flag to indicate whether print row kind info. */
/**
* A flag to indicate whether print row kind info.
*/
private final boolean printRowKind;
private TableauStyle(
......@@ -343,7 +350,8 @@ public class CustomTableResultImpl implements TableResult {
/**
* only print the result content as raw form. column delimiter is ",", row delimiter is "\n".
*/
private static final class RawContentStyle implements PrintStyle {}
private static final class RawContentStyle implements PrintStyle {
}
/**
* A {@link CloseableIterator} wrapper class that can return whether the first row is ready.
......
......@@ -3,8 +3,8 @@ package com.dlink.executor;
import org.apache.flink.table.types.DataType;
/**
* @author wenmo
* @since 2021/10/22 10:02
* @author wenmo
* @since 2021/10/22 10:02
**/
public class TableSchemaField {
private String name;
......
......@@ -16,19 +16,19 @@ import java.util.Optional;
*/
public class FlinkUtil {
public static List<String> getFieldNamesFromCatalogManager(CatalogManager catalogManager, String catalog, String database, String table){
public static List<String> getFieldNamesFromCatalogManager(CatalogManager catalogManager, String catalog, String database, String table) {
Optional<CatalogManager.TableLookupResult> tableOpt = catalogManager.getTable(
ObjectIdentifier.of(catalog, database, table)
);
if (tableOpt.isPresent()) {
return tableOpt.get().getResolvedSchema().getColumnNames();
}else{
} else {
return new ArrayList<String>();
}
}
public static List<String> catchColumn(TableResult tableResult){
public static List<String> catchColumn(TableResult tableResult) {
return tableResult.getResolvedSchema().getColumnNames();
}
}
......@@ -14,14 +14,14 @@ import java.util.Map;
*/
public class FlinkBaseUtil {
public static Map<String,String> getParamsFromArgs(String[] args){
Map<String,String> params = new HashMap<>();
public static Map<String, String> getParamsFromArgs(String[] args) {
Map<String, String> params = new HashMap<>();
ParameterTool parameters = ParameterTool.fromArgs(args);
params.put(FlinkParamConstant.ID,parameters.get(FlinkParamConstant.ID, null));
params.put(FlinkParamConstant.DRIVER,parameters.get(FlinkParamConstant.DRIVER, null));
params.put(FlinkParamConstant.URL,parameters.get(FlinkParamConstant.URL, null));
params.put(FlinkParamConstant.USERNAME,parameters.get(FlinkParamConstant.USERNAME, null));
params.put(FlinkParamConstant.PASSWORD,parameters.get(FlinkParamConstant.PASSWORD, null));
params.put(FlinkParamConstant.ID, parameters.get(FlinkParamConstant.ID, null));
params.put(FlinkParamConstant.DRIVER, parameters.get(FlinkParamConstant.DRIVER, null));
params.put(FlinkParamConstant.URL, parameters.get(FlinkParamConstant.URL, null));
params.put(FlinkParamConstant.USERNAME, parameters.get(FlinkParamConstant.USERNAME, null));
params.put(FlinkParamConstant.PASSWORD, parameters.get(FlinkParamConstant.PASSWORD, null));
return params;
}
}
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:dubbo="http://dubbo.apache.org/schema/dubbo"
xmlns="http://www.springframework.org/schema/beans" xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://dubbo.apache.org/schema/dubbo http://dubbo.apache.org/schema/dubbo/dubbo.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd">
<context:property-placeholder/>
<dubbo:application name="demo-consumer"/>
<dubbo:registry address="zookeeper://${zookeeper.address:127.0.0.1}:2181"/>
<!-- <dubbo:reference id="demoService" check="true" interface="com.dlink.service.DemoService" version="1.0.0"/>-->
</beans>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment