Unverified Commit 84d7797e authored by aiwenmo's avatar aiwenmo Committed by GitHub

[Feature-666][client] Capture column type conversion exception details in CDCSOURCE (#667)

* [Feature-654][web] Add task info tab

* [Feature-666][client] Capture column type conversion exception details in CDCSOURCE
Co-authored-by: 's avatarwenmo <32723967+wenmo@users.noreply.github.com>
parent 11b97adb
......@@ -49,6 +49,7 @@ import com.dlink.model.ColumnType;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.JSONUtil;
/**
* AbstractCDCBuilder
......@@ -118,11 +119,13 @@ public abstract class AbstractSinkBuilder {
protected DataStream<RowData> buildRowData(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
return filterOperator
.flatMap(new FlatMapFunction<Map, RowData>() {
@Override
public void flatMap(Map value, Collector<RowData> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -160,6 +163,10 @@ public abstract class AbstractSinkBuilder {
out.collect(uagenericRowData);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
});
}
......@@ -191,7 +198,7 @@ public abstract class AbstractSinkBuilder {
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList);
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList, table.getSchemaTableName());
addSink(env, rowDataDataStream, table, columnNameList, columnTypeList);
}
......@@ -230,9 +237,9 @@ public abstract class AbstractSinkBuilder {
case JAVA_LANG_DOUBLE:
return new DoubleType();
case DECIMAL:
if(columnType.getPrecision() == null || columnType.getPrecision() == 0){
if (columnType.getPrecision() == null || columnType.getPrecision() == 0) {
return new DecimalType(38, columnType.getScale());
}else{
} else {
return new DecimalType(columnType.getPrecision(), columnType.getScale());
}
case INT:
......@@ -303,13 +310,13 @@ public abstract class AbstractSinkBuilder {
return tableName;
}
protected List<String> getPKList(Table table){
protected List<String> getPKList(Table table) {
List<String> pks = new ArrayList<>();
if(Asserts.isNullCollection(table.getColumns())){
if (Asserts.isNullCollection(table.getColumns())) {
return pks;
}
for(Column column: table.getColumns()){
if(column.isKeyFlag()){
for (Column column : table.getColumns()) {
if (column.isKeyFlag()) {
pks.add(column.getName());
}
}
......
package com.dlink.cdc.sql;
import com.dlink.assertion.Asserts;
import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
......@@ -22,7 +12,11 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.operations.ModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.types.logical.*;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.DateType;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
......@@ -36,6 +30,18 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.dlink.assertion.Asserts;
import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.JSONUtil;
import com.dlink.utils.LogUtil;
/**
* SQLSinkBuilder
*
......@@ -62,7 +68,8 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
private DataStream<Row> buildRow(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
final String[] columnNames = columnNameList.toArray(new String[columnNameList.size()]);
final LogicalType[] columnTypes = columnTypeList.toArray(new LogicalType[columnTypeList.size()]);
......@@ -73,6 +80,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
.flatMap(new FlatMapFunction<Map, Row>() {
@Override
public void flatMap(Map value, Collector<Row> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -106,6 +114,10 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
out.collect(uarow);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
}, rowTypeInfo);
}
......@@ -161,18 +173,19 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
logger.info("Build deserialize successful...");
for (Schema schema : schemaList) {
for (Table table : schema.getTables()) {
final String schemaTableName = table.getSchemaTableName();
try {
SingleOutputStreamOperator<Map> filterOperator = shunt(mapOperator, table, schemaFieldName);
logger.info("Build " + table.getSchemaTableName() + " shunt successful...");
logger.info("Build " + schemaTableName + " shunt successful...");
List<String> columnNameList = new ArrayList<>();
List<LogicalType> columnTypeList = new ArrayList<>();
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList);
logger.info("Build " + table.getSchemaTableName() + " flatMap successful...");
logger.info("Start build " + table.getSchemaTableName() + " sink...");
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList, schemaTableName);
logger.info("Build " + schemaTableName + " flatMap successful...");
logger.info("Start build " + schemaTableName + " sink...");
addTableSink(customTableEnvironment, rowDataDataStream, table, columnNameList);
} catch (Exception e) {
logger.error("Build " + table.getSchemaTableName() + " cdc sync failed...");
logger.error("Build " + schemaTableName + " cdc sync failed...");
logger.error(LogUtil.getError(e));
}
}
......@@ -191,23 +204,23 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
return null;
}
if (logicalType instanceof DateType) {
if(value instanceof Integer){
if (value instanceof Integer) {
return Instant.ofEpochMilli(((Integer) value).longValue()).atZone(ZoneId.systemDefault()).toLocalDate();
}else {
} else {
return Instant.ofEpochMilli((long) value).atZone(ZoneId.systemDefault()).toLocalDate();
}
} else if (logicalType instanceof TimestampType) {
if(value instanceof Integer){
if (value instanceof Integer) {
return Instant.ofEpochMilli(((Integer) value).longValue()).atZone(ZoneId.systemDefault()).toLocalDateTime();
}else {
} else {
return Instant.ofEpochMilli((long) value).atZone(ZoneId.systemDefault()).toLocalDateTime();
}
} else if (logicalType instanceof DecimalType) {
return new BigDecimal((String) value);
} else if (logicalType instanceof BigIntType) {
if(value instanceof Integer){
if (value instanceof Integer) {
return ((Integer) value).longValue();
}else {
} else {
return value;
}
} else {
......
......@@ -49,6 +49,7 @@ import com.dlink.model.ColumnType;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.JSONUtil;
/**
* AbstractCDCBuilder
......@@ -118,11 +119,13 @@ public abstract class AbstractSinkBuilder {
protected DataStream<RowData> buildRowData(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
return filterOperator
.flatMap(new FlatMapFunction<Map, RowData>() {
@Override
public void flatMap(Map value, Collector<RowData> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -160,6 +163,10 @@ public abstract class AbstractSinkBuilder {
out.collect(uagenericRowData);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
});
}
......@@ -191,7 +198,7 @@ public abstract class AbstractSinkBuilder {
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList);
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList, table.getSchemaTableName());
addSink(env, rowDataDataStream, table, columnNameList, columnTypeList);
}
......@@ -230,9 +237,9 @@ public abstract class AbstractSinkBuilder {
case JAVA_LANG_DOUBLE:
return new DoubleType();
case DECIMAL:
if(columnType.getPrecision() == null || columnType.getPrecision() == 0){
if (columnType.getPrecision() == null || columnType.getPrecision() == 0) {
return new DecimalType(38, columnType.getScale());
}else{
} else {
return new DecimalType(columnType.getPrecision(), columnType.getScale());
}
case INT:
......@@ -303,13 +310,13 @@ public abstract class AbstractSinkBuilder {
return tableName;
}
protected List<String> getPKList(Table table){
protected List<String> getPKList(Table table) {
List<String> pks = new ArrayList<>();
if(Asserts.isNullCollection(table.getColumns())){
if (Asserts.isNullCollection(table.getColumns())) {
return pks;
}
for(Column column: table.getColumns()){
if(column.isKeyFlag()){
for (Column column : table.getColumns()) {
if (column.isKeyFlag()) {
pks.add(column.getName());
}
}
......
package com.dlink.cdc.sql;
import com.dlink.assertion.Asserts;
import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
......@@ -22,7 +12,11 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.operations.ModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.types.logical.*;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.DateType;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
......@@ -36,6 +30,18 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.dlink.assertion.Asserts;
import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.JSONUtil;
import com.dlink.utils.LogUtil;
/**
* SQLSinkBuilder
*
......@@ -62,7 +68,8 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
private DataStream<Row> buildRow(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
final String[] columnNames = columnNameList.toArray(new String[columnNameList.size()]);
final LogicalType[] columnTypes = columnTypeList.toArray(new LogicalType[columnTypeList.size()]);
......@@ -73,6 +80,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
.flatMap(new FlatMapFunction<Map, Row>() {
@Override
public void flatMap(Map value, Collector<Row> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -106,6 +114,10 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
out.collect(uarow);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
}, rowTypeInfo);
}
......@@ -161,18 +173,19 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
logger.info("Build deserialize successful...");
for (Schema schema : schemaList) {
for (Table table : schema.getTables()) {
final String schemaTableName = table.getSchemaTableName();
try {
SingleOutputStreamOperator<Map> filterOperator = shunt(mapOperator, table, schemaFieldName);
logger.info("Build " + table.getSchemaTableName() + " shunt successful...");
logger.info("Build " + schemaTableName + " shunt successful...");
List<String> columnNameList = new ArrayList<>();
List<LogicalType> columnTypeList = new ArrayList<>();
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList);
logger.info("Build " + table.getSchemaTableName() + " flatMap successful...");
logger.info("Start build " + table.getSchemaTableName() + " sink...");
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList, schemaTableName);
logger.info("Build " + schemaTableName + " flatMap successful...");
logger.info("Start build " + schemaTableName + " sink...");
addTableSink(customTableEnvironment, rowDataDataStream, table, columnNameList);
} catch (Exception e) {
logger.error("Build " + table.getSchemaTableName() + " cdc sync failed...");
logger.error("Build " + schemaTableName + " cdc sync failed...");
logger.error(LogUtil.getError(e));
}
}
......@@ -191,23 +204,23 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
return null;
}
if (logicalType instanceof DateType) {
if(value instanceof Integer){
if (value instanceof Integer) {
return Instant.ofEpochMilli(((Integer) value).longValue()).atZone(ZoneId.systemDefault()).toLocalDate();
}else {
} else {
return Instant.ofEpochMilli((long) value).atZone(ZoneId.systemDefault()).toLocalDate();
}
} else if (logicalType instanceof TimestampType) {
if(value instanceof Integer){
if (value instanceof Integer) {
return Instant.ofEpochMilli(((Integer) value).longValue()).atZone(ZoneId.systemDefault()).toLocalDateTime();
}else {
} else {
return Instant.ofEpochMilli((long) value).atZone(ZoneId.systemDefault()).toLocalDateTime();
}
} else if (logicalType instanceof DecimalType) {
return new BigDecimal((String) value);
} else if (logicalType instanceof BigIntType) {
if(value instanceof Integer){
if (value instanceof Integer) {
return ((Integer) value).longValue();
}else {
} else {
return value;
}
} else {
......
......@@ -49,6 +49,7 @@ import com.dlink.model.ColumnType;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.JSONUtil;
/**
* AbstractCDCBuilder
......@@ -118,11 +119,13 @@ public abstract class AbstractSinkBuilder {
protected DataStream<RowData> buildRowData(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
return filterOperator
.flatMap(new FlatMapFunction<Map, RowData>() {
@Override
public void flatMap(Map value, Collector<RowData> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -160,6 +163,10 @@ public abstract class AbstractSinkBuilder {
out.collect(uagenericRowData);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
});
}
......@@ -191,7 +198,7 @@ public abstract class AbstractSinkBuilder {
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList);
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList, table.getSchemaTableName());
addSink(env, rowDataDataStream, table, columnNameList, columnTypeList);
}
......@@ -230,9 +237,9 @@ public abstract class AbstractSinkBuilder {
case JAVA_LANG_DOUBLE:
return new DoubleType();
case DECIMAL:
if(columnType.getPrecision() == null || columnType.getPrecision() == 0){
if (columnType.getPrecision() == null || columnType.getPrecision() == 0) {
return new DecimalType(38, columnType.getScale());
}else{
} else {
return new DecimalType(columnType.getPrecision(), columnType.getScale());
}
case INT:
......@@ -303,13 +310,13 @@ public abstract class AbstractSinkBuilder {
return tableName;
}
protected List<String> getPKList(Table table){
protected List<String> getPKList(Table table) {
List<String> pks = new ArrayList<>();
if(Asserts.isNullCollection(table.getColumns())){
if (Asserts.isNullCollection(table.getColumns())) {
return pks;
}
for(Column column: table.getColumns()){
if(column.isKeyFlag()){
for (Column column : table.getColumns()) {
if (column.isKeyFlag()) {
pks.add(column.getName());
}
}
......
package com.dlink.cdc.sql;
import com.dlink.model.*;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
......@@ -16,7 +12,11 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.operations.ModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.types.logical.*;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.DateType;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
......@@ -35,7 +35,12 @@ import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.utils.SqlUtil;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.JSONUtil;
import com.dlink.utils.LogUtil;
/**
* SQLSinkBuilder
......@@ -63,7 +68,8 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
private DataStream<Row> buildRow(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
final String[] columnNames = columnNameList.toArray(new String[columnNameList.size()]);
final LogicalType[] columnTypes = columnTypeList.toArray(new LogicalType[columnTypeList.size()]);
......@@ -74,6 +80,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
.flatMap(new FlatMapFunction<Map, Row>() {
@Override
public void flatMap(Map value, Collector<Row> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -107,6 +114,10 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
out.collect(uarow);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
}, rowTypeInfo);
}
......@@ -162,18 +173,19 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
logger.info("Build deserialize successful...");
for (Schema schema : schemaList) {
for (Table table : schema.getTables()) {
final String schemaTableName = table.getSchemaTableName();
try {
SingleOutputStreamOperator<Map> filterOperator = shunt(mapOperator, table, schemaFieldName);
logger.info("Build " + table.getSchemaTableName() + " shunt successful...");
logger.info("Build " + schemaTableName + " shunt successful...");
List<String> columnNameList = new ArrayList<>();
List<LogicalType> columnTypeList = new ArrayList<>();
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList);
logger.info("Build " + table.getSchemaTableName() + " flatMap successful...");
logger.info("Start build " + table.getSchemaTableName() + " sink...");
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList, schemaTableName);
logger.info("Build " + schemaTableName + " flatMap successful...");
logger.info("Start build " + schemaTableName + " sink...");
addTableSink(customTableEnvironment, rowDataDataStream, table, columnNameList);
} catch (Exception e) {
logger.error("Build " + table.getSchemaTableName() + " cdc sync failed...");
logger.error("Build " + schemaTableName + " cdc sync failed...");
logger.error(LogUtil.getError(e));
}
}
......
......@@ -49,6 +49,7 @@ import com.dlink.model.ColumnType;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.JSONUtil;
/**
* AbstractCDCBuilder
......@@ -118,11 +119,13 @@ public abstract class AbstractSinkBuilder {
protected DataStream<RowData> buildRowData(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
return filterOperator
.flatMap(new FlatMapFunction<Map, RowData>() {
@Override
public void flatMap(Map value, Collector<RowData> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -160,6 +163,10 @@ public abstract class AbstractSinkBuilder {
out.collect(uagenericRowData);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
});
}
......@@ -191,7 +198,7 @@ public abstract class AbstractSinkBuilder {
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList);
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList, table.getSchemaTableName());
addSink(env, rowDataDataStream, table, columnNameList, columnTypeList);
}
......@@ -230,9 +237,9 @@ public abstract class AbstractSinkBuilder {
case JAVA_LANG_DOUBLE:
return new DoubleType();
case DECIMAL:
if(columnType.getPrecision() == null || columnType.getPrecision() == 0){
if (columnType.getPrecision() == null || columnType.getPrecision() == 0) {
return new DecimalType(38, columnType.getScale());
}else{
} else {
return new DecimalType(columnType.getPrecision(), columnType.getScale());
}
case INT:
......@@ -303,13 +310,13 @@ public abstract class AbstractSinkBuilder {
return tableName;
}
protected List<String> getPKList(Table table){
protected List<String> getPKList(Table table) {
List<String> pks = new ArrayList<>();
if(Asserts.isNullCollection(table.getColumns())){
if (Asserts.isNullCollection(table.getColumns())) {
return pks;
}
for(Column column: table.getColumns()){
if(column.isKeyFlag()){
for (Column column : table.getColumns()) {
if (column.isKeyFlag()) {
pks.add(column.getName());
}
}
......
package com.dlink.cdc.sql;
import com.dlink.assertion.Asserts;
import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
......@@ -22,7 +12,11 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.operations.ModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.types.logical.*;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.DateType;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
......@@ -36,6 +30,18 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.dlink.assertion.Asserts;
import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.JSONUtil;
import com.dlink.utils.LogUtil;
/**
* SQLSinkBuilder
*
......@@ -62,7 +68,8 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
private DataStream<Row> buildRow(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
final String[] columnNames = columnNameList.toArray(new String[columnNameList.size()]);
final LogicalType[] columnTypes = columnTypeList.toArray(new LogicalType[columnTypeList.size()]);
......@@ -73,6 +80,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
.flatMap(new FlatMapFunction<Map, Row>() {
@Override
public void flatMap(Map value, Collector<Row> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -106,6 +114,10 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
out.collect(uarow);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
}, rowTypeInfo);
}
......@@ -161,18 +173,19 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
logger.info("Build deserialize successful...");
for (Schema schema : schemaList) {
for (Table table : schema.getTables()) {
final String schemaTableName = table.getSchemaTableName();
try {
SingleOutputStreamOperator<Map> filterOperator = shunt(mapOperator, table, schemaFieldName);
logger.info("Build " + table.getSchemaTableName() + " shunt successful...");
logger.info("Build " + schemaTableName + " shunt successful...");
List<String> columnNameList = new ArrayList<>();
List<LogicalType> columnTypeList = new ArrayList<>();
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList);
logger.info("Build " + table.getSchemaTableName() + " flatMap successful...");
logger.info("Start build " + table.getSchemaTableName() + " sink...");
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList, schemaTableName);
logger.info("Build " + schemaTableName + " flatMap successful...");
logger.info("Start build " + schemaTableName + " sink...");
addTableSink(customTableEnvironment, rowDataDataStream, table, columnNameList);
} catch (Exception e) {
logger.error("Build " + table.getSchemaTableName() + " cdc sync failed...");
logger.error("Build " + schemaTableName + " cdc sync failed...");
logger.error(LogUtil.getError(e));
}
}
......
......@@ -49,6 +49,7 @@ import com.dlink.model.ColumnType;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.JSONUtil;
/**
* AbstractCDCBuilder
......@@ -118,11 +119,13 @@ public abstract class AbstractSinkBuilder {
protected DataStream<RowData> buildRowData(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
return filterOperator
.flatMap(new FlatMapFunction<Map, RowData>() {
@Override
public void flatMap(Map value, Collector<RowData> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -160,6 +163,10 @@ public abstract class AbstractSinkBuilder {
out.collect(uagenericRowData);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
});
}
......@@ -191,7 +198,7 @@ public abstract class AbstractSinkBuilder {
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList);
DataStream<RowData> rowDataDataStream = buildRowData(filterOperator, columnNameList, columnTypeList, table.getSchemaTableName());
addSink(env, rowDataDataStream, table, columnNameList, columnTypeList);
}
......@@ -230,9 +237,9 @@ public abstract class AbstractSinkBuilder {
case JAVA_LANG_DOUBLE:
return new DoubleType();
case DECIMAL:
if(columnType.getPrecision() == null || columnType.getPrecision() == 0){
if (columnType.getPrecision() == null || columnType.getPrecision() == 0) {
return new DecimalType(38, columnType.getScale());
}else{
} else {
return new DecimalType(columnType.getPrecision(), columnType.getScale());
}
case INT:
......@@ -303,13 +310,13 @@ public abstract class AbstractSinkBuilder {
return tableName;
}
protected List<String> getPKList(Table table){
protected List<String> getPKList(Table table) {
List<String> pks = new ArrayList<>();
if(Asserts.isNullCollection(table.getColumns())){
if (Asserts.isNullCollection(table.getColumns())) {
return pks;
}
for(Column column: table.getColumns()){
if(column.isKeyFlag()){
for (Column column : table.getColumns()) {
if (column.isKeyFlag()) {
pks.add(column.getName());
}
}
......
package com.dlink.cdc.sql;
import com.dlink.assertion.Asserts;
import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
......@@ -22,7 +12,11 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.operations.ModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.types.logical.*;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.DateType;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
......@@ -36,6 +30,18 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.dlink.assertion.Asserts;
import com.dlink.cdc.AbstractSinkBuilder;
import com.dlink.cdc.CDCBuilder;
import com.dlink.cdc.SinkBuilder;
import com.dlink.executor.CustomTableEnvironment;
import com.dlink.model.FlinkCDCConfig;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.FlinkBaseUtil;
import com.dlink.utils.JSONUtil;
import com.dlink.utils.LogUtil;
/**
* SQLSinkBuilder
*
......@@ -62,7 +68,8 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
private DataStream<Row> buildRow(
SingleOutputStreamOperator<Map> filterOperator,
List<String> columnNameList,
List<LogicalType> columnTypeList) {
List<LogicalType> columnTypeList,
String schemaTableName) {
final String[] columnNames = columnNameList.toArray(new String[columnNameList.size()]);
final LogicalType[] columnTypes = columnTypeList.toArray(new LogicalType[columnTypeList.size()]);
......@@ -73,6 +80,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
.flatMap(new FlatMapFunction<Map, Row>() {
@Override
public void flatMap(Map value, Collector<Row> out) throws Exception {
try {
switch (value.get("op").toString()) {
case "r":
case "c":
......@@ -106,6 +114,10 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
out.collect(uarow);
break;
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
throw e;
}
}
}, rowTypeInfo);
}
......@@ -161,18 +173,19 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
logger.info("Build deserialize successful...");
for (Schema schema : schemaList) {
for (Table table : schema.getTables()) {
final String schemaTableName = table.getSchemaTableName();
try {
SingleOutputStreamOperator<Map> filterOperator = shunt(mapOperator, table, schemaFieldName);
logger.info("Build " + table.getSchemaTableName() + " shunt successful...");
logger.info("Build " + schemaTableName + " shunt successful...");
List<String> columnNameList = new ArrayList<>();
List<LogicalType> columnTypeList = new ArrayList<>();
buildColumn(columnNameList, columnTypeList, table.getColumns());
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList);
logger.info("Build " + table.getSchemaTableName() + " flatMap successful...");
logger.info("Start build " + table.getSchemaTableName() + " sink...");
DataStream<Row> rowDataDataStream = buildRow(filterOperator, columnNameList, columnTypeList, schemaTableName);
logger.info("Build " + schemaTableName + " flatMap successful...");
logger.info("Start build " + schemaTableName + " sink...");
addTableSink(customTableEnvironment, rowDataDataStream, table, columnNameList);
} catch (Exception e) {
logger.error("Build " + table.getSchemaTableName() + " cdc sync failed...");
logger.error("Build " + schemaTableName + " cdc sync failed...");
logger.error(LogUtil.getError(e));
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment