Unverified Commit ec3ae8b2 authored by xiaoguaiguai's avatar xiaoguaiguai Committed by GitHub

Merge branch 'DataLinkDC:dev' into dev

parents 337e12a3 684d7fe8
......@@ -39,7 +39,7 @@ public class ClusterConfigurationController {
@PutMapping
public Result saveOrUpdate(@RequestBody ClusterConfiguration clusterConfiguration) {
TestResult testResult = clusterConfigurationService.testGateway(clusterConfiguration);
clusterConfiguration.setAvailable(testResult.isAvailable());
clusterConfiguration.setIsAvailable(testResult.isAvailable());
if (clusterConfigurationService.saveOrUpdate(clusterConfiguration)) {
return Result.succeed(Asserts.isNotNull(clusterConfiguration.getId()) ? "修改成功" : "新增成功");
} else {
......
......@@ -35,7 +35,7 @@ public class Cluster extends SuperEntity {
private String note;
private boolean autoRegisters;
private Boolean autoRegisters;
private Integer clusterConfigurationId;
......
......@@ -33,7 +33,7 @@ public class ClusterConfiguration extends SuperEntity {
private String configJson;
private boolean isAvailable;
private Boolean isAvailable;
private String note;
......
......@@ -44,7 +44,7 @@ public class DataBase extends SuperEntity {
private String dbVersion;
private boolean status;
private Boolean status;
private LocalDateTime healthTime;
......
......@@ -44,11 +44,11 @@ public class Task extends SuperEntity {
private Integer parallelism;
private boolean fragment;
private Boolean fragment;
private boolean statementSet;
private Boolean statementSet;
private boolean batchModel;
private Boolean batchModel;
private Integer clusterId;
......
......@@ -38,9 +38,9 @@ public class User implements Serializable {
private String mobile;
private boolean enabled;
private Boolean enabled;
private boolean isDelete;
private Boolean isDelete;
@TableField(fill = FieldFill.INSERT)
private LocalDateTime createTime;
......@@ -49,5 +49,5 @@ public class User implements Serializable {
private LocalDateTime updateTime;
@TableField(exist = false)
private boolean isAdmin;
private Boolean isAdmin;
}
......@@ -487,7 +487,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private JobConfig buildJobConfig(Task task) {
boolean isJarTask = Dialect.FLINKJAR.equalsVal(task.getDialect());
if (!isJarTask && task.isFragment()) {
if (!isJarTask && task.getFragment()) {
String flinkWithSql = dataBaseService.getEnabledFlinkWithSql();
if (Asserts.isNotNullString(flinkWithSql)) {
task.setStatement(flinkWithSql + "\r\n" + task.getStatement());
......
......@@ -33,7 +33,7 @@ public class UserServiceImpl extends SuperServiceImpl<UserMapper, User> implemen
}
user.setPassword(SaSecureUtil.md5(user.getPassword()));
user.setEnabled(true);
user.setDelete(false);
user.setIsDelete(false);
if (save(user)) {
return Result.succeed("注册成功");
} else {
......@@ -69,7 +69,7 @@ public class UserServiceImpl extends SuperServiceImpl<UserMapper, User> implemen
public boolean removeUser(Integer id) {
User user = new User();
user.setId(id);
user.setDelete(true);
user.setIsDelete(true);
return updateById(user);
}
......@@ -84,10 +84,10 @@ public class UserServiceImpl extends SuperServiceImpl<UserMapper, User> implemen
return Result.failed("密码不能为空");
}
if (Asserts.isEquals(SaSecureUtil.md5(password), userPassword)) {
if (user.isDelete()) {
if (user.getIsDelete()) {
return Result.failed("账号不存在");
}
if (!user.isEnabled()) {
if (!user.getEnabled()) {
return Result.failed("账号已被禁用");
}
StpUtil.login(user.getId(), isRemember);
......@@ -102,7 +102,7 @@ public class UserServiceImpl extends SuperServiceImpl<UserMapper, User> implemen
public User getUserByUsername(String username) {
User user = getOne(new QueryWrapper<User>().eq("username", username).eq("is_delete", 0));
if (Asserts.isNotNull(user)) {
user.setAdmin(Asserts.isEqualsIgnoreCase(username, "admin"));
user.setIsAdmin(Asserts.isEqualsIgnoreCase(username, "admin"));
}
return user;
}
......
......@@ -136,7 +136,7 @@
<dependency>
<groupId>com.oracle.database.jdbc</groupId>
<artifactId>ojdbc8</artifactId>
<scope>test</scope>
<scope>provided</scope>
</dependency>
<!-- SQLServer test dependencies -->
<dependency>
......
......@@ -18,8 +18,20 @@
package org.apache.flink.connector.jdbc.internal.converter;
import org.apache.flink.table.data.DecimalData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.TimestampData;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.LocalDateTime;
/**
* Runtime converter that responsible to convert between JDBC object and Flink internal object for
* Oracle.
......@@ -39,4 +51,67 @@ public class OracleRowConverter extends AbstractJdbcRowConverter {
public OracleRowConverter(RowType rowType) {
super(rowType);
}
@Override
protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
switch (type.getTypeRoot()) {
case NULL:
return val -> null;
case BOOLEAN:
case FLOAT:
case DOUBLE:
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
return val -> val;
case TINYINT:
return val -> ((Integer) val).byteValue();
case SMALLINT:
// Converter for small type that casts value to int and then return short value,
// since
// JDBC 1.0 use int type for small values.
return val -> val instanceof Integer ? ((Integer) val).shortValue() : val;
case INTEGER:
case BIGINT:
return val -> val;
case DECIMAL:
final int precision = ((DecimalType) type).getPrecision();
final int scale = ((DecimalType) type).getScale();
// using decimal(20, 0) to support db type bigint unsigned, user should define
// decimal(20, 0) in SQL,
// but other precision like decimal(30, 0) can work too from lenient consideration.
return val ->
val instanceof BigInteger
? DecimalData.fromBigDecimal(
new BigDecimal((BigInteger) val, 0), precision, scale)
: DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
case DATE:
return val -> (int) (((Date) val).toLocalDate().toEpochDay());
case TIME_WITHOUT_TIME_ZONE:
return val -> (int) (((Time) val).toLocalTime().toNanoOfDay() / 1_000_000L);
case TIMESTAMP_WITH_TIME_ZONE:
case TIMESTAMP_WITHOUT_TIME_ZONE:
return (val) -> {
if(val instanceof LocalDateTime){
return TimestampData.fromLocalDateTime((LocalDateTime)val);
}else if(val instanceof oracle.sql.TIMESTAMP){
return TimestampData.fromTimestamp(Timestamp.valueOf(((oracle.sql.TIMESTAMP) val).stringValue()));
}else{
return TimestampData.fromTimestamp((Timestamp) val);
}
};
case CHAR:
case VARCHAR:
return val -> StringData.fromString((String) val);
case BINARY:
case VARBINARY:
return val -> (byte[]) val;
case ARRAY:
case ROW:
case MAP:
case MULTISET:
case RAW:
default:
throw new UnsupportedOperationException("Unsupported type:" + type);
}
}
}
......@@ -136,7 +136,7 @@
<dependency>
<groupId>com.oracle.database.jdbc</groupId>
<artifactId>ojdbc8</artifactId>
<scope>test</scope>
<scope>provided</scope>
</dependency>
<!-- SQLServer test dependencies -->
<dependency>
......
......@@ -18,8 +18,20 @@
package org.apache.flink.connector.jdbc.internal.converter;
import org.apache.flink.table.data.DecimalData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.TimestampData;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.LocalDateTime;
/**
* Runtime converter that responsible to convert between JDBC object and Flink internal object for
* Oracle.
......@@ -39,4 +51,67 @@ public class OracleRowConverter extends AbstractJdbcRowConverter {
public OracleRowConverter(RowType rowType) {
super(rowType);
}
@Override
protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
switch (type.getTypeRoot()) {
case NULL:
return val -> null;
case BOOLEAN:
case FLOAT:
case DOUBLE:
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
return val -> val;
case TINYINT:
return val -> ((Integer) val).byteValue();
case SMALLINT:
// Converter for small type that casts value to int and then return short value,
// since
// JDBC 1.0 use int type for small values.
return val -> val instanceof Integer ? ((Integer) val).shortValue() : val;
case INTEGER:
case BIGINT:
return val -> val;
case DECIMAL:
final int precision = ((DecimalType) type).getPrecision();
final int scale = ((DecimalType) type).getScale();
// using decimal(20, 0) to support db type bigint unsigned, user should define
// decimal(20, 0) in SQL,
// but other precision like decimal(30, 0) can work too from lenient consideration.
return val ->
val instanceof BigInteger
? DecimalData.fromBigDecimal(
new BigDecimal((BigInteger) val, 0), precision, scale)
: DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
case DATE:
return val -> (int) (((Date) val).toLocalDate().toEpochDay());
case TIME_WITHOUT_TIME_ZONE:
return val -> (int) (((Time) val).toLocalTime().toNanoOfDay() / 1_000_000L);
case TIMESTAMP_WITH_TIME_ZONE:
case TIMESTAMP_WITHOUT_TIME_ZONE:
return (val) -> {
if(val instanceof LocalDateTime){
return TimestampData.fromLocalDateTime((LocalDateTime)val);
}else if(val instanceof oracle.sql.TIMESTAMP){
return TimestampData.fromTimestamp(Timestamp.valueOf(((oracle.sql.TIMESTAMP) val).stringValue()));
}else{
return TimestampData.fromTimestamp((Timestamp) val);
}
};
case CHAR:
case VARCHAR:
return val -> StringData.fromString((String) val);
case BINARY:
case VARBINARY:
return val -> (byte[]) val;
case ARRAY:
case ROW:
case MAP:
case MULTISET:
case RAW:
default:
throw new UnsupportedOperationException("Unsupported type:" + type);
}
}
}
......@@ -128,7 +128,7 @@
<dependency>
<groupId>com.oracle.database.jdbc</groupId>
<artifactId>ojdbc8</artifactId>
<scope>test</scope>
<scope>provided</scope>
</dependency>
<!-- SQLServer test dependencies -->
......
package org.apache.flink.connector.jdbc.internal.converter;
import org.apache.flink.table.data.DecimalData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.TimestampData;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.LocalDateTime;
/**
* Runtime converter that responsible to convert between JDBC object and Flink internal object for
* Oracle.
......@@ -21,4 +33,67 @@ public class OracleRowConverter extends AbstractJdbcRowConverter {
public OracleRowConverter(RowType rowType) {
super(rowType);
}
@Override
protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
switch (type.getTypeRoot()) {
case NULL:
return val -> null;
case BOOLEAN:
case FLOAT:
case DOUBLE:
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
return val -> val;
case TINYINT:
return val -> ((Integer) val).byteValue();
case SMALLINT:
// Converter for small type that casts value to int and then return short value,
// since
// JDBC 1.0 use int type for small values.
return val -> val instanceof Integer ? ((Integer) val).shortValue() : val;
case INTEGER:
case BIGINT:
return val -> val;
case DECIMAL:
final int precision = ((DecimalType) type).getPrecision();
final int scale = ((DecimalType) type).getScale();
// using decimal(20, 0) to support db type bigint unsigned, user should define
// decimal(20, 0) in SQL,
// but other precision like decimal(30, 0) can work too from lenient consideration.
return val ->
val instanceof BigInteger
? DecimalData.fromBigDecimal(
new BigDecimal((BigInteger) val, 0), precision, scale)
: DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
case DATE:
return val -> (int) (((Date) val).toLocalDate().toEpochDay());
case TIME_WITHOUT_TIME_ZONE:
return val -> (int) (((Time) val).toLocalTime().toNanoOfDay() / 1_000_000L);
case TIMESTAMP_WITH_TIME_ZONE:
case TIMESTAMP_WITHOUT_TIME_ZONE:
return (val) -> {
if(val instanceof LocalDateTime){
return TimestampData.fromLocalDateTime((LocalDateTime)val);
}else if(val instanceof oracle.sql.TIMESTAMP){
return TimestampData.fromTimestamp(Timestamp.valueOf(((oracle.sql.TIMESTAMP) val).stringValue()));
}else{
return TimestampData.fromTimestamp((Timestamp) val);
}
};
case CHAR:
case VARCHAR:
return val -> StringData.fromString((String) val);
case BINARY:
case VARBINARY:
return val -> (byte[]) val;
case ARRAY:
case ROW:
case MAP:
case MULTISET:
case RAW:
default:
throw new UnsupportedOperationException("Unsupported type:" + type);
}
}
}
......@@ -113,7 +113,7 @@
<dependency>
<groupId>com.oracle.database.jdbc</groupId>
<artifactId>ojdbc8</artifactId>
<scope>test</scope>
<scope>provided</scope>
</dependency>
<!-- SQLServer test dependencies -->
......
package org.apache.flink.connector.jdbc.internal.converter;
import org.apache.flink.table.data.DecimalData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.TimestampData;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.LocalDateTime;
/**
* Runtime converter that responsible to convert between JDBC object and Flink internal object for
* Oracle.
......@@ -21,4 +33,67 @@ public class OracleRowConverter extends AbstractJdbcRowConverter {
public OracleRowConverter(RowType rowType) {
super(rowType);
}
@Override
protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
switch (type.getTypeRoot()) {
case NULL:
return val -> null;
case BOOLEAN:
case FLOAT:
case DOUBLE:
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
return val -> val;
case TINYINT:
return val -> ((Integer) val).byteValue();
case SMALLINT:
// Converter for small type that casts value to int and then return short value,
// since
// JDBC 1.0 use int type for small values.
return val -> val instanceof Integer ? ((Integer) val).shortValue() : val;
case INTEGER:
case BIGINT:
return val -> val;
case DECIMAL:
final int precision = ((DecimalType) type).getPrecision();
final int scale = ((DecimalType) type).getScale();
// using decimal(20, 0) to support db type bigint unsigned, user should define
// decimal(20, 0) in SQL,
// but other precision like decimal(30, 0) can work too from lenient consideration.
return val ->
val instanceof BigInteger
? DecimalData.fromBigDecimal(
new BigDecimal((BigInteger) val, 0), precision, scale)
: DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
case DATE:
return val -> (int) (((Date) val).toLocalDate().toEpochDay());
case TIME_WITHOUT_TIME_ZONE:
return val -> (int) (((Time) val).toLocalTime().toNanoOfDay() / 1_000_000L);
case TIMESTAMP_WITH_TIME_ZONE:
case TIMESTAMP_WITHOUT_TIME_ZONE:
return (val) -> {
if(val instanceof LocalDateTime){
return TimestampData.fromLocalDateTime((LocalDateTime)val);
}else if(val instanceof oracle.sql.TIMESTAMP){
return TimestampData.fromTimestamp(Timestamp.valueOf(((oracle.sql.TIMESTAMP) val).stringValue()));
}else{
return TimestampData.fromTimestamp((Timestamp) val);
}
};
case CHAR:
case VARCHAR:
return val -> StringData.fromString((String) val);
case BINARY:
case VARBINARY:
return val -> val;
case ARRAY:
case ROW:
case MAP:
case MULTISET:
case RAW:
default:
throw new UnsupportedOperationException("Unsupported type:" + type);
}
}
}
package com.dlink.daemon.task;
import java.util.Optional;
import java.util.ServiceLoader;
import com.dlink.assertion.Asserts;
import com.dlink.daemon.exception.DaemonTaskException;
import sun.misc.Service;
import java.util.Iterator;
import java.util.Optional;
public interface DaemonTask {
static Optional<DaemonTask> get(DaemonTaskConfig config) {
Asserts.checkNotNull(config, "线程任务配置不能为空");
Iterator<DaemonTask> providers = Service.providers(DaemonTask.class);
while (providers.hasNext()) {
DaemonTask daemonTask = providers.next();
ServiceLoader<DaemonTask> daemonTasks = ServiceLoader.load(DaemonTask.class);
for (DaemonTask daemonTask : daemonTasks) {
if (daemonTask.canHandle(config.getType())) {
return Optional.of(daemonTask.setConfig(config));
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment