Unverified Commit 35810a97 authored by aiwenmo's avatar aiwenmo Committed by GitHub

[Fix] [admin] Fix job monitoring bugs caused by multi tenancy (#1158)

Co-authored-by: 's avatarwenmo <32723967+wenmo@users.noreply.github.com>
parent 8f5e5b8f
......@@ -19,7 +19,7 @@
package com.dlink.configure;
import com.dlink.context.RequestContext;
import com.dlink.context.TenantContextHolder;
import java.util.List;
......@@ -46,34 +46,21 @@ import net.sf.jsqlparser.expression.NullValue;
public class MybatisPlusConfig {
private static final List<String> IGNORE_TABLE_NAMES = Lists.newArrayList(
"dlink_namespace"
, "dlink_alert_group"
, "dlink_alert_history"
, "dlink_alert_instance"
, "dlink_catalogue"
, "dlink_cluster"
, "dlink_cluster_configuration"
, "dlink_database"
//,"dlink_fragment"
, "dlink_history"
, "dlink_jar"
, "dlink_job_history"
, "dlink_job_instance"
,"dlink_role"
, "dlink_savepoints"
, "dlink_task"
, "dlink_task_statement"
, "dlink_task_version"
);
"dlink_namespace", "dlink_alert_group", "dlink_alert_history", "dlink_alert_instance", "dlink_catalogue",
"dlink_cluster", "dlink_cluster_configuration", "dlink_database"
// ,"dlink_fragment"
, "dlink_history", "dlink_jar", "dlink_job_history", "dlink_job_instance", "dlink_role", "dlink_savepoints",
"dlink_task", "dlink_task_statement", "dlink_task_version");
@Bean
public MybatisPlusInterceptor mybatisPlusInterceptor() {
log.info("mybatis plus interceptor execute");
MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor();
interceptor.addInnerInterceptor(new TenantLineInnerInterceptor(new TenantLineHandler() {
@Override
public Expression getTenantId() {
Integer tenantId = (Integer) RequestContext.get();
Integer tenantId = (Integer) TenantContextHolder.get();
if (tenantId == null) {
log.warn("request context tenant id is null");
return new NullValue();
......@@ -90,4 +77,4 @@ public class MybatisPlusConfig {
return interceptor;
}
}
\ No newline at end of file
}
......@@ -20,20 +20,21 @@
package com.dlink.context;
/**
* request context
* TenantContextHolder
*/
public class RequestContext {
private static final ThreadLocal<Object> threadLocal = new ThreadLocal<>();
public class TenantContextHolder {
private static final ThreadLocal<Object> TENANT_CONTEXT = new ThreadLocal<>();
public static void set(Object value) {
threadLocal.set(value);
TENANT_CONTEXT.set(value);
}
public static Object get() {
return threadLocal.get();
return TENANT_CONTEXT.get();
}
public static void remove() {
threadLocal.remove();
public static void clear() {
TENANT_CONTEXT.remove();
}
}
\ No newline at end of file
}
......@@ -20,7 +20,6 @@
package com.dlink.init;
import com.dlink.assertion.Asserts;
import com.dlink.context.RequestContext;
import com.dlink.daemon.task.DaemonFactory;
import com.dlink.daemon.task.DaemonTaskConfig;
import com.dlink.job.FlinkJobTask;
......@@ -76,7 +75,6 @@ public class SystemInit implements ApplicationRunner {
List<Tenant> tenants = tenantService.list();
sysConfigService.initSysConfig();
for (Tenant tenant : tenants) {
RequestContext.set(tenant.getId());
taskService.initDefaultFlinkSQLEnv(tenant.getId());
}
initTaskMonitor();
......
......@@ -19,7 +19,7 @@
package com.dlink.interceptor;
import com.dlink.context.RequestContext;
import com.dlink.context.TenantContextHolder;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
......@@ -41,7 +41,7 @@ public class TenantInterceptor implements HandlerInterceptor {
Object handler) throws Exception {
String tenantId = request.getHeader("tenantId");
if (!StringUtils.isNullOrEmpty(tenantId)) {
RequestContext.set(Integer.valueOf(tenantId));
TenantContextHolder.set(Integer.valueOf(tenantId));
}
return HandlerInterceptor.super.preHandle(request, response, handler);
}
......
......@@ -74,7 +74,7 @@ public class FlinkJobTask implements DaemonTask {
preDealTime = System.currentTimeMillis();
JobInstance jobInstance = taskService.refreshJobInstance(config.getId(), false);
if ((!JobStatus.isDone(jobInstance.getStatus())) || (Asserts.isNotNull(jobInstance.getFinishTime())
&& Duration.between(jobInstance.getFinishTime(), LocalDateTime.now()).toMinutes() < 1)) {
&& Duration.between(jobInstance.getFinishTime(), LocalDateTime.now()).toMinutes() < 1)) {
DefaultThreadPool.getInstance().execute(this);
} else {
taskService.handleJobDone(jobInstance);
......
......@@ -24,6 +24,8 @@ import com.dlink.model.JobHistory;
import org.apache.ibatis.annotations.Mapper;
import com.baomidou.mybatisplus.annotation.InterceptorIgnore;
/**
* JobHistoryMapper
*
......@@ -33,6 +35,9 @@ import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface JobHistoryMapper extends SuperMapper<JobHistory> {
@InterceptorIgnore(tenantLine = "true")
JobHistory getByIdWithoutTenant(Integer id);
int insert(JobHistory jobHistory);
}
......@@ -27,6 +27,8 @@ import org.apache.ibatis.annotations.Mapper;
import java.util.List;
import com.baomidou.mybatisplus.annotation.InterceptorIgnore;
/**
* JobInstanceMapper
*
......@@ -36,6 +38,9 @@ import java.util.List;
@Mapper
public interface JobInstanceMapper extends SuperMapper<JobInstance> {
@InterceptorIgnore(tenantLine = "true")
JobInstance getByIdWithoutTenant(Integer id);
List<JobInstanceCount> countStatus();
List<JobInstanceCount> countHistoryStatus();
......
......@@ -27,6 +27,8 @@ import org.apache.ibatis.annotations.Param;
import java.util.List;
import com.baomidou.mybatisplus.annotation.InterceptorIgnore;
/**
* 作业 Mapper 接口
*
......@@ -38,9 +40,11 @@ public interface TaskMapper extends SuperMapper<Task> {
Integer queryAllSizeByName(String name);
List<Task> queryOnLineTaskByDoneStatus(@Param("parentIds") List<Integer> parentIds
, @Param("stepIds") List<Integer> stepIds, @Param("includeNull") boolean includeNull
, @Param("jobStatuses") List<String> jobStatuses);
List<Task> queryOnLineTaskByDoneStatus(@Param("parentIds") List<Integer> parentIds,
@Param("stepIds") List<Integer> stepIds,
@Param("includeNull") boolean includeNull,
@Param("jobStatuses") List<String> jobStatuses);
@InterceptorIgnore(tenantLine = "true")
Task getTaskByNameAndTenantId(@Param("name") String name, @Param("tenantId") Integer tenantId);
}
......@@ -30,9 +30,12 @@ import com.dlink.model.JobHistory;
**/
public interface JobHistoryService extends ISuperService<JobHistory> {
JobHistory getByIdWithoutTenant(Integer id);
JobHistory getJobHistory(Integer id);
JobHistory getJobHistoryInfo(JobHistory jobHistory);
JobHistory refreshJobHistory(Integer id, String jobManagerHost, String jobId, boolean needSave);
}
......@@ -38,6 +38,8 @@ import com.fasterxml.jackson.databind.JsonNode;
*/
public interface JobInstanceService extends ISuperService<JobInstance> {
JobInstance getByIdWithoutTenant(Integer id);
JobInstanceStatus getStatusCount(boolean isHistory);
List<JobInstance> listJobInstanceActive();
......
......@@ -43,6 +43,11 @@ import com.fasterxml.jackson.databind.JsonNode;
@Service
public class JobHistoryServiceImpl extends SuperServiceImpl<JobHistoryMapper, JobHistory> implements JobHistoryService {
@Override
public JobHistory getByIdWithoutTenant(Integer id) {
return baseMapper.getByIdWithoutTenant(id);
}
@Override
public JobHistory getJobHistory(Integer id) {
return getJobHistoryInfo(getById(id));
......@@ -111,11 +116,10 @@ public class JobHistoryServiceImpl extends SuperServiceImpl<JobHistoryMapper, Jo
jobHistory.setCheckpointsConfigJson(JSONUtil.toJsonString(checkPointsConfig));
jobHistory.setConfigJson(JSONUtil.toJsonString(jobsConfig));
if (needSave) {
if (Asserts.isNotNull(getById(id))) {
updateById(jobHistory);
} else {
save(jobHistory);
}
updateById(jobHistory);
/*
* if (Asserts.isNotNull(getById(id))) { updateById(jobHistory); } else { save(jobHistory); }
*/
}
} catch (Exception e) {
e.printStackTrace();
......
......@@ -59,7 +59,9 @@ import com.fasterxml.jackson.databind.ObjectMapper;
* @since 2022/2/2 13:52
*/
@Service
public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper, JobInstance> implements JobInstanceService {
public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper, JobInstance>
implements
JobInstanceService {
@Autowired
private HistoryService historyService;
......@@ -70,6 +72,11 @@ public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper,
@Autowired
private JobHistoryService jobHistoryService;
@Override
public JobInstance getByIdWithoutTenant(Integer id) {
return baseMapper.getByIdWithoutTenant(id);
}
@Override
public JobInstanceStatus getStatusCount(boolean isHistory) {
List<JobInstanceCount> jobInstanceCounts = null;
......@@ -153,7 +160,8 @@ public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper,
history.setConfig(JSONUtil.parseObject(history.getConfigJson()));
jobInfoDetail.setHistory(history);
if (Asserts.isNotNull(history.getClusterConfigurationId())) {
jobInfoDetail.setClusterConfiguration(clusterConfigurationService.getClusterConfigById(history.getClusterConfigurationId()));
jobInfoDetail.setClusterConfiguration(
clusterConfigurationService.getClusterConfigById(history.getClusterConfigurationId()));
}
return jobInfoDetail;
}
......@@ -174,7 +182,8 @@ public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper,
history.setConfig(JSONUtil.parseObject(history.getConfigJson()));
jobInfoDetail.setHistory(history);
if (Asserts.isNotNull(history) && Asserts.isNotNull(history.getClusterConfigurationId())) {
jobInfoDetail.setClusterConfiguration(clusterConfigurationService.getClusterConfigById(history.getClusterConfigurationId()));
jobInfoDetail.setClusterConfiguration(
clusterConfigurationService.getClusterConfigById(history.getClusterConfigurationId()));
}
if (pool.exist(key)) {
pool.refresh(jobInfoDetail);
......@@ -219,7 +228,8 @@ public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper,
list.get(i).setDuration(pool.get(list.get(i).getId().toString()).getInstance().getDuration());
}
}
return ProTableResult.<JobInstance>builder().success(true).data(list).total(page.getTotal()).current(current).pageSize(pageSize).build();
return ProTableResult.<JobInstance>builder().success(true).data(list).total(page.getTotal()).current(current)
.pageSize(pageSize).build();
}
}
......@@ -166,7 +166,7 @@ public class StudioServiceImpl implements StudioService {
public JobResult executeSql(StudioExecuteDTO studioExecuteDTO) {
if (Dialect.notFlinkSql(studioExecuteDTO.getDialect())) {
return executeCommonSql(SqlDTO.build(studioExecuteDTO.getStatement(),
studioExecuteDTO.getDatabaseId(), studioExecuteDTO.getMaxRowNum()));
studioExecuteDTO.getDatabaseId(), studioExecuteDTO.getMaxRowNum()));
} else {
return executeFlinkSql(studioExecuteDTO);
}
......@@ -177,7 +177,7 @@ public class StudioServiceImpl implements StudioService {
JobConfig config = studioExecuteDTO.getJobConfig();
buildSession(config);
// To initialize java udf, but it only support local mode.
UDFPath udfPath = udfService.initUDF(studioExecuteDTO.getStatement(), config.getGatewayConfig() == null ? null : config.getGatewayConfig().getType());
UDFPath udfPath = udfService.initUDF(studioExecuteDTO.getStatement(), GatewayType.get(config.getType()));
config.setJarFiles(udfPath.getJarPaths());
config.setPyFiles(udfPath.getPyPaths());
JobManager jobManager = JobManager.build(config);
......@@ -233,7 +233,7 @@ public class StudioServiceImpl implements StudioService {
JobConfig config = studioDDLDTO.getJobConfig();
if (!config.isUseSession()) {
config.setAddress(
clusterService.buildEnvironmentAddress(config.isUseRemote(), studioDDLDTO.getClusterId()));
clusterService.buildEnvironmentAddress(config.isUseRemote(), studioDDLDTO.getClusterId()));
}
JobManager jobManager = JobManager.build(config);
return jobManager.executeDDL(studioDDLDTO.getStatement());
......@@ -252,7 +252,7 @@ public class StudioServiceImpl implements StudioService {
Map<String, ProcessEntity> map = ProcessPool.getInstance().getMap();
Map<String, StringBuilder> map2 = ConsolePool.getInstance().getMap();
ProcessEntity process = ProcessContextHolder.registerProcess(
ProcessEntity.init(ProcessType.FLINKEXPLAIN, SaManager.getStpLogic(null).getLoginIdAsInt(), "admin"));
ProcessEntity.init(ProcessType.FLINKEXPLAIN, SaManager.getStpLogic(null).getLoginIdAsInt(), "admin"));
addFlinkSQLEnv(studioExecuteDTO);
......@@ -269,7 +269,7 @@ public class StudioServiceImpl implements StudioService {
process.start();
JobManager jobManager = JobManager.buildPlanMode(config);
List<SqlExplainResult> sqlExplainResults =
jobManager.explainSql(studioExecuteDTO.getStatement()).getSqlExplainResults();
jobManager.explainSql(studioExecuteDTO.getStatement()).getSqlExplainResults();
process.finish();
return sqlExplainResults;
}
......@@ -343,15 +343,15 @@ public class StudioServiceImpl implements StudioService {
if (sessionDTO.isUseRemote()) {
Cluster cluster = clusterService.getById(sessionDTO.getClusterId());
SessionConfig sessionConfig = SessionConfig.build(
sessionDTO.getType(), true,
cluster.getId(), cluster.getAlias(),
clusterService.buildEnvironmentAddress(true, sessionDTO.getClusterId()));
sessionDTO.getType(), true,
cluster.getId(), cluster.getAlias(),
clusterService.buildEnvironmentAddress(true, sessionDTO.getClusterId()));
return JobManager.createSession(sessionDTO.getSession(), sessionConfig, createUser);
} else {
SessionConfig sessionConfig = SessionConfig.build(
sessionDTO.getType(), false,
null, null,
clusterService.buildEnvironmentAddress(false, null));
sessionDTO.getType(), false,
null, null,
clusterService.buildEnvironmentAddress(false, null));
return JobManager.createSession(sessionDTO.getSession(), sessionConfig, createUser);
}
}
......@@ -369,7 +369,7 @@ public class StudioServiceImpl implements StudioService {
@Override
public LineageResult getLineage(StudioCADTO studioCADTO) {
if (Asserts.isNotNullString(studioCADTO.getDialect())
&& !studioCADTO.getDialect().equalsIgnoreCase("flinksql")) {
&& !studioCADTO.getDialect().equalsIgnoreCase("flinksql")) {
if (Asserts.isNull(studioCADTO.getDatabaseId())) {
return null;
}
......@@ -379,10 +379,10 @@ public class StudioServiceImpl implements StudioService {
}
if (studioCADTO.getDialect().equalsIgnoreCase("doris")) {
return com.dlink.explainer.sqllineage.LineageBuilder.getSqlLineage(studioCADTO.getStatement(), "mysql",
dataBase.getDriverConfig());
dataBase.getDriverConfig());
} else {
return com.dlink.explainer.sqllineage.LineageBuilder.getSqlLineage(studioCADTO.getStatement(),
studioCADTO.getDialect().toLowerCase(), dataBase.getDriverConfig());
studioCADTO.getDialect().toLowerCase(), dataBase.getDriverConfig());
}
} else {
addFlinkSQLEnv(studioCADTO);
......@@ -414,7 +414,7 @@ public class StudioServiceImpl implements StudioService {
jobConfig.setAddress(cluster.getJobManagerHost());
if (Asserts.isNotNull(cluster.getClusterConfigurationId())) {
Map<String, Object> gatewayConfig =
clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
jobConfig.buildGatewayConfig(gatewayConfig);
}
JobManager jobManager = JobManager.build(jobConfig);
......@@ -433,7 +433,7 @@ public class StudioServiceImpl implements StudioService {
// 如果用户选择用dlink平台来托管集群信息 说明任务一定是从dlink发起提交的
if (Asserts.isNotNull(cluster.getClusterConfigurationId())) {
Map<String, Object> gatewayConfig =
clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
jobConfig.buildGatewayConfig(gatewayConfig);
jobConfig.getGatewayConfig().getClusterConfig().setAppId(cluster.getName());
jobConfig.setTaskId(cluster.getTaskId());
......@@ -492,7 +492,7 @@ public class StudioServiceImpl implements StudioService {
}
for (Catalog catalog : catalogs) {
String statement = FlinkQuery.useCatalog(catalog.getName()) + FlinkQuery.separator()
+ FlinkQuery.showDatabases();
+ FlinkQuery.showDatabases();
studioMetaStoreDTO.setStatement(statement);
IResult tableResult = executeMSFlinkSql(studioMetaStoreDTO);
if (result instanceof DDLResult) {
......@@ -526,7 +526,7 @@ public class StudioServiceImpl implements StudioService {
}
} else {
String baseStatement = FlinkQuery.useCatalog(studioMetaStoreDTO.getCatalog()) + FlinkQuery.separator()
+ FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase()) + FlinkQuery.separator();
+ FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase()) + FlinkQuery.separator();
// show tables
String tableStatement = baseStatement + FlinkQuery.showTables();
studioMetaStoreDTO.setStatement(tableStatement);
......@@ -564,7 +564,7 @@ public class StudioServiceImpl implements StudioService {
// nothing to do
} else {
String baseStatement = FlinkQuery.useCatalog(studioMetaStoreDTO.getCatalog()) + FlinkQuery.separator()
+ FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase()) + FlinkQuery.separator();
+ FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase()) + FlinkQuery.separator();
// desc tables
String tableStatement = baseStatement + FlinkQuery.descTable(studioMetaStoreDTO.getTable());
studioMetaStoreDTO.setStatement(tableStatement);
......@@ -575,12 +575,12 @@ public class StudioServiceImpl implements StudioService {
int i = 1;
for (Map<String, Object> item : rowData) {
FlinkColumn column = FlinkColumn.build(i,
item.get(FlinkQuery.columnName()).toString(),
item.get(FlinkQuery.columnType()).toString(),
item.get(FlinkQuery.columnKey()).toString(),
item.get(FlinkQuery.columnNull()).toString(),
item.get(FlinkQuery.columnExtras()).toString(),
item.get(FlinkQuery.columnWatermark()).toString());
item.get(FlinkQuery.columnName()).toString(),
item.get(FlinkQuery.columnType()).toString(),
item.get(FlinkQuery.columnKey()).toString(),
item.get(FlinkQuery.columnNull()).toString(),
item.get(FlinkQuery.columnExtras()).toString(),
item.get(FlinkQuery.columnWatermark()).toString());
columns.add(column);
i++;
}
......
......@@ -31,6 +31,7 @@ import com.dlink.common.result.Result;
import com.dlink.config.Dialect;
import com.dlink.constant.FlinkRestResultConstant;
import com.dlink.constant.NetConstant;
import com.dlink.context.TenantContextHolder;
import com.dlink.daemon.task.DaemonFactory;
import com.dlink.daemon.task.DaemonTaskConfig;
import com.dlink.db.service.impl.SuperServiceImpl;
......@@ -197,7 +198,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private String buildParas(Integer id) {
return "--id " + id + " --driver " + driver + " --url " + url + " --username " + username + " --password "
+ password;
+ password;
}
@Override
......@@ -206,10 +207,10 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
Asserts.checkNull(task, Tips.TASK_NOT_EXIST);
if (Dialect.notFlinkSql(task.getDialect())) {
return executeCommonSql(SqlDTO.build(task.getStatement(),
task.getDatabaseId(), null));
task.getDatabaseId(), null));
}
JobConfig config = buildJobConfig(task);
UDFPath udfPath = udfService.initUDF(task.getStatement(), config.getGatewayConfig().getType());
UDFPath udfPath = udfService.initUDF(task.getStatement(), GatewayType.get(config.getType()));
config.setJarFiles(udfPath.getJarPaths());
config.setPyFiles(udfPath.getPyPaths());
JobManager jobManager = JobManager.build(config);
......@@ -227,7 +228,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
task.setStep(JobLifeCycle.ONLINE.getValue());
if (Dialect.notFlinkSql(task.getDialect())) {
return executeCommonSql(SqlDTO.build(task.getStatement(),
task.getDatabaseId(), null));
task.getDatabaseId(), null));
}
JobConfig config = buildJobConfig(task);
JobManager jobManager = JobManager.build(config);
......@@ -247,7 +248,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
}
if (Dialect.notFlinkSql(task.getDialect())) {
return executeCommonSql(SqlDTO.build(task.getStatement(),
task.getDatabaseId(), null));
task.getDatabaseId(), null));
}
if (StringUtils.isBlank(savePointPath)) {
task.setSavePointStrategy(SavePointStrategy.LATEST.getValue());
......@@ -371,14 +372,15 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
Map<String, String> config = task.getConfig().get(0);
UDFTemplate template = udfTemplateService.getById(config.get("templateId"));
if (template != null) {
String code = UDFUtil.templateParse(task.getDialect(), template.getTemplateCode(), config.get("className"));
String code = UDFUtil.templateParse(task.getDialect(), template.getTemplateCode(),
config.get("className"));
task.setStatement(code);
}
}
}
// to compiler udf
if (Asserts.isNotNullString(task.getDialect()) && Dialect.JAVA.equalsVal(task.getDialect())
&& Asserts.isNotNullString(task.getStatement())) {
&& Asserts.isNotNullString(task.getStatement())) {
CustomStringJavaCompiler compiler = new CustomStringJavaCompiler(task.getStatement());
task.setSavePointPath(compiler.getFullClassName());
} else if (Dialect.PYTHON.equalsVal(task.getDialect())) {
......@@ -392,8 +394,8 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
Task taskInfo = getById(task.getId());
Assert.check(taskInfo);
if (JobLifeCycle.RELEASE.equalsValue(taskInfo.getStep())
|| JobLifeCycle.ONLINE.equalsValue(taskInfo.getStep())
|| JobLifeCycle.CANCEL.equalsValue(taskInfo.getStep())) {
|| JobLifeCycle.ONLINE.equalsValue(taskInfo.getStep())
|| JobLifeCycle.CANCEL.equalsValue(taskInfo.getStep())) {
throw new BusException("该作业已" + JobLifeCycle.get(taskInfo.getStep()).getLabel() + ",禁止修改!");
}
task.setStep(JobLifeCycle.DEVELOP.getValue());
......@@ -441,6 +443,8 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
Task defaultFlinkSQLEnvTask = getTaskByNameAndTenantId(name, tenantId);
if (null == defaultFlinkSQLEnvTask) {
defaultFlinkSQLEnvTask = new Task();
} else {
return defaultFlinkSQLEnvTask;
}
// defaultFlinkSQLEnvTask.setId(1);
defaultFlinkSQLEnvTask.setName(name);
......@@ -502,7 +506,8 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
@Override
public Task getUDFByClassName(String className) {
Task task = getOne(
new QueryWrapper<Task>().in("dialect", Dialect.JAVA, Dialect.SCALA, Dialect.PYTHON).eq("enabled", 1).eq("save_point_path", className));
new QueryWrapper<Task>().in("dialect", Dialect.JAVA, Dialect.SCALA, Dialect.PYTHON).eq("enabled", 1)
.eq("save_point_path", className));
Assert.check(task);
task.setStatement(statementService.getById(task.getId()).getStatement());
return task;
......@@ -511,7 +516,8 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
@Override
public List<Task> getAllUDF() {
List<Task> tasks =
list(new QueryWrapper<Task>().in("dialect", Dialect.JAVA, Dialect.SCALA, Dialect.PYTHON).eq("enabled", 1).isNotNull("save_point_path"));
list(new QueryWrapper<Task>().in("dialect", Dialect.JAVA, Dialect.SCALA, Dialect.PYTHON)
.eq("enabled", 1).isNotNull("save_point_path"));
return tasks.stream().peek(task -> {
Assert.check(task);
task.setStatement(statementService.getById(task.getId()).getStatement());
......@@ -548,7 +554,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
List<TaskVersion> taskVersions = taskVersionService.getTaskVersionByTaskId(task.getId());
List<Integer> versionIds = taskVersions.stream().map(TaskVersion::getVersionId).collect(Collectors.toList());
Map<Integer, TaskVersion> versionMap =
taskVersions.stream().collect(Collectors.toMap(TaskVersion::getVersionId, t -> t));
taskVersions.stream().collect(Collectors.toMap(TaskVersion::getVersionId, t -> t));
TaskVersion taskVersion = new TaskVersion();
BeanUtil.copyProperties(task, taskVersion);
......@@ -587,14 +593,14 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
Task taskInfo = getTaskInfoById(dto.getId());
if (JobLifeCycle.RELEASE.equalsValue(taskInfo.getStep())
|| JobLifeCycle.ONLINE.equalsValue(taskInfo.getStep())
|| JobLifeCycle.CANCEL.equalsValue(taskInfo.getStep())) {
|| JobLifeCycle.ONLINE.equalsValue(taskInfo.getStep())
|| JobLifeCycle.CANCEL.equalsValue(taskInfo.getStep())) {
// throw new BusException("该作业已" + JobLifeCycle.get(taskInfo.getStep()).getLabel() + ",禁止回滚!");
return Result.failed("该作业已" + JobLifeCycle.get(taskInfo.getStep()).getLabel() + ",禁止回滚!");
}
LambdaQueryWrapper<TaskVersion> queryWrapper = new LambdaQueryWrapper<TaskVersion>()
.eq(TaskVersion::getTaskId, dto.getId()).eq(TaskVersion::getVersionId, dto.getVersionId());
.eq(TaskVersion::getTaskId, dto.getId()).eq(TaskVersion::getVersionId, dto.getVersionId());
TaskVersion taskVersion = taskVersionService.getOne(queryWrapper);
......@@ -741,7 +747,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
}
if (Asserts.isNotNull(cluster.getClusterConfigurationId())) {
Map<String, Object> gatewayConfig =
clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
jobConfig.buildGatewayConfig(gatewayConfig);
jobConfig.getGatewayConfig().getClusterConfig().setAppId(cluster.getName());
useGateway = true;
......@@ -777,7 +783,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private JobConfig buildJobConfig(Task task) {
boolean isJarTask = Dialect.FLINKJAR.equalsVal(task.getDialect())
|| Dialect.KUBERNETES_APPLICATION.equalsVal(task.getDialect());
|| Dialect.KUBERNETES_APPLICATION.equalsVal(task.getDialect());
if (!isJarTask && Asserts.isNotNull(task.getFragment()) ? task.getFragment() : false) {
String flinkWithSql = dataBaseService.getEnabledFlinkWithSql();
if (Asserts.isNotNullString(flinkWithSql)) {
......@@ -797,20 +803,20 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
}
// support custom K8s app submit, rather than clusterConfiguration
else if (Dialect.KUBERNETES_APPLICATION.equalsVal(task.getDialect())
&& GatewayType.KUBERNETES_APPLICATION.equalsValue(config.getType())) {
&& GatewayType.KUBERNETES_APPLICATION.equalsValue(config.getType())) {
Map<String, Object> gatewayConfig = JSONUtil.toMap(task.getStatement(), String.class, Object.class);
config.buildGatewayConfig(gatewayConfig);
} else {
Map<String, Object> gatewayConfig =
clusterConfigurationService.getGatewayConfig(task.getClusterConfigurationId());
clusterConfigurationService.getGatewayConfig(task.getClusterConfigurationId());
// submit application type with clusterConfiguration
if (GatewayType.YARN_APPLICATION.equalsValue(config.getType())
|| GatewayType.KUBERNETES_APPLICATION.equalsValue(config.getType())) {
|| GatewayType.KUBERNETES_APPLICATION.equalsValue(config.getType())) {
if (!isJarTask) {
SystemConfiguration systemConfiguration = SystemConfiguration.getInstances();
gatewayConfig.put("userJarPath", systemConfiguration.getSqlSubmitJarPath());
gatewayConfig.put("userJarParas",
systemConfiguration.getSqlSubmitJarParas() + buildParas(config.getTaskId()));
systemConfiguration.getSqlSubmitJarParas() + buildParas(config.getTaskId()));
gatewayConfig.put("userJarMainAppClass", systemConfiguration.getSqlSubmitJarMainAppClass());
} else {
Jar jar = jarService.getById(task.getJarId());
......@@ -858,8 +864,9 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
jobInfoDetail = pool.get(key);
} else {
jobInfoDetail = new JobInfoDetail(id);
JobInstance jobInstance = jobInstanceService.getById(id);
JobInstance jobInstance = jobInstanceService.getByIdWithoutTenant(id);
Asserts.checkNull(jobInstance, "该任务实例不存在");
TenantContextHolder.set(jobInstance.getTenantId());
jobInfoDetail.setInstance(jobInstance);
Cluster cluster = clusterService.getById(jobInstance.getClusterId());
jobInfoDetail.setCluster(cluster);
......@@ -867,7 +874,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
history.setConfig(JSONUtil.parseObject(history.getConfigJson()));
if (Asserts.isNotNull(history) && Asserts.isNotNull(history.getClusterConfigurationId())) {
jobInfoDetail.setClusterConfiguration(
clusterConfigurationService.getClusterConfigById(history.getClusterConfigurationId()));
clusterConfigurationService.getClusterConfigById(history.getClusterConfigurationId()));
}
jobInfoDetail.setHistory(history);
jobInfoDetail.setJobHistory(jobHistoryService.getJobHistory(id));
......@@ -877,12 +884,12 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
return jobInfoDetail.getInstance();
}
JobHistory jobHistoryJson =
jobHistoryService.refreshJobHistory(id, jobInfoDetail.getCluster().getJobManagerHost(),
jobInfoDetail.getInstance().getJid(), jobInfoDetail.isNeedSave());
jobHistoryService.refreshJobHistory(id, jobInfoDetail.getCluster().getJobManagerHost(),
jobInfoDetail.getInstance().getJid(), jobInfoDetail.isNeedSave());
JobHistory jobHistory = jobHistoryService.getJobHistoryInfo(jobHistoryJson);
jobInfoDetail.setJobHistory(jobHistory);
if (JobStatus.isDone(jobInfoDetail.getInstance().getStatus())
&& (Asserts.isNull(jobHistory.getJob()) || jobHistory.isError())) {
&& (Asserts.isNull(jobHistory.getJob()) || jobHistory.isError())) {
return jobInfoDetail.getInstance();
}
String status = jobInfoDetail.getInstance().getStatus();
......@@ -891,12 +898,12 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
jobInfoDetail.getInstance().setStatus(JobStatus.UNKNOWN.getValue());
} else {
jobInfoDetail.getInstance().setDuration(
jobInfoDetail.getJobHistory().getJob().get(FlinkRestResultConstant.JOB_DURATION).asLong() / 1000);
jobInfoDetail.getJobHistory().getJob().get(FlinkRestResultConstant.JOB_DURATION).asLong() / 1000);
jobInfoDetail.getInstance()
.setStatus(jobInfoDetail.getJobHistory().getJob().get(FlinkRestResultConstant.JOB_STATE).asText());
.setStatus(jobInfoDetail.getJobHistory().getJob().get(FlinkRestResultConstant.JOB_STATE).asText());
}
if (JobStatus.isDone(jobInfoDetail.getInstance().getStatus())
&& !status.equals(jobInfoDetail.getInstance().getStatus())) {
&& !status.equals(jobInfoDetail.getInstance().getStatus())) {
jobStatusChanged = true;
jobInfoDetail.getInstance().setFinishTime(LocalDateTime.now());
// handleJobDone(jobInfoDetail.getInstance());
......@@ -913,7 +920,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private boolean inRefreshPlan(JobInstance jobInstance) {
if ((!JobStatus.isDone(jobInstance.getStatus())) || (Asserts.isNotNull(jobInstance.getFinishTime())
&& Duration.between(jobInstance.getFinishTime(), LocalDateTime.now()).toMinutes() < 1)) {
&& Duration.between(jobInstance.getFinishTime(), LocalDateTime.now()).toMinutes() < 1)) {
return true;
} else {
return false;
......@@ -960,9 +967,9 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
// clusterConfigurationName
if (Asserts.isNotNull(task.getClusterConfigurationId())) {
ClusterConfiguration clusterConfiguration =
clusterConfigurationService.getById(task.getClusterConfigurationId());
clusterConfigurationService.getById(task.getClusterConfigurationId());
((ObjectNode) jsonNode).put("clusterConfigurationName",
Asserts.isNotNull(clusterConfiguration) ? clusterConfiguration.getName() : null);
Asserts.isNotNull(clusterConfiguration) ? clusterConfiguration.getName() : null);
}
// databaseName
if (Asserts.isNotNull(task.getDatabaseId())) {
......@@ -1038,15 +1045,15 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
}
if (Asserts.isNotNull(task.getClusterConfigurationName())) {
ClusterConfiguration clusterConfiguration = clusterConfigurationService
.getOne(new QueryWrapper<ClusterConfiguration>().eq("name",
task.getClusterConfigurationName()));
.getOne(new QueryWrapper<ClusterConfiguration>().eq("name",
task.getClusterConfigurationName()));
if (Asserts.isNotNull(clusterConfiguration)) {
task.setClusterConfigurationId(clusterConfiguration.getId());
}
}
if (Asserts.isNotNull(task.getDatabaseName())) {
DataBase dataBase =
dataBaseService.getOne(new QueryWrapper<DataBase>().eq("name", task.getDatabaseName()));
dataBaseService.getOne(new QueryWrapper<DataBase>().eq("name", task.getDatabaseName()));
if (Asserts.isNotNull(dataBase)) {
task.setDatabaseId(dataBase.getId());
}
......@@ -1063,7 +1070,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
*/
if (Asserts.isNotNull(task.getAlertGroupName())) {
AlertGroup alertGroup =
alertGroupService.getOne(new QueryWrapper<AlertGroup>().eq("name", task.getAlertGroupName()));
alertGroupService.getOne(new QueryWrapper<AlertGroup>().eq("name", task.getAlertGroupName()));
if (Asserts.isNotNull(alertGroup)) {
task.setAlertGroupId(alertGroup.getId());
}
......@@ -1099,7 +1106,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
return Result.failed("一共" + jsonNodes.size() + "个作业,全部导入失败");
} else if (errorNumber > 0) {
return Result.failed("一共" + jsonNodes.size() + "个作业,其中成功导入" + (jsonNode.size() - errorNumber) + "个,失败"
+ errorNumber + "个");
+ errorNumber + "个");
}
return Result.succeed("成功导入" + jsonNodes.size() + "个作业");
}
......@@ -1150,7 +1157,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
long minutes = ChronoUnit.MINUTES.between(startTime, endTime);
long seconds = ChronoUnit.SECONDS.between(startTime, endTime);
String duration = days + "天 " + (hours - (days * 24)) + "小时 " + (minutes - (hours * 60)) + "分 "
+ (seconds - (minutes * 60)) + "秒";
+ (seconds - (minutes * 60)) + "秒";
return duration;
}
......@@ -1230,7 +1237,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private void sendAlert(AlertInstance alertInstance, JobInstance jobInstance, Task task, AlertMsg alertMsg) {
AlertConfig alertConfig = AlertConfig.build(alertInstance.getName(), alertInstance.getType(),
JSONUtil.toMap(alertInstance.getParams()));
JSONUtil.toMap(alertInstance.getParams()));
Alert alert = Alert.build(alertConfig);
String title = "任务【" + task.getAlias() + "】:" + jobInstance.getStatus();
String content = alertMsg.toString();
......@@ -1249,10 +1256,10 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
@Override
public Result queryAllCatalogue() {
final LambdaQueryWrapper<Catalogue> queryWrapper = new LambdaQueryWrapper<Catalogue>()
.select(Catalogue::getId, Catalogue::getName, Catalogue::getParentId)
.eq(Catalogue::getIsLeaf, 0)
.eq(Catalogue::getEnabled, 1)
.isNull(Catalogue::getTaskId);
.select(Catalogue::getId, Catalogue::getName, Catalogue::getParentId)
.eq(Catalogue::getIsLeaf, 0)
.eq(Catalogue::getEnabled, 1)
.isNull(Catalogue::getTaskId);
final List<Catalogue> catalogueList = catalogueService.list(queryWrapper);
return Result.succeed(TreeUtil.build(dealWithCatalogue(catalogueList), -1).get(0));
}
......@@ -1278,7 +1285,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
public Result<List<Task>> queryOnLineTaskByDoneStatus(List<JobLifeCycle> jobLifeCycle, List<JobStatus> jobStatuses,
boolean includeNull, Integer catalogueId) {
final Tree<Integer> node = ((Tree<Integer>) queryAllCatalogue().getDatas())
.getNode(Objects.isNull(catalogueId) ? 0 : catalogueId);
.getNode(Objects.isNull(catalogueId) ? 0 : catalogueId);
final List<Integer> parentIds = new ArrayList<>(0);
parentIds.add(node.getId());
childrenNodeParse(node, parentIds);
......@@ -1289,8 +1296,8 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private List<Task> getTasks(List<JobLifeCycle> jobLifeCycle, List<JobStatus> jobStatuses, boolean includeNull,
List<Integer> parentIds) {
return this.baseMapper.queryOnLineTaskByDoneStatus(parentIds,
jobLifeCycle.stream().filter(Objects::nonNull).map(JobLifeCycle::getValue).collect(Collectors.toList()),
includeNull, jobStatuses.stream().map(JobStatus::name).collect(Collectors.toList()));
jobLifeCycle.stream().filter(Objects::nonNull).map(JobLifeCycle::getValue).collect(Collectors.toList()),
includeNull, jobStatuses.stream().map(JobStatus::name).collect(Collectors.toList()));
}
private void childrenNodeParse(Tree<Integer> node, List<Integer> parentIds) {
......@@ -1310,7 +1317,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
@Override
public void selectSavepointOnLineTask(TaskOperatingResult taskOperatingResult) {
final JobInstance jobInstanceByTaskId =
jobInstanceService.getJobInstanceByTaskId(taskOperatingResult.getTask().getId());
jobInstanceService.getJobInstanceByTaskId(taskOperatingResult.getTask().getId());
if (jobInstanceByTaskId == null) {
startGoingLiveTask(taskOperatingResult, null);
return;
......@@ -1329,8 +1336,8 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private void findTheConditionSavePointToOnline(TaskOperatingResult taskOperatingResult,
JobInstance jobInstanceByTaskId) {
final LambdaQueryWrapper<JobHistory> queryWrapper = new LambdaQueryWrapper<JobHistory>()
.select(JobHistory::getId, JobHistory::getCheckpointsJson)
.eq(JobHistory::getId, jobInstanceByTaskId.getId());
.select(JobHistory::getId, JobHistory::getCheckpointsJson)
.eq(JobHistory::getId, jobInstanceByTaskId.getId());
final JobHistory jobHistory = jobHistoryService.getOne(queryWrapper);
if (jobHistory != null && StringUtils.isNotBlank(jobHistory.getCheckpointsJson())) {
final ObjectNode jsonNodes = JSONUtil.parseObject(jobHistory.getCheckpointsJson());
......
......@@ -21,7 +21,7 @@ package com.dlink.service.impl;
import com.dlink.assertion.Asserts;
import com.dlink.common.result.Result;
import com.dlink.context.RequestContext;
import com.dlink.context.TenantContextHolder;
import com.dlink.db.service.impl.SuperServiceImpl;
import com.dlink.mapper.TenantMapper;
import com.dlink.model.Namespace;
......@@ -66,7 +66,7 @@ public class TenantServiceImpl extends SuperServiceImpl<TenantMapper, Tenant> im
}
tenant.setIsDelete(false);
if (save(tenant)) {
RequestContext.set(tenant.getId());
TenantContextHolder.set(tenant.getId());
return Result.succeed("新增成功");
}
return Result.failed("新增失败");
......@@ -101,12 +101,14 @@ public class TenantServiceImpl extends SuperServiceImpl<TenantMapper, Tenant> im
return Result.failed("租户不存在");
}
Long tenantRoleCount = roleService.getBaseMapper().selectCount(new QueryWrapper<Role>().eq("tenant_id", id));
Long tenantRoleCount =
roleService.getBaseMapper().selectCount(new QueryWrapper<Role>().eq("tenant_id", id));
if (tenantRoleCount > 0) {
return Result.failed("删除租户失败,该租户已绑定角色");
}
Long tenantNamespaceCount = namespaceService.getBaseMapper().selectCount(new QueryWrapper<Namespace>().eq("tenant_id", id));
Long tenantNamespaceCount =
namespaceService.getBaseMapper().selectCount(new QueryWrapper<Namespace>().eq("tenant_id", id));
if (tenantNamespaceCount > 0) {
return Result.failed("删除租户失败,该租户已绑定名称空间");
}
......@@ -162,12 +164,12 @@ public class TenantServiceImpl extends SuperServiceImpl<TenantMapper, Tenant> im
public Result switchTenant(JsonNode para) {
if (para.size() > 0) {
Integer tenantId = para.get("tenantId").asInt();
RequestContext.remove();
RequestContext.set(tenantId);
TenantContextHolder.clear();
TenantContextHolder.set(tenantId);
return Result.succeed("切换租户成功");
} else {
return Result.failed("无法切换租户,获取不到租户信息");
}
}
}
\ No newline at end of file
}
......@@ -21,7 +21,7 @@ package com.dlink.service.impl;
import com.dlink.assertion.Asserts;
import com.dlink.common.result.Result;
import com.dlink.context.RequestContext;
import com.dlink.context.TenantContextHolder;
import com.dlink.db.service.impl.SuperServiceImpl;
import com.dlink.dto.LoginUTO;
import com.dlink.dto.UserDTO;
......@@ -146,7 +146,7 @@ public class UserServiceImpl extends SuperServiceImpl<UserMapper, User> implemen
}
// 将前端入参 租户id 放入上下文
RequestContext.set(loginUTO.getTenantId());
TenantContextHolder.set(loginUTO.getTenantId());
// get user tenants and roles
UserDTO userDTO = getUserALLBaseInfo(loginUTO, user);
......@@ -177,7 +177,8 @@ public class UserServiceImpl extends SuperServiceImpl<UserMapper, User> implemen
});
userTenants.forEach(userTenant -> {
Tenant tenant = tenantService.getBaseMapper().selectOne(new QueryWrapper<Tenant>().eq("id", userTenant.getTenantId()));
Tenant tenant = tenantService.getBaseMapper()
.selectOne(new QueryWrapper<Tenant>().eq("id", userTenant.getTenantId()));
if (Asserts.isNotNull(tenant)) {
tenantList.add(tenant);
}
......
......@@ -2,6 +2,12 @@
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.dlink.mapper.JobHistoryMapper">
<select id="getByIdWithoutTenant" resultType="com.dlink.model.JobHistory">
select *
from dlink_job_history
where id = #{id}
limit 1
</select>
<insert id="insert">
insert into dlink_job_history (id,job_json,exceptions_json,checkpoints_json,checkpoints_config_json,config_json,
......
......@@ -2,6 +2,13 @@
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.dlink.mapper.JobInstanceMapper">
<select id="getByIdWithoutTenant" resultType="com.dlink.model.JobInstance">
select *
from dlink_job_instance
where id = #{id}
limit 1
</select>
<select id="selectForProTable" resultType="com.dlink.model.JobInstance">
select
a.*,
......
......@@ -59,8 +59,8 @@
select *
from dlink_task
where 1 = 1
and name = "${name}"
and tenant_id = ${tenantId}
and name = #{name}
and tenant_id = #{tenantId}
</select>
<select id="queryOnLineTaskByDoneStatus" resultType="com.dlink.model.Task">
......
......@@ -15,16 +15,14 @@
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>dlink-client</artifactId>
<groupId>com.dlink</groupId>
<artifactId>dlink-client</artifactId>
<version>0.6.8-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-client-1.14</artifactId>
<properties>
......@@ -54,5 +52,11 @@
<artifactId>jackson-datatype-jsr310</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>flink-connector-starrocks</artifactId>
<version>1.2.3_flink-1.14_2.12</version>
<scope>provided</scope>
</dependency>
</dependencies>
</project>
\ No newline at end of file
</project>
......@@ -15,16 +15,14 @@
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>dlink-flink</artifactId>
<groupId>com.dlink</groupId>
<artifactId>dlink-flink</artifactId>
<version>0.6.8-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-flink-1.14</artifactId>
......@@ -45,17 +43,19 @@
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-runtime_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
......@@ -66,8 +66,6 @@
<artifactId>flink-shaded-guava</artifactId>
</exclusion>
</exclusions>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
......@@ -77,13 +75,13 @@
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
......@@ -143,8 +141,8 @@
<version>${flinkcdc.version}</version>
<exclusions>
<exclusion>
<artifactId>flink-shaded-guava</artifactId>
<groupId>org.apache.flink</groupId>
<artifactId>flink-shaded-guava</artifactId>
</exclusion>
</exclusions>
</dependency>
......@@ -183,10 +181,5 @@
<artifactId>flink-doris-connector-1.14_${scala.binary.version}</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>flink-connector-starrocks</artifactId>
<version>1.2.3_flink-1.14_${scala.binary.version}</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
</project>
......@@ -55,7 +55,8 @@ public class ConsolePool extends AbstractPool<StringBuilder> {
if (consoleEntityMap.containsKey(user)) {
consoleEntityMap.get(user).append(str);
} else {
consoleEntityMap.put(user, new StringBuilder(str));
StringBuilder sb = new StringBuilder("Dinky User Console:");
consoleEntityMap.put(user, sb.append(str));
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment