Commit 692d55f9 authored by wenmo's avatar wenmo

优化SET和新增作业配置其他配置低优先级实现

parent 439a8eae
...@@ -3,12 +3,18 @@ package com.dlink.model; ...@@ -3,12 +3,18 @@ package com.dlink.model;
import com.baomidou.mybatisplus.annotation.FieldFill; import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName; import com.baomidou.mybatisplus.annotation.TableName;
import com.dlink.assertion.Asserts;
import com.dlink.db.model.SuperEntity; import com.dlink.db.model.SuperEntity;
import com.dlink.job.JobConfig; import com.dlink.job.JobConfig;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.Data; import lombok.Data;
import lombok.EqualsAndHashCode; import lombok.EqualsAndHashCode;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* 任务 * 任务
...@@ -46,7 +52,7 @@ public class Task extends SuperEntity{ ...@@ -46,7 +52,7 @@ public class Task extends SuperEntity{
private Integer jarId; private Integer jarId;
private String config; private String configJson;
private String note; private String note;
...@@ -59,6 +65,21 @@ public class Task extends SuperEntity{ ...@@ -59,6 +65,21 @@ public class Task extends SuperEntity{
@TableField(exist = false) @TableField(exist = false)
private List<Savepoints> savepoints; private List<Savepoints> savepoints;
@TableField(exist = false)
private List<Map<String,String>> config = new ArrayList<>();
public List<Map<String,String>> parseConfig(){
ObjectMapper objectMapper = new ObjectMapper();
try {
if(Asserts.isNotNullString(configJson)) {
config = objectMapper.readValue(configJson, ArrayList.class);
}
} catch (JsonProcessingException e) {
e.printStackTrace();
}
return config;
}
/*public ExecutorSetting buildExecutorSetting(){ /*public ExecutorSetting buildExecutorSetting(){
HashMap configMap = new HashMap(); HashMap configMap = new HashMap();
if(config!=null&&!"".equals(clusterName)) { if(config!=null&&!"".equals(clusterName)) {
...@@ -72,7 +93,11 @@ public class Task extends SuperEntity{ ...@@ -72,7 +93,11 @@ public class Task extends SuperEntity{
if(clusterId==null||clusterId==0){ if(clusterId==null||clusterId==0){
useRemote = false; useRemote = false;
} }
return new JobConfig(type,false,false,useRemote,clusterId,clusterConfigurationId,jarId,getId(),alias,fragment,statementSet,checkPoint,parallelism,savePointStrategy,savePointPath); Map<String,String> map = new HashMap<>();
for(Map<String,String> item : config){
map.put(item.get("key"),item.get("value"));
}
return new JobConfig(type,false,false,useRemote,clusterId,clusterConfigurationId,jarId,getId(),alias,fragment,statementSet,checkPoint,parallelism,savePointStrategy,savePointPath,map);
} }
} }
...@@ -51,14 +51,14 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen ...@@ -51,14 +51,14 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
@Override @Override
public JobResult submitByTaskId(Integer id) { public JobResult submitByTaskId(Integer id) {
Task task = this.getById(id); Task task = this.getTaskInfoById(id);
Assert.check(task); Assert.check(task);
boolean isJarTask = isJarTask(task); boolean isJarTask = isJarTask(task);
Statement statement = null; /*Statement statement = null;
if(!isJarTask){ if(!isJarTask){
statement = statementService.getById(id); statement = statementService.getById(id);
Assert.check(statement); Assert.check(statement);
} }*/
JobConfig config = task.buildSubmitConfig(); JobConfig config = task.buildSubmitConfig();
if (!JobManager.useGateway(config.getType())) { if (!JobManager.useGateway(config.getType())) {
config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), task.getClusterId())); config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), task.getClusterId()));
...@@ -100,7 +100,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen ...@@ -100,7 +100,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
} }
JobManager jobManager = JobManager.build(config); JobManager jobManager = JobManager.build(config);
if(!isJarTask) { if(!isJarTask) {
return jobManager.executeSql(statement.getStatement()); return jobManager.executeSql(task.getStatement());
}else{ }else{
return jobManager.executeJar(); return jobManager.executeJar();
} }
...@@ -114,6 +114,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen ...@@ -114,6 +114,7 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
public Task getTaskInfoById(Integer id) { public Task getTaskInfoById(Integer id) {
Task task = this.getById(id); Task task = this.getById(id);
if (task != null) { if (task != null) {
task.parseConfig();
Statement statement = statementService.getById(id); Statement statement = statementService.getById(id);
if (task.getClusterId() != null) { if (task.getClusterId() != null) {
Cluster cluster = clusterService.getById(task.getClusterId()); Cluster cluster = clusterService.getById(task.getClusterId());
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
<result column="cluster_id" property="clusterId" /> <result column="cluster_id" property="clusterId" />
<result column="cluster_configuration_id" property="clusterConfigurationId" /> <result column="cluster_configuration_id" property="clusterConfigurationId" />
<result column="jar_id" property="jarId" /> <result column="jar_id" property="jarId" />
<result column="config" property="config" /> <result column="config_json" property="configJson" />
<result column="note" property="note" /> <result column="note" property="note" />
<result column="enabled" property="enabled" /> <result column="enabled" property="enabled" />
<result column="create_time" property="createTime" /> <result column="create_time" property="createTime" />
...@@ -26,7 +26,7 @@ ...@@ -26,7 +26,7 @@
<!-- 通用查询结果列 --> <!-- 通用查询结果列 -->
<sql id="Base_Column_List"> <sql id="Base_Column_List">
id, name, alias, type,check_point,save_point_strategy,save_point_path, parallelism,fragment,statement_set,cluster_id,cluster_configuration_id,jar_id,config,note, enabled, create_time, update_time id, name, alias, type,check_point,save_point_strategy,save_point_path, parallelism,fragment,statement_set,cluster_id,cluster_configuration_id,jar_id,config_json,note, enabled, create_time, update_time
</sql> </sql>
......
...@@ -83,7 +83,8 @@ public class JobConfig { ...@@ -83,7 +83,8 @@ public class JobConfig {
public JobConfig(String type,boolean useResult, boolean useSession, boolean useRemote, Integer clusterId, public JobConfig(String type,boolean useResult, boolean useSession, boolean useRemote, Integer clusterId,
Integer clusterConfigurationId, Integer jarId, Integer taskId, String jobName, boolean useSqlFragment, Integer clusterConfigurationId, Integer jarId, Integer taskId, String jobName, boolean useSqlFragment,
boolean useStatementSet,Integer checkpoint, Integer parallelism, Integer savePointStrategyValue, String savePointPath) { boolean useStatementSet,Integer checkpoint, Integer parallelism, Integer savePointStrategyValue,
String savePointPath,Map<String,String> config) {
this.type = type; this.type = type;
this.useResult = useResult; this.useResult = useResult;
this.useSession = useSession; this.useSession = useSession;
...@@ -99,10 +100,11 @@ public class JobConfig { ...@@ -99,10 +100,11 @@ public class JobConfig {
this.parallelism = parallelism; this.parallelism = parallelism;
this.savePointStrategy = SavePointStrategy.get(savePointStrategyValue); this.savePointStrategy = SavePointStrategy.get(savePointStrategyValue);
this.savePointPath = savePointPath; this.savePointPath = savePointPath;
this.config = config;
} }
public ExecutorSetting getExecutorSetting(){ public ExecutorSetting getExecutorSetting(){
return new ExecutorSetting(checkpoint,parallelism,useSqlFragment,savePointPath,jobName); return new ExecutorSetting(checkpoint,parallelism,useSqlFragment,savePointPath,jobName,config);
} }
public void setSessionConfig(SessionConfig sessionConfig){ public void setSessionConfig(SessionConfig sessionConfig){
......
...@@ -231,7 +231,7 @@ CREATE TABLE `dlink_task` ( ...@@ -231,7 +231,7 @@ CREATE TABLE `dlink_task` (
`cluster_id` int(11) NULL DEFAULT NULL COMMENT 'Flink集群ID', `cluster_id` int(11) NULL DEFAULT NULL COMMENT 'Flink集群ID',
`cluster_configuration_id` int(11) NULL DEFAULT NULL COMMENT '集群配置ID', `cluster_configuration_id` int(11) NULL DEFAULT NULL COMMENT '集群配置ID',
`jar_id` int(11) NULL DEFAULT NULL COMMENT 'jarID', `jar_id` int(11) NULL DEFAULT NULL COMMENT 'jarID',
`config` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '配置', `config_json` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '配置JSON',
`note` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '注释', `note` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '注释',
`enabled` tinyint(1) NOT NULL DEFAULT 1 COMMENT '是否启用', `enabled` tinyint(1) NOT NULL DEFAULT 1 COMMENT '是否启用',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间', `create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
......
...@@ -465,4 +465,10 @@ CREATE TABLE `dlink_user` ( ...@@ -465,4 +465,10 @@ CREATE TABLE `dlink_user` (
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic; ) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;
INSERT INTO `dlink_user`(`id`, `username`, `password`, `nickname`, `worknum`, `avatar`, `mobile`, `enabled`, `is_delete`, `create_time`, `update_time`) VALUES (1, 'admin', '21232f297a57a5a743894a0e4a801fc3', 'Admin', NULL, NULL, NULL, 1, 0, '2021-11-28 17:19:27', '2021-11-28 17:19:31'); INSERT INTO `dlink_user`(`id`, `username`, `password`, `nickname`, `worknum`, `avatar`, `mobile`, `enabled`, `is_delete`, `create_time`, `update_time`) VALUES (1, 'admin', '21232f297a57a5a743894a0e4a801fc3', 'Admin', NULL, NULL, NULL, 1, 0, '2021-11-28 17:19:27', '2021-11-28 17:19:31');
-- ----------------------------
-- 0.4.0 2021-11-29
-- ----------------------------
ALTER TABLE `dlink_task`
CHANGE COLUMN `config` `config_json` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '配置JSON' AFTER `jar_id`;
SET FOREIGN_KEY_CHECKS = 1; SET FOREIGN_KEY_CHECKS = 1;
...@@ -6,6 +6,7 @@ import com.dlink.executor.custom.CustomTableResultImpl; ...@@ -6,6 +6,7 @@ import com.dlink.executor.custom.CustomTableResultImpl;
import com.dlink.interceptor.FlinkInterceptor; import com.dlink.interceptor.FlinkInterceptor;
import com.dlink.result.SqlExplainResult; import com.dlink.result.SqlExplainResult;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.ExplainDetail; import org.apache.flink.table.api.ExplainDetail;
...@@ -14,7 +15,11 @@ import org.apache.flink.table.api.TableResult; ...@@ -14,7 +15,11 @@ import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.CatalogManager; import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.functions.ScalarFunction; import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.functions.UserDefinedFunction; import org.apache.flink.table.functions.UserDefinedFunction;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.operations.command.ResetOperation;
import org.apache.flink.table.operations.command.SetOperation;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
...@@ -99,6 +104,10 @@ public abstract class Executor { ...@@ -99,6 +104,10 @@ public abstract class Executor {
if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){ if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){
environment.setParallelism(executorSetting.getParallelism()); environment.setParallelism(executorSetting.getParallelism());
} }
if(executorSetting.getConfig()!=null) {
Configuration configuration = Configuration.fromMap(executorSetting.getConfig());
environment.getConfig().configure(configuration, null);
}
} }
private void updateEnvironment(ExecutorSetting executorSetting){ private void updateEnvironment(ExecutorSetting executorSetting){
...@@ -108,6 +117,10 @@ public abstract class Executor { ...@@ -108,6 +117,10 @@ public abstract class Executor {
if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){ if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){
environment.setParallelism(executorSetting.getParallelism()); environment.setParallelism(executorSetting.getParallelism());
} }
if(executorSetting.getConfig()!=null) {
Configuration configuration = Configuration.fromMap(executorSetting.getConfig());
environment.getConfig().configure(configuration, null);
}
} }
private void initStreamExecutionEnvironment(){ private void initStreamExecutionEnvironment(){
...@@ -227,4 +240,34 @@ public abstract class Executor { ...@@ -227,4 +240,34 @@ public abstract class Executor {
public void submitStatementSet(List<String> statements){ public void submitStatementSet(List<String> statements){
executeStatementSet(statements); executeStatementSet(statements);
} }
public boolean parseAndLoadConfiguration(String statement){
List<Operation> operations = stEnvironment.getParser().parse(statement);
for(Operation operation : operations){
if(operation instanceof SetOperation){
callSet((SetOperation)operation);
return true;
} else if (operation instanceof ResetOperation){
callReset((ResetOperation)operation);
return true;
}
}
return false;
}
private void callSet(SetOperation setOperation){
if (setOperation.getKey().isPresent() && setOperation.getValue().isPresent()) {
String key = setOperation.getKey().get().trim();
String value = setOperation.getValue().get().trim();
Map<String,String> confMap = new HashMap<>();
confMap.put(key,value);
Configuration configuration = Configuration.fromMap(confMap);
environment.getConfig().configure(configuration,null);
stEnvironment.getConfig().addConfiguration(configuration);
}
}
private void callReset(ResetOperation resetOperation) {
// to do nothing
}
} }
...@@ -34,7 +34,11 @@ public class FlinkInterceptor { ...@@ -34,7 +34,11 @@ public class FlinkInterceptor {
return statement.trim(); return statement.trim();
} }
// return false to continue with executeSql
public static boolean build(Executor executor, String statement) { public static boolean build(Executor executor, String statement) {
if(executor.parseAndLoadConfiguration(statement)){
return true;
}
Operation operation = Operations.buildOperation(statement); Operation operation = Operations.buildOperation(statement);
if (Asserts.isNotNull(operation)) { if (Asserts.isNotNull(operation)) {
operation.build(executor.getCustomTableEnvironmentImpl()); operation.build(executor.getCustomTableEnvironmentImpl());
......
...@@ -13,8 +13,8 @@ import com.dlink.trans.ddl.SetOperation; ...@@ -13,8 +13,8 @@ import com.dlink.trans.ddl.SetOperation;
public class Operations { public class Operations {
private static Operation[] operations = { private static Operation[] operations = {
new CreateAggTableOperation(), new CreateAggTableOperation()
new SetOperation() // , new SetOperation()
}; };
public static SqlType getSqlTypeFromStatements(String statement){ public static SqlType getSqlTypeFromStatements(String statement){
......
...@@ -18,6 +18,7 @@ import java.util.Map; ...@@ -18,6 +18,7 @@ import java.util.Map;
* @author wenmo * @author wenmo
* @since 2021/10/21 19:56 * @since 2021/10/21 19:56
**/ **/
@Deprecated
public class SetOperation extends AbstractOperation implements Operation { public class SetOperation extends AbstractOperation implements Operation {
private String KEY_WORD = "SET"; private String KEY_WORD = "SET";
......
...@@ -47,17 +47,9 @@ const StudioMenu = (props: any) => { ...@@ -47,17 +47,9 @@ const StudioMenu = (props: any) => {
let param = { let param = {
useSession: useSession, useSession: useSession,
session: currentSession.session, session: currentSession.session,
useRemote: current.task.useRemote,
clusterId: current.task.clusterId,
useResult: current.task.useResult,
maxRowNum: current.task.maxRowNum,
statement: selectsql, statement: selectsql,
fragment: current.task.fragment, configJson: JSON.stringify(current.task.config),
jobName: current.task.jobName, ...current.task,
parallelism: current.task.parallelism,
checkPoint: current.task.checkPoint,
savePointStrategy: current.task.savePointStrategy,
savePointPath: current.task.savePointPath,
}; };
const key = current.key; const key = current.key;
const taskKey = (Math.random() * 1000) + ''; const taskKey = (Math.random() * 1000) + '';
...@@ -82,11 +74,6 @@ const StudioMenu = (props: any) => { ...@@ -82,11 +74,6 @@ const StudioMenu = (props: any) => {
let newTabs = tabs; let newTabs = tabs;
for (let i = 0; i < newTabs.panes.length; i++) { for (let i = 0; i < newTabs.panes.length; i++) {
if (newTabs.panes[i].key == key) { if (newTabs.panes[i].key == key) {
/*let newResult = newTabs.panes[i].console.result;
newResult.unshift(res.datas);
newTabs.panes[i].console={
result:newResult,
};*/
newTabs.panes[i].console.result = res.datas; newTabs.panes[i].console.result = res.datas;
break; break;
} }
......
...@@ -146,11 +146,10 @@ const StudioTree: React.FC<StudioTreeProps> = (props) => { ...@@ -146,11 +146,10 @@ const StudioTree: React.FC<StudioTreeProps> = (props) => {
session:'', session:'',
maxRowNum: 100, maxRowNum: 100,
jobName:node.name, jobName:node.name,
config: [],
useResult:false, useResult:false,
useSession:false, useSession:false,
useRemote:true, useRemote:true,
...result.datas ...result.datas,
}, },
console:{ console:{
result:[], result:[],
......
...@@ -271,7 +271,9 @@ const Model: ModelType = { ...@@ -271,7 +271,9 @@ const Model: ModelType = {
effects: { effects: {
* saveTask({payload}, {call, put}) { * saveTask({payload}, {call, put}) {
yield call(handleAddOrUpdate, 'api/task', payload); let para = payload;
para.configJson = JSON.stringify(payload.config);
yield call(handleAddOrUpdate, 'api/task', para);
yield put({ yield put({
type: 'saveTaskData', type: 'saveTaskData',
payload, payload,
......
...@@ -423,6 +423,12 @@ export default (): React.ReactNode => { ...@@ -423,6 +423,12 @@ export default (): React.ReactNode => {
<li> <li>
<Link>新增用户管理模块</Link> <Link>新增用户管理模块</Link>
</li> </li>
<li>
<Link>优化 SET 语法</Link>
</li>
<li>
<Link>新增作业配置的其他配置低优先级加载实现</Link>
</li>
</ul> </ul>
</Paragraph> </Paragraph>
</Timeline.Item> </Timeline.Item>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment