Commit 5a5d9e24 authored by gaogao110's avatar gaogao110

Merge remote-tracking branch 'Upstream/dev' into dev

# Conflicts:
#	pom.xml
parents 606d9430 80913511
......@@ -88,6 +88,7 @@ Dinky(原 Dlink):
| 运维中心 | 主页 | 新增 任务实例列表 | 0.6.0 |
| | 作业监控 | 新增 作业总览 | 0.6.0 |
| | | 新增 实时监控 Flink Job | 0.6.0 |
| | | 新增 作业实例与历史切换 | 0.6.0 |
| | | 新增 自动告警 | 0.6.0 |
| | | 新增 FlinkWebUI 跳转 | 0.6.0 |
| | | 新增 智能重启(重新上线) | 0.6.0 |
......@@ -97,14 +98,14 @@ Dinky(原 Dlink):
| | | 新增 配置信息 | 0.6.0 |
| | | 新增 集群信息 | dev |
| | | 新增 作业快照 | dev |
| | | 新增 异常信息 | dev |
| | | 新增 异常信息 | 0.6.0 |
| | | 新增 作业日志 | dev |
| | | 新增 自动调优 | dev |
| | | 新增 FlinkSQL | dev |
| | | 新增 FlinkSQL | 0.6.0 |
| | | 新增 数据地图 | dev |
| | | 新增 即席查询 | dev |
| | | 新增 历史版本 | dev |
| | | 新增 告警记录 | dev |
| | | 新增 告警记录 | 0.6.0 |
| 注册中心 | Flink 集群实例 | 新增 外部 Flink 集群实例注册 | 0.4.0 |
| | | 新增 外部 Flink 集群实例心态检测与版本获取 | 0.4.0 |
| | | 新增 外部 Flink 集群手动一键回收 | 0.4.0 |
......
......@@ -105,6 +105,19 @@
<groupId>cn.dev33</groupId>
<artifactId>sa-token-spring-boot-starter</artifactId>
</dependency>
<!-- sa-token 持久化 -->
<!-- <dependency>-->
<!-- <groupId>org.springframework.boot</groupId>-->
<!-- <artifactId>spring-boot-starter-data-redis</artifactId>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>cn.dev33</groupId>-->
<!-- <artifactId>sa-token-dao-redis-jackson</artifactId>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>redis.clients</groupId>-->
<!-- <artifactId>jedis</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
......
......@@ -3,7 +3,9 @@ package com.dlink.controller;
import com.dlink.common.result.ProTableResult;
import com.dlink.common.result.Result;
import com.dlink.model.AlertGroup;
import com.dlink.model.AlertHistory;
import com.dlink.service.AlertGroupService;
import com.dlink.service.AlertHistoryService;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
......@@ -30,6 +32,8 @@ import java.util.List;
public class AlertGroupController {
@Autowired
private AlertGroupService alertGroupService;
@Autowired
private AlertHistoryService alertHistoryService;
/**
* 新增或者更新
......@@ -90,4 +94,12 @@ public class AlertGroupController {
public Result listEnabledAll() {
return Result.succeed(alertGroupService.listEnabledAll(),"获取成功");
}
/**
* 动态查询列表
*/
@PostMapping("/history")
public ProTableResult<AlertHistory> listAlertHistory(@RequestBody JsonNode para) {
return alertHistoryService.selectForProTable(para);
}
}
......@@ -12,6 +12,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
......@@ -74,7 +75,10 @@ public class JobInstanceController {
*/
@GetMapping("/getStatusCount")
public Result getStatusCount() {
return Result.succeed(jobInstanceService.getStatusCount(), "获取成功");
HashMap<String,Object> result = new HashMap<>();
result.put("history",jobInstanceService.getStatusCount(true));
result.put("instance",jobInstanceService.getStatusCount(false));
return Result.succeed(result, "获取成功");
}
/**
......@@ -92,4 +96,12 @@ public class JobInstanceController {
public Result refreshJobInfoDetail(@RequestParam Integer id) {
return Result.succeed(taskService.refreshJobInfoDetail(id), "刷新成功");
}
/**
* 获取单表的血缘分析
*/
@GetMapping("/getOneTableColumnCA")
public Result getOneTableColumnCA(@RequestParam Integer id) {
return Result.succeed(jobInstanceService.getOneTableColumnCA(id), "刷新成功");
}
}
......@@ -18,5 +18,7 @@ public interface JobInstanceMapper extends SuperMapper<JobInstance> {
List<JobInstanceCount> countStatus();
List<JobInstanceCount> countHistoryStatus();
List<JobInstance> listJobInstanceActive();
}
package com.dlink.service;
import com.dlink.db.service.ISuperService;
import com.dlink.explainer.ca.TableCANode;
import com.dlink.model.JobInfoDetail;
import com.dlink.model.JobInstance;
import com.dlink.model.JobInstanceStatus;
......@@ -15,7 +16,7 @@ import java.util.List;
*/
public interface JobInstanceService extends ISuperService<JobInstance> {
JobInstanceStatus getStatusCount();
JobInstanceStatus getStatusCount(boolean isHistory);
List<JobInstance> listJobInstanceActive();
......@@ -23,4 +24,5 @@ public interface JobInstanceService extends ISuperService<JobInstance> {
JobInfoDetail getJobInfoDetailInfo(JobInstance jobInstance);
List<TableCANode> getOneTableColumnCA(Integer id);
}
......@@ -4,6 +4,8 @@ import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.dlink.assertion.Asserts;
import com.dlink.constant.FlinkRestResultConstant;
import com.dlink.db.service.impl.SuperServiceImpl;
import com.dlink.explainer.ca.CABuilder;
import com.dlink.explainer.ca.TableCANode;
import com.dlink.mapper.JobInstanceMapper;
import com.dlink.model.Cluster;
import com.dlink.model.History;
......@@ -44,8 +46,13 @@ public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper,
private JobHistoryService jobHistoryService;
@Override
public JobInstanceStatus getStatusCount() {
List<JobInstanceCount> jobInstanceCounts = baseMapper.countStatus();
public JobInstanceStatus getStatusCount(boolean isHistory) {
List<JobInstanceCount> jobInstanceCounts = null;
if(isHistory){
jobInstanceCounts = baseMapper.countHistoryStatus();
}else{
jobInstanceCounts = baseMapper.countStatus();
}
JobInstanceStatus jobInstanceStatus = new JobInstanceStatus();
Integer total = 0;
for (JobInstanceCount item : jobInstanceCounts) {
......@@ -119,4 +126,9 @@ public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper,
return jobInfoDetail;
}
@Override
public List<TableCANode> getOneTableColumnCA(Integer id) {
return CABuilder.getOneTableColumnCAByStatement(getJobInfoDetail(id).getHistory().getStatement());
}
}
......@@ -11,7 +11,25 @@ spring:
# clean-disabled: true
## baseline-on-migrate: true
# table: dlink_schema_history
# Redis配置
#sa-token如需依赖redis,请打开redis配置和pom.xml、dlink-admin/pom.xml中依赖
# redis:
# host: localhost
# port: 6379
# password:
# database: 10
# jedis:
# pool:
# # 连接池最大连接数(使用负值表示没有限制)
# max-active: 50
# # 连接池最大阻塞等待时间(使用负值表示没有限制)
# max-wait: 3000
# # 连接池中的最大空闲连接数
# max-idle: 20
# # 连接池中的最小空闲连接数
# min-idle: 5
# # 连接超时时间(毫秒)
# timeout: 5000
server:
port: 8888
......
......@@ -9,6 +9,9 @@
dlink_alert_history a
<where>
1=1
<if test='param.jobInstanceId!=null and param.jobInstanceId!=""'>
and a.job_instance_id = #{param.jobInstanceId}
</if>
<if test='param.createTime!=null and param.createTime!=""'>
and a.create_time <![CDATA[>=]]> str_to_date( #{param.createTime},'%Y-%m-%d %H:%i:%s')
</if>
......
......@@ -61,6 +61,6 @@
from
dlink_cluster a
where enabled = 1
and (type = 'standalone' or type = 'yarn-session')
and (type = 'standalone' or type = 'yarn-session'or type = 'kubernetes-session')
</select>
</mapper>
......@@ -9,6 +9,12 @@
(select dc.alias FROM dlink_cluster dc where dc.id=a.cluster_id) as clusterAlias
from
dlink_job_instance a
<if test='!(param.isHistory!=null and param.isHistory==true)'>
inner join (
select max(ji.id) as id from dlink_job_instance ji
group by ji.task_id
) snap on snap.id = a.id
</if>
left join dlink_history dh on a.history_id = dh.id
<where>
1=1
......@@ -45,6 +51,19 @@
</select>
<select id="countStatus" resultType="com.dlink.model.JobInstanceCount">
select
a.status,
count(1) as counts
from
dlink_job_instance a
inner join (
select max(ji.id) as id from dlink_job_instance ji
group by ji.task_id
) snap on snap.id = a.id
group by status
</select>
<select id="countHistoryStatus" resultType="com.dlink.model.JobInstanceCount">
select
status,
count(1) as counts
......
......@@ -46,17 +46,18 @@ public class FlinkAPI {
return result;
}
private JsonNode get(String route){
private JsonNode get(String route) {
String res = HttpUtil.get(NetConstant.HTTP + address + NetConstant.SLASH + route, NetConstant.SERVER_TIME_OUT_ACTIVE);
return parse(res);
}
/**
* get请求获取jobManger/TaskManager的日志 (结果为字符串并不是json格式)
*
* @param route
* @return
*/
private String getResult(String route){
private String getResult(String route) {
String res = HttpUtil.get(NetConstant.HTTP + address + NetConstant.SLASH + route, NetConstant.SERVER_TIME_OUT_ACTIVE);
return res;
}
......@@ -67,7 +68,7 @@ public class FlinkAPI {
}
private JsonNode patch(String route, String body) {
String res = HttpUtil.createRequest(Method.PATCH,NetConstant.HTTP + address + NetConstant.SLASH + route).timeout(NetConstant.SERVER_TIME_OUT_ACTIVE).body(body).execute().body();
String res = HttpUtil.createRequest(Method.PATCH, NetConstant.HTTP + address + NetConstant.SLASH + route).timeout(NetConstant.SERVER_TIME_OUT_ACTIVE).body(body).execute().body();
return parse(res);
}
......@@ -83,27 +84,27 @@ public class FlinkAPI {
return joblist;
}
public boolean stop(String jobId){
get(FlinkRestAPIConstant.JOBS+jobId+FlinkRestAPIConstant.CANCEL);
public boolean stop(String jobId) {
get(FlinkRestAPIConstant.JOBS + jobId + FlinkRestAPIConstant.CANCEL);
return true;
}
public SavePointResult savepoints(String jobId, String savePointType){
public SavePointResult savepoints(String jobId, String savePointType) {
SavePointType type = SavePointType.get(savePointType);
String paramType = null;
SavePointResult result = SavePointResult.build(GatewayType.YARN_PER_JOB);
JobInfo jobInfo = new JobInfo(jobId);
Map<String, Object> paramMap = new HashMap<>();
switch (type){
switch (type) {
case CANCEL:
jobInfo.setStatus(JobInfo.JobStatus.CANCEL);
case STOP:
paramMap.put("drain",false);
paramMap.put("drain", false);
paramType = FlinkRestAPIConstant.STOP;
jobInfo.setStatus(JobInfo.JobStatus.STOP);
break;
case TRIGGER:
paramMap.put("cancel-job",false);
paramMap.put("cancel-job", false);
//paramMap.put("target-directory","hdfs:///flink13/ss1");
paramType = FlinkRestAPIConstant.SAVEPOINTS;
jobInfo.setStatus(JobInfo.JobStatus.RUN);
......@@ -122,17 +123,17 @@ public class FlinkAPI {
Thread.sleep(1000);
JsonNode node = get(FlinkRestAPIConstant.JOBS + jobId + FlinkRestAPIConstant.SAVEPOINTS + NetConstant.SLASH + triggerid);
String status = node.get("status").get("id").asText();
if(Asserts.isEquals(status,"IN_PROGRESS")){
if (Asserts.isEquals(status, "IN_PROGRESS")) {
continue;
}
if(node.get("operation").has("failure-cause")) {
if (node.get("operation").has("failure-cause")) {
String failureCause = node.get("operation").get("failure-cause").asText();
if (Asserts.isNotNullString(failureCause)) {
result.fail(failureCause);
break;
}
}
if(node.get("operation").has("location")) {
if (node.get("operation").has("location")) {
String location = node.get("operation").get("location").asText();
List<JobInfo> jobInfos = new ArrayList<>();
jobInfo.setSavePoint(location);
......@@ -154,24 +155,28 @@ public class FlinkAPI {
return result.get("flink-version").asText();
}
public JsonNode getOverview() {
return get(FlinkRestAPIConstant.OVERVIEW);
}
public JsonNode getJobInfo(String jobId) {
return get(FlinkRestAPIConstant.JOBS+jobId);
return get(FlinkRestAPIConstant.JOBS + jobId);
}
public JsonNode getException(String jobId) {
return get(FlinkRestAPIConstant.JOBS+jobId+FlinkRestAPIConstant.EXCEPTIONS);
return get(FlinkRestAPIConstant.JOBS + jobId + FlinkRestAPIConstant.EXCEPTIONS);
}
public JsonNode getCheckPoints(String jobId) {
return get(FlinkRestAPIConstant.JOBS+jobId+FlinkRestAPIConstant.CHECKPOINTS);
return get(FlinkRestAPIConstant.JOBS + jobId + FlinkRestAPIConstant.CHECKPOINTS);
}
public JsonNode getCheckPointsConfig(String jobId) {
return get(FlinkRestAPIConstant.JOBS+jobId+FlinkRestAPIConstant.CHECKPOINTS_CONFIG);
return get(FlinkRestAPIConstant.JOBS + jobId + FlinkRestAPIConstant.CHECKPOINTS_CONFIG);
}
public JsonNode getJobsConfig(String jobId) {
return get(FlinkRestAPIConstant.JOBS+jobId+FlinkRestAPIConstant.CONFIG);
return get(FlinkRestAPIConstant.JOBS + jobId + FlinkRestAPIConstant.CONFIG);
}
public JsonNode getJobManagerMetrics() {
......@@ -202,7 +207,6 @@ public class FlinkAPI {
return get(FlinkRestAPIConstant.TASK_MANAGER + containerId + FlinkRestAPIConstant.METRICS);
}
public String getTaskManagerLog(String containerId) {
return getResult(FlinkRestAPIConstant.TASK_MANAGER + containerId + FlinkRestAPIConstant.LOG);
}
......@@ -218,6 +222,4 @@ public class FlinkAPI {
public JsonNode getTaskManagerThreadDump(String containerId) {
return get(FlinkRestAPIConstant.TASK_MANAGER + containerId + FlinkRestAPIConstant.THREAD_DUMP);
}
}
......@@ -8,6 +8,8 @@ package com.dlink.constant;
**/
public final class FlinkRestAPIConstant {
public static final String OVERVIEW = "overview";
public static final String FLINK_CONFIG = "config";
public static final String CONFIG = "/config";
......
package com.dlink.explainer.lineage;
import com.dlink.explainer.ca.ColumnCAResult;
import com.dlink.plus.FlinkSqlPlus;
import java.util.List;
/**
* LineageBuilder
*
* @author wenmo
* @since 2022/3/15 22:58
*/
public class LineageBuilder {
public static LineageResult getLineage(String statement){
FlinkSqlPlus plus = FlinkSqlPlus.build();
List<ColumnCAResult> columnCAResults = plus.explainSqlColumnCA(statement);
for (int j = 0; j < columnCAResults.size(); j++) {
ColumnCAResult result = columnCAResults.get(j);
}
return null;
}
}
package com.dlink.explainer.lineage;
/**
* LineageColumn
*
* @author wenmo
* @since 2022/3/15 22:55
*/
public class LineageColumn {
private String name;
private String title;
public LineageColumn() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}
package com.dlink.explainer.lineage;
/**
* LineageRelation
*
* @author wenmo
* @since 2022/3/15 23:00
*/
public class LineageRelation {
private String id;
private String srcTableId;
private String tgtTableId;
private String srcTableColName;
private String tgtTableColName;
public LineageRelation() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getSrcTableId() {
return srcTableId;
}
public void setSrcTableId(String srcTableId) {
this.srcTableId = srcTableId;
}
public String getTgtTableId() {
return tgtTableId;
}
public void setTgtTableId(String tgtTableId) {
this.tgtTableId = tgtTableId;
}
public String getSrcTableColName() {
return srcTableColName;
}
public void setSrcTableColName(String srcTableColName) {
this.srcTableColName = srcTableColName;
}
public String getTgtTableColName() {
return tgtTableColName;
}
public void setTgtTableColName(String tgtTableColName) {
this.tgtTableColName = tgtTableColName;
}
}
package com.dlink.explainer.lineage;
import java.util.List;
/**
* LineageResult
*
* @author wenmo
* @since 2022/3/15 22:59
*/
public class LineageResult {
private List<LineageTable> tables;
private List<LineageRelation> relations;
public LineageResult() {
}
public List<LineageTable> getTables() {
return tables;
}
public void setTables(List<LineageTable> tables) {
this.tables = tables;
}
public List<LineageRelation> getRelations() {
return relations;
}
public void setRelations(List<LineageRelation> relations) {
this.relations = relations;
}
}
package com.dlink.explainer.lineage;
import java.util.List;
/**
* LineageTable
*
* @author wenmo
* @since 2022/3/15 22:55
*/
public class LineageTable {
private String id;
private String name;
private List<LineageColumn> columns;
public LineageTable() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<LineageColumn> getColumns() {
return columns;
}
public void setColumns(List<LineageColumn> columns) {
this.columns = columns;
}
}
package com.dlink.core;
import com.dlink.explainer.ca.CABuilder;
import com.dlink.explainer.ca.ColumnCANode;
import com.dlink.explainer.ca.TableCANode;
import com.dlink.explainer.lineage.LineageBuilder;
import com.dlink.explainer.lineage.LineageResult;
import org.junit.Test;
import java.util.List;
/**
* LineageTest
*
* @author wenmo
* @since 2022/3/15 23:08
*/
public class LineageTest {
@Test
public void sumTest() {
String sql = "CREATE TABLE ST (\n" +
" a STRING,\n" +
" b STRING,\n" +
" c STRING\n" +
") WITH (\n" +
" 'connector' = 'datagen',\n" +
" 'rows-per-second' = '1'\n" +
");\n" +
"CREATE TABLE TT (\n" +
" A STRING,\n" +
" B STRING\n" +
") WITH (\n" +
" 'connector' = 'print'\n" +
");\n" +
"insert into TT select a||c A ,b B from ST";
LineageResult result = LineageBuilder.getLineage(sql);
System.out.println("end");
}
}
......@@ -387,4 +387,6 @@ create table dlink_job_history
)
comment 'Job历史详情';
CREATE INDEX dlink_job_instance_task_id_IDX USING BTREE ON dlink_job_instance (task_id);
SET FOREIGN_KEY_CHECKS = 1;
......@@ -634,5 +634,9 @@ ALTER TABLE `dlink_task`
-- ----------------------------
ALTER TABLE `dlink_job_instance`
ADD COLUMN `step` INT NULL COMMENT '生命周期' AFTER `task_id`;
-- ----------------------------
-- 0.6.0-SNAPSHOT 2022-03-15
-- ----------------------------
CREATE INDEX dlink_job_instance_task_id_IDX USING BTREE ON dlink_job_instance (task_id);
SET FOREIGN_KEY_CHECKS = 1;
......@@ -84,6 +84,7 @@
"react-dom": "^17.0.0",
"react-helmet-async": "^1.0.4",
"react-highlight-words": "^0.17.0",
"react-lineage-dag": "^1.0.0",
"react-monaco-editor": "^0.43.0",
"sql-formatter": "^4.0.2",
"umi": "^3.5.0",
......
import MonacoEditor from "react-monaco-editor";
import * as _monaco from "monaco-editor";
export type CodeShowFormProps = {
height?: string;
width?: string;
language: string;
theme?: string;
options?: any;
code: string;
};
const CodeShow = (props: CodeShowFormProps) => {
const {
height = '100%',
width = '100%',
language = 'sql',
theme = 'vs',
options = {
selectOnLineNumbers: true,
renderSideBySide: false,
autoIndent:'None',
readOnly:true ,
},
code,
} = props;
return (<>
<MonacoEditor
width={width}
height={height}
language={language}
value={code}
options={options}
theme={theme}
/>
</>)
};
export default CodeShow;
......@@ -67,7 +67,7 @@ const ClusterForm: React.FC<ClusterFormProps> = (props) => {
<Option value={RUN_MODE.STANDALONE}>Standalone</Option>
<Option value={RUN_MODE.YARN_SESSION}>Yarn Session</Option>
<Option value={RUN_MODE.YARN_PER_JOB}>Yarn Per-Job</Option>
<Option value={RUN_MODE.YARN_SESSION}>Yarn Application</Option>
<Option value={RUN_MODE.YARN_APPLICATION}>Yarn Application</Option>
<Option value={RUN_MODE.KUBERNETES_SESSION}>Kubernetes Session</Option>
<Option value={RUN_MODE.KUBERNETES_APPLICATION}>Kubernetes Application</Option>
</Select>
......
......@@ -68,7 +68,7 @@ const UpdateForm: React.FC<UpdateFormProps> = (props) => {
<Option value={RUN_MODE.STANDALONE}>Standalone</Option>
<Option value={RUN_MODE.YARN_SESSION}>Yarn Session</Option>
<Option value={RUN_MODE.YARN_PER_JOB}>Yarn Per-Job</Option>
<Option value={RUN_MODE.YARN_SESSION}>Yarn Application</Option>
<Option value={RUN_MODE.YARN_APPLICATION}>Yarn Application</Option>
<Option value={RUN_MODE.KUBERNETES_SESSION}>Kubernetes Session</Option>
<Option value={RUN_MODE.KUBERNETES_APPLICATION}>Kubernetes Application</Option>
</Select>
......
import { Typography} from 'antd';
import ProTable from '@ant-design/pro-table';
import {ProColumns} from "@ant-design/pro-table";
import {queryData} from "@/components/Common/crud";
const { Text } = Typography;
type AlertHistoryTableListItem = {
title: string,
content: string,
status: number,
log: string,
createTime: string,
}
const Alert = (props: any) => {
const url = '/api/alertGroup';
const {job} = props;
const columns: ProColumns<AlertHistoryTableListItem>[] = [
{
title: '标题',
dataIndex: 'title',
render: (dom, entity) => {
return <Text style={{ width: 200 }} ellipsis={{ tooltip:entity.title }}>{entity.title}</Text>;
},
},
{
title: '正文',
dataIndex: 'content',
render: (dom, entity) => {
return <Text style={{ width: 500 }} ellipsis={{ tooltip:entity.content }}>{entity.content}</Text>;
},
},
{
title: '状态',
dataIndex: 'status',
sorter: true,
render: (dom, entity) => {
return entity.status === 1?<Text type="success">成功</Text>:<Text type="danger">失败</Text>;
},
},
{
title: '日志',
dataIndex: 'log',
render: (dom, entity) => {
return <Text style={{ width: 500 }} ellipsis={{ tooltip:entity.log }}>{entity.log}</Text>;
},
},
{
title: '报警时间',
dataIndex: 'createTime',
valueType: 'dateTime',
},
];
return (<>
<ProTable
columns={columns}
style={{width: '100%'}}
request={(params, sorter, filter) => queryData(url+'/history', {...params, jobInstanceId:job.instance?.id,sorter, filter})}
rowKey="name"
pagination={{
pageSize: 10,
}}
toolBarRender={false}
search={false}
size="small"
/>
</>)
};
export default Alert;
import {Tabs, Empty} from 'antd';
import CodeShow from "@/components/Common/CodeShow";
import {LineageTable} from 'react-lineage-dag';
const {TabPane} = Tabs;
const DataMap = (props: any) => {
const {job} = props;
const data = {
tables: [
{
id: '1',
name: 'table-1',
columns: [
{
name: 'id',
title: 'id'
},
{
name: 'age',
title: 'age'
}
]
},
{
id: '2',
name: 'table-2',
columns: [
{
name: 'id',
title: 'id'
},
{
name: 'age',
title: 'age'
}
]
},
{
id: '3',
name: 'table-3',
columns: [
{
name: 'id',
title: 'id'
},
{
name: 'age',
title: 'age'
}
]
}
],
relations: [
{
srcTableId: '1',
tgtTableId: '2',
// srcTableColName: 'id',
// tgtTableColName: 'age'
},
{
srcTableId: '1',
tgtTableId: '3',
// srcTableColName: 'id',
// tgtTableColName: 'age'
}
]
};
return (<>
<Tabs defaultActiveKey="OneCA" size="small" tabPosition="top" style={{
border: "1px solid #f0f0f0"
}}>
<TabPane tab={<span>血缘分析</span>} key="OneCA">
<LineageTable {...data} onEachFrame={() => { }}/>
</TabPane>
</Tabs>
</>)
};
export default DataMap;
import {Tabs, Empty} from 'antd';
import CodeShow from "@/components/Common/CodeShow";
const {TabPane} = Tabs;
const Exception = (props: any) => {
const {job} = props;
return (<>
<Tabs defaultActiveKey="RootException" size="small" tabPosition="top" style={{
border: "1px solid #f0f0f0"
}}>
<TabPane tab={<span>Root Exception</span>} key="RootException">
<CodeShow code={job.jobHistory?.exceptions['root-exception'] as string} language='java' height='500px'/>
</TabPane>
<TabPane tab={<span>Exception History</span>} key="ExceptionHistory">
<Empty image={Empty.PRESENTED_IMAGE_SIMPLE}/>
</TabPane>
</Tabs>
</>)
};
export default Exception;
import CodeShow from "@/components/Common/CodeShow";
const FlinkSQL = (props: any) => {
const {job} = props;
return (<>
<CodeShow code={job.history?.statement} language='sql' height='500px'/>
</>)
};
export default FlinkSQL;
......@@ -16,6 +16,10 @@ import JobStatus, {isStatusDone} from "@/components/Common/JobStatus";
import {cancelJob, offLineTask, restartJob} from "@/components/Studio/StudioEvent/DDL";
import {CODE} from "@/components/Common/crud";
import JobLifeCycle from "@/components/Common/JobLifeCycle";
import Exception from "@/pages/DevOps/JobInfo/Exception";
import FlinkSQL from "@/pages/DevOps/JobInfo/FlinkSQL";
import Alert from "@/pages/DevOps/JobInfo/Alert";
import DataMap from "@/pages/DevOps/JobInfo/DataMap";
const {Link} = Typography;
......@@ -254,14 +258,14 @@ const JobInfo = (props: any) => {
{tabKey === 'config' ? <Config job={job}/> : undefined}
{tabKey === 'cluster' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'snapshot' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'exception' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'exception' ? <Exception job={job}/> : undefined}
{tabKey === 'log' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'optimize' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'flinksql' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'datamap' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'flinksql' ? <FlinkSQL job={job}/> : undefined}
{tabKey === 'datamap' ? <DataMap job={job} /> : undefined}
{tabKey === 'olap' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'version' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'alert' ? <Empty image={Empty.PRESENTED_IMAGE_SIMPLE} /> : undefined}
{tabKey === 'alert' ? <Alert job={job} /> : undefined}
</ProCard>
</PageContainer>
);
......
import { history } from 'umi';
import {queryData} from "@/components/Common/crud";
import React, {useState} from "react";
import type { ProColumns } from '@ant-design/pro-table';
import React, {useState, useRef, useEffect} from "react";
import type { ProColumns,ActionType } from '@ant-design/pro-table';
import ProTable from "@ant-design/pro-table";
import {JobInstanceTableListItem} from "@/pages/DevOps/data";
import moment from 'moment';
......@@ -14,6 +14,11 @@ const JobInstanceTable = (props: any) => {
const {status, activeKey,isHistory, dispatch} = props;
const [time, setTime] = useState(() => Date.now());
const ref = useRef<ActionType>();
useEffect(() => {
ref?.current?.reload();
}, [isHistory]);
const getColumns = () => {
const columns: ProColumns<JobInstanceTableListItem>[] = [{
......@@ -133,6 +138,7 @@ const JobInstanceTable = (props: any) => {
return (
<><ProTable
actionRef={ref}
request={(params, sorter, filter) => {
setTime(Date.now());
return queryData(url, {...params,status,isHistory, sorter: {id: 'descend'}, filter});
......
......@@ -37,12 +37,29 @@ const DevOps = (props:any) => {
{ key: JOB_STATUS.UNKNOWN, status: 'default', title: '未知', value: 0 },
];
const [statusCount, setStatusCount] = useState<any[]>(statusCountDefault);
const [statusHistoryCount, setStatusHistoryCount] = useState<any[]>(statusCountDefault);
const [activeKey, setActiveKey] = useState<string>('');
const refreshStatusCount = () => {
const res = getStatusCount();
res.then((result)=>{
const statusCountData: StatusCount = result.datas;
const statusHistoryCountData: StatusCount = result.datas.history;
const historyItems: any = [
{ key: '', title: renderSwitch(), value: statusHistoryCountData.all, total: true },
{ key: JOB_STATUS.CREATED, status: 'default', title: '已创建', value: statusHistoryCountData.created },
{ key: JOB_STATUS.INITIALIZING, status: 'default', title: '初始化', value: statusHistoryCountData.initializing },
{ key: JOB_STATUS.RUNNING, status: 'success', title: '运行中', value: statusHistoryCountData.running },
{ key: JOB_STATUS.FINISHED, status: 'processing', title: '已完成', value: statusHistoryCountData.finished },
{ key: JOB_STATUS.FAILING, status: 'error', title: '异常中', value: statusHistoryCountData.failing },
{ key: JOB_STATUS.FAILED, status: 'error', title: '已异常', value: statusHistoryCountData.failed },
{ key: JOB_STATUS.SUSPENDED, status: 'warning', title: '已暂停', value: statusHistoryCountData.suspended },
{ key: JOB_STATUS.CANCELLING, status: 'warning', title: '停止中', value: statusHistoryCountData.cancelling },
{ key: JOB_STATUS.CANCELED, status: 'warning', title: '停止', value: statusHistoryCountData.canceled },
{ key: JOB_STATUS.RESTARTING, status: 'default', title: '重启中', value: statusHistoryCountData.restarting },
{ key: JOB_STATUS.UNKNOWN, status: 'default', title: '未知', value: statusHistoryCountData.unknown },
];
setStatusHistoryCount(historyItems);
const statusCountData: StatusCount = result.datas.instance;
const items: any = [
{ key: '', title: renderSwitch(), value: statusCountData.all, total: true },
{ key: JOB_STATUS.CREATED, status: 'default', title: '已创建', value: statusCountData.created },
......@@ -77,7 +94,7 @@ const DevOps = (props:any) => {
},
}}
>
{statusCount.map((item) => (
{(isHistory?statusHistoryCount:statusCount).map((item) => (
<ProCard.TabPane
style={{ width: '100%' }}
key={item.key}
......
......@@ -749,6 +749,21 @@ export default (): React.ReactNode => {
<li>
<Link>修复 用户逻辑删除bug</Link>
</li>
<li>
<Link>新增 运维中心的作业实例与历史切换</Link>
</li>
<li>
<Link>新增 运维中心的异常信息实现</Link>
</li>
<li>
<Link>新增 运维中心的FlinkSQL实现</Link>
</li>
<li>
<Link>新增 运维中心的报警记录实现</Link>
</li>
<li>
<Link>修复 kubernetes集群配置相关显示bug</Link>
</li>
</ul>
</Paragraph>
</Timeline.Item>
......
......@@ -57,6 +57,7 @@
<scope.runtime>provided</scope.runtime>
<scala.binary.version>2.11</scala.binary.version>
<protobuf-java.version>2.5.0</protobuf-java.version>
<jedis.version>2.9.0</jedis.version>
</properties>
<dependencyManagement>
......@@ -164,6 +165,18 @@
<artifactId>sa-token-spring-boot-starter</artifactId>
<version>${sa-token.version}</version>
</dependency>
<!--sa-token redis持久化 start-->
<dependency>
<groupId>cn.dev33</groupId>
<artifactId>sa-token-dao-redis-jackson</artifactId>
<version>${sa-token.version}</version>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>${jedis.version}</version>
</dependency>
<!--sa-token redis持久化 end-->
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-core</artifactId>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment