Commit 4fcc452c authored by godkaikai's avatar godkaikai

优化别名与Flink集群注册时的异常

parent 2ea1d40b
......@@ -42,7 +42,7 @@ DataLink 开源项目及社区正在建设,希望本项目可以帮助你更
| | 表级血缘分析 | 0.3.0 |
| | 任务进程 | 0.3.0 |
| | 远程任务停止 | 0.3.0 |
| | 函数自动补全 | 敬请期待 |
| | 函数自动补全 | 0.3.1 |
| | 任务审计 | 敬请期待 |
| | FlinkSQL 运行指标 | 敬请期待 |
| | 字段级血缘分析 | 敬请期待 |
......@@ -113,8 +113,6 @@ extends/ -- 扩展
|- flink-sql-connector-hbase-2.2_2.11-1.12.5.jar
|- flink-sql-connector-kafka_2.11-1.12.5.jar
|- ojdbc8-12.2.0.1.jar
|- otj-pg-embedded-0.13.3.jar
|- postgresql-42.2.10.jar
lib/ -- 外部依赖及Connector
|- dlink-client-1.12.jar -- 必需
|- dlink-connector-jdbc.jar
......@@ -135,6 +133,8 @@ dlink-admin.jar --程序包
```
解压后结构如上所示,修改配置文件内容。
lib 文件夹下存放 dlink 自身的扩展文件,plugins 文件夹下存放 flink 及 hadoop 的官方扩展文件。
extends 文件夹只作为扩展插件的备份管理,不会被 dlink 加载。
在Mysql数据库中创建数据库并执行初始化脚本。
......@@ -146,6 +146,51 @@ sh auto.sh stop
sh auto.sh restart
sh auto.sh status
```
此时通过 8888 端口号可以正常访问 Dlink 的前端页面,但是如果在 plugins 中引入 Hadoop 依赖后,网页将无法正常访问,所以建议使用 nginx 的方式部署。
前端 Nginx 部署:
```shell
server {
listen 9999;
server_name localhost;
# gzip config
gzip on;
gzip_min_length 1k;
gzip_comp_level 9;
gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png;
gzip_vary on;
gzip_disable "MSIE [1-6]\.";
#charset koi8-r;
#access_log logs/host.access.log main;
location / {
root html;
index index.html index.htm;
try_files $uri $uri/ /index.html;
}
#error_page 404 /404.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root html;
}
location ^~ /api {
proxy_pass http://127.0.0.1:8888;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Real-IP $remote_addr;
}
}
```
1. server.listen 填写前端访问端口
2. proxy_pass 填写后端地址如 http://127.0.0.1:8888
3. 将 dist 文件夹下打包好的资源上传到 nginx 的 html 文件夹中,重启 nginx,访问即可。
### 从源码编译
......
package com.dlink.controller;
import com.dlink.api.FlinkAPI;
import com.dlink.cluster.FlinkClusterInfo;
import com.dlink.common.result.ProTableResult;
import com.dlink.common.result.Result;
import com.dlink.model.Cluster;
......@@ -105,14 +106,14 @@ public class ClusterController {
}
private void checkHealth(Cluster cluster){
String jobManagerHost = clusterService.checkHeartBeat(cluster.getHosts(), cluster.getJobManagerHost());
if(jobManagerHost==null){
FlinkClusterInfo info = clusterService.checkHeartBeat(cluster.getHosts(), cluster.getJobManagerHost());
if(!info.isEffective()){
cluster.setJobManagerHost("");
cluster.setStatus(0);
}else{
cluster.setJobManagerHost(jobManagerHost);
cluster.setJobManagerHost(info.getJobManagerAddress());
cluster.setStatus(1);
cluster.setVersion(FlinkAPI.build(jobManagerHost).getVersion());
cluster.setVersion(info.getVersion());
}
}
}
......@@ -33,12 +33,17 @@ public class DateMetaObjectHandler implements MetaObjectHandler {
public void insertFill(MetaObject metaObject) {
Object createTime = getFieldValByName(mybatisPlusFillProperties.getCreateTimeField(), metaObject);
Object updateTime = getFieldValByName(mybatisPlusFillProperties.getUpdateTimeField(), metaObject);
Object alias = getFieldValByName(mybatisPlusFillProperties.getAlias(), metaObject);
Object name = getFieldValByName(mybatisPlusFillProperties.getName(), metaObject);
if (createTime == null) {
setFieldValByName(mybatisPlusFillProperties.getCreateTimeField(), LocalDateTime.now(), metaObject);
}
if (updateTime == null) {
setFieldValByName(mybatisPlusFillProperties.getUpdateTimeField(), LocalDateTime.now(), metaObject);
}
if (alias == null) {
setFieldValByName(mybatisPlusFillProperties.getAlias(), name, metaObject);
}
}
@Override
......
......@@ -24,4 +24,8 @@ public class MybatisPlusFillProperties {
private String createTimeField = "createTime";
private String updateTimeField = "updateTime";
private String name = "name";
private String alias = "alias";
}
package com.dlink.model;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import com.dlink.db.model.SuperEntity;
import lombok.Data;
......@@ -18,6 +20,7 @@ public class Cluster extends SuperEntity {
private static final long serialVersionUID = 3104721227014487321L;
@TableField(fill = FieldFill.INSERT)
private String alias;
private String type;
......
package com.dlink.model;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import com.dlink.db.model.SuperEntity;
import com.dlink.metadata.driver.DriverConfig;
......@@ -21,6 +23,7 @@ public class DataBase extends SuperEntity {
private static final long serialVersionUID = -5002272138861566408L;
@TableField(fill = FieldFill.INSERT)
private String alias;
private String groupName;
......
package com.dlink.model;
import cn.hutool.json.JSONUtil;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import com.dlink.db.model.SuperEntity;
......@@ -25,6 +26,7 @@ public class Task extends SuperEntity{
private static final long serialVersionUID = 5988972129893667154L;
@TableField(fill = FieldFill.INSERT)
private String alias;
private String type;
......
package com.dlink.service;
import com.dlink.cluster.FlinkClusterInfo;
import com.dlink.db.service.ISuperService;
import com.dlink.model.Cluster;
......@@ -13,7 +14,7 @@ import java.util.List;
**/
public interface ClusterService extends ISuperService<Cluster> {
String checkHeartBeat(String hosts,String host);
FlinkClusterInfo checkHeartBeat(String hosts, String host);
String getJobManagerAddress(Cluster cluster);
......
......@@ -3,6 +3,7 @@ package com.dlink.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.dlink.assertion.Assert;
import com.dlink.cluster.FlinkCluster;
import com.dlink.cluster.FlinkClusterInfo;
import com.dlink.constant.FlinkConstant;
import com.dlink.constant.NetConstant;
import com.dlink.db.service.impl.SuperServiceImpl;
......@@ -25,14 +26,18 @@ import java.util.List;
public class ClusterServiceImpl extends SuperServiceImpl<ClusterMapper, Cluster> implements ClusterService {
@Override
public String checkHeartBeat(String hosts,String host) {
public FlinkClusterInfo checkHeartBeat(String hosts, String host) {
return FlinkCluster.testFlinkJobManagerIP(hosts,host);
}
@Override
public String getJobManagerAddress(Cluster cluster) {
Assert.check(cluster);
String host = FlinkCluster.testFlinkJobManagerIP(cluster.getHosts(), cluster.getJobManagerHost());
FlinkClusterInfo info = FlinkCluster.testFlinkJobManagerIP(cluster.getHosts(), cluster.getJobManagerHost());
String host = null;
if(info.isEffective()){
host = info.getJobManagerAddress();
}
Assert.checkHost(host);
if(!host.equals(cluster.getJobManagerHost())){
cluster.setJobManagerHost(host);
......
......@@ -75,14 +75,6 @@
<include>dlink-connector-jdbc-1.12-${project.version}.jar</include>
</includes>
</fileSet>
<!-- 将模块dlink-connectors的flink-shaded-hadoop-3-uber文件放到打包目录extends下 -->
<fileSet>
<directory>${project.parent.basedir}/dlink-connectors/dlink-flink-shaded-hadoop-3-uber/target</directory>
<outputDirectory>extends</outputDirectory>
<includes>
<include>dlink-flink-shaded-hadoop-3-uber.jar</include>
</includes>
</fileSet>
<!-- 将模块dlink-function的jar文件放到打包目录/lib下 -->
<fileSet>
......@@ -153,11 +145,7 @@
</directory>
<outputDirectory>extends</outputDirectory>
<includes>
<include>flink-sql-connector-hbase-1.4_2.11-*.jar</include>
<include>flink-sql-connector-hbase-2.2_2.11-*.jar</include>
<include>flink-sql-connector-kafka_2.11-*.jar</include>
<include>postgresql-*.jar</include>
<include>otj-pg-embedded-*.jar</include>
<include>flink-sql-connector-*.jar</include>
<include>ojdbc8-*.jar</include>
<include>clickhouse-jdbc-*.jar</include>
</includes>
......
......@@ -3,7 +3,7 @@
<parent>
<artifactId>dlink-client</artifactId>
<groupId>com.dlink</groupId>
<version>0.3.1</version>
<version>0.3.2-SANPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-client-1.11</artifactId>
......
......@@ -3,7 +3,7 @@
<parent>
<artifactId>dlink-client</artifactId>
<groupId>com.dlink</groupId>
<version>0.3.1</version>
<version>0.3.2-SANPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-client-1.12</artifactId>
......
......@@ -3,7 +3,7 @@
<parent>
<artifactId>dlink-client</artifactId>
<groupId>com.dlink</groupId>
<version>0.3.1</version>
<version>0.3.2-SANPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-client-1.13</artifactId>
......
......@@ -17,6 +17,10 @@ public interface NetConstant {
* 连接运行服务器超时时间 1000
*/
Integer SERVER_TIME_OUT_ACTIVE = 1000;
/**
* 读取服务器超时时间 3000
*/
Integer READ_TIME_OUT = 3000;
/**
* 连接FLINK历史服务器超时时间 2000
*/
......
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<artifactId>dlink-connectors</artifactId>
<groupId>com.dlink</groupId>
<version>0.3.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-flink-shaded-hadoop-3-uber</artifactId>
<build>
<finalName>${project.artifactId}</finalName>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer>
<resource>reference.conf</resource>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</plugin>
</plugins>
</build>
<properties>
<java.version>1.8</java.version>
<scala.binary.version>2.11</scala.binary.version>
<flink.version>1.12.5</flink.version>
<maven.compiler.source>1.8</maven.compiler.source>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
</project>
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dlink-connectors</artifactId>
<groupId>com.dlink</groupId>
<version>0.3.2-SANPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-flink-shaded-hadoop-3-uber</artifactId>
<properties>
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<flink.version>1.12.5</flink.version>
<scala.binary.version>2.11</scala.binary.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-shaded-hadoop-3-uber</artifactId>
<version>3.1.1.7.2.9.0-173-9.0</version>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>log4j-over-slf4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<!--打jar包-->
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.1</version>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
<finalName>${project.artifactId}</finalName>
</build>
</project>
\ No newline at end of file
......@@ -12,7 +12,6 @@
<modules>
<module>dlink-connector-jdbc-1.12</module>
<module>dlink-connector-jdbc-1.13</module>
<module>dlink-flink-shaded-hadoop-3-uber</module>
</modules>
<artifactId>dlink-connectors</artifactId>
</project>
\ No newline at end of file
......@@ -41,8 +41,7 @@ public class FlinkAPI {
return result;
}
private JsonNode get(String route) {
private JsonNode get(String route){
String res = HttpUtil.get(NetConstant.HTTP + address + NetConstant.SLASH + route, NetConstant.SERVER_TIME_OUT_ACTIVE);
return parse(res);
}
......@@ -75,7 +74,7 @@ public class FlinkAPI {
return true;
}
public String getVersion(){
public String getVersion() {
JsonNode result = get(FlinkRestAPIConstant.CONFIG);
return result.get("flink-version").asText();
}
......
package com.dlink.cluster;
import cn.hutool.http.HttpUtil;
import cn.hutool.core.io.IORuntimeException;
import com.dlink.api.FlinkAPI;
import com.dlink.assertion.Asserts;
import com.dlink.constant.FlinkConstant;
import com.dlink.constant.FlinkHistoryConstant;
import com.dlink.constant.NetConstant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.net.SocketTimeoutException;
/**
* FlinkCluster
......@@ -22,27 +18,35 @@ public class FlinkCluster {
private static Logger logger = LoggerFactory.getLogger(FlinkCluster.class);
public static String testFlinkJobManagerIP(String hosts,String host) {
public static FlinkClusterInfo testFlinkJobManagerIP(String hosts,String host) {
if(Asserts.isNotNullString(host)) {
FlinkClusterInfo info = executeSocketTest(host);
if(info.isEffective()){
return info;
}
}
String[] servers = hosts.split(",");
for (String server : servers) {
FlinkClusterInfo info = executeSocketTest(server);
if(info.isEffective()){
return info;
}
}
return FlinkClusterInfo.INEFFECTIVE;
}
private static FlinkClusterInfo executeSocketTest(String host){
try {
String res = FlinkAPI.build(host).getVersion();
if (Asserts.isNotNullString(res)) {
return host;
return FlinkClusterInfo.build(host,res);
}
} catch (IORuntimeException e) {
logger.info("Flink jobManager 地址排除 -- "+ host);
} catch (Exception e) {
logger.error(e.getMessage(),e);
}
String[] servers = hosts.split(",");
for (String server : servers) {
try {
String res = FlinkAPI.build(server).getVersion();
if (Asserts.isNotNullString(res)) {
return server;
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
}
}
return null;
return FlinkClusterInfo.INEFFECTIVE;
}
}
package com.dlink.cluster;
import lombok.Getter;
import lombok.Setter;
/**
* FlinkClusterInfo
*
* @author wenmo
* @since 2021/10/20 9:10
**/
@Getter
@Setter
public class FlinkClusterInfo {
private boolean isEffective;
private String jobManagerAddress;
private String version;
public static final FlinkClusterInfo INEFFECTIVE = new FlinkClusterInfo(false);
public FlinkClusterInfo(boolean isEffective) {
this.isEffective = isEffective;
}
public FlinkClusterInfo(boolean isEffective, String jobManagerAddress, String version) {
this.isEffective = isEffective;
this.jobManagerAddress = jobManagerAddress;
this.version = version;
}
public static FlinkClusterInfo build(String jobManagerAddress, String version){
return new FlinkClusterInfo(true, jobManagerAddress, version);
}
}
......@@ -3,7 +3,7 @@
# 要运行的jar包路径,加不加引号都行。 注意:等号两边 不能 有空格,否则会提示command找不到
JAR_NAME="./dlink-admin-*.jar"
#java -Djava.ext.dirs=$JAVA_HOME/jre/lib/ext:$JAVA_HOME/jre/lib:./lib -classpath ."/lib/*.jar" -jar dlink-admin-*.jar
SETTING="-Dloader.path=./lib"
SETTING="-Dloader.path=./lib,./plugins"
# 如果输入格式不对,给出提示!
tips() {
echo ""
......
......@@ -3,7 +3,7 @@
<parent>
<artifactId>dlink</artifactId>
<groupId>com.dlink</groupId>
<version>0.3.1</version>
<version>0.3.2-SANPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-extends</artifactId>
......@@ -64,13 +64,11 @@
<java.version>1.8</java.version>
<clickhouse-jdbc.version>0.2.6</clickhouse-jdbc.version>
<mariadb-java-client.version>2.5.4</mariadb-java-client.version>
<postgres.version>42.2.10</postgres.version>
<otj-pg-embedded.version>0.13.3</otj-pg-embedded.version>
<maven.compiler.target>1.8</maven.compiler.target>
<scala.binary.version>2.11</scala.binary.version>
<derby.version>10.14.2.0</derby.version>
<mariaDB4j.version>2.4.0</mariaDB4j.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<mariaDB4j.version>2.4.0</mariaDB4j.version>
<mysql-connector-java.version>8.0.22</mysql-connector-java.version>
<maven.compiler.source>1.8</maven.compiler.source>
<ojdbc8.version>12.2.0.1</ojdbc8.version>
......
......@@ -15,10 +15,8 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<flink.version>1.12.5</flink.version>
<scala.binary.version>2.11</scala.binary.version>
<postgres.version>42.2.10</postgres.version>
<mysql-connector-java.version>8.0.22</mysql-connector-java.version>
<ojdbc8.version>12.2.0.1</ojdbc8.version>
<otj-pg-embedded.version>0.13.3</otj-pg-embedded.version>
<mariaDB4j.version>2.4.0</mariaDB4j.version>
<mariadb-java-client.version>2.5.4</mariadb-java-client.version>
<derby.version>10.14.2.0</derby.version>
......@@ -58,16 +56,6 @@
<artifactId>flink-sql-connector-kafka_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgres.version}</version>
</dependency>
<dependency>
<groupId>com.opentable.components</groupId>
<artifactId>otj-pg-embedded</artifactId>
<version>${otj-pg-embedded.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
......
......@@ -6,7 +6,7 @@ import com.dlink.metadata.rules.IColumnType;
/**
* PostgreSqlTypeConvert
*
* @author qiwenkai
* @author wenmo
* @since 2021/7/22 9:33
**/
public class PostgreSqlTypeConvert implements ITypeConvert {
......
......@@ -9,7 +9,7 @@ import com.dlink.model.Table;
/**
* PostgreSqlDriver
*
* @author qiwenkai
* @author wenmo
* @since 2021/7/22 9:28
**/
public class PostgreSqlDriver extends AbstractJdbcDriver {
......
......@@ -3,7 +3,7 @@ package com.dlink.metadata.query;
/**
* PostgreSqlQuery
*
* @author qiwenkai
* @author wenmo
* @since 2021/7/22 9:29
**/
public class PostgreSqlQuery extends AbstractDBQuery {
......
export function getDBImage(type:string) {
let imageUrl = '/database/';
let imageUrl = 'database/';
switch (type.toLowerCase()){
case 'mysql':
imageUrl += 'mysql.jpg';
......@@ -15,7 +15,7 @@ export function getDBImage(type:string) {
imageUrl += 'clickhouse.png';
break;
default:
imageUrl += 'db.png';
imageUrl += 'db.jpg';
}
return imageUrl;
}
......@@ -314,15 +314,21 @@ export default (): React.ReactNode => {
<li>
<Link>优化了 Flink 多版本间的切换,下沉 Flink 获取表字段的逻辑</Link>
</li>
<li>
<Link>增加了适用于 Dlink 的 flink-shaded-hadoop-3-uber</Link>
</li>
<li>
<Link>增加了 plugins 类加载路径用于加载 Flink 的有关依赖</Link>
</li>
<li>
<Link>增加了 dlink-extends 模块用于扩展依赖打包</Link>
</li>
<li>
<Link>增加了更加稳定 Nginx 前后端分离部署方式</Link>
</li>
<li>
<Link>优化所有的新增功能别名未填则默认为名称</Link>
</li>
<li>
<Link>优化在注册 Flink 集群时的链接检测与异常输出</Link>
</li>
</ul>
</Paragraph>
</Timeline.Item>
......
......@@ -178,16 +178,6 @@
<artifactId>dlink-connector-jdbc-1.13</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-connector-hbase-1.4.3-1.12</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-flink-shaded-hadoop-3-uber</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-function</artifactId>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment