Commit 59c7b06d authored by godkaikai's avatar godkaikai

yarn application 测试

parent 8ee08a32
...@@ -114,6 +114,12 @@ ...@@ -114,6 +114,12 @@
<groupId>com.dlink</groupId> <groupId>com.dlink</groupId>
<artifactId>dlink-metadata-base</artifactId> <artifactId>dlink-metadata-base</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-gateway</artifactId>
<scope>provided</scope>
</dependency>
<!--<dependency> <!--<dependency>
<groupId>com.dlink</groupId> <groupId>com.dlink</groupId>
<artifactId>dlink-metadata-mysql</artifactId> <artifactId>dlink-metadata-mysql</artifactId>
......
...@@ -5,6 +5,7 @@ import com.dlink.dto.SessionDTO; ...@@ -5,6 +5,7 @@ import com.dlink.dto.SessionDTO;
import com.dlink.dto.StudioCADTO; import com.dlink.dto.StudioCADTO;
import com.dlink.dto.StudioDDLDTO; import com.dlink.dto.StudioDDLDTO;
import com.dlink.dto.StudioExecuteDTO; import com.dlink.dto.StudioExecuteDTO;
import com.dlink.gateway.GatewayConfig;
import com.dlink.job.JobResult; import com.dlink.job.JobResult;
import com.dlink.result.IResult; import com.dlink.result.IResult;
import com.dlink.service.StudioService; import com.dlink.service.StudioService;
...@@ -139,4 +140,11 @@ public class StudioController { ...@@ -139,4 +140,11 @@ public class StudioController {
public Result cancel(@RequestParam Integer clusterId,@RequestParam String jobId) { public Result cancel(@RequestParam Integer clusterId,@RequestParam String jobId) {
return Result.succeed(studioService.cancel(clusterId,jobId),"停止成功"); return Result.succeed(studioService.cancel(clusterId,jobId),"停止成功");
} }
/**
* 提交jar
*/
@PostMapping("/submitJar")
public Result submitJar(@RequestBody JsonNode para) {
return Result.succeed(studioService.submitJar(GatewayConfig.build(para)),"执行成功");
}
} }
...@@ -5,6 +5,8 @@ import com.dlink.dto.StudioDDLDTO; ...@@ -5,6 +5,8 @@ import com.dlink.dto.StudioDDLDTO;
import com.dlink.dto.StudioExecuteDTO; import com.dlink.dto.StudioExecuteDTO;
import com.dlink.explainer.ca.ColumnCANode; import com.dlink.explainer.ca.ColumnCANode;
import com.dlink.explainer.ca.TableCANode; import com.dlink.explainer.ca.TableCANode;
import com.dlink.gateway.GatewayConfig;
import com.dlink.gateway.result.GatewayResult;
import com.dlink.job.JobResult; import com.dlink.job.JobResult;
import com.dlink.result.IResult; import com.dlink.result.IResult;
import com.dlink.result.SelectResult; import com.dlink.result.SelectResult;
...@@ -48,4 +50,6 @@ public interface StudioService { ...@@ -48,4 +50,6 @@ public interface StudioService {
List<JsonNode> listJobs(Integer clusterId); List<JsonNode> listJobs(Integer clusterId);
boolean cancel(Integer clusterId,String jobId); boolean cancel(Integer clusterId,String jobId);
GatewayResult submitJar(GatewayConfig config);
} }
...@@ -8,6 +8,9 @@ import com.dlink.dto.StudioExecuteDTO; ...@@ -8,6 +8,9 @@ import com.dlink.dto.StudioExecuteDTO;
import com.dlink.explainer.ca.CABuilder; import com.dlink.explainer.ca.CABuilder;
import com.dlink.explainer.ca.ColumnCANode; import com.dlink.explainer.ca.ColumnCANode;
import com.dlink.explainer.ca.TableCANode; import com.dlink.explainer.ca.TableCANode;
import com.dlink.gateway.Gateway;
import com.dlink.gateway.GatewayConfig;
import com.dlink.gateway.result.GatewayResult;
import com.dlink.job.JobConfig; import com.dlink.job.JobConfig;
import com.dlink.job.JobManager; import com.dlink.job.JobManager;
import com.dlink.job.JobResult; import com.dlink.job.JobResult;
...@@ -159,4 +162,9 @@ public class StudioServiceImpl implements StudioService { ...@@ -159,4 +162,9 @@ public class StudioServiceImpl implements StudioService {
Asserts.checkNotNull(cluster,"该集群不存在"); Asserts.checkNotNull(cluster,"该集群不存在");
return FlinkAPI.build(cluster.getJobManagerHost()).stop(jobId); return FlinkAPI.build(cluster.getJobManagerHost()).stop(jobId);
} }
@Override
public GatewayResult submitJar(GatewayConfig config) {
return Gateway.build(config).submitJar();
}
} }
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dlink</artifactId>
<groupId>com.dlink</groupId>
<version>0.3.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-app</artifactId>
<properties>
<mainClass>com.dlink.app.MainApp</mainClass>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<flink.version>1.12.5</flink.version>
<slf4j.version>1.7.7</slf4j.version>
<log4j.version>1.2.17</log4j.version>
<scala.binary.version>2.11</scala.binary.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
<dependencies>
<!-- Apache Flink dependencies -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.21</version>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
<includes>
<include>*.properties</include>
</includes>
</resource>
</resources>
<plugins>
<!-- 编译插件 -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.6.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<!-- 打jar包插件(会包含所有依赖) -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.6</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<!-- 可以设置jar包的入口类(可选) -->
<mainClass>com.dlink.app.MainApp</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
package com.dlink.app;
import com.dlink.app.assertion.Asserts;
import com.dlink.app.db.DBConfig;
import com.dlink.app.executor.Executor;
import com.dlink.app.flinksql.FlinkSQLFactory;
import org.apache.flink.api.java.utils.ParameterTool;
import java.io.IOException;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
/**
* MainApp
*
* @author qiwenkai
* @since 2021/10/27 11:10
**/
public class MainApp {
public static void main(String[] args) throws IOException {
System.out.println(LocalDateTime.now() + "任务开始");
ParameterTool parameters = ParameterTool.fromArgs(args);
String id = parameters.get("id", null);
if (Asserts.isNotNullString(id)) {
Executor.build().submit(FlinkSQLFactory.getStatements(Integer.valueOf(id), DBConfig.build(parameters)));
}
}
}
package com.dlink.app.assertion;
import java.util.Collection;
import java.util.Map;
/**
* Asserts
*
* @author wenmo
* @since 2021/7/5 21:57
*/
public class Asserts {
public static boolean isNotNull(Object object){
return object!=null;
}
public static boolean isNull(Object object){
return object==null;
}
public static boolean isNullString(String str){
return isNull(str)||"".equals(str);
}
public static boolean isNotNullString(String str){
return !isNullString(str);
}
public static boolean isEquals(String str1,String str2){
if(isNull(str1)&&isNull(str2)){
return true;
}else if(isNull(str1)||isNull(str2)){
return false;
}else{
return str1.equals(str2);
}
}
public static boolean isEqualsIgnoreCase(String str1,String str2){
if(isNull(str1)&&isNull(str2)){
return true;
}else if(isNull(str1)||isNull(str2)){
return false;
}else{
return str1.equalsIgnoreCase(str2);
}
}
public static boolean isNullCollection(Collection collection) {
if (isNull(collection)||collection.size()==0) {
return true;
}
return false;
}
public static boolean isNotNullCollection(Collection collection) {
return !isNullCollection(collection);
}
public static boolean isNullMap(Map map) {
if (isNull(map)||map.size()==0) {
return true;
}
return false;
}
public static boolean isNotNullMap(Map map) {
return !isNullMap(map);
}
public static void checkNull(String key,String msg) {
if (key == null||"".equals(key)) {
throw new RuntimeException(msg);
}
}
public static void checkNotNull(Object object,String msg) {
if (isNull(object)) {
throw new RuntimeException(msg);
}
}
public static void checkNullString(String key,String msg) {
if (isNull(key)||isEquals("",key)) {
throw new RuntimeException(msg);
}
}
public static void checkNullCollection(Collection collection,String msg) {
if(isNullCollection(collection)){
throw new RuntimeException(msg);
}
}
public static void checkNullMap(Map map,String msg) {
if(isNullMap(map)){
throw new RuntimeException(msg);
}
}
}
package com.dlink.app.constant;
/**
* AppConstant
*
* @author qiwenkai
* @since 2021/10/27 15:24
**/
public class AppConstant {
public static final String FLINKSQL_SEPARATOR = ";";
}
package com.dlink.app.db;
import org.apache.flink.api.java.utils.ParameterTool;
/**
* DBConfig
*
* @author qiwenkai
* @since 2021/10/27 14:46
**/
public class DBConfig {
private String driver;
private String url;
private String username;
private String password;
public DBConfig(String driver, String url, String username, String password) {
this.driver = driver;
this.url = url;
this.username = username;
this.password = password;
}
public static DBConfig build(String driver, String url, String username, String password){
return new DBConfig(driver,url,username,password);
}
public static DBConfig build(ParameterTool parameters){
return new DBConfig(parameters.get("driver", null),
parameters.get("url", null),
parameters.get("username", null),
parameters.get("password", null));
}
public String getDriver() {
return driver;
}
public String getUrl() {
return url;
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
@Override
public String toString() {
return "DBConfig{" +
"driver='" + driver + '\'' +
", url='" + url + '\'' +
", username='" + username + '\'' +
", password='" + password + '\'' +
'}';
}
}
package com.dlink.app.db;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* DBUtil
*
* @author qiwenkai
* @since 2021/10/27 11:25
**/
public class DBUtil {
private static Connection getConnection(DBConfig config) throws IOException {
Connection conn = null;
try {
Class.forName(config.getDriver());
conn = DriverManager.getConnection(config.getUrl(), config.getUsername(), config.getPassword());
} catch (SQLException | ClassNotFoundException e) {
e.printStackTrace();
close(conn);
}
return conn;
}
private static void close(Connection conn) {
try {
if (conn != null) {
conn.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
public static String getOneByID(String sql,DBConfig config) throws SQLException, IOException {
Connection conn = getConnection(config);
String result = null;
try (Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(sql)) {
if (rs.next()) {
result = rs.getString(1);
}
}
close(conn);
/*catch (SQLException e1) {
e1.printStackTrace();
String message = e1.getMessage();
System.err.println(LocalDateTime.now().toString() + " --> 获取 FlinkSQL 异常,ID 为");
}*/
return result;
}
}
package com.dlink.app.executor;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.api.internal.TableEnvironmentImpl;
import java.util.List;
/**
* Executor
*
* @author qiwenkai
* @since 2021/10/27 15:52
**/
public class Executor {
private StreamExecutionEnvironment environment;
private StreamTableEnvironment stEnvironment;
private ExecutorSetting executorSetting;
private SqlManager sqlManager;
public static Executor build(){
return new Executor(ExecutorSetting.DEFAULT);
}
public static Executor build(ExecutorSetting setting){
return new Executor(setting);
}
private Executor(ExecutorSetting executorSetting) {
this.executorSetting = executorSetting;
this.sqlManager = new SqlManager();
init(executorSetting);
}
private void init(ExecutorSetting setting){
this.environment = StreamExecutionEnvironment.getExecutionEnvironment();
this.stEnvironment = StreamTableEnvironment.create(this.environment);
}
private void executeSql(String statement){
if(executorSetting.isUseSqlFragment()) {
statement = sqlManager.parseVariable(statement);
if(statement.length() > 0 && checkShowFragments(statement)){
stEnvironment.executeSql(statement);
}
}else{
stEnvironment.executeSql(statement);
}
}
public void submit(List<String> statements){
for(String statement : statements){
if(statement==null||"".equals(statement.trim())){
continue;
}
executeSql(statement);
}
}
private boolean checkShowFragments(String sql){
return sqlManager.checkShowFragments(sql);
}
}
package com.dlink.app.executor;
import org.apache.flink.api.java.utils.ParameterTool;
import java.util.Map;
/**
* ExecutorSetting
*
* @author wenmo
* @since 2021/5/25 13:43
**/
public class ExecutorSetting {
private Integer checkpoint;
private Integer parallelism;
private boolean useSqlFragment;
private String savePointPath;
private String jobName;
private Map<String,String> config;
public static final ExecutorSetting DEFAULT = new ExecutorSetting(0,1,true);
public ExecutorSetting(boolean useSqlFragment) {
this.useSqlFragment = useSqlFragment;
}
public ExecutorSetting(Integer checkpoint) {
this.checkpoint = checkpoint;
}
public ExecutorSetting(Integer checkpoint, boolean useSqlFragment) {
this.checkpoint = checkpoint;
this.useSqlFragment = useSqlFragment;
}
public ExecutorSetting(Integer checkpoint, Integer parallelism, boolean useSqlFragment) {
this.checkpoint = checkpoint;
this.parallelism = parallelism;
this.useSqlFragment = useSqlFragment;
}
public ExecutorSetting(Integer checkpoint, Integer parallelism, boolean useSqlFragment, String savePointPath, String jobName) {
this.checkpoint = checkpoint;
this.parallelism = parallelism;
this.useSqlFragment = useSqlFragment;
this.savePointPath = savePointPath;
this.jobName = jobName;
}
public ExecutorSetting(Integer checkpoint, Integer parallelism, boolean useSqlFragment, String savePointPath) {
this.checkpoint = checkpoint;
this.parallelism = parallelism;
this.useSqlFragment = useSqlFragment;
this.savePointPath = savePointPath;
}
public ExecutorSetting(Integer checkpoint, Integer parallelism, boolean useSqlFragment, String savePointPath, String jobName, Map<String, String> config) {
this.checkpoint = checkpoint;
this.parallelism = parallelism;
this.useSqlFragment = useSqlFragment;
this.savePointPath = savePointPath;
this.jobName = jobName;
this.config = config;
}
public Integer getCheckpoint() {
return checkpoint;
}
public Integer getParallelism() {
return parallelism;
}
public boolean isUseSqlFragment() {
return useSqlFragment;
}
public String getSavePointPath() {
return savePointPath;
}
public String getJobName() {
return jobName;
}
public Map<String, String> getConfig() {
return config;
}
}
package com.dlink.app.executor;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.api.ExpressionParserException;
import org.apache.flink.table.catalog.exceptions.CatalogException;
import org.apache.flink.util.StringUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.lang.String.format;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* Flink Sql Fragment Manager
* @author wenmo
* @since 2021/6/7 22:06
**/
@Internal
public final class SqlManager {
private Map<String, String> sqlFragments;
static final String SHOW_FRAGMENTS = "SHOW FRAGMENTS";
public SqlManager() {
sqlFragments = new HashMap<>();
}
/**
* Get names of sql fragments loaded.
*
* @return a list of names of sql fragments loaded
*/
public List<String> listSqlFragments() {
return new ArrayList<>(sqlFragments.keySet());
}
/**
* Registers a fragment of sql under the given name. The sql fragment name must be unique.
*
* @param sqlFragmentName name under which to register the given sql fragment
* @param sqlFragment a fragment of sql to register
* @throws CatalogException if the registration of the sql fragment under the given name failed.
* But at the moment, with CatalogException, not SqlException
*/
public void registerSqlFragment(String sqlFragmentName, String sqlFragment) {
checkArgument(
!StringUtils.isNullOrWhitespaceOnly(sqlFragmentName),
"sql fragment name cannot be null or empty.");
checkNotNull(sqlFragment, "sql fragment cannot be null");
/*if (sqlFragments.containsKey(sqlFragmentName)) {
throw new CatalogException(
format("The fragment of sql %s already exists.", sqlFragmentName));
}*/
sqlFragments.put(sqlFragmentName, sqlFragment);
}
/**
* Unregisters a fragment of sql under the given name. The sql fragment name must be existed.
*
* @param sqlFragmentName name under which to unregister the given sql fragment.
* @param ignoreIfNotExists If false exception will be thrown if the fragment of sql to be
* altered does not exist.
* @throws CatalogException if the unregistration of the sql fragment under the given name
* failed. But at the moment, with CatalogException, not SqlException
*/
public void unregisterSqlFragment(String sqlFragmentName, boolean ignoreIfNotExists) {
checkArgument(
!StringUtils.isNullOrWhitespaceOnly(sqlFragmentName),
"sql fragmentName name cannot be null or empty.");
if (sqlFragments.containsKey(sqlFragmentName)) {
sqlFragments.remove(sqlFragmentName);
} else if (!ignoreIfNotExists) {
throw new CatalogException(
format("The fragment of sql %s does not exist.", sqlFragmentName));
}
}
/**
* Get a fragment of sql under the given name. The sql fragment name must be existed.
*
* @param sqlFragmentName name under which to unregister the given sql fragment.
* @throws CatalogException if the unregistration of the sql fragment under the given name
* failed. But at the moment, with CatalogException, not SqlException
*/
public String getSqlFragment(String sqlFragmentName) {
checkArgument(
!StringUtils.isNullOrWhitespaceOnly(sqlFragmentName),
"sql fragmentName name cannot be null or empty.");
if (sqlFragments.containsKey(sqlFragmentName)) {
return sqlFragments.get(sqlFragmentName);
} else {
throw new CatalogException(
format("The fragment of sql %s does not exist.", sqlFragmentName));
}
}
/**
* Get a fragment of sql under the given name. The sql fragment name must be existed.
*
* @throws CatalogException if the unregistration of the sql fragment under the given name
* failed. But at the moment, with CatalogException, not SqlException
*/
public Map<String, String> getSqlFragment() {
return sqlFragments;
}
public Iterator getSqlFragmentsIterator() {
return sqlFragments.entrySet().iterator();
}
public boolean checkShowFragments(String sql){
return SHOW_FRAGMENTS.equals(sql.trim().toUpperCase());
}
/**
* Parse some variables under the given sql.
*
* @param statement A sql will be parsed.
* @throws ExpressionParserException if the name of the variable under the given sql failed.
*/
public String parseVariable(String statement) {
if (statement == null || "".equals(statement)) {
return statement;
}
String[] strs = statement.split(";");
StringBuilder sb = new StringBuilder();
for (int i = 0; i < strs.length; i++) {
String str = strs[i].trim();
if (str.length() == 0) {
continue;
}
if (str.contains(":=")) {
String[] strs2 = str.split(":=");
if (strs2.length >= 2) {
if (strs2[0].length() == 0) {
throw new ExpressionParserException("Illegal variable name.");
}
String valueString = str.substring(str.indexOf(":=") + 2);
this.registerSqlFragment(strs2[0], replaceVariable(valueString));
} else {
throw new ExpressionParserException("Illegal variable definition.");
}
} else {
sb.append(replaceVariable(str));
}
}
return sb.toString();
}
/**
* Replace some variables under the given sql.
*
* @param statement A sql will be replaced.
*/
private String replaceVariable(String statement) {
String pattern = "\\$\\{(.+?)\\}";
Pattern p = Pattern.compile(pattern);
Matcher m = p.matcher(statement);
StringBuffer sb = new StringBuffer();
while (m.find()) {
String key = m.group(1);
String value = this.getSqlFragment(key);
m.appendReplacement(sb, value == null ? "" : value);
}
m.appendTail(sb);
return sb.toString();
}
}
package com.dlink.app.flinksql;
import com.dlink.app.constant.AppConstant;
import com.dlink.app.db.DBConfig;
import com.dlink.app.db.DBUtil;
import java.io.IOException;
import java.sql.SQLException;
import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.List;
/**
* FlinkSQLFactory
*
* @author qiwenkai
* @since 2021/10/27 11:15
**/
public class FlinkSQLFactory {
private static String getQuerySQL(Integer id) throws SQLException {
if (id == null) {
throw new SQLException("请指定任务ID");
}
return "select statement from dlink_task_statement where id = " + id;
}
private static String getFlinkSQLStatement(Integer id, DBConfig config) {
String statement = "";
try {
statement = DBUtil.getOneByID(getQuerySQL(id),config);
} catch (IOException | SQLException e) {
e.printStackTrace();
System.err.println(LocalDateTime.now().toString() + " --> 获取 FlinkSQL 异常,ID 为"+ id );
System.err.println(LocalDateTime.now().toString() + "连接信息为:"+ config.toString() );
System.err.println(LocalDateTime.now().toString() + "异常信息为:"+ e.getMessage() );
}
return statement;
}
public static List<String> getStatements(Integer id, DBConfig config){
return Arrays.asList(getFlinkSQLStatement(id, config).split(AppConstant.FLINKSQL_SEPARATOR));
}
}
...@@ -150,5 +150,13 @@ ...@@ -150,5 +150,13 @@
<include>clickhouse-jdbc-*.jar</include> <include>clickhouse-jdbc-*.jar</include>
</includes> </includes>
</fileSet> </fileSet>
<fileSet>
<directory>${project.parent.basedir}/dlink-gateway/target</directory>
<outputDirectory>lib</outputDirectory>
<includes>
<include>dlink-gateway-${project.version}.jar</include>
</includes>
</fileSet>
</fileSets> </fileSets>
</assembly> </assembly>
\ No newline at end of file
...@@ -44,6 +44,11 @@ ...@@ -44,6 +44,11 @@
</exclusions> </exclusions>
<version>${flink.version}</version> <version>${flink.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-yarn_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
......
...@@ -39,21 +39,21 @@ ...@@ -39,21 +39,21 @@
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<!--<dependency> <dependency>
<groupId>com.dlink</groupId> <groupId>com.dlink</groupId>
<artifactId>dlink-client-1.12</artifactId> <artifactId>dlink-client-1.12</artifactId>
&lt;!&ndash;<scope>provided</scope>&ndash;&gt; <!--<scope>provided</scope>-->
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.dlink</groupId> <groupId>com.dlink</groupId>
<artifactId>dlink-connector-jdbc-1.12</artifactId> <artifactId>dlink-connector-jdbc-1.12</artifactId>
&lt;!&ndash;<scope>provided</scope>&ndash;&gt; <!--<scope>provided</scope>-->
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.dlink</groupId> <groupId>com.dlink</groupId>
<artifactId>dlink-function</artifactId> <artifactId>dlink-function</artifactId>
&lt;!&ndash;<scope>provided</scope>&ndash;&gt; <!--<scope>provided</scope>-->
</dependency>--> </dependency>
</dependencies> </dependencies>
</project> </project>
\ No newline at end of file
package com.dlink.executor;
import com.dlink.assertion.Asserts;
/**
* AbstractExecutor
*
* @author wenmo
* @since 2021/10/22 11:19
**/
public abstract class AbstractExecutor implements Executor {
protected EnvironmentSetting environmentSetting;
protected ExecutorSetting executorSetting;
public Executor setEnvironmentSetting(EnvironmentSetting setting) {
this.environmentSetting=setting;
return this;
}
public EnvironmentSetting getEnvironmentSetting() {
return environmentSetting;
}
public ExecutorSetting getExecutorSetting() {
return executorSetting;
}
public void setExecutorSetting(ExecutorSetting executorSetting) {
this.executorSetting = executorSetting;
}
public boolean canHandle(String version) {
return Asserts.isEqualsIgnoreCase(getVersion(),version);
}
}
...@@ -18,7 +18,6 @@ public class EnvironmentSetting { ...@@ -18,7 +18,6 @@ public class EnvironmentSetting {
private String host; private String host;
private int port; private int port;
private boolean useRemote; private boolean useRemote;
private String version;
public static final EnvironmentSetting LOCAL = new EnvironmentSetting(false); public static final EnvironmentSetting LOCAL = new EnvironmentSetting(false);
public EnvironmentSetting(boolean useRemote) { public EnvironmentSetting(boolean useRemote) {
...@@ -31,20 +30,13 @@ public class EnvironmentSetting { ...@@ -31,20 +30,13 @@ public class EnvironmentSetting {
this.useRemote = true; this.useRemote = true;
} }
public EnvironmentSetting(String host, int port, boolean useRemote, String version) {
this.host = host;
this.port = port;
this.useRemote = useRemote;
this.version = version;
}
public static EnvironmentSetting build(String address){ public static EnvironmentSetting build(String address){
Asserts.checkNull(address,"Flink 地址不能为空"); Asserts.checkNull(address,"Flink 地址不能为空");
String[] strs = address.split(NetConstant.COLON); String[] strs = address.split(NetConstant.COLON);
if (strs.length >= 2) { if (strs.length >= 2) {
return new EnvironmentSetting(strs[0],Integer.parseInt(strs[1])); return new EnvironmentSetting(strs[0],Integer.parseInt(strs[1]));
} else { } else {
return new EnvironmentSetting(strs[0], FlinkConstant.FLINK_REST_DEFAULT_PORT); return new EnvironmentSetting(strs[0],FlinkConstant.FLINK_REST_DEFAULT_PORT);
} }
} }
......
package com.dlink.executor; package com.dlink.executor;
import com.dlink.assertion.Asserts; import com.dlink.executor.custom.CustomTableEnvironmentImpl;
import com.dlink.exception.FlinkException; import com.dlink.result.SqlExplainResult;
import sun.misc.Service; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.functions.UserDefinedFunction;
import java.util.Iterator; import java.util.Map;
import java.util.Optional;
/** /**
* Executor * Executor
*
* @author wenmo * @author wenmo
* @since 2021/10/22 11:01 * @since 2021/5/25 13:39
**/ **/
public interface Executor { public abstract class Executor {
static Optional<Executor> get(EnvironmentSetting setting) {
Asserts.checkNotNull(setting, "Flink 执行配置不能为空"); protected StreamExecutionEnvironment environment;
Iterator<Executor> providers = Service.providers(Executor.class); protected CustomTableEnvironmentImpl stEnvironment;
while (providers.hasNext()) { protected EnvironmentSetting environmentSetting;
Executor executor = providers.next(); protected ExecutorSetting executorSetting;
if (executor.canHandle(setting.getVersion())) {
return Optional.of(executor.setEnvironmentSetting(setting)); public static Executor build(){
return new LocalStreamExecutor(ExecutorSetting.DEFAULT);
} }
public static Executor build(EnvironmentSetting environmentSetting,ExecutorSetting executorSetting){
if(environmentSetting.isUseRemote()){
return buildRemoteExecutor(environmentSetting,executorSetting);
}else{
return buildLocalExecutor(executorSetting);
} }
return Optional.empty();
} }
static Executor build(EnvironmentSetting config) { public static Executor buildLocalExecutor(ExecutorSetting executorSetting){
Optional<Executor> optionalExecutor = Executor.get(config); return new LocalStreamExecutor(executorSetting);
if (!optionalExecutor.isPresent()) {
throw new FlinkException("不支持 Flink 版本【" + config.getVersion() + "】");
} }
return optionalExecutor.get();
public static Executor buildRemoteExecutor(EnvironmentSetting environmentSetting,ExecutorSetting executorSetting){
environmentSetting.setUseRemote(true);
return new RemoteStreamExecutor(environmentSetting,executorSetting);
} }
Executor setEnvironmentSetting(EnvironmentSetting setting); public StreamExecutionEnvironment getEnvironment(){
return environment;
}
EnvironmentSetting getEnvironmentSetting(); public CustomTableEnvironmentImpl getCustomTableEnvironmentImpl(){
return stEnvironment;
}
boolean canHandle(String type); public ExecutorSetting getExecutorSetting(){
return executorSetting;
}
String getVersion(); public EnvironmentSetting getEnvironmentSetting(){
return environmentSetting;
}
Executor build(); protected void init(){
initEnvironment();
initStreamExecutionEnvironment();
}
Executor build(EnvironmentSetting environmentSetting,ExecutorSetting executorSetting); public void update(ExecutorSetting executorSetting){
updateEnvironment(executorSetting);
updateStreamExecutionEnvironment(executorSetting);
}
Executor buildLocalExecutor(); private void initEnvironment(){
if(executorSetting.getCheckpoint()!=null&&executorSetting.getCheckpoint()>0){
environment.enableCheckpointing(executorSetting.getCheckpoint());
}
if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){
environment.setParallelism(executorSetting.getParallelism());
}
}
Executor buildRemoteExecutor(EnvironmentSetting environmentSetting,ExecutorSetting executorSetting); private void updateEnvironment(ExecutorSetting executorSetting){
if(executorSetting.getCheckpoint()!=null&&executorSetting.getCheckpoint()>0){
environment.enableCheckpointing(executorSetting.getCheckpoint());
}
if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){
environment.setParallelism(executorSetting.getParallelism());
}
}
private void initStreamExecutionEnvironment(){
stEnvironment = CustomTableEnvironmentImpl.create(environment);
if(executorSetting.isUseSqlFragment()){
stEnvironment.useSqlFragment();
}else{
stEnvironment.unUseSqlFragment();
}
if(executorSetting.getJobName()!=null&&!"".equals(executorSetting.getJobName())){
stEnvironment.getConfig().getConfiguration().setString("pipeline.name", executorSetting.getJobName());
}
if(executorSetting.getConfig()!=null){
for (Map.Entry<String, String> entry : executorSetting.getConfig().entrySet()) {
stEnvironment.getConfig().getConfiguration().setString(entry.getKey(), entry.getValue());
}
}
}
private void updateStreamExecutionEnvironment(ExecutorSetting executorSetting){
copyCatalog();
if(executorSetting.isUseSqlFragment()){
stEnvironment.useSqlFragment();
}else{
stEnvironment.unUseSqlFragment();
}
if(executorSetting.getJobName()!=null&&!"".equals(executorSetting.getJobName())){
stEnvironment.getConfig().getConfiguration().setString("pipeline.name", executorSetting.getJobName());
}
if(executorSetting.getConfig()!=null){
for (Map.Entry<String, String> entry : executorSetting.getConfig().entrySet()) {
stEnvironment.getConfig().getConfiguration().setString(entry.getKey(), entry.getValue());
}
}
}
private void copyCatalog(){
String[] catalogs = stEnvironment.listCatalogs();
CustomTableEnvironmentImpl newstEnvironment = CustomTableEnvironmentImpl.create(environment);
for (int i = 0; i < catalogs.length; i++) {
if(stEnvironment.getCatalog(catalogs[i]).isPresent()) {
newstEnvironment.getCatalogManager().unregisterCatalog(catalogs[i],true);
newstEnvironment.registerCatalog(catalogs[i], stEnvironment.getCatalog(catalogs[i]).get());
}
}
stEnvironment = newstEnvironment;
}
public JobExecutionResult execute(String jobName) throws Exception{
return stEnvironment.execute(jobName);
}
public TableResult executeSql(String statement){
return stEnvironment.executeSql(statement);
}
public Table sqlQuery(String statement){
return stEnvironment.sqlQuery(statement);
}
public String explainSql(String statement, ExplainDetail... extraDetails){
return stEnvironment.explainSql(statement,extraDetails);
}
public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails){
return stEnvironment.explainSqlRecord(statement,extraDetails);
}
public ObjectNode getStreamGraph(String statement){
return stEnvironment.getStreamGraph(statement);
}
public void registerFunction(String name, ScalarFunction function){
stEnvironment.registerFunction(name,function);
}
public void createTemporarySystemFunction(String name, Class<? extends UserDefinedFunction> var2){
stEnvironment.createTemporarySystemFunction(name,var2);
}
public CatalogManager getCatalogManager(){
return stEnvironment.getCatalogManager();
}
} }
package com.dlink.executor;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/**
* LocalStreamExecuter
*
* @author wenmo
* @since 2021/5/25 13:48
**/
public class LocalStreamExecutor extends Executor {
public LocalStreamExecutor(ExecutorSetting executorSetting) {
this.executorSetting = executorSetting;
this.environment = StreamExecutionEnvironment.createLocalEnvironment();
init();
}
}
package com.dlink.executor;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/**
* RemoteStreamExecutor
*
* @author wenmo
* @since 2021/5/25 14:05
**/
public class RemoteStreamExecutor extends Executor {
public RemoteStreamExecutor(EnvironmentSetting environmentSetting,ExecutorSetting executorSetting) {
this.environmentSetting = environmentSetting;
this.executorSetting = executorSetting;
this.environment = StreamExecutionEnvironment.createRemoteEnvironment(environmentSetting.getHost(), environmentSetting.getPort());
init();
}
}
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dlink</artifactId>
<groupId>com.dlink</groupId>
<version>0.3.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-gateway</artifactId>
<properties>
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-common</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-client-1.12</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package com.dlink.gateway;
import org.apache.flink.configuration.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* AbstractGateway
*
* @author wenmo
* @since 2021/10/29
**/
public abstract class AbstractGateway implements Gateway {
protected static final Logger logger = LoggerFactory.getLogger(AbstractGateway.class);
protected GatewayConfig config;
protected Configuration configuration;
public AbstractGateway() {
}
public AbstractGateway(GatewayConfig config) {
this.config = config;
}
@Override
public boolean canHandle(GatewayType type) {
return type == getType();
}
@Override
public void setGatewayConfig(GatewayConfig config) {
this.config = config;
}
protected abstract void init();
}
package com.dlink.gateway;
import com.dlink.assertion.Asserts;
import com.dlink.gateway.exception.GatewayException;
import com.dlink.gateway.result.GatewayResult;
import org.apache.flink.runtime.jobgraph.JobGraph;
import sun.misc.Service;
import java.util.Iterator;
import java.util.Optional;
/**
* Submiter
*
* @author wenmo
* @since 2021/10/29
**/
public interface Gateway {
static Optional<Gateway> get(GatewayConfig config){
Asserts.checkNotNull(config,"配置不能为空");
Iterator<Gateway> providers = Service.providers(Gateway.class);
while(providers.hasNext()) {
Gateway gateway = providers.next();
if(gateway.canHandle(config.getType())){
gateway.setGatewayConfig(config);
return Optional.of(gateway);
}
}
return Optional.empty();
}
static Gateway build(GatewayConfig config){
Optional<Gateway> optionalGateway = Gateway.get(config);
if(!optionalGateway.isPresent()){
throw new GatewayException("不支持 Flink Gateway 类型【"+config.getType().getLongValue()+"】,请添加扩展包");
}
return optionalGateway.get();
}
boolean canHandle(GatewayType type);
GatewayType getType();
void setGatewayConfig(GatewayConfig config);
GatewayResult submitJobGraph(JobGraph jobGraph);
GatewayResult submitJar();
}
package com.dlink.gateway;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.Getter;
import lombok.Setter;
import java.util.Arrays;
/**
* SubmitConfig
*
* @author wenmo
* @since 2021/10/29
**/
@Getter
@Setter
public class GatewayConfig {
private GatewayType type;
private String jobName;
private String configDir;
private String userJarPath;
private String[] userJarParas;
private String userJarMainAppClass;
private String savePoint;
public GatewayConfig() {
}
public GatewayConfig(GatewayType type, String jobName, String configDir, String userJarPath, String[] userJarParas, String userJarMainAppClass, String savePoint) {
this.type = type;
this.jobName = jobName;
this.configDir = configDir;
this.userJarPath = userJarPath;
this.userJarParas = userJarParas;
this.userJarMainAppClass = userJarMainAppClass;
this.savePoint = savePoint;
}
public static GatewayConfig build(JsonNode para){
GatewayConfig config = new GatewayConfig();
config.setType(GatewayType.get(para.get("type").asText()));
if(para.has("jobName")) {
config.setJobName(para.get("jobName").asText());
}
if(para.has("configDir")) {
config.setConfigDir(para.get("configDir").asText());
}
if(para.has("userJarPath")) {
config.setUserJarPath(para.get("userJarPath").asText());
}
if(para.has("userJarParas")) {
config.setUserJarParas(para.get("userJarParas").asText().split("\\s+"));
}
if(para.has("userJarMainAppClass")) {
config.setUserJarMainAppClass(para.get("userJarMainAppClass").asText());
}
if(para.has("savePoint")) {
config.setSavePoint(para.get("savePoint").asText());
}
return config;
}
@Override
public String toString() {
return "GatewayConfig{" +
"type=" + type +
", jobName='" + jobName + '\'' +
", configDir='" + configDir + '\'' +
", userJarPath='" + userJarPath + '\'' +
", userJarParas=" + Arrays.toString(userJarParas) +
", userJarMainAppClass='" + userJarMainAppClass + '\'' +
", savePoint='" + savePoint + '\'' +
'}';
}
}
package com.dlink.gateway;
import com.dlink.assertion.Asserts;
/**
* SubmitType
*
* @author wenmo
* @since 2021/10/29
**/
public enum GatewayType {
YARN_APPLICATION("ya","yarn-application");
private String value;
private String longValue;
GatewayType(String value, String longValue){
this.value = value;
this.longValue = longValue;
}
public String getValue() {
return value;
}
public String getLongValue() {
return longValue;
}
public static GatewayType get(String value){
for (GatewayType type : GatewayType.values()) {
if(Asserts.isEquals(type.getValue(),value)||Asserts.isEquals(type.getLongValue(),value)){
return type;
}
}
return GatewayType.YARN_APPLICATION;
}
}
package com.dlink.gateway.exception;
/**
* GatewayException
*
* @author wenmo
* @since 2021/10/29
**/
public class GatewayException extends RuntimeException {
public GatewayException(String message, Throwable cause) {
super(message, cause);
}
public GatewayException(String message) {
super(message);
}
}
\ No newline at end of file
package com.dlink.gateway.result;
import com.dlink.gateway.GatewayType;
import lombok.Getter;
import lombok.Setter;
import java.time.LocalDateTime;
/**
* AbstractGatewayResult
*
* @author qiwenkai
* @since 2021/10/29 15:44
**/
@Setter
@Getter
public abstract class AbstractGatewayResult implements GatewayResult {
protected String jobId;
protected GatewayType type;
protected String savePointPath;
protected LocalDateTime startTime;
protected LocalDateTime endTime;
protected boolean isSuccess;
protected String exceptionMsg;
public AbstractGatewayResult(GatewayType type, LocalDateTime startTime) {
this.type = type;
this.startTime = startTime;
}
public AbstractGatewayResult(String jobId, String savePointPath, LocalDateTime startTime, LocalDateTime endTime, boolean isSuccess, String exceptionMsg) {
this.jobId = jobId;
this.savePointPath = savePointPath;
this.startTime = startTime;
this.endTime = endTime;
this.isSuccess = isSuccess;
this.exceptionMsg = exceptionMsg;
}
public void success(){
this.isSuccess = true;
this.endTime = LocalDateTime.now();
}
public void fail(String error){
this.isSuccess = false;
this.endTime = LocalDateTime.now();
this.exceptionMsg = error;
}
}
package com.dlink.gateway.result;
/**
* GatewayResult
*
* @author qiwenkai
* @since 2021/10/29 15:39
**/
public interface GatewayResult {
}
package com.dlink.gateway.result;
import com.dlink.gateway.GatewayType;
import lombok.Getter;
import lombok.Setter;
import java.time.LocalDateTime;
/**
* YarnResult
*
* @author qiwenkai
* @since 2021/10/29 15:49
**/
@Getter
@Setter
public class YarnResult extends AbstractGatewayResult {
private String appId;
private String webURL;
public YarnResult(GatewayType type, LocalDateTime startTime) {
super(type, startTime);
}
public YarnResult(String appId, String jobId, String savePointPath, LocalDateTime startTime, LocalDateTime endTime, boolean isSuccess, String exceptionMsg) {
super(jobId, savePointPath, startTime, endTime, isSuccess, exceptionMsg);
this.appId = appId;
}
public static YarnResult build(GatewayType type){
return new YarnResult(type,LocalDateTime.now());
}
}
package com.dlink.gateway.yarn;
import com.dlink.assertion.Asserts;
import com.dlink.gateway.GatewayConfig;
import com.dlink.gateway.GatewayType;
import com.dlink.gateway.result.GatewayResult;
import com.dlink.gateway.result.YarnResult;
import org.apache.flink.client.deployment.ClusterClientFactory;
import org.apache.flink.client.deployment.ClusterSpecification;
import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader;
import org.apache.flink.client.deployment.application.ApplicationConfiguration;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.ClusterClientProvider;
import org.apache.flink.configuration.DeploymentOptions;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.configuration.PipelineOptions;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.SavepointConfigOptions;
import org.apache.flink.yarn.YarnClusterDescriptor;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.yarn.entrypoint.YarnApplicationClusterEntryPoint;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import java.util.Collections;
/**
* YarnApplicationGateway
*
* @author wenmo
* @since 2021/10/29
**/
public class YarnApplicationGateway extends YarnGateway {
public YarnApplicationGateway(GatewayConfig config) {
super(config);
}
public YarnApplicationGateway() {
}
@Override
public GatewayType getType() {
return GatewayType.YARN_APPLICATION;
}
@Override
public void init() {
configuration = GlobalConfiguration.loadConfiguration(config.getConfigDir());
configuration.set(DeploymentOptions.TARGET, getType().getLongValue());
if(Asserts.isNotNullString(config.getSavePoint())) {
configuration.setString(SavepointConfigOptions.SAVEPOINT_PATH, config.getSavePoint());
}
clientServiceLoader = new DefaultClusterClientServiceLoader();
}
@Override
public GatewayResult submitJobGraph(JobGraph jobGraph) {
init();
YarnResult result = YarnResult.build(getType());
final ClusterClientFactory clientFactory = clientServiceLoader.getClusterClientFactory(configuration);
try (final YarnClusterDescriptor clusterDescriptor =
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(configuration)) {
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(configuration);
ClusterClientProvider<ApplicationId> clusterClientProvider = clusterDescriptor.deployInternal(
clusterSpecification,
config.getJobName(),
YarnApplicationClusterEntryPoint.class.getName(),
jobGraph,
false);
ClusterClient<ApplicationId> clusterClient = clusterClientProvider.getClusterClient();
ApplicationId applicationId = clusterClient.getClusterId();
result.setAppId(applicationId.toString());
result.setWebURL(clusterClient.getWebInterfaceURL());
result.success();
}catch (Exception e){
e.printStackTrace();
logger.error(e.getMessage());
result.fail(e.getMessage());
}
return result;
}
@Override
public GatewayResult submitJar() {
init();
YarnResult result = YarnResult.build(getType());
logger.warn(config.toString());
configuration.set(PipelineOptions.JARS, Collections.singletonList(config.getUserJarPath()));
configuration.set(YarnConfigOptions.APPLICATION_NAME, config.getJobName());
ApplicationConfiguration appConfig = new ApplicationConfiguration(config.getUserJarParas(), config.getUserJarMainAppClass());
final ClusterClientFactory clientFactory = clientServiceLoader.getClusterClientFactory(configuration);
try (final YarnClusterDescriptor clusterDescriptor =
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(configuration)) {
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(configuration);
ClusterClientProvider<ApplicationId> clusterClientProvider = clusterDescriptor.deployApplicationCluster(
clusterSpecification,
appConfig);
ClusterClient<ApplicationId> clusterClient = clusterClientProvider.getClusterClient();
ApplicationId applicationId = clusterClient.getClusterId();
result.setAppId(applicationId.toString());
result.setWebURL(clusterClient.getWebInterfaceURL());
result.success();
}catch (Exception e){
e.printStackTrace();
logger.error(e.getMessage());
result.fail(e.getMessage());
}
return result;
}
}
package com.dlink.gateway.yarn;
import com.dlink.gateway.AbstractGateway;
import com.dlink.gateway.GatewayConfig;
import org.apache.flink.client.deployment.ClusterClientFactory;
import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader;
/**
* YarnSubmiter
*
* @author wenmo
* @since 2021/10/29
**/
public abstract class YarnGateway extends AbstractGateway {
protected DefaultClusterClientServiceLoader clientServiceLoader;
public YarnGateway() {
}
public YarnGateway(GatewayConfig config) {
super(config);
}
public void init(){}
}
com.dlink.gateway.yarn.YarnApplicationGateway
\ No newline at end of file
package com.dlink.gateway;
import org.junit.Test;
/**
* GatewayTest
*
* @author qiwenkai
* @since 2021/10/29 17:06
**/
public class GatewayTest {
@Test
public void getTest(){
GatewayConfig config = new GatewayConfig();
config.setJobName("apptest");
config.setType(GatewayType.get("yarn-application"));
config.setConfigDir("/opt/src/flink-1.12.2_pj/conf");
config.setUserJarPath("hdfs:///flink12/jar/currencyAppJar.jar");
config.setUserJarParas("--id 2410,2412,2411".split("\\s+"));
config.setUserJarMainAppClass("com.app.MainApp");
String longValue = Gateway.build(config).getType().getLongValue();
System.out.println(longValue);
}
}
...@@ -50,6 +50,9 @@ const StudioProcess = (props: any) => { ...@@ -50,6 +50,9 @@ const StudioProcess = (props: any) => {
(row.state == 'INITIALIZE') ? (row.state == 'INITIALIZE') ?
(<Tag icon={<ClockCircleOutlined />} color="default"> (<Tag icon={<ClockCircleOutlined />} color="default">
INITIALIZE INITIALIZE
</Tag>) :(row.state == 'RESTARTING') ?
(<Tag icon={<ClockCircleOutlined />} color="default">
RESTARTING
</Tag>) : </Tag>) :
(<Tag color="default"> (<Tag color="default">
UNKNOWEN UNKNOWEN
......
...@@ -10,17 +10,20 @@ ...@@ -10,17 +10,20 @@
<version>0.3.2</version> <version>0.3.2</version>
<modules> <modules>
<module>dlink-core</module> <module>dlink-common</module>
<module>dlink-connectors</module>
<module>dlink-executor</module>
<module>dlink-client</module> <module>dlink-client</module>
<module>dlink-function</module> <module>dlink-function</module>
<module>dlink-common</module>
<module>dlink-metadata</module> <module>dlink-metadata</module>
<module>dlink-gateway</module>
<module>dlink-connectors</module>
<module>dlink-executor</module>
<module>dlink-extends</module> <module>dlink-extends</module>
<module>dlink-core</module>
<module>dlink-app</module>
<module>dlink-web</module> <module>dlink-web</module>
<module>dlink-admin</module> <module>dlink-admin</module>
<module>dlink-assembly</module> <module>dlink-assembly</module>
</modules> </modules>
<properties> <properties>
...@@ -204,6 +207,11 @@ ...@@ -204,6 +207,11 @@
<artifactId>dlink-metadata-mysql</artifactId> <artifactId>dlink-metadata-mysql</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-gateway</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>
<build> <build>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment