Commit 527e57d6 authored by godkaikai's avatar godkaikai

yarn-application and yarn per-job

parent cdc26038
...@@ -118,8 +118,19 @@ ...@@ -118,8 +118,19 @@
<dependency> <dependency>
<groupId>com.dlink</groupId> <groupId>com.dlink</groupId>
<artifactId>dlink-gateway</artifactId> <artifactId>dlink-gateway</artifactId>
<!--<scope>provided</scope>--> <scope>provided</scope>
</dependency> </dependency>
<!--<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-yarn_2.11</artifactId>
<version>1.12.5</version>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
</exclusion>
</exclusions>
</dependency>-->
<!--<dependency> <!--<dependency>
<groupId>com.dlink</groupId> <groupId>com.dlink</groupId>
<artifactId>dlink-metadata-mysql</artifactId> <artifactId>dlink-metadata-mysql</artifactId>
......
...@@ -105,5 +105,13 @@ public class TaskController { ...@@ -105,5 +105,13 @@ public class TaskController {
Task task = taskService.getTaskInfoById(id); Task task = taskService.getTaskInfoById(id);
return Result.succeed(task,"获取成功"); return Result.succeed(task,"获取成功");
} }
/**
* 提交作业
*/
@GetMapping(value = "/submitApplication")
public Result submitApplicationByTaskId(@RequestParam Integer id) {
return taskService.submitApplicationByTaskId(id);
}
} }
package com.dlink.service; package com.dlink.service;
import com.dlink.common.result.Result;
import com.dlink.db.service.ISuperService; import com.dlink.db.service.ISuperService;
import com.dlink.job.JobResult; import com.dlink.job.JobResult;
import com.dlink.model.Task; import com.dlink.model.Task;
...@@ -16,6 +17,8 @@ public interface TaskService extends ISuperService<Task> { ...@@ -16,6 +17,8 @@ public interface TaskService extends ISuperService<Task> {
JobResult submitByTaskId(Integer id); JobResult submitByTaskId(Integer id);
Result submitApplicationByTaskId(Integer id);
Task getTaskInfoById(Integer id); Task getTaskInfoById(Integer id);
boolean saveOrUpdateTask(Task task); boolean saveOrUpdateTask(Task task);
......
...@@ -3,11 +3,14 @@ package com.dlink.service.impl; ...@@ -3,11 +3,14 @@ package com.dlink.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.dlink.assertion.Assert; import com.dlink.assertion.Assert;
import com.dlink.cluster.FlinkCluster; import com.dlink.cluster.FlinkCluster;
import com.dlink.common.result.Result;
import com.dlink.constant.FlinkConstant; import com.dlink.constant.FlinkConstant;
import com.dlink.db.service.impl.SuperServiceImpl; import com.dlink.db.service.impl.SuperServiceImpl;
import com.dlink.exception.BusException; import com.dlink.exception.BusException;
import com.dlink.executor.Executor; import com.dlink.executor.Executor;
import com.dlink.executor.ExecutorSetting; import com.dlink.executor.ExecutorSetting;
import com.dlink.gateway.GatewayConfig;
import com.dlink.gateway.GatewayType;
import com.dlink.job.JobConfig; import com.dlink.job.JobConfig;
import com.dlink.job.JobManager; import com.dlink.job.JobManager;
import com.dlink.job.JobResult; import com.dlink.job.JobResult;
...@@ -51,6 +54,24 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen ...@@ -51,6 +54,24 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
return jobManager.executeSql(statement.getStatement()); return jobManager.executeSql(statement.getStatement());
} }
@Override
public Result submitApplicationByTaskId(Integer id) {
Task task = this.getById(id);
Assert.check(task);
Statement statement = statementService.getById(id);
Assert.check(statement);
JobConfig config = task.buildSubmitConfig();
GatewayConfig gatewayConfig = new GatewayConfig();
gatewayConfig.setJobName(config.getJobName());
gatewayConfig.setType(GatewayType.YARN_PER_JOB);
gatewayConfig.setFlinkConfigPath("/opt/src/flink-1.12.2_pj/conf");
gatewayConfig.setFlinkLibs("hdfs:///flink12/lib/flinklib");
gatewayConfig.setYarnConfigPath("/usr/local/hadoop/hadoop-2.7.7/etc/hadoop/yarn-site.xml");
JobManager jobManager = JobManager.build(config);
SubmitResult result = jobManager.submitGraph(statement.getStatement(), gatewayConfig);
return Result.succeed(result,"提交成功");
}
@Override @Override
public Task getTaskInfoById(Integer id) { public Task getTaskInfoById(Integer id) {
Task task = this.getById(id); Task task = this.getById(id);
......
...@@ -48,7 +48,55 @@ ...@@ -48,7 +48,55 @@
<groupId>org.apache.flink</groupId> <groupId>org.apache.flink</groupId>
<artifactId>flink-yarn_2.11</artifactId> <artifactId>flink-yarn_2.11</artifactId>
<version>${flink.version}</version> <version>${flink.version}</version>
<!--<scope>provided</scope>-->
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-client</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<!--<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-yarn_2.11</artifactId>
<version>${flink.version}</version>
&lt;!&ndash;<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
</exclusions>&ndash;&gt;
&lt;!&ndash;<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-client</artifactId>
</exclusion>
</exclusions>&ndash;&gt;
</dependency>-->
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
......
...@@ -7,11 +7,13 @@ import com.fasterxml.jackson.databind.node.ObjectNode; ...@@ -7,11 +7,13 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.dag.Transformation; import org.apache.flink.api.dag.Transformation;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.graph.JSONGenerator; import org.apache.flink.streaming.api.graph.JSONGenerator;
import org.apache.flink.streaming.api.graph.StreamGraph; import org.apache.flink.streaming.api.graph.StreamGraph;
import org.apache.flink.table.api.EnvironmentSettings; import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.ExplainDetail; import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.Table; import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.TableException;
...@@ -22,6 +24,7 @@ import org.apache.flink.table.catalog.FunctionCatalog; ...@@ -22,6 +24,7 @@ import org.apache.flink.table.catalog.FunctionCatalog;
import org.apache.flink.table.catalog.GenericInMemoryCatalog; import org.apache.flink.table.catalog.GenericInMemoryCatalog;
import org.apache.flink.table.delegation.Executor; import org.apache.flink.table.delegation.Executor;
import org.apache.flink.table.delegation.ExecutorFactory; import org.apache.flink.table.delegation.ExecutorFactory;
import org.apache.flink.table.delegation.Parser;
import org.apache.flink.table.delegation.Planner; import org.apache.flink.table.delegation.Planner;
import org.apache.flink.table.delegation.PlannerFactory; import org.apache.flink.table.delegation.PlannerFactory;
import org.apache.flink.table.factories.ComponentFactoryService; import org.apache.flink.table.factories.ComponentFactoryService;
...@@ -157,6 +160,39 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -157,6 +160,39 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
} }
} }
public JobGraph getJobGraphFromInserts(List<String> statements) {
List<ModifyOperation> modifyOperations = new ArrayList();
for(String statement : statements){
if(useSqlFragment) {
statement = sqlManager.parseVariable(statement);
if (statement.length() == 0) {
throw new TableException("This is a sql fragment.");
}
}
if (checkShowFragments(statement)) {
throw new TableException("'SHOW FRAGMENTS' can't be add inserts.");
}
List<Operation> operations = getParser().parse(statement);
if (operations.size() != 1) {
throw new TableException("Only single statement is supported.");
} else {
Operation operation = (Operation)operations.get(0);
if (operation instanceof ModifyOperation) {
modifyOperations.add((ModifyOperation)operation);
} else {
throw new TableException("Only insert statement is supported now.");
}
}
}
List<Transformation<?>> trans = getPlanner().translate(modifyOperations);
if(execEnv instanceof ExecutorBase){
StreamGraph streamGraph = ExecutorUtils.generateStreamGraph(((ExecutorBase) execEnv).getExecutionEnvironment(), trans);
return streamGraph.getJobGraph();
}else{
throw new TableException("Unsupported SQL query! ExecEnv need a ExecutorBase.");
}
}
public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails) { public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails) {
SqlExplainResult record = new SqlExplainResult(); SqlExplainResult record = new SqlExplainResult();
if(useSqlFragment) { if(useSqlFragment) {
...@@ -277,4 +313,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -277,4 +313,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
this.functionCatalog.registerTempSystemAggregateFunction(name, tableAggregateFunction, typeInfo, accTypeInfo); this.functionCatalog.registerTempSystemAggregateFunction(name, tableAggregateFunction, typeInfo, accTypeInfo);
} }
public Parser getParser(){
return super.parser;
}
} }
...@@ -3,9 +3,13 @@ package com.dlink.executor.custom; ...@@ -3,9 +3,13 @@ package com.dlink.executor.custom;
import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Internal;
import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.ExpressionParserException; import org.apache.flink.table.api.ExpressionParserException;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.Table; import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.TableResult; import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.exceptions.CatalogException; import org.apache.flink.table.catalog.exceptions.CatalogException;
import org.apache.flink.table.operations.ModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.types.Row; import org.apache.flink.types.Row;
import org.apache.flink.util.StringUtils; import org.apache.flink.util.StringUtils;
...@@ -26,6 +30,8 @@ import static org.apache.flink.util.Preconditions.checkNotNull; ...@@ -26,6 +30,8 @@ import static org.apache.flink.util.Preconditions.checkNotNull;
public final class SqlManager { public final class SqlManager {
private Map<String, String> sqlFragments; private Map<String, String> sqlFragments;
private List<ModifyOperation> operations = new ArrayList();
static final String SHOW_FRAGMENTS = "SHOW FRAGMENTS"; static final String SHOW_FRAGMENTS = "SHOW FRAGMENTS";
public SqlManager() { public SqlManager() {
...@@ -191,4 +197,18 @@ public final class SqlManager { ...@@ -191,4 +197,18 @@ public final class SqlManager {
m.appendTail(sb); m.appendTail(sb);
return sb.toString(); return sb.toString();
} }
public void addInsertSql(String statement,CustomTableEnvironmentImpl tableEnvironment) {
List<Operation> operations = tableEnvironment.getParser().parse(statement);
if (operations.size() != 1) {
throw new TableException("Only single statement is supported.");
} else {
Operation operation = (Operation)operations.get(0);
if (operation instanceof ModifyOperation) {
this.operations.add((ModifyOperation)operation);
} else {
throw new TableException("Only insert statement is supported now.");
}
}
}
} }
...@@ -55,5 +55,10 @@ ...@@ -55,5 +55,10 @@
<artifactId>dlink-function</artifactId> <artifactId>dlink-function</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-gateway</artifactId>
<scope>provided</scope>
</dependency>
</dependencies> </dependencies>
</project> </project>
\ No newline at end of file
...@@ -4,8 +4,10 @@ import com.dlink.executor.custom.CustomTableEnvironmentImpl; ...@@ -4,8 +4,10 @@ import com.dlink.executor.custom.CustomTableEnvironmentImpl;
import com.dlink.result.SqlExplainResult; import com.dlink.result.SqlExplainResult;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.api.common.JobExecutionResult; import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.ExplainDetail; import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.Table; import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult; import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.CatalogManager; import org.apache.flink.table.catalog.CatalogManager;
...@@ -13,6 +15,7 @@ import org.apache.flink.table.functions.ScalarFunction; ...@@ -13,6 +15,7 @@ import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.functions.UserDefinedFunction; import org.apache.flink.table.functions.UserDefinedFunction;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
...@@ -173,4 +176,8 @@ public abstract class Executor { ...@@ -173,4 +176,8 @@ public abstract class Executor {
public CatalogManager getCatalogManager(){ public CatalogManager getCatalogManager(){
return stEnvironment.getCatalogManager(); return stEnvironment.getCatalogManager();
} }
public JobGraph getJobGraphFromInserts(List<String> statements){
return stEnvironment.getJobGraphFromInserts(statements);
}
} }
...@@ -7,6 +7,9 @@ import com.dlink.executor.Executor; ...@@ -7,6 +7,9 @@ import com.dlink.executor.Executor;
import com.dlink.executor.ExecutorSetting; import com.dlink.executor.ExecutorSetting;
import com.dlink.executor.custom.CustomTableEnvironmentImpl; import com.dlink.executor.custom.CustomTableEnvironmentImpl;
import com.dlink.explainer.Explainer; import com.dlink.explainer.Explainer;
import com.dlink.gateway.Gateway;
import com.dlink.gateway.GatewayConfig;
import com.dlink.gateway.result.GatewayResult;
import com.dlink.interceptor.FlinkInterceptor; import com.dlink.interceptor.FlinkInterceptor;
import com.dlink.parser.SqlType; import com.dlink.parser.SqlType;
import com.dlink.result.*; import com.dlink.result.*;
...@@ -17,9 +20,12 @@ import com.dlink.session.SessionPool; ...@@ -17,9 +20,12 @@ import com.dlink.session.SessionPool;
import com.dlink.trans.Operations; import com.dlink.trans.Operations;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.JobID;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.TableResult; import org.apache.flink.table.api.TableResult;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
...@@ -104,7 +110,9 @@ public class JobManager extends RunTime { ...@@ -104,7 +110,9 @@ public class JobManager extends RunTime {
} }
private void initEnvironmentSetting(){ private void initEnvironmentSetting(){
environmentSetting = EnvironmentSetting.build(config.getAddress()); if(Asserts.isNotNullString(config.getAddress())) {
environmentSetting = EnvironmentSetting.build(config.getAddress());
}
} }
private void initExecutorSetting(){ private void initExecutorSetting(){
...@@ -203,6 +211,68 @@ public class JobManager extends RunTime { ...@@ -203,6 +211,68 @@ public class JobManager extends RunTime {
return result; return result;
} }
public SubmitResult submitGraph(String statement, GatewayConfig gatewayConfig) {
if (statement == null || "".equals(statement)) {
return SubmitResult.error("FlinkSql语句不存在");
}
String[] statements = statement.split(FlinkSQLConstant.SEPARATOR);
List<String> sqlList = Arrays.asList(statements);
SubmitResult result = new SubmitResult(null, sqlList, null, executorSetting.getJobName());
int currentIndex = 0;
try {
if (Asserts.isNullCollection(sqlList)) {
result.setSuccess(false);
result.setMsg(LocalDateTime.now().toString() + ":执行sql语句为空。");
return result;
}
Executor executor = createExecutor();
List<String> inserts = new ArrayList<>();
long start = System.currentTimeMillis();
for (String sqlText : sqlList) {
currentIndex++;
SqlType operationType = Operations.getOperationType(sqlText);
CustomTableEnvironmentImpl stEnvironment = executor.getCustomTableEnvironmentImpl();
if (operationType.equals(SqlType.INSERT)) {
if (!FlinkInterceptor.build(stEnvironment, sqlText)) {
inserts.add(sqlText);
}
} else if(operationType.equals(SqlType.SET)){
} else {
if (!FlinkInterceptor.build(stEnvironment, sqlText)) {
executor.executeSql(sqlText);
}
}
}
JobGraph jobGraph = executor.getJobGraphFromInserts(inserts);
GatewayResult gatewayResult = Gateway.build(gatewayConfig).submitJobGraph(jobGraph);
long finish = System.currentTimeMillis();
long timeElapsed = finish - start;
InsertResult insertResult = new InsertResult(gatewayResult.getAppId(), true);
result.setResult(insertResult);
result.setJobId(gatewayResult.getAppId());
result.setTime(timeElapsed);
result.setSuccess(true);
result.setFinishDate(LocalDateTime.now());
} catch (Exception e) {
e.printStackTrace();
StackTraceElement[] trace = e.getStackTrace();
StringBuilder resMsg = new StringBuilder();
for (StackTraceElement s : trace) {
resMsg.append(" \n " + s + " ");
}
result.setSuccess(false);
// result.setError(LocalDateTime.now().toString() + ":" + "运行第" + currentIndex + "行sql时出现异常:" + e.getMessage());
result.setError(LocalDateTime.now().toString() + ":" + "运行第" + currentIndex + "行sql时出现异常:" + e.getMessage() + "\n >>>堆栈信息<<<" + resMsg.toString());
// result.setError(LocalDateTime.now().toString() + ":" + "运行第" + currentIndex + "行sql时出现异常:" + e.getMessage() + "\n >>>异常原因<<< \n" + e.toString());
return result;
}
result.setSuccess(true);
result.setMsg(LocalDateTime.now().toString() + ":任务提交成功!");
return result;
}
public JobResult executeSql(String statement) { public JobResult executeSql(String statement) {
Job job = new Job(config,environmentSetting.getAddress(), Job job = new Job(config,environmentSetting.getAddress(),
Job.JobStatus.INITIALIZE,statement,executorSetting, LocalDateTime.now(),executor); Job.JobStatus.INITIALIZE,statement,executorSetting, LocalDateTime.now(),executor);
......
...@@ -44,7 +44,18 @@ ...@@ -44,7 +44,18 @@
<artifactId>dlink-client-1.12</artifactId> <artifactId>dlink-client-1.12</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-yarn_2.11</artifactId>
<version>1.12.5</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies> </dependencies>
</project> </project>
\ No newline at end of file
package com.dlink.gateway;
/**
* ConfigPara
*
* @author wenmo
* @since 2021/11/2
**/
public class ConfigPara {
private String key;
private String value;
public ConfigPara() {
}
public ConfigPara(String key, String value) {
this.key = key;
this.value = value;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
...@@ -8,6 +8,7 @@ import sun.misc.Service; ...@@ -8,6 +8,7 @@ import sun.misc.Service;
import java.util.Iterator; import java.util.Iterator;
import java.util.Optional; import java.util.Optional;
import java.util.ServiceLoader;
/** /**
* Submiter * Submiter
...@@ -19,9 +20,10 @@ public interface Gateway { ...@@ -19,9 +20,10 @@ public interface Gateway {
static Optional<Gateway> get(GatewayConfig config){ static Optional<Gateway> get(GatewayConfig config){
Asserts.checkNotNull(config,"配置不能为空"); Asserts.checkNotNull(config,"配置不能为空");
Iterator<Gateway> providers = Service.providers(Gateway.class); ServiceLoader<Gateway> loader = ServiceLoader.load(Gateway.class);
while(providers.hasNext()) { Iterator<Gateway> iterator = loader.iterator();
Gateway gateway = providers.next(); while(iterator.hasNext()) {
Gateway gateway = iterator.next();
if(gateway.canHandle(config.getType())){ if(gateway.canHandle(config.getType())){
gateway.setGatewayConfig(config); gateway.setGatewayConfig(config);
return Optional.of(gateway); return Optional.of(gateway);
...@@ -48,4 +50,6 @@ public interface Gateway { ...@@ -48,4 +50,6 @@ public interface Gateway {
GatewayResult submitJar(); GatewayResult submitJar();
GatewayResult savepoint();
} }
package com.dlink.gateway; package com.dlink.gateway;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List;
import java.util.Map;
/** /**
* SubmitConfig * SubmitConfig
...@@ -18,23 +23,18 @@ public class GatewayConfig { ...@@ -18,23 +23,18 @@ public class GatewayConfig {
private GatewayType type; private GatewayType type;
private String jobName; private String jobName;
private String configDir; private String flinkConfigPath;
private String userJarPath; private String userJarPath;
private String[] userJarParas; private String[] userJarParas;
private String userJarMainAppClass; private String userJarMainAppClass;
private String savePoint; private String savePoint;
private String flinkLibs;
private String yarnConfigPath;
private List<ConfigPara> configParas;
public GatewayConfig() { private static final ObjectMapper mapper = new ObjectMapper();
}
public GatewayConfig(GatewayType type, String jobName, String configDir, String userJarPath, String[] userJarParas, String userJarMainAppClass, String savePoint) { public GatewayConfig() {
this.type = type;
this.jobName = jobName;
this.configDir = configDir;
this.userJarPath = userJarPath;
this.userJarParas = userJarParas;
this.userJarMainAppClass = userJarMainAppClass;
this.savePoint = savePoint;
} }
public static GatewayConfig build(JsonNode para){ public static GatewayConfig build(JsonNode para){
...@@ -43,8 +43,8 @@ public class GatewayConfig { ...@@ -43,8 +43,8 @@ public class GatewayConfig {
if(para.has("jobName")) { if(para.has("jobName")) {
config.setJobName(para.get("jobName").asText()); config.setJobName(para.get("jobName").asText());
} }
if(para.has("configDir")) { if(para.has("flinkConfigPath")) {
config.setConfigDir(para.get("configDir").asText()); config.setFlinkConfigPath(para.get("flinkConfigPath").asText());
} }
if(para.has("userJarPath")) { if(para.has("userJarPath")) {
config.setUserJarPath(para.get("userJarPath").asText()); config.setUserJarPath(para.get("userJarPath").asText());
...@@ -58,6 +58,25 @@ public class GatewayConfig { ...@@ -58,6 +58,25 @@ public class GatewayConfig {
if(para.has("savePoint")) { if(para.has("savePoint")) {
config.setSavePoint(para.get("savePoint").asText()); config.setSavePoint(para.get("savePoint").asText());
} }
if(para.has("flinkLibs")) {
config.setFlinkLibs(para.get("flinkLibs").asText());
}
if(para.has("yarnConfigPath")) {
config.setYarnConfigPath(para.get("yarnConfigPath").asText());
}
if(para.has("configParas")) {
try {
List<ConfigPara> configParas = new ArrayList<>();
JsonNode paras = mapper.readTree(para.get("configParas").asText());
paras.forEach((JsonNode node)-> {
configParas.add(new ConfigPara(node.get("key").asText(),node.get("value").asText()));
}
);
config.setConfigParas(configParas);
} catch (JsonProcessingException e) {
e.printStackTrace();
}
}
return config; return config;
} }
...@@ -66,11 +85,14 @@ public class GatewayConfig { ...@@ -66,11 +85,14 @@ public class GatewayConfig {
return "GatewayConfig{" + return "GatewayConfig{" +
"type=" + type + "type=" + type +
", jobName='" + jobName + '\'' + ", jobName='" + jobName + '\'' +
", configDir='" + configDir + '\'' + ", flinkConfigPath='" + flinkConfigPath + '\'' +
", userJarPath='" + userJarPath + '\'' + ", userJarPath='" + userJarPath + '\'' +
", userJarParas=" + Arrays.toString(userJarParas) + ", userJarParas=" + Arrays.toString(userJarParas) +
", userJarMainAppClass='" + userJarMainAppClass + '\'' + ", userJarMainAppClass='" + userJarMainAppClass + '\'' +
", savePoint='" + savePoint + '\'' + ", savePoint='" + savePoint + '\'' +
", flinkLibs='" + flinkLibs + '\'' +
", yarnConfigPath='" + yarnConfigPath + '\'' +
", configParas='" + configParas.toString() + '\'' +
'}'; '}';
} }
} }
...@@ -10,7 +10,7 @@ import com.dlink.assertion.Asserts; ...@@ -10,7 +10,7 @@ import com.dlink.assertion.Asserts;
**/ **/
public enum GatewayType { public enum GatewayType {
YARN_APPLICATION("ya","yarn-application"); YARN_APPLICATION("ya","yarn-application"),YARN_PER_JOB("ypj","yarn-per-job");
private String value; private String value;
private String longValue; private String longValue;
......
...@@ -8,5 +8,5 @@ package com.dlink.gateway.result; ...@@ -8,5 +8,5 @@ package com.dlink.gateway.result;
**/ **/
public interface GatewayResult { public interface GatewayResult {
String getAppId();
} }
...@@ -28,6 +28,13 @@ public class YarnResult extends AbstractGatewayResult { ...@@ -28,6 +28,13 @@ public class YarnResult extends AbstractGatewayResult {
this.appId = appId; this.appId = appId;
} }
public String getAppId() {
return appId;
}
public String getWebURL() {
return webURL;
}
public static YarnResult build(GatewayType type){ public static YarnResult build(GatewayType type){
return new YarnResult(type,LocalDateTime.now()); return new YarnResult(type,LocalDateTime.now());
......
...@@ -3,22 +3,17 @@ package com.dlink.gateway.yarn; ...@@ -3,22 +3,17 @@ package com.dlink.gateway.yarn;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
import com.dlink.gateway.GatewayConfig; import com.dlink.gateway.GatewayConfig;
import com.dlink.gateway.GatewayType; import com.dlink.gateway.GatewayType;
import com.dlink.gateway.exception.GatewayException;
import com.dlink.gateway.result.GatewayResult; import com.dlink.gateway.result.GatewayResult;
import com.dlink.gateway.result.YarnResult; import com.dlink.gateway.result.YarnResult;
import org.apache.flink.client.deployment.ClusterClientFactory;
import org.apache.flink.client.deployment.ClusterSpecification; import org.apache.flink.client.deployment.ClusterSpecification;
import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader;
import org.apache.flink.client.deployment.application.ApplicationConfiguration; import org.apache.flink.client.deployment.application.ApplicationConfiguration;
import org.apache.flink.client.program.ClusterClient; import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.ClusterClientProvider; import org.apache.flink.client.program.ClusterClientProvider;
import org.apache.flink.configuration.DeploymentOptions;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.configuration.PipelineOptions; import org.apache.flink.configuration.PipelineOptions;
import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.SavepointConfigOptions; import org.apache.flink.yarn.YarnClientYarnClusterInformationRetriever;
import org.apache.flink.yarn.YarnClusterDescriptor; import org.apache.flink.yarn.YarnClusterDescriptor;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.yarn.entrypoint.YarnApplicationClusterEntryPoint;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import java.util.Collections; import java.util.Collections;
...@@ -43,58 +38,24 @@ public class YarnApplicationGateway extends YarnGateway { ...@@ -43,58 +38,24 @@ public class YarnApplicationGateway extends YarnGateway {
return GatewayType.YARN_APPLICATION; return GatewayType.YARN_APPLICATION;
} }
@Override
public void init() {
configuration = GlobalConfiguration.loadConfiguration(config.getConfigDir());
configuration.set(DeploymentOptions.TARGET, getType().getLongValue());
if(Asserts.isNotNullString(config.getSavePoint())) {
configuration.setString(SavepointConfigOptions.SAVEPOINT_PATH, config.getSavePoint());
}
clientServiceLoader = new DefaultClusterClientServiceLoader();
}
@Override @Override
public GatewayResult submitJobGraph(JobGraph jobGraph) { public GatewayResult submitJobGraph(JobGraph jobGraph) {
init(); throw new GatewayException("Couldn't deploy Yarn Application Cluster with job graph.");
YarnResult result = YarnResult.build(getType());
final ClusterClientFactory clientFactory = clientServiceLoader.getClusterClientFactory(configuration);
try (final YarnClusterDescriptor clusterDescriptor =
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(configuration)) {
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(configuration);
ClusterClientProvider<ApplicationId> clusterClientProvider = clusterDescriptor.deployInternal(
clusterSpecification,
config.getJobName(),
YarnApplicationClusterEntryPoint.class.getName(),
jobGraph,
false);
ClusterClient<ApplicationId> clusterClient = clusterClientProvider.getClusterClient();
ApplicationId applicationId = clusterClient.getClusterId();
result.setAppId(applicationId.toString());
result.setWebURL(clusterClient.getWebInterfaceURL());
result.success();
}catch (Exception e){
e.printStackTrace();
logger.error(e.getMessage());
result.fail(e.getMessage());
}
return result;
} }
@Override @Override
public GatewayResult submitJar() { public GatewayResult submitJar() {
init(); if(Asserts.isNull(yarnClient)){
init();
}
YarnResult result = YarnResult.build(getType()); YarnResult result = YarnResult.build(getType());
logger.warn(config.toString());
configuration.set(PipelineOptions.JARS, Collections.singletonList(config.getUserJarPath())); configuration.set(PipelineOptions.JARS, Collections.singletonList(config.getUserJarPath()));
configuration.set(YarnConfigOptions.APPLICATION_NAME, config.getJobName()); ClusterSpecification clusterSpecification = new ClusterSpecification.ClusterSpecificationBuilder().createClusterSpecification();
ApplicationConfiguration appConfig = new ApplicationConfiguration(config.getUserJarParas(), config.getUserJarMainAppClass()); ApplicationConfiguration appConfig = new ApplicationConfiguration(config.getUserJarParas(), config.getUserJarMainAppClass());
final ClusterClientFactory clientFactory = clientServiceLoader.getClusterClientFactory(configuration); YarnClusterDescriptor yarnClusterDescriptor = new YarnClusterDescriptor(
try (final YarnClusterDescriptor clusterDescriptor = configuration, yarnConfiguration, yarnClient, YarnClientYarnClusterInformationRetriever.create(yarnClient), true);
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(configuration)) { try {
final ClusterSpecification clusterSpecification = ClusterClientProvider<ApplicationId> clusterClientProvider = yarnClusterDescriptor.deployApplicationCluster(
clientFactory.getClusterSpecification(configuration);
ClusterClientProvider<ApplicationId> clusterClientProvider = clusterDescriptor.deployApplicationCluster(
clusterSpecification, clusterSpecification,
appConfig); appConfig);
ClusterClient<ApplicationId> clusterClient = clusterClientProvider.getClusterClient(); ClusterClient<ApplicationId> clusterClient = clusterClientProvider.getClusterClient();
......
package com.dlink.gateway.yarn; package com.dlink.gateway.yarn;
import com.dlink.assertion.Asserts;
import com.dlink.gateway.AbstractGateway; import com.dlink.gateway.AbstractGateway;
import com.dlink.gateway.ConfigPara;
import com.dlink.gateway.GatewayConfig; import com.dlink.gateway.GatewayConfig;
import com.dlink.gateway.result.GatewayResult;
import org.apache.flink.client.deployment.ClusterClientFactory; import org.apache.flink.client.deployment.ClusterClientFactory;
import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader; import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader;
import org.apache.flink.configuration.DeploymentOptions;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.runtime.jobgraph.SavepointConfigOptions;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.yarn.configuration.YarnLogConfigUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import java.util.Collections;
import java.util.List;
/** /**
* YarnSubmiter * YarnSubmiter
...@@ -13,7 +27,8 @@ import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader; ...@@ -13,7 +27,8 @@ import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader;
**/ **/
public abstract class YarnGateway extends AbstractGateway { public abstract class YarnGateway extends AbstractGateway {
protected DefaultClusterClientServiceLoader clientServiceLoader; protected YarnConfiguration yarnConfiguration;
protected YarnClient yarnClient;
public YarnGateway() { public YarnGateway() {
} }
...@@ -22,5 +37,40 @@ public abstract class YarnGateway extends AbstractGateway { ...@@ -22,5 +37,40 @@ public abstract class YarnGateway extends AbstractGateway {
super(config); super(config);
} }
public void init(){} public void init(){
initConfig();
initYarnClient();
}
private void initConfig(){
configuration = GlobalConfiguration.loadConfiguration(config.getFlinkConfigPath());
addConfigParas(config.getConfigParas());
configuration.set(DeploymentOptions.TARGET, getType().getLongValue());
if(Asserts.isNotNullString(config.getSavePoint())) {
configuration.setString(SavepointConfigOptions.SAVEPOINT_PATH, config.getSavePoint());
}
configuration.set(YarnConfigOptions.PROVIDED_LIB_DIRS, Collections.singletonList(config.getFlinkLibs()));
configuration.set(YarnConfigOptions.APPLICATION_NAME, config.getJobName());
YarnLogConfigUtil.setLogConfigFileInConfig(configuration, config.getFlinkConfigPath());
}
private void initYarnClient(){
yarnConfiguration = new YarnConfiguration();
yarnConfiguration.addResource( new Path( config.getYarnConfigPath() ) );
yarnClient = YarnClient.createYarnClient();
yarnClient.init(yarnConfiguration);
yarnClient.start();
}
private void addConfigParas(List<ConfigPara> configParas){
if(Asserts.isNotNull(configParas)) {
for (ConfigPara configPara : configParas) {
configuration.setString(configPara.getKey(), configPara.getValue());
}
}
}
public GatewayResult savepoint(){
}
} }
package com.dlink.gateway.yarn;
import com.dlink.assertion.Asserts;
import com.dlink.gateway.GatewayConfig;
import com.dlink.gateway.GatewayType;
import com.dlink.gateway.exception.GatewayException;
import com.dlink.gateway.result.GatewayResult;
import com.dlink.gateway.result.YarnResult;
import org.apache.flink.client.deployment.ClusterSpecification;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.ClusterClientProvider;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.yarn.YarnClientYarnClusterInformationRetriever;
import org.apache.flink.yarn.YarnClusterDescriptor;
import org.apache.hadoop.yarn.api.records.ApplicationId;
/**
* YarnApplicationGateway
*
* @author wenmo
* @since 2021/10/29
**/
public class YarnPerJobGateway extends YarnGateway {
public YarnPerJobGateway(GatewayConfig config) {
super(config);
}
public YarnPerJobGateway() {
}
@Override
public GatewayType getType() {
return GatewayType.YARN_PER_JOB;
}
@Override
public GatewayResult submitJobGraph(JobGraph jobGraph) {
if(Asserts.isNull(yarnClient)){
init();
}
YarnResult result = YarnResult.build(getType());
ClusterSpecification clusterSpecification = new ClusterSpecification.ClusterSpecificationBuilder().createClusterSpecification();
YarnClusterDescriptor yarnClusterDescriptor = new YarnClusterDescriptor(
configuration, yarnConfiguration, yarnClient, YarnClientYarnClusterInformationRetriever.create(yarnClient), true);
try {
ClusterClientProvider<ApplicationId> clusterClientProvider = yarnClusterDescriptor.deployJobCluster(clusterSpecification,jobGraph,false);
ClusterClient<ApplicationId> clusterClient = clusterClientProvider.getClusterClient();
ApplicationId applicationId = clusterClient.getClusterId();
result.setAppId(applicationId.toString());
result.setWebURL(clusterClient.getWebInterfaceURL());
result.success();
}catch (Exception e){
e.printStackTrace();
logger.error(e.getMessage());
result.fail(e.getMessage());
}
return result;
}
@Override
public GatewayResult submitJar() {
throw new GatewayException("Couldn't deploy Yarn Per-Job Cluster with User Application Jar.");
}
}
com.dlink.gateway.yarn.YarnApplicationGateway com.dlink.gateway.yarn.YarnApplicationGateway
\ No newline at end of file com.dlink.gateway.yarn.YarnPerJobGateway
\ No newline at end of file
...@@ -15,7 +15,7 @@ public class GatewayTest { ...@@ -15,7 +15,7 @@ public class GatewayTest {
GatewayConfig config = new GatewayConfig(); GatewayConfig config = new GatewayConfig();
config.setJobName("apptest"); config.setJobName("apptest");
config.setType(GatewayType.get("yarn-application")); config.setType(GatewayType.get("yarn-application"));
config.setConfigDir("/opt/src/flink-1.12.2_pj/conf"); config.setFlinkConfigPath("/opt/src/flink-1.12.2_pj/conf");
config.setUserJarPath("hdfs:///flink12/jar/currencyAppJar.jar"); config.setUserJarPath("hdfs:///flink12/jar/currencyAppJar.jar");
config.setUserJarParas("--id 2410,2412,2411".split("\\s+")); config.setUserJarParas("--id 2410,2412,2411".split("\\s+"));
config.setUserJarMainAppClass("com.app.MainApp"); config.setUserJarMainAppClass("com.app.MainApp");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment