Commit 1b459fbc authored by wenmo's avatar wenmo

新增 Batch 引擎

parent 179cd24e
...@@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; ...@@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.dag.Transformation; import org.apache.flink.api.dag.Transformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.PipelineOptions; import org.apache.flink.configuration.PipelineOptions;
import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator; import org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator;
...@@ -13,7 +14,10 @@ import org.apache.flink.runtime.rest.messages.JobPlanInfo; ...@@ -13,7 +14,10 @@ import org.apache.flink.runtime.rest.messages.JobPlanInfo;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.graph.JSONGenerator; import org.apache.flink.streaming.api.graph.JSONGenerator;
import org.apache.flink.streaming.api.graph.StreamGraph; import org.apache.flink.streaming.api.graph.StreamGraph;
import org.apache.flink.table.api.*; import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.internal.TableEnvironmentImpl; import org.apache.flink.table.api.internal.TableEnvironmentImpl;
import org.apache.flink.table.catalog.CatalogManager; import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.FunctionCatalog; import org.apache.flink.table.catalog.FunctionCatalog;
...@@ -34,7 +38,6 @@ import org.apache.flink.table.operations.Operation; ...@@ -34,7 +38,6 @@ import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.operations.QueryOperation; import org.apache.flink.table.operations.QueryOperation;
import org.apache.flink.table.planner.delegation.ExecutorBase; import org.apache.flink.table.planner.delegation.ExecutorBase;
import org.apache.flink.table.planner.utils.ExecutorUtils; import org.apache.flink.table.planner.utils.ExecutorUtils;
import org.apache.flink.types.Row;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
...@@ -47,7 +50,7 @@ import java.util.Map; ...@@ -47,7 +50,7 @@ import java.util.Map;
* @author wenmo * @author wenmo
* @since 2021/6/7 22:06 * @since 2021/6/7 22:06
**/ **/
public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements CustomTableEnvironment {
protected CustomTableEnvironmentImpl(CatalogManager catalogManager, ModuleManager moduleManager, TableConfig tableConfig, Executor executor, FunctionCatalog functionCatalog, Planner planner, boolean isStreamingMode, ClassLoader userClassLoader) { protected CustomTableEnvironmentImpl(CatalogManager catalogManager, ModuleManager moduleManager, TableConfig tableConfig, Executor executor, FunctionCatalog functionCatalog, Planner planner, boolean isStreamingMode, ClassLoader userClassLoader) {
super(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, isStreamingMode, userClassLoader); super(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, isStreamingMode, userClassLoader);
...@@ -57,14 +60,19 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -57,14 +60,19 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
return create(executionEnvironment, EnvironmentSettings.newInstance().build()); return create(executionEnvironment, EnvironmentSettings.newInstance().build());
} }
static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings) { public static CustomTableEnvironmentImpl createBatch(StreamExecutionEnvironment executionEnvironment) {
Configuration configuration = new Configuration();
configuration.setString("execution.runtime-mode", "BATCH");
TableConfig tableConfig = new TableConfig();
tableConfig.addConfiguration(configuration);
return create(executionEnvironment, EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build(), tableConfig);
}
public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings) {
return create(executionEnvironment, settings, new TableConfig()); return create(executionEnvironment, settings, new TableConfig());
} }
public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) { public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) {
if (!settings.isStreamingMode()) {
throw new TableException("StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment.");
} else {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
ModuleManager moduleManager = new ModuleManager(); ModuleManager moduleManager = new ModuleManager();
CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).executionConfig(executionEnvironment.getConfig()).build(); CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).executionConfig(executionEnvironment.getConfig()).build();
...@@ -72,9 +80,9 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -72,9 +80,9 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
Map<String, String> executorProperties = settings.toExecutorProperties(); Map<String, String> executorProperties = settings.toExecutorProperties();
Executor executor = lookupExecutor(executorProperties, executionEnvironment); Executor executor = lookupExecutor(executorProperties, executionEnvironment);
Map<String, String> plannerProperties = settings.toPlannerProperties(); Map<String, String> plannerProperties = settings.toPlannerProperties();
Planner planner = ( ComponentFactoryService.find(PlannerFactory.class, plannerProperties)).create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager); Planner planner = (ComponentFactoryService.find(PlannerFactory.class, plannerProperties)).create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager);
return new CustomTableEnvironmentImpl(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode(), classLoader); return new CustomTableEnvironmentImpl(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode(), classLoader);
}
} }
private static Executor lookupExecutor(Map<String, String> executorProperties, StreamExecutionEnvironment executionEnvironment) { private static Executor lookupExecutor(Map<String, String> executorProperties, StreamExecutionEnvironment executionEnvironment) {
...@@ -94,25 +102,25 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -94,25 +102,25 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
} else { } else {
List<ModifyOperation> modifyOperations = new ArrayList<>(); List<ModifyOperation> modifyOperations = new ArrayList<>();
for (int i = 0; i < operations.size(); i++) { for (int i = 0; i < operations.size(); i++) {
if(operations.get(i) instanceof ModifyOperation){ if (operations.get(i) instanceof ModifyOperation) {
modifyOperations.add((ModifyOperation)operations.get(i)); modifyOperations.add((ModifyOperation) operations.get(i));
} }
} }
List<Transformation<?>> trans = super.planner.translate(modifyOperations); List<Transformation<?>> trans = super.planner.translate(modifyOperations);
if(execEnv instanceof ExecutorBase){ if (execEnv instanceof ExecutorBase) {
StreamGraph streamGraph = ExecutorUtils.generateStreamGraph(((ExecutorBase) execEnv).getExecutionEnvironment(), trans); StreamGraph streamGraph = ExecutorUtils.generateStreamGraph(((ExecutorBase) execEnv).getExecutionEnvironment(), trans);
JSONGenerator jsonGenerator = new JSONGenerator(streamGraph); JSONGenerator jsonGenerator = new JSONGenerator(streamGraph);
String json = jsonGenerator.getJSON(); String json = jsonGenerator.getJSON();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
ObjectNode objectNode =mapper.createObjectNode(); ObjectNode objectNode = mapper.createObjectNode();
try { try {
objectNode = (ObjectNode) mapper.readTree(json); objectNode = (ObjectNode) mapper.readTree(json);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
e.printStackTrace(); e.printStackTrace();
}finally { } finally {
return objectNode; return objectNode;
} }
}else{ } else {
throw new TableException("Unsupported SQL query! ExecEnv need a ExecutorBase."); throw new TableException("Unsupported SQL query! ExecEnv need a ExecutorBase.");
} }
} }
...@@ -124,27 +132,27 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -124,27 +132,27 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
public StreamGraph getStreamGraphFromInserts(List<String> statements) { public StreamGraph getStreamGraphFromInserts(List<String> statements) {
List<ModifyOperation> modifyOperations = new ArrayList(); List<ModifyOperation> modifyOperations = new ArrayList();
for(String statement : statements){ for (String statement : statements) {
List<Operation> operations = getParser().parse(statement); List<Operation> operations = getParser().parse(statement);
if (operations.size() != 1) { if (operations.size() != 1) {
throw new TableException("Only single statement is supported."); throw new TableException("Only single statement is supported.");
} else { } else {
Operation operation = operations.get(0); Operation operation = operations.get(0);
if (operation instanceof ModifyOperation) { if (operation instanceof ModifyOperation) {
modifyOperations.add((ModifyOperation)operation); modifyOperations.add((ModifyOperation) operation);
} else { } else {
throw new TableException("Only insert statement is supported now."); throw new TableException("Only insert statement is supported now.");
} }
} }
} }
List<Transformation<?>> trans = getPlanner().translate(modifyOperations); List<Transformation<?>> trans = getPlanner().translate(modifyOperations);
if(execEnv instanceof ExecutorBase){ if (execEnv instanceof ExecutorBase) {
StreamGraph streamGraph = ExecutorUtils.generateStreamGraph(((ExecutorBase) execEnv).getExecutionEnvironment(), trans); StreamGraph streamGraph = ExecutorUtils.generateStreamGraph(((ExecutorBase) execEnv).getExecutionEnvironment(), trans);
if(tableConfig.getConfiguration().containsKey(PipelineOptions.NAME.key())) { if (tableConfig.getConfiguration().containsKey(PipelineOptions.NAME.key())) {
streamGraph.setJobName(tableConfig.getConfiguration().getString(PipelineOptions.NAME)); streamGraph.setJobName(tableConfig.getConfiguration().getString(PipelineOptions.NAME));
} }
return streamGraph; return streamGraph;
}else{ } else {
throw new TableException("Unsupported SQL query! ExecEnv need a ExecutorBase."); throw new TableException("Unsupported SQL query! ExecEnv need a ExecutorBase.");
} }
} }
...@@ -174,11 +182,11 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -174,11 +182,11 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
record.setExplain(operation.asSummaryString()); record.setExplain(operation.asSummaryString());
operationlist.remove(i); operationlist.remove(i);
record.setType("DDL"); record.setType("DDL");
i=i-1; i = i - 1;
} }
} }
record.setExplainTrue(true); record.setExplainTrue(true);
if(operationlist.size()==0){ if (operationlist.size() == 0) {
return record; return record;
} }
record.setExplain(planner.explain(operationlist, extraDetails)); record.setExplain(planner.explain(operationlist, extraDetails));
......
...@@ -4,8 +4,11 @@ import com.dlink.result.SqlExplainResult; ...@@ -4,8 +4,11 @@ import com.dlink.result.SqlExplainResult;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.dag.Transformation; import org.apache.flink.api.dag.Transformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ExecutionOptions;
import org.apache.flink.configuration.PipelineOptions; import org.apache.flink.configuration.PipelineOptions;
import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator; import org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator;
...@@ -13,7 +16,10 @@ import org.apache.flink.runtime.rest.messages.JobPlanInfo; ...@@ -13,7 +16,10 @@ import org.apache.flink.runtime.rest.messages.JobPlanInfo;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.graph.JSONGenerator; import org.apache.flink.streaming.api.graph.JSONGenerator;
import org.apache.flink.streaming.api.graph.StreamGraph; import org.apache.flink.streaming.api.graph.StreamGraph;
import org.apache.flink.table.api.*; import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.internal.TableEnvironmentImpl; import org.apache.flink.table.api.internal.TableEnvironmentImpl;
import org.apache.flink.table.catalog.CatalogManager; import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.FunctionCatalog; import org.apache.flink.table.catalog.FunctionCatalog;
...@@ -34,7 +40,6 @@ import org.apache.flink.table.operations.Operation; ...@@ -34,7 +40,6 @@ import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.operations.QueryOperation; import org.apache.flink.table.operations.QueryOperation;
import org.apache.flink.table.planner.delegation.ExecutorBase; import org.apache.flink.table.planner.delegation.ExecutorBase;
import org.apache.flink.table.planner.utils.ExecutorUtils; import org.apache.flink.table.planner.utils.ExecutorUtils;
import org.apache.flink.types.Row;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
...@@ -47,7 +52,7 @@ import java.util.Map; ...@@ -47,7 +52,7 @@ import java.util.Map;
* @author wenmo * @author wenmo
* @since 2021/6/7 22:06 * @since 2021/6/7 22:06
**/ **/
public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { public class CustomTableEnvironmentImpl extends TableEnvironmentImpl implements CustomTableEnvironment {
protected CustomTableEnvironmentImpl(CatalogManager catalogManager, ModuleManager moduleManager, TableConfig tableConfig, Executor executor, FunctionCatalog functionCatalog, Planner planner, boolean isStreamingMode, ClassLoader userClassLoader) { protected CustomTableEnvironmentImpl(CatalogManager catalogManager, ModuleManager moduleManager, TableConfig tableConfig, Executor executor, FunctionCatalog functionCatalog, Planner planner, boolean isStreamingMode, ClassLoader userClassLoader) {
super(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, isStreamingMode, userClassLoader); super(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, isStreamingMode, userClassLoader);
...@@ -57,14 +62,19 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -57,14 +62,19 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
return create(executionEnvironment, EnvironmentSettings.newInstance().build()); return create(executionEnvironment, EnvironmentSettings.newInstance().build());
} }
static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings) { public static CustomTableEnvironmentImpl createBatch(StreamExecutionEnvironment executionEnvironment) {
Configuration configuration = new Configuration();
configuration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
TableConfig tableConfig = new TableConfig();
tableConfig.addConfiguration(configuration);
return create(executionEnvironment, EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build(), tableConfig);
}
public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings) {
return create(executionEnvironment, settings, new TableConfig()); return create(executionEnvironment, settings, new TableConfig());
} }
public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) { public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) {
if (!settings.isStreamingMode()) {
throw new TableException("StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment.");
} else {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
ModuleManager moduleManager = new ModuleManager(); ModuleManager moduleManager = new ModuleManager();
CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).executionConfig(executionEnvironment.getConfig()).build(); CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).executionConfig(executionEnvironment.getConfig()).build();
...@@ -74,7 +84,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl { ...@@ -74,7 +84,7 @@ public class CustomTableEnvironmentImpl extends TableEnvironmentImpl {
Map<String, String> plannerProperties = settings.toPlannerProperties(); Map<String, String> plannerProperties = settings.toPlannerProperties();
Planner planner = (ComponentFactoryService.find(PlannerFactory.class, plannerProperties)).create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager); Planner planner = (ComponentFactoryService.find(PlannerFactory.class, plannerProperties)).create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager);
return new CustomTableEnvironmentImpl(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode(), classLoader); return new CustomTableEnvironmentImpl(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode(), classLoader);
}
} }
private static Executor lookupExecutor(Map<String, String> executorProperties, StreamExecutionEnvironment executionEnvironment) { private static Executor lookupExecutor(Map<String, String> executorProperties, StreamExecutionEnvironment executionEnvironment) {
......
package com.dlink.core; package com.dlink.core;
import com.dlink.executor.CustomBatchTableEnvironmentImpl; import com.dlink.executor.CustomTableEnvironmentImpl;
import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.java.LocalEnvironment; import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ExecutionOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings; import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult; import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.internal.BatchTableEnvironmentImpl;
import org.junit.Test; import org.junit.Test;
/** /**
...@@ -17,7 +19,7 @@ import org.junit.Test; ...@@ -17,7 +19,7 @@ import org.junit.Test;
*/ */
public class BatchTest { public class BatchTest {
@Test @Test
public void batchTest(){ public void batchTest() {
String source = "CREATE TABLE Orders (\n" + String source = "CREATE TABLE Orders (\n" +
" order_number BIGINT,\n" + " order_number BIGINT,\n" +
" price DECIMAL(32,2),\n" + " price DECIMAL(32,2),\n" +
...@@ -42,7 +44,7 @@ public class BatchTest { ...@@ -42,7 +44,7 @@ public class BatchTest {
} }
@Test @Test
public void batchTest2(){ public void batchTest2() {
String source = "CREATE TABLE Orders (\n" + String source = "CREATE TABLE Orders (\n" +
" order_number BIGINT,\n" + " order_number BIGINT,\n" +
" price DECIMAL(32,2),\n" + " price DECIMAL(32,2),\n" +
...@@ -53,29 +55,18 @@ public class BatchTest { ...@@ -53,29 +55,18 @@ public class BatchTest {
" 'number-of-rows' = '100'\n" + " 'number-of-rows' = '100'\n" +
")"; ")";
String select = "select order_number,price,order_time from Orders"; String select = "select order_number,price,order_time from Orders";
LocalEnvironment environment = ExecutionEnvironment.createLocalEnvironment(); // LocalEnvironment environment = ExecutionEnvironment.createLocalEnvironment();
CustomBatchTableEnvironmentImpl batchTableEnvironment = CustomBatchTableEnvironmentImpl.create(environment); StreamExecutionEnvironment environment = StreamExecutionEnvironment.createLocalEnvironment();
batchTableEnvironment.executeSql(source); Configuration configuration = new Configuration();
TableResult tableResult = batchTableEnvironment.executeSql(select); configuration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
tableResult.print(); // configuration.setString("execution.runtime-mode", "STREAMING");
} TableConfig tableConfig = new TableConfig();
tableConfig.addConfiguration(configuration);
@Test CustomTableEnvironmentImpl batchTableEnvironment = CustomTableEnvironmentImpl.create(environment,
public void batchTest3(){ EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build(), tableConfig);
String source = "CREATE TABLE Orders (\n" +
" order_number BIGINT,\n" +
" price DECIMAL(32,2),\n" +
" buyer ROW<first_name STRING, last_name STRING>,\n" +
" order_time TIMESTAMP(3)\n" +
") WITH (\n" +
" 'connector' = 'datagen',\n" +
" 'number-of-rows' = '100'\n" +
")";
String select = "select order_number,price,order_time from Orders";
LocalEnvironment environment = ExecutionEnvironment.createLocalEnvironment();
CustomBatchTableEnvironmentImpl batchTableEnvironment = CustomBatchTableEnvironmentImpl.create(environment);
batchTableEnvironment.executeSql(source); batchTableEnvironment.executeSql(source);
TableResult tableResult = batchTableEnvironment.executeSql(select); batchTableEnvironment.executeSql(select);
tableResult.print(); // TableResult tableResult = batchTableEnvironment.executeSql(select);
// tableResult.print();
} }
} }
package com.dlink.executor;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.dag.Pipeline;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.client.program.OptimizerPlanEnvironment;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.graph.StreamGraph;
import org.apache.flink.table.api.TableException;
/**
* AbstractBatchExecutor
*
* @author wenmo
* @since 2022/2/7 20:05
*/
public abstract class AbstractBatchExecutor extends Executor{
protected ExecutionEnvironment environment;
public void initEnvironment(){
if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){
environment.setParallelism(executorSetting.getParallelism());
}
if(executorSetting.getConfig()!=null) {
Configuration configuration = Configuration.fromMap(executorSetting.getConfig());
environment.getConfig().configure(configuration, null);
}
}
public void updateEnvironment(ExecutorSetting executorSetting){
if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){
environment.setParallelism(executorSetting.getParallelism());
}
if(executorSetting.getConfig()!=null) {
Configuration configuration = Configuration.fromMap(executorSetting.getConfig());
environment.getConfig().configure(configuration, null);
}
}
public JobExecutionResult execute(String jobName) throws Exception {
return environment.execute(jobName);
}
public StreamGraph getStreamGraph(){
throw new TableException("Batch model can't get StreamGraph.");
}
public StreamExecutionEnvironment getStreamExecutionEnvironment(){
return null;
}
public ExecutionConfig getExecutionConfig(){
return environment.getConfig();
}
public boolean parseAndLoadConfiguration(String statement){
return stEnvironment.parseAndLoadConfiguration(statement,getExecutionConfig(),setConfig);
}
}
package com.dlink.executor;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.graph.StreamGraph;
/**
* AbstractStreamExecutor
*
* @author wenmo
* @since 2022/2/7 20:03
*/
public abstract class AbstractStreamExecutor extends Executor{
protected StreamExecutionEnvironment environment;
public void initEnvironment(){
if(executorSetting.getCheckpoint()!=null&&executorSetting.getCheckpoint()>0){
environment.enableCheckpointing(executorSetting.getCheckpoint());
}
if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){
environment.setParallelism(executorSetting.getParallelism());
}
if(executorSetting.getConfig()!=null) {
Configuration configuration = Configuration.fromMap(executorSetting.getConfig());
environment.getConfig().configure(configuration, null);
}
}
public void updateEnvironment(ExecutorSetting executorSetting){
if(executorSetting.getCheckpoint()!=null&&executorSetting.getCheckpoint()>0){
environment.enableCheckpointing(executorSetting.getCheckpoint());
}
if(executorSetting.getParallelism()!=null&&executorSetting.getParallelism()>0){
environment.setParallelism(executorSetting.getParallelism());
}
if(executorSetting.getConfig()!=null) {
Configuration configuration = Configuration.fromMap(executorSetting.getConfig());
environment.getConfig().configure(configuration, null);
}
}
public JobExecutionResult execute(String jobName) throws Exception {
return environment.execute(jobName);
}
public StreamGraph getStreamGraph(){
return environment.getStreamGraph();
}
public StreamExecutionEnvironment getStreamExecutionEnvironment(){
return environment;
}
public ExecutionConfig getExecutionConfig(){
return environment.getConfig();
}
public boolean parseAndLoadConfiguration(String statement){
return stEnvironment.parseAndLoadConfiguration(statement,getExecutionConfig(),setConfig);
}
}
package com.dlink.executor; package com.dlink.executor;
import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/** /**
* AppBatchExecutor * AppBatchExecutor
...@@ -8,16 +8,16 @@ import org.apache.flink.api.java.ExecutionEnvironment; ...@@ -8,16 +8,16 @@ import org.apache.flink.api.java.ExecutionEnvironment;
* @author wenmo * @author wenmo
* @since 2022/2/7 22:14 * @since 2022/2/7 22:14
*/ */
public class AppBatchExecutor extends AbstractBatchExecutor { public class AppBatchExecutor extends Executor {
public AppBatchExecutor(ExecutorSetting executorSetting) { public AppBatchExecutor(ExecutorSetting executorSetting) {
this.executorSetting = executorSetting; this.executorSetting = executorSetting;
this.environment = ExecutionEnvironment.createLocalEnvironment(); this.environment = StreamExecutionEnvironment.createLocalEnvironment();
init(); init();
} }
@Override @Override
CustomTableEnvironment createCustomTableEnvironment() { CustomTableEnvironment createCustomTableEnvironment() {
return CustomBatchTableEnvironmentImpl.create(environment); return CustomTableEnvironmentImpl.createBatch(environment);
} }
} }
...@@ -8,7 +8,7 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; ...@@ -8,7 +8,7 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
* @author wenmo * @author wenmo
* @since 2021/11/18 * @since 2021/11/18
*/ */
public class AppStreamExecutor extends AbstractStreamExecutor{ public class AppStreamExecutor extends Executor {
public AppStreamExecutor(ExecutorSetting executorSetting) { public AppStreamExecutor(ExecutorSetting executorSetting) {
this.executorSetting = executorSetting; this.executorSetting = executorSetting;
......
package com.dlink.executor; package com.dlink.executor;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.BatchTableEnvironment;
/** /**
* LocalBatchExecutor * LocalBatchExecutor
...@@ -12,16 +8,16 @@ import org.apache.flink.table.api.bridge.java.BatchTableEnvironment; ...@@ -12,16 +8,16 @@ import org.apache.flink.table.api.bridge.java.BatchTableEnvironment;
* @author wenmo * @author wenmo
* @since 2022/2/4 0:04 * @since 2022/2/4 0:04
*/ */
public class LocalBatchExecutor extends AbstractBatchExecutor { public class LocalBatchExecutor extends Executor {
public LocalBatchExecutor(ExecutorSetting executorSetting) { public LocalBatchExecutor(ExecutorSetting executorSetting) {
this.executorSetting = executorSetting; this.executorSetting = executorSetting;
this.environment = ExecutionEnvironment.createLocalEnvironment(); this.environment = StreamExecutionEnvironment.createLocalEnvironment();
init(); init();
} }
@Override @Override
CustomTableEnvironment createCustomTableEnvironment() { CustomTableEnvironment createCustomTableEnvironment() {
return CustomBatchTableEnvironmentImpl.create(environment); return CustomTableEnvironmentImpl.createBatch(environment);
} }
} }
...@@ -8,7 +8,7 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; ...@@ -8,7 +8,7 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
* @author wenmo * @author wenmo
* @since 2021/5/25 13:48 * @since 2021/5/25 13:48
**/ **/
public class LocalStreamExecutor extends AbstractStreamExecutor { public class LocalStreamExecutor extends Executor {
public LocalStreamExecutor(ExecutorSetting executorSetting) { public LocalStreamExecutor(ExecutorSetting executorSetting) {
this.executorSetting = executorSetting; this.executorSetting = executorSetting;
......
package com.dlink.executor; package com.dlink.executor;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/** /**
...@@ -9,17 +8,17 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; ...@@ -9,17 +8,17 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
* @author wenmo * @author wenmo
* @since 2022/2/7 22:10 * @since 2022/2/7 22:10
*/ */
public class RemoteBatchExecutor extends AbstractBatchExecutor { public class RemoteBatchExecutor extends Executor {
public RemoteBatchExecutor(EnvironmentSetting environmentSetting,ExecutorSetting executorSetting) { public RemoteBatchExecutor(EnvironmentSetting environmentSetting, ExecutorSetting executorSetting) {
this.environmentSetting = environmentSetting; this.environmentSetting = environmentSetting;
this.executorSetting = executorSetting; this.executorSetting = executorSetting;
this.environment = ExecutionEnvironment.createRemoteEnvironment(environmentSetting.getHost(), environmentSetting.getPort()); this.environment = StreamExecutionEnvironment.createRemoteEnvironment(environmentSetting.getHost(), environmentSetting.getPort());
init(); init();
} }
@Override @Override
CustomTableEnvironment createCustomTableEnvironment() { CustomTableEnvironment createCustomTableEnvironment() {
return CustomBatchTableEnvironmentImpl.create(environment); return CustomTableEnvironmentImpl.createBatch(environment);
} }
} }
...@@ -8,9 +8,9 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; ...@@ -8,9 +8,9 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
* @author wenmo * @author wenmo
* @since 2021/5/25 14:05 * @since 2021/5/25 14:05
**/ **/
public class RemoteStreamExecutor extends AbstractStreamExecutor { public class RemoteStreamExecutor extends Executor {
public RemoteStreamExecutor(EnvironmentSetting environmentSetting,ExecutorSetting executorSetting) { public RemoteStreamExecutor(EnvironmentSetting environmentSetting, ExecutorSetting executorSetting) {
this.environmentSetting = environmentSetting; this.environmentSetting = environmentSetting;
this.executorSetting = executorSetting; this.executorSetting = executorSetting;
this.environment = StreamExecutionEnvironment.createRemoteEnvironment(environmentSetting.getHost(), environmentSetting.getPort()); this.environment = StreamExecutionEnvironment.createRemoteEnvironment(environmentSetting.getHost(), environmentSetting.getPort());
......
...@@ -647,6 +647,9 @@ export default (): React.ReactNode => { ...@@ -647,6 +647,9 @@ export default (): React.ReactNode => {
<li> <li>
<Link>新增 FlinkJar Dialect 的管理</Link> <Link>新增 FlinkJar Dialect 的管理</Link>
</li> </li>
<li>
<Link>新增 Batch 引擎</Link>
</li>
</ul> </ul>
</Paragraph> </Paragraph>
</Timeline.Item> </Timeline.Item>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment