Commit bfdac7ca authored by wenmo's avatar wenmo

数据源fragment配置和查询fragment语法

parent 9aac98c6
...@@ -20,7 +20,7 @@ import java.util.Map; ...@@ -20,7 +20,7 @@ import java.util.Map;
*/ */
@Getter @Getter
@Setter @Setter
public class APIExecuteSqlDTO extends AbstractStatementDTO{ public class APIExecuteSqlDTO extends AbstractStatementDTO {
// RUN_MODE // RUN_MODE
private String type; private String type;
private boolean useResult = false; private boolean useResult = false;
...@@ -28,8 +28,6 @@ public class APIExecuteSqlDTO extends AbstractStatementDTO{ ...@@ -28,8 +28,6 @@ public class APIExecuteSqlDTO extends AbstractStatementDTO{
private boolean useAutoCancel = false; private boolean useAutoCancel = false;
private boolean useStatementSet = false; private boolean useStatementSet = false;
private String address; private String address;
private boolean fragment = false;
// private String statement;
private String jobName; private String jobName;
private Integer maxRowNum = 100; private Integer maxRowNum = 100;
private Integer checkPoint = 0; private Integer checkPoint = 0;
...@@ -44,8 +42,8 @@ public class APIExecuteSqlDTO extends AbstractStatementDTO{ ...@@ -44,8 +42,8 @@ public class APIExecuteSqlDTO extends AbstractStatementDTO{
savePointStrategy = 3; savePointStrategy = 3;
} }
return new JobConfig( return new JobConfig(
type, useResult,useChangeLog, useChangeLog, false, null, true, address, jobName, type, useResult, useChangeLog, useChangeLog, false, null, true, address, jobName,
fragment, useStatementSet, maxRowNum, checkPoint, parallelism, savePointStrategy, isFragment(), useStatementSet, maxRowNum, checkPoint, parallelism, savePointStrategy,
savePointPath, configuration, gatewayConfig); savePointPath, configuration, gatewayConfig);
} }
} }
...@@ -17,14 +17,12 @@ import java.util.Map; ...@@ -17,14 +17,12 @@ import java.util.Map;
*/ */
@Getter @Getter
@Setter @Setter
public class APIExplainSqlDTO extends AbstractStatementDTO{ public class APIExplainSqlDTO extends AbstractStatementDTO {
private boolean useStatementSet = false; private boolean useStatementSet = false;
private boolean fragment = false;
// private String statement;
private Integer parallelism; private Integer parallelism;
private Map<String, String> configuration; private Map<String, String> configuration;
public JobConfig getJobConfig() { public JobConfig getJobConfig() {
return new JobConfig("local", false, false, fragment, useStatementSet, parallelism, configuration); return new JobConfig("local", false, false, isFragment(), useStatementSet, parallelism, configuration);
} }
} }
...@@ -10,6 +10,8 @@ public class AbstractStatementDTO { ...@@ -10,6 +10,8 @@ public class AbstractStatementDTO {
private String statement; private String statement;
private Integer envId; private Integer envId;
private boolean fragment = false;
public String getStatement() { public String getStatement() {
return statement; return statement;
...@@ -26,4 +28,12 @@ public class AbstractStatementDTO { ...@@ -26,4 +28,12 @@ public class AbstractStatementDTO {
public void setEnvId(Integer envId) { public void setEnvId(Integer envId) {
this.envId = envId; this.envId = envId;
} }
public boolean isFragment() {
return fragment;
}
public void setFragment(boolean fragment) {
this.fragment = fragment;
}
} }
...@@ -19,7 +19,7 @@ import java.util.Map; ...@@ -19,7 +19,7 @@ import java.util.Map;
*/ */
@Getter @Getter
@Setter @Setter
public class StudioExecuteDTO extends AbstractStatementDTO{ public class StudioExecuteDTO extends AbstractStatementDTO {
// RUN_MODE // RUN_MODE
private String type; private String type;
private String dialect; private String dialect;
...@@ -34,7 +34,6 @@ public class StudioExecuteDTO extends AbstractStatementDTO{ ...@@ -34,7 +34,6 @@ public class StudioExecuteDTO extends AbstractStatementDTO{
private Integer clusterConfigurationId; private Integer clusterConfigurationId;
private Integer databaseId; private Integer databaseId;
private Integer jarId; private Integer jarId;
private boolean fragment;
private String jobName; private String jobName;
private Integer taskId; private Integer taskId;
private Integer maxRowNum; private Integer maxRowNum;
...@@ -46,22 +45,22 @@ public class StudioExecuteDTO extends AbstractStatementDTO{ ...@@ -46,22 +45,22 @@ public class StudioExecuteDTO extends AbstractStatementDTO{
private static final ObjectMapper mapper = new ObjectMapper(); private static final ObjectMapper mapper = new ObjectMapper();
public JobConfig getJobConfig() { public JobConfig getJobConfig() {
Map<String,String> config = new HashMap<>(); Map<String, String> config = new HashMap<>();
JsonNode paras = null; JsonNode paras = null;
if(Asserts.isNotNullString(configJson)) { if (Asserts.isNotNullString(configJson)) {
try { try {
paras = mapper.readTree(configJson); paras = mapper.readTree(configJson);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
e.printStackTrace(); e.printStackTrace();
} }
paras.forEach((JsonNode node) -> { paras.forEach((JsonNode node) -> {
config.put(node.get("key").asText(), node.get("value").asText()); config.put(node.get("key").asText(), node.get("value").asText());
} }
); );
} }
return new JobConfig( return new JobConfig(
type,useResult,useChangeLog,useAutoCancel, useSession, session, clusterId, type, useResult, useChangeLog, useAutoCancel, useSession, session, clusterId,
clusterConfigurationId,jarId, taskId, jobName, fragment,statementSet,batchModel, clusterConfigurationId, jarId, taskId, jobName, isFragment(), statementSet, batchModel,
maxRowNum, checkPoint, parallelism,savePointStrategy, savePointPath,config); maxRowNum, checkPoint, parallelism, savePointStrategy, savePointPath, config);
} }
} }
...@@ -61,28 +61,32 @@ public class StudioServiceImpl implements StudioService { ...@@ -61,28 +61,32 @@ public class StudioServiceImpl implements StudioService {
@Autowired @Autowired
private TaskService taskService; private TaskService taskService;
private void addFlinkSQLEnv(AbstractStatementDTO statementDTO){ private void addFlinkSQLEnv(AbstractStatementDTO statementDTO) {
if(Asserts.isNotNull(statementDTO.getEnvId())&&statementDTO.getEnvId()!=0){ String flinkWithSql = dataBaseService.getEnabledFlinkWithSql();
if (statementDTO.isFragment() && Asserts.isNotNullString(flinkWithSql)) {
statementDTO.setStatement(flinkWithSql + "\r\n" + statementDTO.getStatement());
}
if (Asserts.isNotNull(statementDTO.getEnvId()) && statementDTO.getEnvId() != 0) {
Task task = taskService.getTaskInfoById(statementDTO.getEnvId()); Task task = taskService.getTaskInfoById(statementDTO.getEnvId());
if(Asserts.isNotNull(task)&&Asserts.isNotNullString(task.getStatement())) { if (Asserts.isNotNull(task) && Asserts.isNotNullString(task.getStatement())) {
statementDTO.setStatement(task.getStatement() + "\r\n" + statementDTO.getStatement()); statementDTO.setStatement(task.getStatement() + "\r\n" + statementDTO.getStatement());
} }
} }
} }
private void buildSession(JobConfig config){ private void buildSession(JobConfig config) {
// If you are using a shared session, configure the current jobmanager address // If you are using a shared session, configure the current jobmanager address
if(!config.isUseSession()) { if (!config.isUseSession()) {
config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), config.getClusterId())); config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), config.getClusterId()));
} }
} }
@Override @Override
public JobResult executeSql(StudioExecuteDTO studioExecuteDTO) { public JobResult executeSql(StudioExecuteDTO studioExecuteDTO) {
if(Dialect.isSql(studioExecuteDTO.getDialect())){ if (Dialect.isSql(studioExecuteDTO.getDialect())) {
return executeCommonSql(SqlDTO.build(studioExecuteDTO.getStatement(), return executeCommonSql(SqlDTO.build(studioExecuteDTO.getStatement(),
studioExecuteDTO.getDatabaseId(),studioExecuteDTO.getMaxRowNum())); studioExecuteDTO.getDatabaseId(), studioExecuteDTO.getMaxRowNum()));
}else{ } else {
return executeFlinkSql(studioExecuteDTO); return executeFlinkSql(studioExecuteDTO);
} }
} }
...@@ -103,26 +107,26 @@ public class StudioServiceImpl implements StudioService { ...@@ -103,26 +107,26 @@ public class StudioServiceImpl implements StudioService {
JobResult result = new JobResult(); JobResult result = new JobResult();
result.setStatement(sqlDTO.getStatement()); result.setStatement(sqlDTO.getStatement());
result.setStartTime(LocalDateTime.now()); result.setStartTime(LocalDateTime.now());
if(Asserts.isNull(sqlDTO.getDatabaseId())){ if (Asserts.isNull(sqlDTO.getDatabaseId())) {
result.setSuccess(false); result.setSuccess(false);
result.setError("请指定数据源"); result.setError("请指定数据源");
result.setEndTime(LocalDateTime.now()); result.setEndTime(LocalDateTime.now());
return result; return result;
}else{ } else {
DataBase dataBase = dataBaseService.getById(sqlDTO.getDatabaseId()); DataBase dataBase = dataBaseService.getById(sqlDTO.getDatabaseId());
if(Asserts.isNull(dataBase)){ if (Asserts.isNull(dataBase)) {
result.setSuccess(false); result.setSuccess(false);
result.setError("数据源不存在"); result.setError("数据源不存在");
result.setEndTime(LocalDateTime.now()); result.setEndTime(LocalDateTime.now());
return result; return result;
} }
Driver driver = Driver.build(dataBase.getDriverConfig()); Driver driver = Driver.build(dataBase.getDriverConfig());
JdbcSelectResult selectResult = driver.executeSql(sqlDTO.getStatement(),sqlDTO.getMaxRowNum()); JdbcSelectResult selectResult = driver.executeSql(sqlDTO.getStatement(), sqlDTO.getMaxRowNum());
driver.close(); driver.close();
result.setResult(selectResult); result.setResult(selectResult);
if(selectResult.isSuccess()){ if (selectResult.isSuccess()) {
result.setSuccess(true); result.setSuccess(true);
}else{ } else {
result.setSuccess(false); result.setSuccess(false);
result.setError(selectResult.getError()); result.setError(selectResult.getError());
} }
...@@ -134,7 +138,7 @@ public class StudioServiceImpl implements StudioService { ...@@ -134,7 +138,7 @@ public class StudioServiceImpl implements StudioService {
@Override @Override
public IResult executeDDL(StudioDDLDTO studioDDLDTO) { public IResult executeDDL(StudioDDLDTO studioDDLDTO) {
JobConfig config = studioDDLDTO.getJobConfig(); JobConfig config = studioDDLDTO.getJobConfig();
if(!config.isUseSession()) { if (!config.isUseSession()) {
config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioDDLDTO.getClusterId())); config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioDDLDTO.getClusterId()));
} }
JobManager jobManager = JobManager.build(config); JobManager jobManager = JobManager.build(config);
...@@ -143,9 +147,9 @@ public class StudioServiceImpl implements StudioService { ...@@ -143,9 +147,9 @@ public class StudioServiceImpl implements StudioService {
@Override @Override
public List<SqlExplainResult> explainSql(StudioExecuteDTO studioExecuteDTO) { public List<SqlExplainResult> explainSql(StudioExecuteDTO studioExecuteDTO) {
if( Dialect.isSql(studioExecuteDTO.getDialect())){ if (Dialect.isSql(studioExecuteDTO.getDialect())) {
return explainCommonSql(studioExecuteDTO); return explainCommonSql(studioExecuteDTO);
}else{ } else {
return explainFlinkSql(studioExecuteDTO); return explainFlinkSql(studioExecuteDTO);
} }
} }
...@@ -163,15 +167,15 @@ public class StudioServiceImpl implements StudioService { ...@@ -163,15 +167,15 @@ public class StudioServiceImpl implements StudioService {
} }
private List<SqlExplainResult> explainCommonSql(StudioExecuteDTO studioExecuteDTO) { private List<SqlExplainResult> explainCommonSql(StudioExecuteDTO studioExecuteDTO) {
if(Asserts.isNull(studioExecuteDTO.getDatabaseId())){ if (Asserts.isNull(studioExecuteDTO.getDatabaseId())) {
return new ArrayList<SqlExplainResult>(){{ return new ArrayList<SqlExplainResult>() {{
add(SqlExplainResult.fail(studioExecuteDTO.getStatement(),"请指定数据源")); add(SqlExplainResult.fail(studioExecuteDTO.getStatement(), "请指定数据源"));
}}; }};
}else{ } else {
DataBase dataBase = dataBaseService.getById(studioExecuteDTO.getDatabaseId()); DataBase dataBase = dataBaseService.getById(studioExecuteDTO.getDatabaseId());
if(Asserts.isNull(dataBase)){ if (Asserts.isNull(dataBase)) {
return new ArrayList<SqlExplainResult>(){{ return new ArrayList<SqlExplainResult>() {{
add(SqlExplainResult.fail(studioExecuteDTO.getStatement(),"数据源不存在")); add(SqlExplainResult.fail(studioExecuteDTO.getStatement(), "数据源不存在"));
}}; }};
} }
Driver driver = Driver.build(dataBase.getDriverConfig()); Driver driver = Driver.build(dataBase.getDriverConfig());
...@@ -202,12 +206,12 @@ public class StudioServiceImpl implements StudioService { ...@@ -202,12 +206,12 @@ public class StudioServiceImpl implements StudioService {
JobManager jobManager = JobManager.buildPlanMode(config); JobManager jobManager = JobManager.buildPlanMode(config);
String planJson = jobManager.getJobPlanJson(studioExecuteDTO.getStatement()); String planJson = jobManager.getJobPlanJson(studioExecuteDTO.getStatement());
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
ObjectNode objectNode =mapper.createObjectNode(); ObjectNode objectNode = mapper.createObjectNode();
try { try {
objectNode = (ObjectNode) mapper.readTree(planJson); objectNode = (ObjectNode) mapper.readTree(planJson);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
e.printStackTrace(); e.printStackTrace();
}finally { } finally {
return objectNode; return objectNode;
} }
} }
...@@ -219,14 +223,14 @@ public class StudioServiceImpl implements StudioService { ...@@ -219,14 +223,14 @@ public class StudioServiceImpl implements StudioService {
@Override @Override
public SessionInfo createSession(SessionDTO sessionDTO, String createUser) { public SessionInfo createSession(SessionDTO sessionDTO, String createUser) {
if(sessionDTO.isUseRemote()) { if (sessionDTO.isUseRemote()) {
Cluster cluster = clusterService.getById(sessionDTO.getClusterId()); Cluster cluster = clusterService.getById(sessionDTO.getClusterId());
SessionConfig sessionConfig = SessionConfig.build( SessionConfig sessionConfig = SessionConfig.build(
sessionDTO.getType(), true, sessionDTO.getType(), true,
cluster.getId(), cluster.getAlias(), cluster.getId(), cluster.getAlias(),
clusterService.buildEnvironmentAddress(true, sessionDTO.getClusterId())); clusterService.buildEnvironmentAddress(true, sessionDTO.getClusterId()));
return JobManager.createSession(sessionDTO.getSession(), sessionConfig, createUser); return JobManager.createSession(sessionDTO.getSession(), sessionConfig, createUser);
}else{ } else {
SessionConfig sessionConfig = SessionConfig.build( SessionConfig sessionConfig = SessionConfig.build(
sessionDTO.getType(), false, sessionDTO.getType(), false,
null, null, null, null,
...@@ -237,9 +241,9 @@ public class StudioServiceImpl implements StudioService { ...@@ -237,9 +241,9 @@ public class StudioServiceImpl implements StudioService {
@Override @Override
public boolean clearSession(String session) { public boolean clearSession(String session) {
if(SessionPool.remove(session)>0){ if (SessionPool.remove(session) > 0) {
return true; return true;
}else{ } else {
return false; return false;
} }
} }
...@@ -267,8 +271,8 @@ public class StudioServiceImpl implements StudioService { ...@@ -267,8 +271,8 @@ public class StudioServiceImpl implements StudioService {
@Override @Override
public List<JsonNode> listJobs(Integer clusterId) { public List<JsonNode> listJobs(Integer clusterId) {
Cluster cluster = clusterService.getById(clusterId); Cluster cluster = clusterService.getById(clusterId);
Asserts.checkNotNull(cluster,"该集群不存在"); Asserts.checkNotNull(cluster, "该集群不存在");
try{ try {
return FlinkAPI.build(cluster.getJobManagerHost()).listJobs(); return FlinkAPI.build(cluster.getJobManagerHost()).listJobs();
} catch (Exception e) { } catch (Exception e) {
logger.info("查询作业时集群不存在"); logger.info("查询作业时集群不存在");
...@@ -277,12 +281,12 @@ public class StudioServiceImpl implements StudioService { ...@@ -277,12 +281,12 @@ public class StudioServiceImpl implements StudioService {
} }
@Override @Override
public boolean cancel(Integer clusterId,String jobId) { public boolean cancel(Integer clusterId, String jobId) {
Cluster cluster = clusterService.getById(clusterId); Cluster cluster = clusterService.getById(clusterId);
Asserts.checkNotNull(cluster,"该集群不存在"); Asserts.checkNotNull(cluster, "该集群不存在");
JobConfig jobConfig = new JobConfig(); JobConfig jobConfig = new JobConfig();
jobConfig.setAddress(cluster.getJobManagerHost()); jobConfig.setAddress(cluster.getJobManagerHost());
if(Asserts.isNotNull(cluster.getClusterConfigurationId())){ if (Asserts.isNotNull(cluster.getClusterConfigurationId())) {
Map<String, Object> gatewayConfig = clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId()); Map<String, Object> gatewayConfig = clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
jobConfig.buildGatewayConfig(gatewayConfig); jobConfig.buildGatewayConfig(gatewayConfig);
} }
...@@ -291,13 +295,13 @@ public class StudioServiceImpl implements StudioService { ...@@ -291,13 +295,13 @@ public class StudioServiceImpl implements StudioService {
} }
@Override @Override
public boolean savepoint(Integer clusterId, String jobId, String savePointType,String name) { public boolean savepoint(Integer clusterId, String jobId, String savePointType, String name) {
Cluster cluster = clusterService.getById(clusterId); Cluster cluster = clusterService.getById(clusterId);
Asserts.checkNotNull(cluster,"该集群不存在"); Asserts.checkNotNull(cluster, "该集群不存在");
JobConfig jobConfig = new JobConfig(); JobConfig jobConfig = new JobConfig();
jobConfig.setAddress(cluster.getJobManagerHost()); jobConfig.setAddress(cluster.getJobManagerHost());
jobConfig.setType(cluster.getType()); jobConfig.setType(cluster.getType());
if(Asserts.isNotNull(cluster.getClusterConfigurationId())){ if (Asserts.isNotNull(cluster.getClusterConfigurationId())) {
Map<String, Object> gatewayConfig = clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId()); Map<String, Object> gatewayConfig = clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
jobConfig.buildGatewayConfig(gatewayConfig); jobConfig.buildGatewayConfig(gatewayConfig);
jobConfig.getGatewayConfig().getClusterConfig().setAppId(cluster.getName()); jobConfig.getGatewayConfig().getClusterConfig().setAppId(cluster.getName());
...@@ -305,10 +309,10 @@ public class StudioServiceImpl implements StudioService { ...@@ -305,10 +309,10 @@ public class StudioServiceImpl implements StudioService {
} }
JobManager jobManager = JobManager.build(jobConfig); JobManager jobManager = JobManager.build(jobConfig);
jobManager.setUseGateway(true); jobManager.setUseGateway(true);
SavePointResult savePointResult = jobManager.savepoint(jobId, savePointType,null); SavePointResult savePointResult = jobManager.savepoint(jobId, savePointType, null);
if(Asserts.isNotNull(savePointResult)){ if (Asserts.isNotNull(savePointResult)) {
for(JobInfo item : savePointResult.getJobInfos()){ for (JobInfo item : savePointResult.getJobInfos()) {
if(Asserts.isEqualsIgnoreCase(jobId,item.getJobId())){ if (Asserts.isEqualsIgnoreCase(jobId, item.getJobId())) {
Savepoints savepoints = new Savepoints(); Savepoints savepoints = new Savepoints();
savepoints.setName(name); savepoints.setName(name);
savepoints.setType(savePointType); savepoints.setType(savePointType);
...@@ -322,14 +326,14 @@ public class StudioServiceImpl implements StudioService { ...@@ -322,14 +326,14 @@ public class StudioServiceImpl implements StudioService {
return false; return false;
} }
private void initUDF(JobConfig config,String statement){ private void initUDF(JobConfig config, String statement) {
if(!GatewayType.LOCAL.equalsValue(config.getType())){ if (!GatewayType.LOCAL.equalsValue(config.getType())) {
return; return;
} }
List<String> udfClassNameList = JobManager.getUDFClassName(statement); List<String> udfClassNameList = JobManager.getUDFClassName(statement);
for(String item : udfClassNameList){ for (String item : udfClassNameList) {
Task task = taskService.getUDFByClassName(item); Task task = taskService.getUDFByClassName(item);
JobManager.initUDF(item,task.getStatement()); JobManager.initUDF(item, task.getStatement());
} }
} }
} }
...@@ -276,6 +276,12 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen ...@@ -276,6 +276,12 @@ public class TaskServiceImpl extends SuperServiceImpl<TaskMapper, Task> implemen
private JobConfig buildJobConfig(Task task) { private JobConfig buildJobConfig(Task task) {
boolean isJarTask = Dialect.FLINKJAR.equalsVal(task.getDialect()); boolean isJarTask = Dialect.FLINKJAR.equalsVal(task.getDialect());
if (!isJarTask && task.isFragment()) {
String flinkWithSql = dataBaseService.getEnabledFlinkWithSql();
if (Asserts.isNotNullString(flinkWithSql)) {
task.setStatement(flinkWithSql + "\r\n" + task.getStatement());
}
}
if (!isJarTask && Asserts.isNotNull(task.getEnvId()) && task.getEnvId() != 0) { if (!isJarTask && Asserts.isNotNull(task.getEnvId()) && task.getEnvId() != 0) {
Task envTask = getTaskInfoById(task.getEnvId()); Task envTask = getTaskInfoById(task.getEnvId());
if (Asserts.isNotNull(envTask) && Asserts.isNotNullString(envTask.getStatement())) { if (Asserts.isNotNull(envTask) && Asserts.isNotNullString(envTask.getStatement())) {
......
...@@ -17,6 +17,7 @@ import com.dlink.gateway.result.GatewayResult; ...@@ -17,6 +17,7 @@ import com.dlink.gateway.result.GatewayResult;
import com.dlink.gateway.result.SavePointResult; import com.dlink.gateway.result.SavePointResult;
import com.dlink.gateway.result.TestResult; import com.dlink.gateway.result.TestResult;
import com.dlink.interceptor.FlinkInterceptor; import com.dlink.interceptor.FlinkInterceptor;
import com.dlink.interceptor.FlinkInterceptorResult;
import com.dlink.model.SystemConfiguration; import com.dlink.model.SystemConfiguration;
import com.dlink.parser.SqlType; import com.dlink.parser.SqlType;
import com.dlink.pool.ClassEntity; import com.dlink.pool.ClassEntity;
...@@ -162,7 +163,7 @@ public class JobManager { ...@@ -162,7 +163,7 @@ public class JobManager {
private Executor createExecutor() { private Executor createExecutor() {
initEnvironmentSetting(); initEnvironmentSetting();
if (!runMode.equals(GatewayType.LOCAL)&& !useGateway && config.isUseRemote()) { if (!runMode.equals(GatewayType.LOCAL) && !useGateway && config.isUseRemote()) {
executor = Executor.buildRemoteExecutor(environmentSetting, config.getExecutorSetting()); executor = Executor.buildRemoteExecutor(environmentSetting, config.getExecutorSetting());
return executor; return executor;
} else { } else {
...@@ -200,7 +201,7 @@ public class JobManager { ...@@ -200,7 +201,7 @@ public class JobManager {
} }
public boolean init() { public boolean init() {
if(!isPlanMode) { if (!isPlanMode) {
runMode = GatewayType.get(config.getType()); runMode = GatewayType.get(config.getType());
useGateway = useGateway(config.getType()); useGateway = useGateway(config.getType());
handler = JobHandler.build(); handler = JobHandler.build();
...@@ -239,7 +240,7 @@ public class JobManager { ...@@ -239,7 +240,7 @@ public class JobManager {
ready(); ready();
String currentSql = ""; String currentSql = "";
// JobParam jobParam = pretreatStatements(SqlUtil.getStatements(statement)); // JobParam jobParam = pretreatStatements(SqlUtil.getStatements(statement));
JobParam jobParam = Explainer.build(executor,useStatementSet,sqlSeparator).pretreatStatements(SqlUtil.getStatements(statement,sqlSeparator)); JobParam jobParam = Explainer.build(executor, useStatementSet, sqlSeparator).pretreatStatements(SqlUtil.getStatements(statement, sqlSeparator));
try { try {
for (StatementParam item : jobParam.getDdl()) { for (StatementParam item : jobParam.getDdl()) {
currentSql = item.getValue(); currentSql = item.getValue();
...@@ -254,12 +255,12 @@ public class JobManager { ...@@ -254,12 +255,12 @@ public class JobManager {
currentSql = String.join(sqlSeparator, inserts); currentSql = String.join(sqlSeparator, inserts);
JobGraph jobGraph = executor.getJobGraphFromInserts(inserts); JobGraph jobGraph = executor.getJobGraphFromInserts(inserts);
GatewayResult gatewayResult = null; GatewayResult gatewayResult = null;
if (GatewayType.YARN_APPLICATION.equals(runMode)||GatewayType.KUBERNETES_APPLICATION.equals(runMode)) { if (GatewayType.YARN_APPLICATION.equals(runMode) || GatewayType.KUBERNETES_APPLICATION.equals(runMode)) {
config.addGatewayConfig(executor.getSetConfig()); config.addGatewayConfig(executor.getSetConfig());
gatewayResult = Gateway.build(config.getGatewayConfig()).submitJar(); gatewayResult = Gateway.build(config.getGatewayConfig()).submitJar();
} else { } else {
config.addGatewayConfig(executor.getSetConfig()); config.addGatewayConfig(executor.getSetConfig());
if(Asserts.isNotNullString(config.getSavePointPath())) { if (Asserts.isNotNullString(config.getSavePointPath())) {
jobGraph.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(config.getSavePointPath())); jobGraph.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(config.getSavePointPath()));
} }
gatewayResult = Gateway.build(config.getGatewayConfig()).submitJobGraph(jobGraph); gatewayResult = Gateway.build(config.getGatewayConfig()).submitJobGraph(jobGraph);
...@@ -281,7 +282,7 @@ public class JobManager { ...@@ -281,7 +282,7 @@ public class JobManager {
job.setJobId(tableResult.getJobClient().get().getJobID().toHexString()); job.setJobId(tableResult.getJobClient().get().getJobID().toHexString());
} }
if (config.isUseResult()) { if (config.isUseResult()) {
IResult result = ResultBuilder.build(SqlType.INSERT, config.getMaxRowNum(), config.isUseChangeLog(),config.isUseAutoCancel()).getResult(tableResult); IResult result = ResultBuilder.build(SqlType.INSERT, config.getMaxRowNum(), config.isUseChangeLog(), config.isUseAutoCancel()).getResult(tableResult);
job.setResult(result); job.setResult(result);
} }
} }
...@@ -294,12 +295,12 @@ public class JobManager { ...@@ -294,12 +295,12 @@ public class JobManager {
currentSql = String.join(sqlSeparator, inserts); currentSql = String.join(sqlSeparator, inserts);
JobGraph jobGraph = executor.getJobGraphFromInserts(inserts); JobGraph jobGraph = executor.getJobGraphFromInserts(inserts);
GatewayResult gatewayResult = null; GatewayResult gatewayResult = null;
if (GatewayType.YARN_APPLICATION.equals(runMode)||GatewayType.KUBERNETES_APPLICATION.equals(runMode)) { if (GatewayType.YARN_APPLICATION.equals(runMode) || GatewayType.KUBERNETES_APPLICATION.equals(runMode)) {
config.addGatewayConfig(executor.getSetConfig()); config.addGatewayConfig(executor.getSetConfig());
gatewayResult = Gateway.build(config.getGatewayConfig()).submitJar(); gatewayResult = Gateway.build(config.getGatewayConfig()).submitJar();
} else { } else {
config.addGatewayConfig(executor.getSetConfig()); config.addGatewayConfig(executor.getSetConfig());
if(Asserts.isNotNullString(config.getSavePointPath())) { if (Asserts.isNotNullString(config.getSavePointPath())) {
jobGraph.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(config.getSavePointPath())); jobGraph.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(config.getSavePointPath()));
} }
gatewayResult = Gateway.build(config.getGatewayConfig()).submitJobGraph(jobGraph); gatewayResult = Gateway.build(config.getGatewayConfig()).submitJobGraph(jobGraph);
...@@ -310,15 +311,23 @@ public class JobManager { ...@@ -310,15 +311,23 @@ public class JobManager {
} else { } else {
for (StatementParam item : jobParam.getTrans()) { for (StatementParam item : jobParam.getTrans()) {
currentSql = item.getValue(); currentSql = item.getValue();
if (!FlinkInterceptor.build(executor, item.getValue())) { FlinkInterceptorResult flinkInterceptorResult = FlinkInterceptor.build(executor, item.getValue());
TableResult tableResult = executor.executeSql(item.getValue()); if (Asserts.isNotNull(flinkInterceptorResult.getTableResult())) {
if (tableResult.getJobClient().isPresent()) {
job.setJobId(tableResult.getJobClient().get().getJobID().toHexString());
}
if (config.isUseResult()) { if (config.isUseResult()) {
IResult result = ResultBuilder.build(item.getType(), config.getMaxRowNum(), config.isUseChangeLog(),config.isUseAutoCancel()).getResult(tableResult); IResult result = ResultBuilder.build(item.getType(), config.getMaxRowNum(), config.isUseChangeLog(), config.isUseAutoCancel()).getResult(flinkInterceptorResult.getTableResult());
job.setResult(result); job.setResult(result);
} }
} else {
if (!flinkInterceptorResult.isNoExecute()) {
TableResult tableResult = executor.executeSql(item.getValue());
if (tableResult.getJobClient().isPresent()) {
job.setJobId(tableResult.getJobClient().get().getJobID().toHexString());
}
if (config.isUseResult()) {
IResult result = ResultBuilder.build(item.getType(), config.getMaxRowNum(), config.isUseChangeLog(), config.isUseAutoCancel()).getResult(tableResult);
job.setResult(result);
}
}
} }
} }
} }
...@@ -327,7 +336,7 @@ public class JobManager { ...@@ -327,7 +336,7 @@ public class JobManager {
if (useGateway) { if (useGateway) {
for (StatementParam item : jobParam.getExecute()) { for (StatementParam item : jobParam.getExecute()) {
executor.executeSql(item.getValue()); executor.executeSql(item.getValue());
if(!useStatementSet) { if (!useStatementSet) {
break; break;
} }
} }
...@@ -335,12 +344,12 @@ public class JobManager { ...@@ -335,12 +344,12 @@ public class JobManager {
streamGraph.setJobName(config.getJobName()); streamGraph.setJobName(config.getJobName());
JobGraph jobGraph = streamGraph.getJobGraph(); JobGraph jobGraph = streamGraph.getJobGraph();
GatewayResult gatewayResult = null; GatewayResult gatewayResult = null;
if (GatewayType.YARN_APPLICATION.equals(runMode)||GatewayType.KUBERNETES_APPLICATION.equals(runMode)) { if (GatewayType.YARN_APPLICATION.equals(runMode) || GatewayType.KUBERNETES_APPLICATION.equals(runMode)) {
config.addGatewayConfig(executor.getSetConfig()); config.addGatewayConfig(executor.getSetConfig());
gatewayResult = Gateway.build(config.getGatewayConfig()).submitJar(); gatewayResult = Gateway.build(config.getGatewayConfig()).submitJar();
} else { } else {
config.addGatewayConfig(executor.getSetConfig()); config.addGatewayConfig(executor.getSetConfig());
if(Asserts.isNotNullString(config.getSavePointPath())) { if (Asserts.isNotNullString(config.getSavePointPath())) {
jobGraph.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(config.getSavePointPath())); jobGraph.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(config.getSavePointPath()));
} }
gatewayResult = Gateway.build(config.getGatewayConfig()).submitJobGraph(jobGraph); gatewayResult = Gateway.build(config.getGatewayConfig()).submitJobGraph(jobGraph);
...@@ -351,7 +360,7 @@ public class JobManager { ...@@ -351,7 +360,7 @@ public class JobManager {
} else { } else {
for (StatementParam item : jobParam.getExecute()) { for (StatementParam item : jobParam.getExecute()) {
executor.executeSql(item.getValue()); executor.executeSql(item.getValue());
if(!useStatementSet) { if (!useStatementSet) {
break; break;
} }
} }
...@@ -360,7 +369,7 @@ public class JobManager { ...@@ -360,7 +369,7 @@ public class JobManager {
job.setJobId(jobExecutionResult.getJobID().toHexString()); job.setJobId(jobExecutionResult.getJobID().toHexString());
} }
if (config.isUseResult()) { if (config.isUseResult()) {
IResult result = ResultBuilder.build(SqlType.EXECUTE, config.getMaxRowNum(), config.isUseChangeLog(),config.isUseAutoCancel()).getResult(null); IResult result = ResultBuilder.build(SqlType.EXECUTE, config.getMaxRowNum(), config.isUseChangeLog(), config.isUseAutoCancel()).getResult(null);
job.setResult(result); job.setResult(result);
} }
} }
...@@ -369,7 +378,7 @@ public class JobManager { ...@@ -369,7 +378,7 @@ public class JobManager {
job.setStatus(Job.JobStatus.SUCCESS); job.setStatus(Job.JobStatus.SUCCESS);
success(); success();
} catch (Exception e) { } catch (Exception e) {
String error = LogUtil.getError("Exception in executing FlinkSQL:\n" + currentSql,e); String error = LogUtil.getError("Exception in executing FlinkSQL:\n" + currentSql, e);
job.setEndTime(LocalDateTime.now()); job.setEndTime(LocalDateTime.now());
job.setStatus(Job.JobStatus.FAILED); job.setStatus(Job.JobStatus.FAILED);
job.setError(error); job.setError(error);
...@@ -389,7 +398,7 @@ public class JobManager { ...@@ -389,7 +398,7 @@ public class JobManager {
} }
public IResult executeDDL(String statement) { public IResult executeDDL(String statement) {
String[] statements = SqlUtil.getStatements(statement,sqlSeparator); String[] statements = SqlUtil.getStatements(statement, sqlSeparator);
try { try {
for (String item : statements) { for (String item : statements) {
String newStatement = executor.pretreatStatement(item); String newStatement = executor.pretreatStatement(item);
...@@ -402,7 +411,7 @@ public class JobManager { ...@@ -402,7 +411,7 @@ public class JobManager {
} }
LocalDateTime startTime = LocalDateTime.now(); LocalDateTime startTime = LocalDateTime.now();
TableResult tableResult = executor.executeSql(newStatement); TableResult tableResult = executor.executeSql(newStatement);
IResult result = ResultBuilder.build(operationType, config.getMaxRowNum(), false,false).getResult(tableResult); IResult result = ResultBuilder.build(operationType, config.getMaxRowNum(), false, false).getResult(tableResult);
result.setStartTime(startTime); result.setStartTime(startTime);
return result; return result;
} }
...@@ -436,15 +445,15 @@ public class JobManager { ...@@ -436,15 +445,15 @@ public class JobManager {
} }
public ExplainResult explainSql(String statement) { public ExplainResult explainSql(String statement) {
return Explainer.build(executor,useStatementSet,sqlSeparator).explainSql(statement); return Explainer.build(executor, useStatementSet, sqlSeparator).explainSql(statement);
} }
public ObjectNode getStreamGraph(String statement) { public ObjectNode getStreamGraph(String statement) {
return Explainer.build(executor,useStatementSet,sqlSeparator).getStreamGraph(statement); return Explainer.build(executor, useStatementSet, sqlSeparator).getStreamGraph(statement);
} }
public String getJobPlanJson(String statement) { public String getJobPlanJson(String statement) {
return Explainer.build(executor,useStatementSet,sqlSeparator).getJobPlanInfo(statement).getJsonPlan(); return Explainer.build(executor, useStatementSet, sqlSeparator).getJobPlanInfo(statement).getJsonPlan();
} }
public boolean cancel(String jobId) { public boolean cancel(String jobId) {
...@@ -454,22 +463,22 @@ public class JobManager { ...@@ -454,22 +463,22 @@ public class JobManager {
Gateway.build(config.getGatewayConfig()).savepointJob(); Gateway.build(config.getGatewayConfig()).savepointJob();
return true; return true;
} else { } else {
try{ try {
return FlinkAPI.build(config.getAddress()).stop(jobId); return FlinkAPI.build(config.getAddress()).stop(jobId);
}catch (Exception e){ } catch (Exception e) {
logger.info("停止作业时集群不存在"); logger.info("停止作业时集群不存在");
} }
return false; return false;
} }
} }
public SavePointResult savepoint(String jobId,String savePointType,String savePoint) { public SavePointResult savepoint(String jobId, String savePointType, String savePoint) {
if (useGateway && !useRestAPI) { if (useGateway && !useRestAPI) {
config.getGatewayConfig().setFlinkConfig(FlinkConfig.build(jobId, ActionType.SAVEPOINT.getValue(), config.getGatewayConfig().setFlinkConfig(FlinkConfig.build(jobId, ActionType.SAVEPOINT.getValue(),
savePointType, null)); savePointType, null));
return Gateway.build(config.getGatewayConfig()).savepointJob(savePoint); return Gateway.build(config.getGatewayConfig()).savepointJob(savePoint);
} else { } else {
return FlinkAPI.build(config.getAddress()).savepoints(jobId,savePointType); return FlinkAPI.build(config.getAddress()).savepoints(jobId, savePointType);
} }
} }
...@@ -486,7 +495,7 @@ public class JobManager { ...@@ -486,7 +495,7 @@ public class JobManager {
job.setStatus(Job.JobStatus.SUCCESS); job.setStatus(Job.JobStatus.SUCCESS);
success(); success();
} catch (Exception e) { } catch (Exception e) {
String error = LogUtil.getError("Exception in executing Jar:\n" + config.getGatewayConfig().getAppConfig().getUserJarPath(),e); String error = LogUtil.getError("Exception in executing Jar:\n" + config.getGatewayConfig().getAppConfig().getUserJarPath(), e);
job.setEndTime(LocalDateTime.now()); job.setEndTime(LocalDateTime.now());
job.setStatus(Job.JobStatus.FAILED); job.setStatus(Job.JobStatus.FAILED);
job.setError(error); job.setError(error);
...@@ -497,39 +506,39 @@ public class JobManager { ...@@ -497,39 +506,39 @@ public class JobManager {
return job.getJobResult(); return job.getJobResult();
} }
public static TestResult testGateway(GatewayConfig gatewayConfig){ public static TestResult testGateway(GatewayConfig gatewayConfig) {
return Gateway.build(gatewayConfig).test(); return Gateway.build(gatewayConfig).test();
} }
public String exportSql(String sql){ public String exportSql(String sql) {
String statement = executor.pretreatStatement(sql); String statement = executor.pretreatStatement(sql);
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
if(Asserts.isNotNullString(config.getJobName())){ if (Asserts.isNotNullString(config.getJobName())) {
sb.append("set " + PipelineOptions.NAME.key() + " = " + config.getJobName() + ";\r\n"); sb.append("set " + PipelineOptions.NAME.key() + " = " + config.getJobName() + ";\r\n");
} }
if(Asserts.isNotNull(config.getParallelism())){ if (Asserts.isNotNull(config.getParallelism())) {
sb.append("set " + CoreOptions.DEFAULT_PARALLELISM.key() + " = " + config.getParallelism() + ";\r\n"); sb.append("set " + CoreOptions.DEFAULT_PARALLELISM.key() + " = " + config.getParallelism() + ";\r\n");
} }
if(Asserts.isNotNull(config.getCheckpoint())){ if (Asserts.isNotNull(config.getCheckpoint())) {
sb.append("set " + ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL.key() + " = " + config.getCheckpoint() + ";\r\n"); sb.append("set " + ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL.key() + " = " + config.getCheckpoint() + ";\r\n");
} }
if(Asserts.isNotNullString(config.getSavePointPath())){ if (Asserts.isNotNullString(config.getSavePointPath())) {
sb.append("set " + SavepointConfigOptions.SAVEPOINT_PATH + " = " + config.getSavePointPath() + ";\r\n"); sb.append("set " + SavepointConfigOptions.SAVEPOINT_PATH + " = " + config.getSavePointPath() + ";\r\n");
} }
if(Asserts.isNotNull(config.getGatewayConfig())&&Asserts.isNotNull(config.getGatewayConfig().getFlinkConfig().getConfiguration())) { if (Asserts.isNotNull(config.getGatewayConfig()) && Asserts.isNotNull(config.getGatewayConfig().getFlinkConfig().getConfiguration())) {
for (Map.Entry<String, String> entry : config.getGatewayConfig().getFlinkConfig().getConfiguration().entrySet()) { for (Map.Entry<String, String> entry : config.getGatewayConfig().getFlinkConfig().getConfiguration().entrySet()) {
sb.append("set " + entry.getKey() + " = " + entry.getValue() + ";\r\n"); sb.append("set " + entry.getKey() + " = " + entry.getValue() + ";\r\n");
} }
} }
switch (GatewayType.get(config.getType())){ switch (GatewayType.get(config.getType())) {
case YARN_PER_JOB: case YARN_PER_JOB:
case YARN_APPLICATION: case YARN_APPLICATION:
sb.append("set " + DeploymentOptions.TARGET.key() + " = " + GatewayType.get(config.getType()).getLongValue() + ";\r\n"); sb.append("set " + DeploymentOptions.TARGET.key() + " = " + GatewayType.get(config.getType()).getLongValue() + ";\r\n");
if(Asserts.isNotNull(config.getGatewayConfig())) { if (Asserts.isNotNull(config.getGatewayConfig())) {
sb.append("set " + YarnConfigOptions.PROVIDED_LIB_DIRS.key() + " = " + Collections.singletonList(config.getGatewayConfig().getClusterConfig().getFlinkLibPath()) + ";\r\n"); sb.append("set " + YarnConfigOptions.PROVIDED_LIB_DIRS.key() + " = " + Collections.singletonList(config.getGatewayConfig().getClusterConfig().getFlinkLibPath()) + ";\r\n");
} }
if(Asserts.isNotNull(config.getGatewayConfig())&&Asserts.isNotNullString(config.getGatewayConfig().getFlinkConfig().getJobName())) { if (Asserts.isNotNull(config.getGatewayConfig()) && Asserts.isNotNullString(config.getGatewayConfig().getFlinkConfig().getJobName())) {
sb.append("set " + YarnConfigOptions.APPLICATION_NAME.key() + " = " + config.getGatewayConfig().getFlinkConfig().getJobName() + ";\r\n"); sb.append("set " + YarnConfigOptions.APPLICATION_NAME.key() + " = " + config.getGatewayConfig().getFlinkConfig().getJobName() + ";\r\n");
} }
} }
...@@ -537,7 +546,7 @@ public class JobManager { ...@@ -537,7 +546,7 @@ public class JobManager {
return sb.toString(); return sb.toString();
} }
public static List<String> getUDFClassName(String statement){ public static List<String> getUDFClassName(String statement) {
Pattern pattern = Pattern.compile("function (.*?)'(.*?)'", Pattern.CASE_INSENSITIVE); Pattern pattern = Pattern.compile("function (.*?)'(.*?)'", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(statement); Matcher matcher = pattern.matcher(statement);
List<String> classNameList = new ArrayList<>(); List<String> classNameList = new ArrayList<>();
...@@ -547,10 +556,10 @@ public class JobManager { ...@@ -547,10 +556,10 @@ public class JobManager {
return classNameList; return classNameList;
} }
public static void initUDF(String className,String code){ public static void initUDF(String className, String code) {
if(ClassPool.exist(ClassEntity.build(className,code))){ if (ClassPool.exist(ClassEntity.build(className, code))) {
UDFUtil.initClassLoader(className); UDFUtil.initClassLoader(className);
}else{ } else {
UDFUtil.buildClass(code); UDFUtil.buildClass(code);
} }
} }
......
...@@ -78,4 +78,12 @@ public class SqlParserTest { ...@@ -78,4 +78,12 @@ public class SqlParserTest {
Map<String,List<String>> lists = SingleSqlParserFactory.generateParser(sql); Map<String,List<String>> lists = SingleSqlParserFactory.generateParser(sql);
System.out.println(lists.toString()); System.out.println(lists.toString());
} }
@Test
public void showFragmentTest(){
String sql = "show fragment test";
Map<String,List<String>> lists = SingleSqlParserFactory.generateParser(sql);
System.out.println(lists.toString());
}
} }
...@@ -2,6 +2,7 @@ package com.dlink.executor; ...@@ -2,6 +2,7 @@ package com.dlink.executor;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
import com.dlink.interceptor.FlinkInterceptor; import com.dlink.interceptor.FlinkInterceptor;
import com.dlink.interceptor.FlinkInterceptorResult;
import com.dlink.result.SqlExplainResult; import com.dlink.result.SqlExplainResult;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
...@@ -197,8 +198,8 @@ public abstract class Executor { ...@@ -197,8 +198,8 @@ public abstract class Executor {
return FlinkInterceptor.pretreatStatement(this, statement); return FlinkInterceptor.pretreatStatement(this, statement);
} }
private boolean pretreatExecute(String statement) { private FlinkInterceptorResult pretreatExecute(String statement) {
return !FlinkInterceptor.build(this, statement); return FlinkInterceptor.build(this, statement);
} }
public JobExecutionResult execute(String jobName) throws Exception { public JobExecutionResult execute(String jobName) throws Exception {
...@@ -207,7 +208,11 @@ public abstract class Executor { ...@@ -207,7 +208,11 @@ public abstract class Executor {
public TableResult executeSql(String statement) { public TableResult executeSql(String statement) {
statement = pretreatStatement(statement); statement = pretreatStatement(statement);
if (pretreatExecute(statement)) { FlinkInterceptorResult flinkInterceptorResult = pretreatExecute(statement);
if (Asserts.isNotNull(flinkInterceptorResult.getTableResult())) {
return flinkInterceptorResult.getTableResult();
}
if (!flinkInterceptorResult.isNoExecute()) {
return stEnvironment.executeSql(statement); return stEnvironment.executeSql(statement);
} else { } else {
return CustomTableResultImpl.TABLE_RESULT_OK; return CustomTableResultImpl.TABLE_RESULT_OK;
...@@ -216,7 +221,7 @@ public abstract class Executor { ...@@ -216,7 +221,7 @@ public abstract class Executor {
public String explainSql(String statement, ExplainDetail... extraDetails) { public String explainSql(String statement, ExplainDetail... extraDetails) {
statement = pretreatStatement(statement); statement = pretreatStatement(statement);
if (pretreatExecute(statement)) { if (!pretreatExecute(statement).isNoExecute()) {
return stEnvironment.explainSql(statement, extraDetails); return stEnvironment.explainSql(statement, extraDetails);
} else { } else {
return ""; return "";
...@@ -225,7 +230,7 @@ public abstract class Executor { ...@@ -225,7 +230,7 @@ public abstract class Executor {
public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails) { public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails) {
statement = pretreatStatement(statement); statement = pretreatStatement(statement);
if (Asserts.isNotNullString(statement) && pretreatExecute(statement)) { if (Asserts.isNotNullString(statement) && !pretreatExecute(statement).isNoExecute()) {
return stEnvironment.explainSqlRecord(statement, extraDetails); return stEnvironment.explainSqlRecord(statement, extraDetails);
} else { } else {
return null; return null;
...@@ -234,7 +239,7 @@ public abstract class Executor { ...@@ -234,7 +239,7 @@ public abstract class Executor {
public ObjectNode getStreamGraph(String statement) { public ObjectNode getStreamGraph(String statement) {
statement = pretreatStatement(statement); statement = pretreatStatement(statement);
if (pretreatExecute(statement)) { if (!pretreatExecute(statement).isNoExecute()) {
return stEnvironment.getStreamGraph(statement); return stEnvironment.getStreamGraph(statement);
} else { } else {
return null; return null;
......
...@@ -11,12 +11,7 @@ import org.apache.flink.table.catalog.exceptions.CatalogException; ...@@ -11,12 +11,7 @@ import org.apache.flink.table.catalog.exceptions.CatalogException;
import org.apache.flink.types.Row; import org.apache.flink.types.Row;
import org.apache.flink.util.StringUtils; import org.apache.flink.util.StringUtils;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
...@@ -26,8 +21,9 @@ import static org.apache.flink.util.Preconditions.checkNotNull; ...@@ -26,8 +21,9 @@ import static org.apache.flink.util.Preconditions.checkNotNull;
/** /**
* Flink Sql Fragment Manager * Flink Sql Fragment Manager
* @author wenmo *
* @since 2021/6/7 22:06 * @author wenmo
* @since 2021/6/7 22:06
**/ **/
public final class SqlManager { public final class SqlManager {
...@@ -111,6 +107,16 @@ public final class SqlManager { ...@@ -111,6 +107,16 @@ public final class SqlManager {
} }
} }
public TableResult getSqlFragmentResult(String sqlFragmentName) {
if (Asserts.isNullString(sqlFragmentName)) {
return CustomTableResultImpl.buildTableResult(new ArrayList<>(Arrays.asList(new TableSchemaField("fragment", DataTypes.STRING()))), new ArrayList<>());
}
String sqlFragment = getSqlFragment(sqlFragmentName);
List<Row> rows = new ArrayList<>();
rows.add(Row.of(sqlFragment));
return CustomTableResultImpl.buildTableResult(new ArrayList<>(Arrays.asList(new TableSchemaField("fragment", DataTypes.STRING()))), rows);
}
/** /**
* Get a fragment of sql under the given name. The sql fragment name must be existed. * Get a fragment of sql under the given name. The sql fragment name must be existed.
* *
...@@ -126,7 +132,7 @@ public final class SqlManager { ...@@ -126,7 +132,7 @@ public final class SqlManager {
for (String key : sqlFragments.keySet()) { for (String key : sqlFragments.keySet()) {
rows.add(Row.of(key)); rows.add(Row.of(key));
} }
return CustomTableResultImpl.buildTableResult(new ArrayList<>(Arrays.asList(new TableSchemaField("sql fragment name", DataTypes.STRING()))), rows); return CustomTableResultImpl.buildTableResult(new ArrayList<>(Arrays.asList(new TableSchemaField("fragmentName", DataTypes.STRING()))), rows);
} }
public Iterator getSqlFragmentsIterator() { public Iterator getSqlFragmentsIterator() {
...@@ -141,9 +147,10 @@ public final class SqlManager { ...@@ -141,9 +147,10 @@ public final class SqlManager {
return environment.fromValues(keys); return environment.fromValues(keys);
} }
public boolean checkShowFragments(String sql){ public boolean checkShowFragments(String sql) {
return SHOW_FRAGMENTS.equals(sql.trim().toUpperCase()); return SHOW_FRAGMENTS.equals(sql.trim().toUpperCase());
} }
/** /**
* Parse some variables under the given sql. * Parse some variables under the given sql.
* *
......
...@@ -3,11 +3,12 @@ package com.dlink.interceptor; ...@@ -3,11 +3,12 @@ package com.dlink.interceptor;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
import com.dlink.catalog.function.FunctionManager; import com.dlink.catalog.function.FunctionManager;
import com.dlink.catalog.function.UDFunction; import com.dlink.catalog.function.UDFunction;
import com.dlink.executor.Executor;
import com.dlink.executor.CustomTableEnvironmentImpl; import com.dlink.executor.CustomTableEnvironmentImpl;
import com.dlink.executor.Executor;
import com.dlink.trans.Operation; import com.dlink.trans.Operation;
import com.dlink.trans.Operations; import com.dlink.trans.Operations;
import com.dlink.utils.SqlUtil; import com.dlink.utils.SqlUtil;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.functions.AggregateFunction; import org.apache.flink.table.functions.AggregateFunction;
import org.apache.flink.table.functions.ScalarFunction; import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.functions.TableAggregateFunction; import org.apache.flink.table.functions.TableAggregateFunction;
...@@ -27,7 +28,7 @@ public class FlinkInterceptor { ...@@ -27,7 +28,7 @@ public class FlinkInterceptor {
public static String pretreatStatement(Executor executor, String statement) { public static String pretreatStatement(Executor executor, String statement) {
statement = SqlUtil.removeNote(statement); statement = SqlUtil.removeNote(statement);
if(executor.isUseSqlFragment()) { if (executor.isUseSqlFragment()) {
statement = executor.getSqlManager().parseVariable(statement); statement = executor.getSqlManager().parseVariable(statement);
} }
// initFunctions(executor.getCustomTableEnvironmentImpl(), statement); // initFunctions(executor.getCustomTableEnvironmentImpl(), statement);
...@@ -35,13 +36,15 @@ public class FlinkInterceptor { ...@@ -35,13 +36,15 @@ public class FlinkInterceptor {
} }
// return false to continue with executeSql // return false to continue with executeSql
public static boolean build(Executor executor, String statement) { public static FlinkInterceptorResult build(Executor executor, String statement) {
boolean noExecute = false;
TableResult tableResult = null;
Operation operation = Operations.buildOperation(statement); Operation operation = Operations.buildOperation(statement);
if (Asserts.isNotNull(operation)) { if (Asserts.isNotNull(operation)) {
operation.build(executor); tableResult = operation.build(executor);
return operation.noExecute(); noExecute = operation.noExecute();
} }
return false; return FlinkInterceptorResult.build(noExecute, tableResult);
} }
@Deprecated @Deprecated
......
package com.dlink.interceptor;
import org.apache.flink.table.api.TableResult;
/**
* FlinkInterceptorResult
*
* @author wenmo
* @since 2022/2/17 16:36
**/
public class FlinkInterceptorResult {
private boolean noExecute;
private TableResult tableResult;
public FlinkInterceptorResult() {
}
public FlinkInterceptorResult(boolean noExecute, TableResult tableResult) {
this.noExecute = noExecute;
this.tableResult = tableResult;
}
public boolean isNoExecute() {
return noExecute;
}
public void setNoExecute(boolean noExecute) {
this.noExecute = noExecute;
}
public TableResult getTableResult() {
return tableResult;
}
public void setTableResult(TableResult tableResult) {
this.tableResult = tableResult;
}
public static FlinkInterceptorResult buildResult(TableResult tableResult) {
return new FlinkInterceptorResult(false, tableResult);
}
public static FlinkInterceptorResult build(boolean noExecute, TableResult tableResult) {
return new FlinkInterceptorResult(noExecute, tableResult);
}
}
package com.dlink.parser;
/**
* ShowFragmentsParser
*
* @author wenmo
* @since 2022/2/17 16:19
**/
public class ShowFragmentParser extends BaseSingleSqlParser {
public ShowFragmentParser(String originalSql) {
super(originalSql);
}
@Override
protected void initializeSegments() {
//SHOW FRAGMENT (.+)
segments.add(new SqlSegment("FRAGMENT", "(show\\s+fragment)\\s+(.*)( ENDOFSQL)", ","));
}
}
...@@ -13,10 +13,10 @@ import java.util.regex.Pattern; ...@@ -13,10 +13,10 @@ import java.util.regex.Pattern;
*/ */
public class SingleSqlParserFactory { public class SingleSqlParserFactory {
public static Map<String,List<String>> generateParser(String sql) { public static Map<String, List<String>> generateParser(String sql) {
BaseSingleSqlParser tmp = null; BaseSingleSqlParser tmp = null;
// sql = sql.replace("\n"," ").replaceAll("\\s{1,}", " ") +" ENDOFSQL"; // sql = sql.replace("\n"," ").replaceAll("\\s{1,}", " ") +" ENDOFSQL";
sql = sql.replace("\r\n"," ").replace("\n"," ") +" ENDOFSQL"; sql = sql.replace("\r\n", " ").replace("\n", " ") + " ENDOFSQL";
if (contains(sql, "(insert\\s+into)(.+)(select)(.+)(from)(.+)")) { if (contains(sql, "(insert\\s+into)(.+)(select)(.+)(from)(.+)")) {
tmp = new InsertSelectSqlParser(sql); tmp = new InsertSelectSqlParser(sql);
} else if (contains(sql, "(create\\s+aggtable)(.+)(as\\s+select)(.+)")) { } else if (contains(sql, "(create\\s+aggtable)(.+)(as\\s+select)(.+)")) {
...@@ -37,6 +37,8 @@ public class SingleSqlParserFactory { ...@@ -37,6 +37,8 @@ public class SingleSqlParserFactory {
} else if (contains(sql, "(use)(.+)")) { } else if (contains(sql, "(use)(.+)")) {
} else if (contains(sql, "(set)(.+)")) { } else if (contains(sql, "(set)(.+)")) {
tmp = new SetSqlParser(sql); tmp = new SetSqlParser(sql);
} else if (contains(sql, "(show\\s+fragment)\\s+(.+)")) {
tmp = new ShowFragmentParser(sql);
} else { } else {
} }
return tmp.splitSql2Segment(); return tmp.splitSql2Segment();
......
package com.dlink.trans; package com.dlink.trans;
import com.dlink.executor.Executor; import com.dlink.executor.Executor;
import org.apache.flink.table.api.TableResult;
/** /**
* Operation * Operation
...@@ -14,7 +15,7 @@ public interface Operation { ...@@ -14,7 +15,7 @@ public interface Operation {
Operation create(String statement); Operation create(String statement);
void build(Executor executor); TableResult build(Executor executor);
boolean noExecute(); boolean noExecute();
} }
package com.dlink.trans; package com.dlink.trans;
import com.dlink.parser.SqlType; import com.dlink.parser.SqlType;
import com.dlink.trans.ddl.CreateAggTableOperation; import com.dlink.trans.ddl.*;
import com.dlink.trans.ddl.CreateCDCSourceOperation;
import com.dlink.trans.ddl.SetOperation;
/** /**
* Operations * Operations
...@@ -14,12 +12,14 @@ import com.dlink.trans.ddl.SetOperation; ...@@ -14,12 +12,14 @@ import com.dlink.trans.ddl.SetOperation;
public class Operations { public class Operations {
private static Operation[] operations = { private static Operation[] operations = {
new CreateAggTableOperation() new CreateAggTableOperation()
, new SetOperation() , new SetOperation()
, new CreateCDCSourceOperation() , new CreateCDCSourceOperation()
, new ShowFragmentsOperation()
, new ShowFragmentOperation()
}; };
public static SqlType getSqlTypeFromStatements(String statement){ public static SqlType getSqlTypeFromStatements(String statement) {
String[] statements = statement.split(";"); String[] statements = statement.split(";");
SqlType sqlType = SqlType.UNKNOWN; SqlType sqlType = SqlType.UNKNOWN;
for (String item : statements) { for (String item : statements) {
...@@ -27,7 +27,7 @@ public class Operations { ...@@ -27,7 +27,7 @@ public class Operations {
continue; continue;
} }
sqlType = Operations.getOperationType(item); sqlType = Operations.getOperationType(item);
if(sqlType == SqlType.INSERT ||sqlType == SqlType.SELECT){ if (sqlType == SqlType.INSERT || sqlType == SqlType.SELECT) {
return sqlType; return sqlType;
} }
} }
...@@ -46,10 +46,10 @@ public class Operations { ...@@ -46,10 +46,10 @@ public class Operations {
return type; return type;
} }
public static Operation buildOperation(String statement){ public static Operation buildOperation(String statement) {
String sql = statement.replace("\n"," ").replaceAll("\\s{1,}", " ").trim().toUpperCase(); String sql = statement.replace("\n", " ").replaceAll("\\s{1,}", " ").trim().toUpperCase();
for (int i = 0; i < operations.length; i++) { for (int i = 0; i < operations.length; i++) {
if(sql.startsWith(operations[i].getHandle())){ if (sql.startsWith(operations[i].getHandle())) {
return operations[i].create(statement); return operations[i].create(statement);
} }
} }
......
...@@ -4,6 +4,7 @@ import com.dlink.executor.Executor; ...@@ -4,6 +4,7 @@ import com.dlink.executor.Executor;
import com.dlink.trans.AbstractOperation; import com.dlink.trans.AbstractOperation;
import com.dlink.trans.Operation; import com.dlink.trans.Operation;
import org.apache.flink.table.api.Table; import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import java.util.List; import java.util.List;
...@@ -13,7 +14,7 @@ import java.util.List; ...@@ -13,7 +14,7 @@ import java.util.List;
* @author wenmo * @author wenmo
* @since 2021/6/13 19:24 * @since 2021/6/13 19:24
*/ */
public class CreateAggTableOperation extends AbstractOperation implements Operation{ public class CreateAggTableOperation extends AbstractOperation implements Operation {
private String KEY_WORD = "CREATE AGGTABLE"; private String KEY_WORD = "CREATE AGGTABLE";
...@@ -35,11 +36,11 @@ public class CreateAggTableOperation extends AbstractOperation implements Operat ...@@ -35,11 +36,11 @@ public class CreateAggTableOperation extends AbstractOperation implements Operat
} }
@Override @Override
public void build(Executor executor) { public TableResult build(Executor executor) {
AggTable aggTable = AggTable.build(statement); AggTable aggTable = AggTable.build(statement);
Table source = executor.getCustomTableEnvironment().sqlQuery("select * from "+ aggTable.getTable()); Table source = executor.getCustomTableEnvironment().sqlQuery("select * from " + aggTable.getTable());
List<String> wheres = aggTable.getWheres(); List<String> wheres = aggTable.getWheres();
if(wheres!=null&&wheres.size()>0) { if (wheres != null && wheres.size() > 0) {
for (String s : wheres) { for (String s : wheres) {
source = source.filter(s); source = source.filter(s);
} }
...@@ -48,5 +49,6 @@ public class CreateAggTableOperation extends AbstractOperation implements Operat ...@@ -48,5 +49,6 @@ public class CreateAggTableOperation extends AbstractOperation implements Operat
.flatAggregate(aggTable.getAggBy()) .flatAggregate(aggTable.getAggBy())
.select(aggTable.getColumns()); .select(aggTable.getColumns());
executor.getCustomTableEnvironment().registerTable(aggTable.getName(), sink); executor.getCustomTableEnvironment().registerTable(aggTable.getName(), sink);
return null;
} }
} }
...@@ -5,9 +5,10 @@ import com.dlink.executor.Executor; ...@@ -5,9 +5,10 @@ import com.dlink.executor.Executor;
import com.dlink.model.FlinkCDCConfig; import com.dlink.model.FlinkCDCConfig;
import com.dlink.trans.AbstractOperation; import com.dlink.trans.AbstractOperation;
import com.dlink.trans.Operation; import com.dlink.trans.Operation;
import org.apache.flink.table.api.TableResult;
/** /**
* TODO * CreateCDCSourceOperation
* *
* @author wenmo * @author wenmo
* @since 2022/1/29 23:25 * @since 2022/1/29 23:25
...@@ -34,15 +35,16 @@ public class CreateCDCSourceOperation extends AbstractOperation implements Opera ...@@ -34,15 +35,16 @@ public class CreateCDCSourceOperation extends AbstractOperation implements Opera
} }
@Override @Override
public void build(Executor executor) { public TableResult build(Executor executor) {
CDCSource cdcSource = CDCSource.build(statement); CDCSource cdcSource = CDCSource.build(statement);
FlinkCDCConfig config = new FlinkCDCConfig(cdcSource.getHostname(),cdcSource.getPort(),cdcSource.getUsername() FlinkCDCConfig config = new FlinkCDCConfig(cdcSource.getHostname(), cdcSource.getPort(), cdcSource.getUsername()
,cdcSource.getPassword(),cdcSource.getCheckpoint(),cdcSource.getParallelism(),cdcSource.getDatabase(),cdcSource.getTable() , cdcSource.getPassword(), cdcSource.getCheckpoint(), cdcSource.getParallelism(), cdcSource.getDatabase(), cdcSource.getTable()
,cdcSource.getStartupMode(),cdcSource.getTopic(),cdcSource.getBrokers()); , cdcSource.getStartupMode(), cdcSource.getTopic(), cdcSource.getBrokers());
try { try {
FlinkCDCMergeBuilder.buildMySqlCDC(executor.getStreamExecutionEnvironment(),config); FlinkCDCMergeBuilder.buildMySqlCDC(executor.getStreamExecutionEnvironment(), config);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
return null;
} }
} }
...@@ -7,6 +7,7 @@ import com.dlink.trans.AbstractOperation; ...@@ -7,6 +7,7 @@ import com.dlink.trans.AbstractOperation;
import com.dlink.trans.Operation; import com.dlink.trans.Operation;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.TableResult;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
...@@ -40,22 +41,23 @@ public class SetOperation extends AbstractOperation implements Operation { ...@@ -40,22 +41,23 @@ public class SetOperation extends AbstractOperation implements Operation {
} }
@Override @Override
public void build(Executor executor) { public TableResult build(Executor executor) {
try { try {
if(null != Class.forName("org.apache.log4j.Logger")){ if (null != Class.forName("org.apache.log4j.Logger")) {
executor.parseAndLoadConfiguration(statement); executor.parseAndLoadConfiguration(statement);
return; return null;
} }
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
} }
Map<String,List<String>> map = SingleSqlParserFactory.generateParser(statement); Map<String, List<String>> map = SingleSqlParserFactory.generateParser(statement);
if(Asserts.isNotNullMap(map)&&map.size()==2) { if (Asserts.isNotNullMap(map) && map.size() == 2) {
Map<String, String> confMap = new HashMap<>(); Map<String, String> confMap = new HashMap<>();
confMap.put(StringUtils.join(map.get("SET"), "."), StringUtils.join(map.get("="), ",")); confMap.put(StringUtils.join(map.get("SET"), "."), StringUtils.join(map.get("="), ","));
executor.getCustomTableEnvironment().getConfig().addConfiguration(Configuration.fromMap(confMap)); executor.getCustomTableEnvironment().getConfig().addConfiguration(Configuration.fromMap(confMap));
Configuration configuration = Configuration.fromMap(confMap); Configuration configuration = Configuration.fromMap(confMap);
executor.getExecutionConfig().configure(configuration,null); executor.getExecutionConfig().configure(configuration, null);
executor.getCustomTableEnvironment().getConfig().addConfiguration(configuration); executor.getCustomTableEnvironment().getConfig().addConfiguration(configuration);
} }
return null;
} }
} }
package com.dlink.trans.ddl;
import com.dlink.assertion.Asserts;
import com.dlink.executor.Executor;
import com.dlink.parser.SingleSqlParserFactory;
import com.dlink.trans.AbstractOperation;
import com.dlink.trans.Operation;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.table.api.TableResult;
import java.util.List;
import java.util.Map;
/**
* ShowFragmentOperation
*
* @author wenmo
* @since 2022/2/17 17:08
**/
public class ShowFragmentOperation extends AbstractOperation implements Operation {
private String KEY_WORD = "SHOW FRAGMENT ";
public ShowFragmentOperation() {
}
public ShowFragmentOperation(String statement) {
super(statement);
}
@Override
public String getHandle() {
return KEY_WORD;
}
@Override
public Operation create(String statement) {
return new ShowFragmentOperation(statement);
}
@Override
public TableResult build(Executor executor) {
Map<String, List<String>> map = SingleSqlParserFactory.generateParser(statement);
if (Asserts.isNotNullMap(map)) {
if (map.containsKey("FRAGMENT")) {
return executor.getSqlManager().getSqlFragmentResult(StringUtils.join(map.get("FRAGMENT"), ""));
}
}
return executor.getSqlManager().getSqlFragmentResult(null);
}
}
package com.dlink.trans.ddl;
import com.dlink.executor.Executor;
import com.dlink.trans.AbstractOperation;
import com.dlink.trans.Operation;
import org.apache.flink.table.api.TableResult;
/**
* ShowFragmentsOperation
*
* @author wenmo
* @since 2022/2/17 16:31
**/
public class ShowFragmentsOperation extends AbstractOperation implements Operation {
private String KEY_WORD = "SHOW FRAGMENTS";
public ShowFragmentsOperation() {
}
public ShowFragmentsOperation(String statement) {
super(statement);
}
@Override
public String getHandle() {
return KEY_WORD;
}
@Override
public Operation create(String statement) {
return new ShowFragmentsOperation(statement);
}
@Override
public TableResult build(Executor executor) {
return executor.getSqlManager().getSqlFragments();
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment