Unverified Commit 0459b324 authored by Licho's avatar Licho Committed by GitHub

refactor: improve code readability. eliminate hide problem, wait for 0.7 (#1155)

* [WIP]refactor: improve code readability.

* refactor: constructor invoke constructor

* refactor: Asserts.java simple and format.

* refactor: simple code

* refactor: simple code

* chore: reformat code

* chore: remove unused import

* feat: improve variable concurrency strict

* feat: encapsulate start end time set function.

* feat: add AutoClosable to Driver interface.

* refactor: remove double bracket init, it is hide danger.
parent 30fad324
......@@ -31,6 +31,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
/**
* StudioExecuteDTO
......@@ -40,6 +41,7 @@ import lombok.Setter;
*/
@Getter
@Setter
@Slf4j
public class StudioExecuteDTO extends AbstractStatementDTO {
// RUN_MODE
private String type;
......@@ -67,10 +69,9 @@ public class StudioExecuteDTO extends AbstractStatementDTO {
public JobConfig getJobConfig() {
Map<String, String> config = new HashMap<>();
JsonNode paras = null;
if (Asserts.isNotNullString(configJson)) {
try {
paras = mapper.readTree(configJson);
JsonNode paras = mapper.readTree(configJson);
paras.forEach((JsonNode node) -> {
if (!node.isNull()) {
config.put(node.get("key").asText(), node.get("value").asText());
......@@ -78,7 +79,7 @@ public class StudioExecuteDTO extends AbstractStatementDTO {
}
);
} catch (JsonProcessingException e) {
e.printStackTrace();
log.error(e.getMessage());
}
}
return new JobConfig(
......
......@@ -171,9 +171,8 @@ public class DataBaseServiceImpl extends SuperServiceImpl<DataBaseMapper, DataBa
@Override
public List<String> listEnabledFlinkWith() {
List<DataBase> dataBases = listEnabledAll();
List<String> list = new ArrayList<>();
for (DataBase dataBase : dataBases) {
for (DataBase dataBase : listEnabledAll()) {
if (Asserts.isNotNullString(dataBase.getFlinkConfig())) {
list.add(dataBase.getName() + ":=" + dataBase.getFlinkConfig() + "\n;\n");
}
......
......@@ -48,9 +48,8 @@ public class FragmentVariableServiceImpl extends SuperServiceImpl<FragmentVariab
@Override
public Map<String, String> listEnabledVariables() {
List<FragmentVariable> fragmentVariables = listEnabledAll();
Map<String, String> variables = new LinkedHashMap<>();
for (FragmentVariable fragmentVariable : fragmentVariables) {
for (FragmentVariable fragmentVariable : listEnabledAll()) {
variables.put(fragmentVariable.getName(), fragmentVariable.getFragmentValue());
}
return variables;
......
......@@ -52,8 +52,6 @@ import com.dlink.model.UDFPath;
import com.dlink.process.context.ProcessContextHolder;
import com.dlink.process.model.ProcessEntity;
import com.dlink.process.model.ProcessType;
import com.dlink.process.pool.ConsolePool;
import com.dlink.process.pool.ProcessPool;
import com.dlink.result.DDLResult;
import com.dlink.result.IResult;
import com.dlink.result.SelectResult;
......@@ -72,15 +70,13 @@ import com.dlink.session.SessionPool;
import com.dlink.sql.FlinkQuery;
import com.dlink.utils.RunTimeUtil;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.core.JsonProcessingException;
......@@ -101,22 +97,29 @@ public class StudioServiceImpl implements StudioService {
private static final Logger logger = LoggerFactory.getLogger(StudioServiceImpl.class);
@Autowired
private ClusterService clusterService;
@Autowired
private ClusterConfigurationService clusterConfigurationService;
@Autowired
private SavepointsService savepointsService;
@Autowired
private DataBaseService dataBaseService;
@Autowired
private TaskService taskService;
@Autowired
private FragmentVariableService fragmentVariableService;
@Autowired
private UDFService udfService;
private final ClusterService clusterService;
private final ClusterConfigurationService clusterConfigurationService;
private final SavepointsService savepointsService;
private final DataBaseService dataBaseService;
private final TaskService taskService;
private final FragmentVariableService fragmentVariableService;
private final UDFService udfService;
public StudioServiceImpl(ClusterService clusterService,
ClusterConfigurationService clusterConfigurationService,
SavepointsService savepointsService,
DataBaseService dataBaseService,
TaskService taskService,
FragmentVariableService fragmentVariableService,
UDFService udfService) {
this.clusterService = clusterService;
this.clusterConfigurationService = clusterConfigurationService;
this.savepointsService = savepointsService;
this.dataBaseService = dataBaseService;
this.taskService = taskService;
this.fragmentVariableService = fragmentVariableService;
this.udfService = udfService;
}
private void addFlinkSQLEnv(AbstractStatementDTO statementDTO) {
ProcessEntity process = ProcessContextHolder.getProcess();
......@@ -156,7 +159,7 @@ public class StudioServiceImpl implements StudioService {
}
private void buildSession(JobConfig config) {
// If you are using a shared session, configure the current jobmanager address
// If you are using a shared session, configure the current jobManager address
if (!config.isUseSession()) {
config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), config.getClusterId()));
}
......@@ -165,8 +168,10 @@ public class StudioServiceImpl implements StudioService {
@Override
public JobResult executeSql(StudioExecuteDTO studioExecuteDTO) {
if (Dialect.notFlinkSql(studioExecuteDTO.getDialect())) {
return executeCommonSql(SqlDTO.build(studioExecuteDTO.getStatement(),
studioExecuteDTO.getDatabaseId(), studioExecuteDTO.getMaxRowNum()));
return executeCommonSql(SqlDTO.build(
studioExecuteDTO.getStatement(),
studioExecuteDTO.getDatabaseId(),
studioExecuteDTO.getMaxRowNum()));
} else {
return executeFlinkSql(studioExecuteDTO);
}
......@@ -176,6 +181,7 @@ public class StudioServiceImpl implements StudioService {
addFlinkSQLEnv(studioExecuteDTO);
JobConfig config = studioExecuteDTO.getJobConfig();
buildSession(config);
// To initialize java udf, but it only support local mode.
UDFPath udfPath = udfService.initUDF(studioExecuteDTO.getStatement(), GatewayType.get(config.getType()));
config.setJarFiles(udfPath.getJarPaths());
......@@ -199,23 +205,27 @@ public class StudioServiceImpl implements StudioService {
public JobResult executeCommonSql(SqlDTO sqlDTO) {
JobResult result = new JobResult();
result.setStatement(sqlDTO.getStatement());
result.setStartTime(LocalDateTime.now());
result.setStartTimeNow();
if (Asserts.isNull(sqlDTO.getDatabaseId())) {
result.setSuccess(false);
result.setError("请指定数据源");
result.setEndTime(LocalDateTime.now());
result.setEndTimeNow();
return result;
} else {
}
DataBase dataBase = dataBaseService.getById(sqlDTO.getDatabaseId());
if (Asserts.isNull(dataBase)) {
result.setSuccess(false);
result.setError("数据源不存在");
result.setEndTime(LocalDateTime.now());
result.setEndTimeNow();
return result;
}
Driver driver = Driver.build(dataBase.getDriverConfig());
JdbcSelectResult selectResult = driver.executeSql(sqlDTO.getStatement(), sqlDTO.getMaxRowNum());
driver.close();
JdbcSelectResult selectResult;
try (Driver driver = Driver.build(dataBase.getDriverConfig())) {
selectResult = driver.executeSql(sqlDTO.getStatement(), sqlDTO.getMaxRowNum());
}
result.setResult(selectResult);
if (selectResult.isSuccess()) {
result.setSuccess(true);
......@@ -223,10 +233,9 @@ public class StudioServiceImpl implements StudioService {
result.setSuccess(false);
result.setError(selectResult.getError());
}
result.setEndTime(LocalDateTime.now());
result.setEndTimeNow();
return result;
}
}
@Override
public IResult executeDDL(StudioDDLDTO studioDDLDTO) {
......@@ -249,8 +258,6 @@ public class StudioServiceImpl implements StudioService {
}
private List<SqlExplainResult> explainFlinkSql(StudioExecuteDTO studioExecuteDTO) {
Map<String, ProcessEntity> map = ProcessPool.getInstance().getMap();
Map<String, StringBuilder> map2 = ConsolePool.getInstance().getMap();
ProcessEntity process = ProcessContextHolder.registerProcess(
ProcessEntity.init(ProcessType.FLINKEXPLAIN, SaManager.getStpLogic(null).getLoginIdAsInt(), "admin"));
......@@ -262,11 +269,13 @@ public class StudioServiceImpl implements StudioService {
config.buildLocal();
buildSession(config);
process.infoSuccess();
// To initialize java udf, but it has a bug in the product environment now.
UDFPath udfPath = udfService.initUDF(studioExecuteDTO.getStatement(), GatewayType.get(config.getType()));
config.setJarFiles(udfPath.getJarPaths());
config.setPyFiles(udfPath.getPyPaths());
process.start();
JobManager jobManager = JobManager.buildPlanMode(config);
List<SqlExplainResult> sqlExplainResults =
jobManager.explainSql(studioExecuteDTO.getStatement()).getSqlExplainResults();
......@@ -276,26 +285,16 @@ public class StudioServiceImpl implements StudioService {
private List<SqlExplainResult> explainCommonSql(StudioExecuteDTO studioExecuteDTO) {
if (Asserts.isNull(studioExecuteDTO.getDatabaseId())) {
return new ArrayList<SqlExplainResult>() {
{
add(SqlExplainResult.fail(studioExecuteDTO.getStatement(), "请指定数据源"));
return Collections.singletonList(SqlExplainResult.fail(studioExecuteDTO.getStatement(), "请指定数据源"));
}
};
} else {
DataBase dataBase = dataBaseService.getById(studioExecuteDTO.getDatabaseId());
if (Asserts.isNull(dataBase)) {
return new ArrayList<SqlExplainResult>() {
{
add(SqlExplainResult.fail(studioExecuteDTO.getStatement(), "数据源不存在"));
return Collections.singletonList(SqlExplainResult.fail(studioExecuteDTO.getStatement(), "数据源不存在"));
}
};
}
Driver driver = Driver.build(dataBase.getDriverConfig());
List<SqlExplainResult> sqlExplainResults = driver.explain(studioExecuteDTO.getStatement());
driver.close();
return sqlExplainResults;
try (Driver driver = Driver.build(dataBase.getDriverConfig())) {
return driver.explain(studioExecuteDTO.getStatement());
}
}
......@@ -430,17 +429,15 @@ public class StudioServiceImpl implements StudioService {
JobConfig jobConfig = new JobConfig();
jobConfig.setAddress(cluster.getJobManagerHost());
jobConfig.setType(cluster.getType());
// 如果用户选择用dlink平台来托管集群信息 说明任务一定是从dlink发起提交的
if (Asserts.isNotNull(cluster.getClusterConfigurationId())) {
Map<String, Object> gatewayConfig =
clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
// 如果用户选择用dlink平台来托管集群信息 说明任务一定是从dlink发起提交的
Map<String, Object> gatewayConfig = clusterConfigurationService.getGatewayConfig(cluster.getClusterConfigurationId());
jobConfig.buildGatewayConfig(gatewayConfig);
jobConfig.getGatewayConfig().getClusterConfig().setAppId(cluster.getName());
jobConfig.setTaskId(cluster.getTaskId());
useGateway = true;
}
} else {
// 用户选择外部的平台来托管集群信息,但是集群上的任务不一定是通过dlink提交的
else {
jobConfig.setTaskId(taskId);
}
JobManager jobManager = JobManager.build(jobConfig);
......@@ -451,6 +448,7 @@ public class StudioServiceImpl implements StudioService {
if (jobConfig.getTaskId().equals(0)) {
return true;
}
for (JobInfo item : savePointResult.getJobInfos()) {
if (Asserts.isEqualsIgnoreCase(jobId, item.getJobId()) && Asserts.isNotNull(jobConfig.getTaskId())) {
Savepoints savepoints = new Savepoints();
......@@ -480,34 +478,29 @@ public class StudioServiceImpl implements StudioService {
} else {
studioMetaStoreDTO.setStatement(FlinkQuery.showCatalogs());
IResult result = executeMSFlinkSql(studioMetaStoreDTO);
if (result instanceof DDLResult) {
DDLResult ddlResult = (DDLResult) result;
Iterator<String> iterator = ddlResult.getColumns().iterator();
if (iterator.hasNext()) {
String key = iterator.next();
List<Map<String, Object>> rowData = ddlResult.getRowData();
for (Map<String, Object> item : rowData) {
ddlResult.getColumns().stream().findFirst().ifPresent(key -> {
for (Map<String, Object> item : ddlResult.getRowData()) {
catalogs.add(Catalog.build(item.get(key).toString()));
}
}
);
for (Catalog catalog : catalogs) {
String statement = FlinkQuery.useCatalog(catalog.getName()) + FlinkQuery.separator()
+ FlinkQuery.showDatabases();
String statement = FlinkQuery.useCatalog(catalog.getName()) + FlinkQuery.separator() + FlinkQuery.showDatabases();
studioMetaStoreDTO.setStatement(statement);
IResult tableResult = executeMSFlinkSql(studioMetaStoreDTO);
if (result instanceof DDLResult) {
DDLResult tableDDLResult = (DDLResult) tableResult;
Iterator<String> tableIterator = tableDDLResult.getColumns().iterator();
if (tableIterator.hasNext()) {
String key = tableIterator.next();
tableDDLResult.getColumns().stream().findFirst().ifPresent(key -> {
List<Map<String, Object>> rowData = tableDDLResult.getRowData();
List<Schema> schemas = new ArrayList<>();
for (Map<String, Object> item : rowData) {
schemas.add(Schema.build(item.get(key).toString()));
}
catalog.setSchemas(schemas);
}
}
});
}
}
}
......@@ -525,24 +518,25 @@ public class StudioServiceImpl implements StudioService {
tables.addAll(driver.listTables(studioMetaStoreDTO.getDatabase()));
}
} else {
String baseStatement = FlinkQuery.useCatalog(studioMetaStoreDTO.getCatalog()) + FlinkQuery.separator()
+ FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase()) + FlinkQuery.separator();
String baseStatement = FlinkQuery.useCatalog(studioMetaStoreDTO.getCatalog())
+ FlinkQuery.separator()
+ FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase())
+ FlinkQuery.separator();
// show tables
String tableStatement = baseStatement + FlinkQuery.showTables();
studioMetaStoreDTO.setStatement(tableStatement);
IResult result = executeMSFlinkSql(studioMetaStoreDTO);
if (result instanceof DDLResult) {
DDLResult ddlResult = (DDLResult) result;
Iterator<String> iterator = ddlResult.getColumns().iterator();
if (iterator.hasNext()) {
String key = iterator.next();
ddlResult.getColumns().stream().findFirst().ifPresent(key -> {
List<Map<String, Object>> rowData = ddlResult.getRowData();
for (Map<String, Object> item : rowData) {
Table table = Table.build(item.get(key).toString(), studioMetaStoreDTO.getDatabase());
table.setCatalog(studioMetaStoreDTO.getCatalog());
tables.add(table);
}
}
});
}
// show views
schema.setViews(showInfo(studioMetaStoreDTO, baseStatement, FlinkQuery.showViews()));
......@@ -560,11 +554,13 @@ public class StudioServiceImpl implements StudioService {
@Override
public List<FlinkColumn> getMSFlinkColumns(StudioMetaStoreDTO studioMetaStoreDTO) {
List<FlinkColumn> columns = new ArrayList<>();
if (Dialect.notFlinkSql(studioMetaStoreDTO.getDialect())) {
// nothing to do
} else {
String baseStatement = FlinkQuery.useCatalog(studioMetaStoreDTO.getCatalog()) + FlinkQuery.separator()
+ FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase()) + FlinkQuery.separator();
if (!Dialect.notFlinkSql(studioMetaStoreDTO.getDialect())) {
String baseStatement = FlinkQuery.useCatalog(
studioMetaStoreDTO.getCatalog())
+ FlinkQuery.separator()
+ FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase())
+ FlinkQuery.separator();
// desc tables
String tableStatement = baseStatement + FlinkQuery.descTable(studioMetaStoreDTO.getTable());
studioMetaStoreDTO.setStatement(tableStatement);
......@@ -591,19 +587,15 @@ public class StudioServiceImpl implements StudioService {
private List<String> showInfo(StudioMetaStoreDTO studioMetaStoreDTO, String baseStatement, String statement) {
List<String> infos = new ArrayList<>();
String tableStatement = baseStatement + statement;
studioMetaStoreDTO.setStatement(tableStatement);
studioMetaStoreDTO.setStatement(baseStatement + statement);
IResult result = executeMSFlinkSql(studioMetaStoreDTO);
if (result instanceof DDLResult) {
DDLResult ddlResult = (DDLResult) result;
Iterator<String> iterator = ddlResult.getColumns().iterator();
if (iterator.hasNext()) {
String key = iterator.next();
List<Map<String, Object>> rowData = ddlResult.getRowData();
for (Map<String, Object> item : rowData) {
ddlResult.getColumns().stream().findFirst().ifPresent(key -> {
for (Map<String, Object> item : ddlResult.getRowData()) {
infos.add(item.get(key).toString());
}
}
});
}
return infos;
}
......
......@@ -21,8 +21,10 @@ package com.dlink.assertion;
import com.dlink.exception.RunTimeException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
/**
* Asserts
......@@ -44,17 +46,11 @@ public class Asserts {
}
public static boolean isNullString(String str) {
return isNull(str) || "".equals(str);
return isNull(str) || str.isEmpty();
}
public static boolean isAllNullString(String... str) {
boolean isNull = true;
for (String item : str) {
if (isNotNullString(item)) {
isNull = false;
}
}
return isNull;
return Arrays.stream(str).allMatch(Asserts::isNullString);
}
public static boolean isNotNullString(String str) {
......@@ -62,33 +58,15 @@ public class Asserts {
}
public static boolean isAllNotNullString(String... str) {
boolean isNotNull = true;
for (String item : str) {
if (isNullString(item)) {
isNotNull = false;
}
}
return isNotNull;
return Arrays.stream(str).noneMatch(Asserts::isNullString);
}
public static boolean isEquals(String str1, String str2) {
if (isNull(str1) && isNull(str2)) {
return true;
} else if (isNull(str1) || isNull(str2)) {
return false;
} else {
return str1.equals(str2);
}
return Objects.equals(str1, str2);
}
public static boolean isEqualsIgnoreCase(String str1, String str2) {
if (isNull(str1) && isNull(str2)) {
return true;
} else if (isNull(str1) || isNull(str2)) {
return false;
} else {
return str1.equalsIgnoreCase(str2);
}
return (str1 == null && str2 == null) || (str1 != null && str1.equalsIgnoreCase(str2));
}
public static boolean isNullCollection(Collection<?> collection) {
......@@ -100,7 +78,7 @@ public class Asserts {
}
public static boolean isNullMap(Map<?, ?> map) {
return isNull(map) || map.size() == 0;
return isNull(map) || map.isEmpty();
}
public static boolean isNotNullMap(Map<?, ?> map) {
......
......@@ -66,4 +66,12 @@ public class JobResult {
this.startTime = startTime;
this.endTime = endTime;
}
public void setStartTimeNow() {
this.setStartTime(LocalDateTime.now());
}
public void setEndTimeNow() {
this.setEndTime(LocalDateTime.now());
}
}
......@@ -53,6 +53,7 @@ import cn.hutool.core.io.FileUtil;
import cn.hutool.core.lang.Dict;
import cn.hutool.core.lang.Opt;
import cn.hutool.core.map.MapUtil;
import cn.hutool.core.text.CharSequenceUtil;
import cn.hutool.core.util.ReUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.crypto.digest.MD5;
......@@ -69,6 +70,9 @@ import groovy.lang.GroovyClassLoader;
*/
public class UDFUtil {
private UDFUtil() {
}
protected static final Logger log = LoggerFactory.getLogger(UDFUtil.class);
/**
* 存放 udf md5与版本对应的k,v值
......@@ -130,7 +134,7 @@ public class UDFUtil {
}).collect(Collectors.toList());
List<String> classNameList = udfList.stream().map(UDF::getClassName).collect(Collectors.toList());
process.info(StringUtils.join(",", classNameList));
process.info(StrUtil.format("A total of {} UDF have been Parsed.", classNameList.size()));
process.info(CharSequenceUtil.format("A total of {} UDF have been Parsed.", classNameList.size()));
return udfList;
}
......
......@@ -41,7 +41,7 @@ import java.util.Set;
* @author wenmo
* @since 2021/7/19 23:15
*/
public interface Driver {
public interface Driver extends AutoCloseable {
static Optional<Driver> get(DriverConfig config) {
Asserts.checkNotNull(config, "数据源配置不能为空");
......
......@@ -27,7 +27,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import cn.hutool.core.util.StrUtil;
import cn.hutool.core.text.CharSequenceUtil;
/**
* Process
......@@ -121,7 +121,7 @@ public class ProcessEntity {
if (isNullProcess()) {
return;
}
String message = StrUtil.format("\n[{}] {} CONFIG: {}", type.getValue(), LocalDateTime.now(), str);
String message = CharSequenceUtil.format("\n[{}] {} CONFIG: {}", type.getValue(), LocalDateTime.now(), str);
steps.get(stepIndex - 1).appendInfo(message);
ConsolePool.write(message, userId);
}
......@@ -130,7 +130,7 @@ public class ProcessEntity {
if (isNullProcess()) {
return;
}
String message = StrUtil.format("\n[{}] {} INFO: {}", type.getValue(), LocalDateTime.now(), str);
String message = CharSequenceUtil.format("\n[{}] {} INFO: {}", type.getValue(), LocalDateTime.now(), str);
steps.get(stepIndex - 1).appendInfo(message);
ConsolePool.write(message, userId);
}
......@@ -155,7 +155,7 @@ public class ProcessEntity {
if (isNullProcess()) {
return;
}
String message = StrUtil.format("\n[{}] {} ERROR: {}", type.getValue(), LocalDateTime.now(), str);
String message = CharSequenceUtil.format("\n[{}] {} ERROR: {}", type.getValue(), LocalDateTime.now(), str);
steps.get(stepIndex - 1).appendInfo(message);
steps.get(stepIndex - 1).appendError(message);
ConsolePool.write(message, userId);
......
......@@ -41,11 +41,10 @@ public class ProcessStep {
}
public ProcessStep(ProcessStatus stepStatus, LocalDateTime startTime) {
this.stepStatus = stepStatus;
this.startTime = startTime;
this(stepStatus, startTime, null, 0, null, null);
}
public ProcessStep(int index, ProcessStatus stepStatus, LocalDateTime startTime, LocalDateTime endTime, long time,
public ProcessStep(ProcessStatus stepStatus, LocalDateTime startTime, LocalDateTime endTime, long time,
StringBuilder info, StringBuilder error) {
this.stepStatus = stepStatus;
this.startTime = startTime;
......
......@@ -32,9 +32,9 @@ import java.util.concurrent.ConcurrentHashMap;
*/
public class ConsolePool extends AbstractPool<StringBuilder> {
private static volatile Map<String, StringBuilder> consoleEntityMap = new ConcurrentHashMap<>();
private static final Map<String, StringBuilder> consoleEntityMap = new ConcurrentHashMap<>();
private static ConsolePool instance = new ConsolePool();
private static final ConsolePool instance = new ConsolePool();
public static ConsolePool getInstance() {
return instance;
......@@ -51,13 +51,8 @@ public class ConsolePool extends AbstractPool<StringBuilder> {
}
public static void write(String str, Integer userId) {
String user = userId.toString();
if (consoleEntityMap.containsKey(user)) {
consoleEntityMap.get(user).append(str);
} else {
StringBuilder sb = new StringBuilder("Dinky User Console:");
consoleEntityMap.put(user, sb.append(str));
}
String user = String.valueOf(userId);
consoleEntityMap.getOrDefault(user, new StringBuilder("Dinky User Console:")).append(str);
}
}
......@@ -33,9 +33,9 @@ import java.util.concurrent.ConcurrentHashMap;
*/
public class ProcessPool extends AbstractPool<ProcessEntity> {
private static volatile Map<String, ProcessEntity> processEntityMap = new ConcurrentHashMap<>();
private static final Map<String, ProcessEntity> processEntityMap = new ConcurrentHashMap<>();
private static ProcessPool instance = new ProcessPool();
private static final ProcessPool instance = new ProcessPool();
public static ProcessPool getInstance() {
return instance;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment