Commit 1cb25054 authored by wenmo's avatar wenmo

打包优化

parent 0c7f966b
......@@ -46,9 +46,45 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<!--<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
</dependency>-->
<!--<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.30</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.30</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>1.1.2</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.14</version>
<scope>provided</scope>
</dependency>-->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
......@@ -70,10 +106,10 @@
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
</dependency>
<dependency>
<!--<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
</dependency>-->
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-core</artifactId>
......@@ -90,7 +126,7 @@
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata-mysql</artifactId>
<!--<scope>provided</scope>-->
<scope>provided</scope>
</dependency>
</dependencies>
<build>
......
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>service</id>
<formats>
<format>dir</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<fileSet>
<directory>src/main/bin</directory>
<includes>
<include>auto.sh</include>
</includes>
<fileMode>0775</fileMode>
<directoryMode>775</directoryMode>
<outputDirectory>bin/</outputDirectory>
</fileSet>
<fileSet>
<directory>src/main/resources</directory>
<includes>
<include>application.properties</include>
<include>application.yml</include>
</includes>
<outputDirectory>config/</outputDirectory>
</fileSet>
<fileSet>
<directory>target/</directory>
<includes>
<include>*.jar</include>
</includes>
<outputDirectory>.</outputDirectory>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<useProjectArtifact>false</useProjectArtifact>
<outputDirectory>lib</outputDirectory>
<scope>runtime</scope>
</dependencySet>
</dependencySets>
</assembly>
#!/bin/bash
# 定义变量
# 要运行的jar包路径,加不加引号都行。 注意:等号两边 不能 有空格,否则会提示command找不到
JAR_NAME="./dlink-admin.jar"
SETTING="-Djava.ext.dirs=./lib"
# 如果输入格式不对,给出提示!
tips() {
echo ""
echo "WARNING!!!......Tips, please use command: sh auto.sh [start|stop|restart|status]. For example: sh auto.sh start "
echo ""
exit 1
}
# 启动方法
start() {
# 重新获取一下pid,因为其它操作如stop会导致pid的状态更新
pid=`ps -ef | grep $JAR_NAME | grep -v grep | awk '{print $2}'`
# -z 表示如果$pid为空时执行
if [ -z $pid ]; then
nohup java $SETTING -jar -Xms512M -Xmx2048M -XX:PermSize=512M -XX:MaxPermSize=1024M $JAR_NAME > dlink.log 2>&1 &
pid=`ps -ef | grep $JAR_NAME | grep -v grep | awk '{print $2}'`
echo ""
echo "Service ${JAR_NAME} is starting!pid=${pid}"
echo "........................Start successfully!........................."
else
echo ""
echo "Service ${JAR_NAME} is already running,it's pid = ${pid}. If necessary, please use command: sh auto.sh restart."
echo ""
fi
}
# 停止方法
stop() {
# 重新获取一下pid,因为其它操作如start会导致pid的状态更新
pid=`ps -ef | grep $JAR_NAME | grep -v grep | awk '{print $2}'`
# -z 表示如果$pid为空时执行。 注意:每个命令和变量之间一定要前后加空格,否则会提示command找不到
if [ -z $pid ]; then
echo ""
echo "Service ${JAR_NAME} is not running! It's not necessary to stop it!"
echo ""
else
kill -9 $pid
echo ""
echo "Service stop successfully!pid:${pid} which has been killed forcibly!"
echo ""
fi
}
# 输出运行状态方法
status() {
# 重新获取一下pid,因为其它操作如stop、restart、start等会导致pid的状态更新
pid=`ps -ef | grep $JAR_NAME | grep -v grep | awk '{print $2}'`
# -z 表示如果$pid为空时执行。注意:每个命令和变量之间一定要前后加空格,否则会提示command找不到
if [ -z $pid ];then
echo ""
echo "Service ${JAR_NAME} is not running!"
echo ""
else
echo ""
echo "Service ${JAR_NAME} is running. It's pid=${pid}"
echo ""
fi
}
# 重启方法
restart() {
echo ""
echo ".............................Restarting.............................."
echo "....................................................................."
# 重新获取一下pid,因为其它操作如start会导致pid的状态更新
pid=`ps -ef | grep $JAR_NAME | grep -v grep | awk '{print $2}'`
# -z 表示如果$pid为空时执行。 注意:每个命令和变量之间一定要前后加空格,否则会提示command找不到
if [ ! -z $pid ]; then
kill -9 $pid
fi
start
echo "....................Restart successfully!..........................."
}
# 根据输入参数执行对应方法,不输入则执行tips提示方法
case "$1" in
"start")
start
;;
"stop")
stop
;;
"status")
status
;;
"restart")
restart
;;
*)
tips
;;
esac
\ No newline at end of file
......@@ -6,11 +6,11 @@ spring:
driver-class-name: com.mysql.cj.jdbc.Driver
application:
name: dlink
flyway:
enabled: true
clean-disabled: true
# baseline-on-migrate: true
table: dlink_schema_history
# flyway:
# enabled: false
# clean-disabled: true
## baseline-on-migrate: true
# table: dlink_schema_history
server:
port: 8888
......@@ -24,8 +24,8 @@ mybatis-plus:
id-type: auto
configuration:
##### mybatis-plus打印完整sql(只适用于开发环境)
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
# log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl
# log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl
##### Flink 集群配置
......
/*
Navicat Premium Data Transfer
Source Server : 10.1.51.25
Source Server Type : MySQL
Source Server Version : 80013
Source Host : 10.1.51.25:3306
Source Schema : dlink
Target Server Type : MySQL
Target Server Version : 80013
File Encoding : 65001
Date: 28/05/2021 18:56:01
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for dlink_catalogue
-- ----------------------------
DROP TABLE IF EXISTS `dlink_catalogue`;
CREATE TABLE `dlink_catalogue`
(
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`task_id` int(11) NULL DEFAULT NULL COMMENT '任务ID',
`name` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '名称',
`type` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci COMMENT '类型',
`parent_id` int(11) NOT NULL DEFAULT 0 COMMENT '父ID',
`enabled` tinyint(1) NOT NULL DEFAULT 1 COMMENT '启用',
`is_leaf` tinyint(1) NOT NULL COMMENT '是否为叶子',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '最近修改时间',
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `idx_name`(`name`, `parent_id`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '目录' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Table structure for dlink_cluster
-- ----------------------------
DROP TABLE IF EXISTS `dlink_cluster`;
CREATE TABLE `dlink_cluster`
(
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '名称',
`alias` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '别名',
`type` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '类型',
`hosts` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT 'HOSTS',
`job_manager_host` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT 'JMhost',
`status` int(1) NULL DEFAULT NULL COMMENT '状态',
`note` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '注释',
`enabled` tinyint(1) NOT NULL DEFAULT 1 COMMENT '是否启用',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `idx_name`(`name`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '集群' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Table structure for dlink_task
-- ----------------------------
DROP TABLE IF EXISTS `dlink_task`;
CREATE TABLE `dlink_task`
(
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '名称',
`alias` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '别名',
`type` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '类型',
`check_point` int(11) NULL DEFAULT NULL COMMENT 'CheckPoint ',
`save_point_path` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT 'SavePointPath',
`parallelism` int(4) NULL DEFAULT NULL COMMENT 'parallelism',
`fragment` tinyint(255) NULL DEFAULT NULL COMMENT 'fragment',
`cluster_id` int(11) NULL DEFAULT NULL COMMENT 'Flink集群ID',
`note` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '注释',
`enabled` tinyint(1) NOT NULL DEFAULT 1 COMMENT '是否启用',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `idx_name`(`name`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '作业' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Table structure for dlink_task_statement
-- ----------------------------
DROP TABLE IF EXISTS `dlink_task_statement`;
CREATE TABLE `dlink_task_statement`
(
`id` int(11) NOT NULL COMMENT 'ID',
`statement` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '语句',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '语句' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Table structure for dlink_flink_document
-- ----------------------------
DROP TABLE IF EXISTS `dlink_flink_document`;
CREATE TABLE `dlink_flink_document`
(
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`category` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '文档类型',
`type` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '类型',
`subtype` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '子类型',
`name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '信息',
`description` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '描述',
`version` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '版本号',
`like_num` int(255) NULL DEFAULT 0 COMMENT '喜爱值',
`enabled` tinyint(1) NOT NULL DEFAULT 0 COMMENT '是否启用',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 263 CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '文档管理' ROW_FORMAT = Dynamic;
SET FOREIGN_KEY_CHECKS = 1;
COMMIT
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<contextName>${APP_NAME}</contextName>
<springProperty name="APP_NAME" scope="context" source="spring.application.name"/>
<springProperty name="LOG_FILE" scope="context" source="logging.file" defaultValue="./logs/application/${APP_NAME}"/>
<springProperty name="LOG_POINT_FILE" scope="context" source="logging.file" defaultValue="./logs/point"/>
<springProperty name="LOG_AUDIT_FILE" scope="context" source="logging.file" defaultValue="./logs/audit"/>
<springProperty name="LOG_MAXFILESIZE" scope="context" source="logback.filesize" defaultValue="50MB"/>
<springProperty name="LOG_FILEMAXDAY" scope="context" source="logback.filemaxday" defaultValue="7"/>
<springProperty name="ServerIP" scope="context" source="spring.cloud.client.ip-address" defaultValue="0.0.0.0"/>
<springProperty name="ServerPort" scope="context" source="server.port" defaultValue="0000"/>
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<!-- 彩色日志格式 -->
<property name="CONSOLE_LOG_PATTERN"
value="[${APP_NAME}:${ServerIP}:${ServerPort}] %clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(%level){blue} %clr(${PID}){magenta} %clr([%X{traceId}]){yellow} %clr([%thread]){orange} %clr(%-40.40logger{39}){cyan} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />
<property name="CONSOLE_LOG_PATTERN_NO_COLOR" value="[${APP_NAME}:${ServerIP}:${ServerPort}] %d{yyyy-MM-dd HH:mm:ss.SSS} %level ${PID} [%X{traceId}] [%thread] %-40.40logger{39} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />
<!-- 控制台日志 -->
<appender name="StdoutAppender" class="ch.qos.logback.core.ConsoleAppender">
<withJansi>true</withJansi>
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!-- 按照每天生成常规日志文件 -->
<appender name="FileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE}/${APP_NAME}.log</file>
<encoder>
<pattern>${CONSOLE_LOG_PATTERN_NO_COLOR}</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_FILE}/${APP_NAME}.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
</filter>
</appender>
<appender name="point_log" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_POINT_FILE}/point.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS}|${APP_NAME}|%msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_POINT_FILE}/point.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
</appender>
<appender name="audit_log" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_AUDIT_FILE}/audit.log</file>
<encoder>
<pattern>%msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_AUDIT_FILE}/audit.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
</appender>
<appender name="point_log_async" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0</discardingThreshold>
<appender-ref ref="point_log"/>
</appender>
<appender name="file_async" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0</discardingThreshold>
<appender-ref ref="FileAppender"/>
</appender>
<root level="INFO">
<appender-ref ref="StdoutAppender"/>
<appender-ref ref="file_async"/>
</root>
</configuration>
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<contextName>${APP_NAME}</contextName>
<springProperty name="APP_NAME" scope="context" source="spring.application.name"/>
<springProperty name="LOG_FILE" scope="context" source="logging.file" defaultValue="./logs/application/${APP_NAME}"/>
<springProperty name="LOG_POINT_FILE" scope="context" source="logging.file" defaultValue="./logs/point"/>
<springProperty name="LOG_AUDIT_FILE" scope="context" source="logging.file" defaultValue="./logs/audit"/>
<springProperty name="LOG_MAXFILESIZE" scope="context" source="logback.filesize" defaultValue="50MB"/>
<springProperty name="LOG_FILEMAXDAY" scope="context" source="logback.filemaxday" defaultValue="7"/>
<springProperty name="ServerIP" scope="context" source="spring.cloud.client.ip-address" defaultValue="0.0.0.0"/>
<springProperty name="ServerPort" scope="context" source="server.port" defaultValue="0000"/>
<configuration debug="false">
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="./logs" />
<!-- 彩色日志格式 -->
<property name="CONSOLE_LOG_PATTERN"
value="[${APP_NAME}:${ServerIP}:${ServerPort}] %clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(%level){blue} %clr(${PID}){magenta} %clr([%X{traceId}]){yellow} %clr([%thread]){orange} %clr(%-40.40logger{39}){cyan} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />
<property name="CONSOLE_LOG_PATTERN_NO_COLOR" value="[${APP_NAME}:${ServerIP}:${ServerPort}] %d{yyyy-MM-dd HH:mm:ss.SSS} %level ${PID} [%X{traceId}] [%thread] %-40.40logger{39} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />
<!-- 控制台日志 -->
<appender name="StdoutAppender" class="ch.qos.logback.core.ConsoleAppender">
<withJansi>true</withJansi>
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
<!--控制台日志, 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度,%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
</appender>
<!-- 按照每天生成常规日志文件 -->
<appender name="FileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE}/${APP_NAME}.log</file>
<encoder>
<pattern>${CONSOLE_LOG_PATTERN_NO_COLOR}</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<!--文件日志, 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_FILE}/${APP_NAME}.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/dlink.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>30</MaxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
</filter>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
<!--日志文件最大的大小-->
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>10MB</MaxFileSize>
</triggeringPolicy>
</appender>
<appender name="point_log" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_POINT_FILE}/point.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS}|${APP_NAME}|%msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<!--<appender name="dailyRollingFileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>/usr/local/log/app.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_POINT_FILE}/point.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
&lt;!&ndash; daily rollover &ndash;&gt;
<FileNamePattern>logback.%d{yyyy-MM-dd}.log</FileNamePattern>
&lt;!&ndash; keep 30 days' worth of history &ndash;&gt;
<maxHistory>30</maxHistory>
</rollingPolicy>
</appender>
<appender name="audit_log" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_AUDIT_FILE}/audit.log</file>
<encoder>
<pattern>%msg%n</pattern>
<charset>UTF-8</charset>
<Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{35} - %msg %n</Pattern>
</encoder>
<!-- 基于时间的分包策略 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_AUDIT_FILE}/audit.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
</appender>
</appender>-->
<appender name="point_log_async" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0</discardingThreshold>
<appender-ref ref="point_log"/>
</appender>
<appender name="file_async" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0</discardingThreshold>
<appender-ref ref="FileAppender"/>
</appender>
<!--myibatis log configure-->
<!--<logger name="com.apache.ibatis" level="TRACE"/>
<logger name="java.sql.Connection" level="ERROR"/>
<logger name="java.sql.Statement" level="ERROR"/>
<logger name="java.sql.PreparedStatement" level="ERROR"/>-->
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="StdoutAppender"/>
<appender-ref ref="file_async"/>
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE"/>
</root>
</configuration>
......@@ -8,7 +8,7 @@
<version>0.3.0-SANPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<artifactId>dlink-client-1.12</artifactId>
<properties>
......@@ -48,6 +48,10 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-common</artifactId>
</dependency>
</dependencies>
<!--<build>
<plugins>
......
......@@ -8,7 +8,7 @@
<version>0.3.0-SANPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<artifactId>dlink-client-1.13</artifactId>
<properties>
......@@ -48,6 +48,10 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-common</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
......
package com.dlink.result;
import java.util.Date;
/**
* 解释结果
*
* @author wenmo
* @since 2021/6/7 22:06
**/
public class SqlExplainResult {
private Integer index;
private String type;
private String sql;
private String parse;
private String explain;
private String error;
private boolean parseTrue;
private boolean explainTrue;
private Date explainTime;
public Integer getIndex() {
return index;
}
public void setIndex(Integer index) {
this.index = index;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getSql() {
return sql;
}
public void setSql(String sql) {
this.sql = sql;
}
public String getParse() {
return parse;
}
public void setParse(String parse) {
this.parse = parse;
}
public String getExplain() {
return explain;
}
public void setExplain(String explain) {
this.explain = explain;
}
public String getError() {
return error;
}
public void setError(String error) {
this.error = error;
}
public boolean isParseTrue() {
return parseTrue;
}
public void setParseTrue(boolean parseTrue) {
this.parseTrue = parseTrue;
}
public boolean isExplainTrue() {
return explainTrue;
}
public void setExplainTrue(boolean explainTrue) {
this.explainTrue = explainTrue;
}
public Date getExplainTime() {
return explainTime;
}
public void setExplainTime(Date explainTime) {
this.explainTime = explainTime;
}
@Override
public String toString() {
return "SqlExplainRecord{" +
"index=" + index +
", type='" + type + '\'' +
", sql='" + sql + '\'' +
", parse='" + parse + '\'' +
", explain='" + explain + '\'' +
", error='" + error + '\'' +
", parseTrue=" + parseTrue +
", explainTrue=" + explainTrue +
", explainTime=" + explainTime +
'}';
}
}
......@@ -43,17 +43,17 @@
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-client-1.12</artifactId>
<!--<scope>provided</scope>-->
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-connector-jdbc</artifactId>
<!--<scope>provided</scope>-->
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-function</artifactId>
<!--<scope>provided</scope>-->
<scope>provided</scope>
</dependency>
</dependencies>
</project>
\ No newline at end of file
#!/bin/bash
# 定义变量
# 要运行的jar包路径,加不加引号都行。 注意:等号两边 不能 有空格,否则会提示command找不到
JAR_NAME="./dlink-admin.jar"
SETTING="-Djava.ext.dirs=./lib"
JAR_NAME="./dlink-admin-*.jar"
#java -Djava.ext.dirs=$JAVA_HOME/jre/lib/ext:$JAVA_HOME/jre/lib:./lib -classpath ."/lib/*.jar" -jar dlink-admin-*.jar
SETTING="-Djava.ext.dirs=$JAVA_HOME/jre/lib/ext:$JAVA_HOME/jre/lib:./lib"
# 如果输入格式不对,给出提示!
tips() {
......
......@@ -10,6 +10,7 @@
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-metadata-postgresql</artifactId>
<dependencies>
<dependency>
<groupId>com.dlink</groupId>
......
......@@ -41,7 +41,7 @@
<banner.version>1.0.2</banner.version>
<hibernate-validator.version>6.2.0.Final</hibernate-validator.version>
<versions-maven-plugin.version>2.7</versions-maven-plugin.version>
<flyway.version>6.4.4</flyway.version>
<!--<flyway.version>6.4.4</flyway.version>-->
<maven-jar-plugin.version>3.2.0</maven-jar-plugin.version>
<maven-assembly-plugin.version>3.2.0</maven-assembly-plugin.version>
<maven.resource.version>3.2.0</maven.resource.version>
......@@ -56,22 +56,6 @@
<artifactId>hutool-all</artifactId>
<version>${hutool.version}</version>
</dependency>
<!-- mybatis-plus start -->
<!--<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus-boot-starter.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-extension</artifactId>
<version>${mybatis-plus-boot-starter.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus-boot-starter.version}</version>
</dependency>-->
<!-- druid 官方 starter -->
<dependency>
<groupId>com.alibaba</groupId>
......@@ -116,6 +100,11 @@
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
......@@ -153,11 +142,11 @@
<scope>provided</scope>
</dependency>
<!-- flyway -->
<dependency>
<!--<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
<version>${flyway.version}</version>
</dependency>
</dependency>-->
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-core</artifactId>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment