Commit 5291364d by yuwei

2.0.0项目初始化

parent 99660245
......@@ -119,4 +119,16 @@ spring:
- name: Hystrix
args:
name: quartzHystrix
fallbackUri: forward:/fallback
# 工作流
- id: datax-service-workflow
uri: lb://datax-service-workflow
predicates:
- Path=/workflow/**
filters:
- SwaggerHeaderFilter
- StripPrefix=1
- name: Hystrix
args:
name: workFlowHystrix
fallbackUri: forward:/fallback
\ No newline at end of file
......@@ -2,62 +2,45 @@
spring:
redis:
database: 1
host: 127.0.0.1
host: 192.168.234.101
port: 6379
password: # 密码(默认为空)
timeout: 6000ms # 连接超时时长(毫秒)
password: 1234@abcd # 密码(默认为空)
timeout: 6000ms # 连接超时时长(毫秒)
lettuce:
pool:
max-active: 1000 # 连接池最大连接数(使用负值表示没有限制)
max-wait: -1ms # 连接池最大阻塞等待时间(使用负值表示没有限制)
max-wait: -1ms # 连接池最大阻塞等待时间(使用负值表示没有限制)
max-idle: 10 # 连接池中的最大空闲连接
min-idle: 5 # 连接池中的最小空闲连接
datasource:
dynamic:
type: com.zaxxer.hikari.HikariDataSource
hikari:
auto-commit: false
connection-timeout: 30000
idle-timeout: 25000
login-timeout: 5
max-lifetime: 30000
read-only: false
validation-timeout: 3000
maximum-pool-size: 15
minimum-idle: 5
pool-name: DataxHikariCP
connection-test-query: SELECT 1 FROM DUAL
data-source-properties:
cachePrepStmts: true
prepStmtCacheSize: 250
prepStmtCacheSqlLimit: 2048
useServerPrepStmts: true
useLocalSessionState: true
rewriteBatchedStatements: true
cacheResultSetMetadata: true
cacheServerConfiguration: true
elideSetAutoCommits: true
maintainTimeStats: false
primary: mysql
datasource:
mysql:
driver-class-name: com.p6spy.engine.spy.P6SpyDriver
url: jdbc:p6spy:mysql://127.0.0.1:3306/data_cloud?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
username: root
password: 1234@abcd
mybatis-plus:
mapper-locations: classpath*:mapper/*Mapper.xml
type-aliases-package: cn.datax.service.workflow.api.entity
global-config:
db-config:
id-type: ASSIGN_ID
banner: false
configuration:
map-underscore-to-camel-case: true
cache-enabled: false
call-setters-on-nulls: true
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://192.168.234.100:3306/data_cloud_workflow?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
username: root
password: 1234@abcd
type: com.zaxxer.hikari.HikariDataSource
hikari:
auto-commit: false
connection-timeout: 30000
idle-timeout: 25000
login-timeout: 5
max-lifetime: 30000
read-only: false
validation-timeout: 3000
maximum-pool-size: 15
minimum-idle: 5
pool-name: DataxHikariCP
connection-test-query: SELECT 1 FROM DUAL
data-source-properties:
cachePrepStmts: true
prepStmtCacheSize: 250
prepStmtCacheSqlLimit: 2048
useServerPrepStmts: true
useLocalSessionState: true
rewriteBatchedStatements: true
cacheResultSetMetadata: true
cacheServerConfiguration: true
elideSetAutoCommits: true
maintainTimeStats: false
# spring security 配置
security:
......@@ -70,4 +53,10 @@ security:
scope: all
resource:
loadBalanced: true
token-info-uri: http://localhost:8612/auth/oauth/check_token
\ No newline at end of file
token-info-uri: http://localhost:8612/auth/oauth/check_token
flowable:
# 关闭定时任务JOB
async-executor-activate: false
# 将databaseSchemaUpdate设置为true。当Flowable发现库与数据库表结构不一致时,会自动将数据库表结构升级至新版本。
database-schema-update: true
\ No newline at end of file
......@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable;
@FeignClient(contextId = "dataSourceServiceFeign", value = "datax-service-data-factory", fallbackFactory = DataSourceServiceFeignFallbackFactory.class)
public interface DataSourceServiceFeign {
@GetMapping("/inner/dataSource/{id}")
@GetMapping("/dataSource/{id}")
R getDataSourceById(@PathVariable("id") String id);
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-data-sql-console"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
......@@ -2,17 +2,13 @@ package cn.datax.service.data.factory.controller;
import cn.datax.common.core.JsonPage;
import cn.datax.common.core.R;
import cn.datax.common.database.DbQuery;
import cn.datax.common.database.core.PageResult;
import cn.datax.common.validate.ValidationGroups;
import cn.datax.service.data.factory.api.dto.DataSetDto;
import cn.datax.service.data.factory.api.entity.DataSetEntity;
import cn.datax.service.data.factory.api.query.DbDataQuery;
import cn.datax.service.data.factory.api.vo.DataSetVo;
import cn.datax.service.data.factory.api.query.DataSetQuery;
import cn.datax.service.data.factory.mapstruct.DataSetMapper;
import cn.datax.service.data.factory.service.DataSetService;
import cn.datax.service.data.factory.service.DataSourceService;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
......@@ -27,7 +23,6 @@ import org.springframework.web.bind.annotation.*;
import cn.datax.common.base.BaseController;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
......@@ -49,9 +44,6 @@ public class DataSetController extends BaseController {
@Autowired
private DataSetMapper dataSetMapper;
@Autowired
private DataSourceService dataSourceService;
/**
* 通过ID查询信息
*
......@@ -62,8 +54,8 @@ public class DataSetController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}")
public R getDataSetById(@PathVariable String id) {
DataSetVo dataSetVo = dataSetService.getDataSetById(id);
return R.ok().setData(dataSetVo);
DataSetEntity dataSetEntity = dataSetService.getDataSetById(id);
return R.ok().setData(dataSetMapper.toVO(dataSetEntity));
}
/**
......@@ -126,23 +118,4 @@ public class DataSetController extends BaseController {
dataSetService.deleteDataSetById(id);
return R.ok();
}
@ApiOperation(value = "获取SQL结果", notes = "根据数据源的id来获取SQL结果")
@ApiImplicitParam(name = "dbDataQuery", value = "详细实体dbDataQuery", required = true, dataType = "DbDataQuery")
@GetMapping("/queryList")
public R queryList(@RequestBody @Validated DbDataQuery dbDataQuery) {
DbQuery dbQuery = dataSourceService.getDbQuery(dbDataQuery.getDataSourceId());
List<Map<String, Object>> list = dbQuery.queryList(dbDataQuery.getSql());
return R.ok().setData(list);
}
@ApiOperation(value = "分页获取SQL结果", notes = "根据数据源的id来分页获取SQL结果")
@ApiImplicitParam(name = "dbDataQuery", value = "详细实体dbDataQuery", required = true, dataType = "DbDataQuery")
@GetMapping("/queryByPage")
public R queryByPage(@RequestBody @Validated DbDataQuery dbDataQuery) {
DbQuery dbQuery = dataSourceService.getDbQuery(dbDataQuery.getDataSourceId());
PageResult<Map<String, Object>> page = dbQuery.queryByPage(dbDataQuery.getSql(), dbDataQuery.getOffset(), dbDataQuery.getPageSize());
page.setPageNum(dbDataQuery.getPageNum()).setPageSize(dbDataQuery.getPageSize());
return R.ok().setData(page);
}
}
......@@ -5,10 +5,12 @@ import cn.datax.common.core.R;
import cn.datax.common.database.DbQuery;
import cn.datax.common.database.core.DbColumn;
import cn.datax.common.database.core.DbTable;
import cn.datax.common.database.core.PageResult;
import cn.datax.common.validate.ValidationGroups;
import cn.datax.service.data.factory.api.dto.DataSourceDto;
import cn.datax.service.data.factory.api.entity.DataSourceEntity;
import cn.datax.service.data.factory.api.query.DataSourceQuery;
import cn.datax.service.data.factory.api.query.DbDataQuery;
import cn.datax.service.data.factory.api.vo.DataSourceVo;
import cn.datax.service.data.factory.mapstruct.DataSourceMapper;
import cn.datax.service.data.factory.service.DataSourceService;
......@@ -26,6 +28,7 @@ import org.springframework.web.bind.annotation.*;
import cn.datax.common.base.BaseController;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
......@@ -57,8 +60,8 @@ public class DataSourceController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}")
public R getDataSourceById(@PathVariable String id) {
DataSourceVo dataSourceVo = dataSourceService.getDataSourceById(id);
return R.ok().setData(dataSourceVo);
DataSourceEntity dataSourceEntity = dataSourceService.getDataSourceById(id);
return R.ok().setData(dataSourceMapper.toVO(dataSourceEntity));
}
/**
......@@ -170,4 +173,23 @@ public class DataSourceController extends BaseController {
List<DbColumn> columns = dbQuery.getTableColumns(dbName, tableName);
return R.ok().setData(columns);
}
@ApiOperation(value = "获取SQL结果", notes = "根据数据源的id来获取SQL结果")
@ApiImplicitParam(name = "dbDataQuery", value = "详细实体dbDataQuery", required = true, dataType = "DbDataQuery")
@GetMapping("/queryList")
public R queryList(@RequestBody @Validated DbDataQuery dbDataQuery) {
DbQuery dbQuery = dataSourceService.getDbQuery(dbDataQuery.getDataSourceId());
List<Map<String, Object>> list = dbQuery.queryList(dbDataQuery.getSql());
return R.ok().setData(list);
}
@ApiOperation(value = "分页获取SQL结果", notes = "根据数据源的id来分页获取SQL结果")
@ApiImplicitParam(name = "dbDataQuery", value = "详细实体dbDataQuery", required = true, dataType = "DbDataQuery")
@GetMapping("/queryByPage")
public R queryByPage(@RequestBody @Validated DbDataQuery dbDataQuery) {
DbQuery dbQuery = dataSourceService.getDbQuery(dbDataQuery.getDataSourceId());
PageResult<Map<String, Object>> page = dbQuery.queryByPage(dbDataQuery.getSql(), dbDataQuery.getOffset(), dbDataQuery.getPageSize());
page.setPageNum(dbDataQuery.getPageNum()).setPageSize(dbDataQuery.getPageSize());
return R.ok().setData(page);
}
}
package cn.datax.service.data.factory.controller;
import cn.datax.common.base.BaseController;
import cn.datax.common.core.R;
import cn.datax.common.security.annotation.DataInner;
import cn.datax.service.data.factory.api.vo.DataSourceVo;
import cn.datax.service.data.factory.service.DataSourceService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
......@@ -15,13 +8,4 @@ import org.springframework.web.bind.annotation.RestController;
@RequestMapping("/inner")
public class InnerController extends BaseController {
@Autowired
private DataSourceService dataSourceService;
@DataInner
@GetMapping("/dataSource/{id}")
public R getDataSourceById(@PathVariable String id) {
DataSourceVo dataSourceVo = dataSourceService.getDataSourceById(id);
return R.ok().setData(dataSourceVo);
}
}
......@@ -3,7 +3,6 @@ package cn.datax.service.data.factory.service;
import cn.datax.service.data.factory.api.entity.DataSetEntity;
import cn.datax.service.data.factory.api.dto.DataSetDto;
import cn.datax.common.base.BaseService;
import cn.datax.service.data.factory.api.vo.DataSetVo;
/**
* <p>
......@@ -19,7 +18,7 @@ public interface DataSetService extends BaseService<DataSetEntity> {
void updateDataSet(DataSetDto dataSet);
DataSetVo getDataSetById(String id);
DataSetEntity getDataSetById(String id);
void deleteDataSetById(String id);
}
......@@ -4,7 +4,6 @@ import cn.datax.common.database.DbQuery;
import cn.datax.service.data.factory.api.dto.DataSourceDto;
import cn.datax.service.data.factory.api.entity.DataSourceEntity;
import cn.datax.common.base.BaseService;
import cn.datax.service.data.factory.api.vo.DataSourceVo;
/**
* <p>
......@@ -20,7 +19,7 @@ public interface DataSourceService extends BaseService<DataSourceEntity> {
void updateDataSource(DataSourceDto dataSource);
DataSourceVo getDataSourceById(String id);
DataSourceEntity getDataSourceById(String id);
void deleteDataSourceById(String id);
......
......@@ -2,7 +2,6 @@ package cn.datax.service.data.factory.service.impl;
import cn.datax.service.data.factory.api.dto.DataSetDto;
import cn.datax.service.data.factory.api.entity.DataSetEntity;
import cn.datax.service.data.factory.api.vo.DataSetVo;
import cn.datax.service.data.factory.service.DataSetService;
import cn.datax.service.data.factory.mapstruct.DataSetMapper;
import cn.datax.service.data.factory.dao.DataSetDao;
......@@ -51,9 +50,10 @@ public class DataSetServiceImpl extends BaseServiceImpl<DataSetDao, DataSetEntit
}
@Cacheable(key = "#id", unless = "#result == null")
public DataSetVo getDataSetById(String id) {
@Override
public DataSetEntity getDataSetById(String id) {
DataSetEntity dataSetEntity = super.getById(id);
return dataSetMapper.toVO(dataSetEntity);
return dataSetEntity;
}
@CacheEvict(key = "#id")
......
......@@ -6,7 +6,6 @@ import cn.datax.common.database.constants.DbQueryProperty;
import cn.datax.service.data.factory.api.dto.DataSourceDto;
import cn.datax.service.data.factory.api.dto.DbSchema;
import cn.datax.service.data.factory.api.entity.DataSourceEntity;
import cn.datax.service.data.factory.api.vo.DataSourceVo;
import cn.datax.service.data.factory.dao.DataSourceDao;
import cn.datax.service.data.factory.service.DataSourceService;
import cn.datax.service.data.factory.mapstruct.DataSourceMapper;
......@@ -58,9 +57,10 @@ public class DataSourceServiceImpl extends BaseServiceImpl<DataSourceDao, DataSo
}
@Cacheable(key = "#id", unless = "#result == null")
public DataSourceVo getDataSourceById(String id) {
@Override
public DataSourceEntity getDataSourceById(String id) {
DataSourceEntity dataSourceEntity = super.getById(id);
return dataSourceMapper.toVO(dataSourceEntity);
return dataSourceEntity;
}
@CacheEvict(key = "#id")
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-data-factory"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-data-api-call"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
......@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable;
@FeignClient(contextId = "apiMaskServiceFeign", value = "datax-service-data-market", fallbackFactory = ApiMaskServiceFeignFallbackFactory.class)
public interface ApiMaskServiceFeign {
@GetMapping("/inner/apiMask/{apiId}")
@GetMapping("/apiMask/api/{apiId}")
R getApiMaskByApiId(@PathVariable("apiId") String apiId);
}
......@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable;
@FeignClient(contextId = "dataApiServiceFeign", value = "datax-service-data-market", fallbackFactory = DataApiServiceFeignFallbackFactory.class)
public interface DataApiServiceFeign {
@GetMapping("/inner/dataApi/{id}")
@GetMapping("/dataApi/{id}")
R getDataApiById(@PathVariable("id") String id);
}
......@@ -53,8 +53,22 @@ public class ApiMaskController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}")
public R getApiMaskById(@PathVariable String id) {
ApiMaskVo apiMaskVo = apiMaskService.getApiMaskById(id);
return R.ok().setData(apiMaskVo);
ApiMaskEntity apiMaskEntity = apiMaskService.getApiMaskById(id);
return R.ok().setData(apiMaskMapper.toVO(apiMaskEntity));
}
/**
* 通过ID查询信息
*
* @param id
* @return
*/
@ApiOperation(value = "获取详细信息", notes = "根据url的id来获取详细信息")
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/api/{id}")
public R getApiMaskByApiId(@PathVariable String id) {
ApiMaskEntity apiMaskEntity = apiMaskService.getApiMaskByApiId(id);
return R.ok().setData(apiMaskMapper.toVO(apiMaskEntity));
}
/**
......
......@@ -56,8 +56,8 @@ public class DataApiController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}")
public R getDataApiById(@PathVariable String id) {
DataApiVo dataApiVo = dataApiService.getDataApiById(id);
return R.ok().setData(dataApiVo);
DataApiEntity dataApiEntity = dataApiService.getDataApiById(id);
return R.ok().setData(dataApiMapper.toVO(dataApiEntity));
}
/**
......
package cn.datax.service.data.market.controller;
import cn.datax.common.base.BaseController;
import cn.datax.common.core.R;
import cn.datax.common.security.annotation.DataInner;
import cn.datax.service.data.market.api.vo.ApiMaskVo;
import cn.datax.service.data.market.api.vo.DataApiVo;
import cn.datax.service.data.market.service.ApiMaskService;
import cn.datax.service.data.market.service.DataApiService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
......@@ -17,35 +8,4 @@ import org.springframework.web.bind.annotation.RestController;
@RequestMapping("/inner")
public class InnerController extends BaseController {
@Autowired
private DataApiService dataApiService;
@Autowired
private ApiMaskService apiMaskService;
/**
* 通过ID查询信息
*
* @param id
* @return
*/
@DataInner
@GetMapping("/dataApi/{id}")
public R getDataApiById(@PathVariable String id) {
DataApiVo dataApiVo = dataApiService.getDataApiById(id);
return R.ok().setData(dataApiVo);
}
/**
* 通过ID查询信息
*
* @param apiId
* @return
*/
@DataInner
@GetMapping("/apiMask/{apiId}")
public R getApiMaskByApiId(@PathVariable String apiId) {
ApiMaskVo apiMaskVo = apiMaskService.getApiMaskByApiId(apiId);
return R.ok().setData(apiMaskVo);
}
}
......@@ -3,7 +3,6 @@ package cn.datax.service.data.market.service;
import cn.datax.common.base.BaseService;
import cn.datax.service.data.market.api.dto.ApiMaskDto;
import cn.datax.service.data.market.api.entity.ApiMaskEntity;
import cn.datax.service.data.market.api.vo.ApiMaskVo;
/**
* <p>
......@@ -19,9 +18,9 @@ public interface ApiMaskService extends BaseService<ApiMaskEntity> {
void updateApiMask(ApiMaskDto dataApiMask);
ApiMaskVo getApiMaskById(String id);
ApiMaskEntity getApiMaskById(String id);
ApiMaskVo getApiMaskByApiId(String apiId);
ApiMaskEntity getApiMaskByApiId(String apiId);
void deleteApiMaskById(String id);
}
......@@ -4,7 +4,6 @@ import cn.datax.service.data.market.api.dto.SqlParseDto;
import cn.datax.service.data.market.api.entity.DataApiEntity;
import cn.datax.service.data.market.api.dto.DataApiDto;
import cn.datax.common.base.BaseService;
import cn.datax.service.data.market.api.vo.DataApiVo;
import cn.datax.service.data.market.api.vo.SqlParseVo;
/**
......@@ -21,7 +20,7 @@ public interface DataApiService extends BaseService<DataApiEntity> {
void updateDataApi(DataApiDto dataApi);
DataApiVo getDataApiById(String id);
DataApiEntity getDataApiById(String id);
void deleteDataApiById(String id);
......
......@@ -52,16 +52,17 @@ public class ApiMaskServiceImpl extends BaseServiceImpl<ApiMaskDao, ApiMaskEntit
}
@Cacheable(key = "#id")
public ApiMaskVo getApiMaskById(String id) {
@Override
public ApiMaskEntity getApiMaskById(String id) {
ApiMaskEntity apiMaskEntity = super.getById(id);
return apiMaskMapper.toVO(apiMaskEntity);
return apiMaskEntity;
}
@Cacheable(key = "#apiId")
@Cacheable(key = "'api:' + #apiId")
@Override
public ApiMaskVo getApiMaskByApiId(String apiId) {
public ApiMaskEntity getApiMaskByApiId(String apiId) {
ApiMaskEntity apiMaskEntity = apiMaskDao.selectOne(new QueryWrapper<ApiMaskEntity>().eq("api_id", apiId));
return apiMaskMapper.toVO(apiMaskEntity);
return apiMaskEntity;
}
@CacheEvict(key = "#id")
......
......@@ -5,7 +5,6 @@ import cn.datax.common.utils.ThrowableUtil;
import cn.datax.service.data.market.api.dto.*;
import cn.datax.service.data.market.api.entity.DataApiEntity;
import cn.datax.service.data.market.api.enums.ConfigType;
import cn.datax.service.data.market.api.vo.DataApiVo;
import cn.datax.service.data.market.api.vo.SqlParseVo;
import cn.datax.service.data.market.service.DataApiService;
import cn.datax.service.data.market.mapstruct.DataApiMapper;
......@@ -90,9 +89,10 @@ public class DataApiServiceImpl extends BaseServiceImpl<DataApiDao, DataApiEntit
}
@Cacheable(key = "#id", unless = "#result == null")
public DataApiVo getDataApiById(String id) {
@Override
public DataApiEntity getDataApiById(String id) {
DataApiEntity dataApiEntity = super.getById(id);
return dataApiMapper.toVO(dataApiEntity);
return dataApiEntity;
}
@CacheEvict(key = "#id")
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-data-market"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-file"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
......@@ -54,8 +54,8 @@ public class QrtzJobController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}")
public R getQrtzJobById(@PathVariable String id) {
QrtzJobVo qrtzJobVo = qrtzJobService.getQrtzJobById(id);
return R.ok().setData(qrtzJobVo);
QrtzJobEntity qrtzJobEntity = qrtzJobService.getQrtzJobById(id);
return R.ok().setData(qrtzJobMapper.toVO(qrtzJobEntity));
}
/**
......
......@@ -54,8 +54,8 @@ public class QrtzJobLogController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}")
public R getQrtzJobLogById(@PathVariable String id) {
QrtzJobLogVo qrtzJobLogVo = qrtzJobLogService.getQrtzJobLogById(id);
return R.ok().setData(qrtzJobLogVo);
QrtzJobLogEntity qrtzJobLogEntity = qrtzJobLogService.getQrtzJobLogById(id);
return R.ok().setData(qrtzJobLogMapper.toVO(qrtzJobLogEntity));
}
/**
......
......@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service;
import cn.datax.service.quartz.api.entity.QrtzJobLogEntity;
import cn.datax.service.quartz.api.dto.QrtzJobLogDto;
import cn.datax.common.base.BaseService;
import cn.datax.service.quartz.api.vo.QrtzJobLogVo;
/**
* <p>
......@@ -19,7 +18,7 @@ public interface QrtzJobLogService extends BaseService<QrtzJobLogEntity> {
void updateQrtzJobLog(QrtzJobLogDto qrtzJobLog);
QrtzJobLogVo getQrtzJobLogById(String id);
QrtzJobLogEntity getQrtzJobLogById(String id);
void deleteQrtzJobLogById(String id);
}
......@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service;
import cn.datax.service.quartz.api.entity.QrtzJobEntity;
import cn.datax.service.quartz.api.dto.QrtzJobDto;
import cn.datax.common.base.BaseService;
import cn.datax.service.quartz.api.vo.QrtzJobVo;
/**
* <p>
......@@ -19,7 +18,7 @@ public interface QrtzJobService extends BaseService<QrtzJobEntity> {
void updateQrtzJob(QrtzJobDto qrtzJob);
QrtzJobVo getQrtzJobById(String id);
QrtzJobEntity getQrtzJobById(String id);
void deleteQrtzJobById(String id);
......
......@@ -2,7 +2,6 @@ package cn.datax.service.quartz.service.impl;
import cn.datax.service.quartz.api.entity.QrtzJobLogEntity;
import cn.datax.service.quartz.api.dto.QrtzJobLogDto;
import cn.datax.service.quartz.api.vo.QrtzJobLogVo;
import cn.datax.service.quartz.service.QrtzJobLogService;
import cn.datax.service.quartz.mapstruct.QrtzJobLogMapper;
import cn.datax.service.quartz.dao.QrtzJobLogDao;
......@@ -45,9 +44,9 @@ public class QrtzJobLogServiceImpl extends BaseServiceImpl<QrtzJobLogDao, QrtzJo
}
@Override
public QrtzJobLogVo getQrtzJobLogById(String id) {
public QrtzJobLogEntity getQrtzJobLogById(String id) {
QrtzJobLogEntity qrtzJobLogEntity = super.getById(id);
return qrtzJobLogMapper.toVO(qrtzJobLogEntity);
return qrtzJobLogEntity;
}
@Override
......
......@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service.impl;
import cn.datax.common.core.DataConstant;
import cn.datax.service.quartz.api.entity.QrtzJobEntity;
import cn.datax.service.quartz.api.dto.QrtzJobDto;
import cn.datax.service.quartz.api.vo.QrtzJobVo;
import cn.datax.service.quartz.quartz.utils.ScheduleUtil;
import cn.datax.service.quartz.service.QrtzJobService;
import cn.datax.service.quartz.mapstruct.QrtzJobMapper;
......@@ -56,9 +55,9 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit
@Cacheable(key = "#id")
@Override
public QrtzJobVo getQrtzJobById(String id) {
public QrtzJobEntity getQrtzJobById(String id) {
QrtzJobEntity qrtzJobEntity = super.getById(id);
return qrtzJobMapper.toVO(qrtzJobEntity);
return qrtzJobEntity;
}
@CacheEvict(key = "#id")
......@@ -69,17 +68,19 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit
qrtzJobDao.deleteById(id);
}
@CachePut(key = "#id")
@Override
public void pauseById(String id) {
QrtzJobEntity job = super.getById(id);
QrtzJobEntity job = getQrtzJobById(id);
job.setStatus(DataConstant.EnableState.DISABLE.getKey());
super.updateById(job);
ScheduleUtil.pauseJob(id);
}
@CachePut(key = "#id")
@Override
public void resumeById(String id) {
QrtzJobEntity job = super.getById(id);
QrtzJobEntity job = getQrtzJobById(id);
job.setStatus(DataConstant.EnableState.ENABLE.getKey());
super.updateById(job);
ScheduleUtil.resumeJob(id);
......@@ -87,7 +88,7 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit
@Override
public void runById(String id) {
QrtzJobEntity job = super.getById(id);
QrtzJobEntity job = getQrtzJobById(id);
ScheduleUtil.runJob(job);
}
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-quartz"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
......@@ -36,18 +36,18 @@ import org.springframework.web.servlet.DispatcherServlet;
"org.flowable.ui.common.rest"
},
excludeFilters = {
//移除flowable.cmmon.app 的设置
// 移除flowable.cmmon.app 的设置
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = EditorUsersResource.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = EditorGroupsResource.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = RemoteIdmServiceImpl.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = RemoteIdmAuthenticationProvider.class),
//移除flowable 中的spring security 的设置
// 移除flowable 中的spring security 的设置
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.ApiWebSecurityConfigurationAdapter.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.ActuatorWebSecurityConfigurationAdapter.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.FormLoginWebSecurityConfigurerAdapter.class),
//编辑器国际化文件 这个在flowable 6.5 版本中前端支持国际化了, 不需要排除了
//@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = StencilSetResource.class),
// 编辑器国际化文件 这个在flowable 6.5 版本中前端支持国际化了, 不需要排除了
// @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = StencilSetResource.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = org.flowable.ui.modeler.conf.ApplicationConfiguration.class) ,
// 排除获取用户信息,采用自定义方式实现
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = RemoteAccountResource.class)
......
log4j.rootLogger=DEBUG, CONSOLE
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern= %d{hh:mm:ss,SSS} [%t] %-5p %c %x - %m%n
\ No newline at end of file
......@@ -41,10 +41,5 @@
<artifactId>datax-common-core</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>cn.datax</groupId>
<artifactId>datax-common-dictionary</artifactId>
<version>2.0.0</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package cn.datax.service.workflow.config;
import lombok.extern.slf4j.Slf4j;
import org.flowable.spring.SpringProcessEngineConfiguration;
import org.flowable.spring.boot.EngineConfigurationConfigurer;
import org.springframework.context.annotation.Configuration;
@Slf4j
@Configuration
public class ProcessEngineConfig implements EngineConfigurationConfigurer<SpringProcessEngineConfiguration> {
@Override
public void configure(SpringProcessEngineConfiguration engineConfiguration) {
engineConfiguration.setActivityFontName("宋体");
engineConfiguration.setLabelFontName("宋体");
engineConfiguration.setAnnotationFontName("宋体");
log.info("配置字体:" + engineConfiguration.getActivityFontName());
}
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-workflow"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
module.log=com.p6spy.engine.logging.P6LogFactory,com.p6spy.engine.outage.P6OutageFactory
# 自定义日志打印
logMessageFormat=com.baomidou.mybatisplus.extension.p6spy.P6SpyLogger
#日志输出到控制台
appender=com.baomidou.mybatisplus.extension.p6spy.StdoutLogger
# 使用日志系统记录 sql
#appender=com.p6spy.engine.spy.appender.Slf4JLogger
# 设置 p6spy driver 代理
deregisterdrivers=true
# 取消JDBC URL前缀
useprefix=true
# 配置记录 Log 例外,可去掉的结果集有error,info,batch,debug,statement,commit,rollback,result,resultset.
excludecategories=info,debug,result,batch,resultset
# 日期格式
dateformat=yyyy-MM-dd HH:mm:ss
# 实际驱动可多个
#driverlist=org.h2.Driver
# 是否开启慢SQL记录
outagedetection=true
# 慢SQL记录标准 2 秒
outagedetectioninterval=2
# 开启过滤
filter=true
# 配置不打印的内容
exclude=select 1
\ No newline at end of file
......@@ -64,8 +64,8 @@ public class ${table.controllerName} {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}")
public R get${className}ById(@PathVariable String id) {
${className}Vo ${classNameLower}Vo = ${classNameLower}Service.get${className}ById(id);
return R.ok().setData(${classNameLower}Vo);
${entity} ${classNameLower}Entity = ${classNameLower}Service.get${className}ById(id);
return R.ok().setData(${classNameLower}Mapper.toVO(${classNameLower}Entity));
}
/**
......
package ${package.Service};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.entity.${entity};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.vo.${className}Vo;
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.dto.${className}Dto;
import ${superServiceClassPackage};
......@@ -19,7 +18,7 @@ public interface ${table.serviceName} extends ${superServiceClass}<${entity}> {
void update${className}(${className}Dto ${classNameLower});
${className}Vo get${className}ById(String id);
${entity} get${className}ById(String id);
void delete${className}ById(String id);
}
package ${package.ServiceImpl};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.entity.${entity};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.vo.${className}Vo;
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.dto.${className}Dto;
import ${package.Service}.${table.serviceName};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.mapstruct.${className}Mapper;
......@@ -45,9 +44,9 @@ public class ${table.serviceImplName} extends ${superServiceImplClass}<${table.m
}
@Override
public ${className}Vo get${className}ById(String id) {
public ${entity} get${className}ById(String id) {
${entity} ${classNameLower}Entity = super.getById(id);
return ${classNameLower}Mapper.toVO(${classNameLower}Entity);
return ${classNameLower}Entity;
}
@Override
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment