Commit 5291364d by yuwei

2.0.0项目初始化

parent 99660245
...@@ -119,4 +119,16 @@ spring: ...@@ -119,4 +119,16 @@ spring:
- name: Hystrix - name: Hystrix
args: args:
name: quartzHystrix name: quartzHystrix
fallbackUri: forward:/fallback
# 工作流
- id: datax-service-workflow
uri: lb://datax-service-workflow
predicates:
- Path=/workflow/**
filters:
- SwaggerHeaderFilter
- StripPrefix=1
- name: Hystrix
args:
name: workFlowHystrix
fallbackUri: forward:/fallback fallbackUri: forward:/fallback
\ No newline at end of file
...@@ -2,62 +2,45 @@ ...@@ -2,62 +2,45 @@
spring: spring:
redis: redis:
database: 1 database: 1
host: 127.0.0.1 host: 192.168.234.101
port: 6379 port: 6379
password: # 密码(默认为空) password: 1234@abcd # 密码(默认为空)
timeout: 6000ms # 连接超时时长(毫秒) timeout: 6000ms # 连接超时时长(毫秒)
lettuce: lettuce:
pool: pool:
max-active: 1000 # 连接池最大连接数(使用负值表示没有限制) max-active: 1000 # 连接池最大连接数(使用负值表示没有限制)
max-wait: -1ms # 连接池最大阻塞等待时间(使用负值表示没有限制) max-wait: -1ms # 连接池最大阻塞等待时间(使用负值表示没有限制)
max-idle: 10 # 连接池中的最大空闲连接 max-idle: 10 # 连接池中的最大空闲连接
min-idle: 5 # 连接池中的最小空闲连接 min-idle: 5 # 连接池中的最小空闲连接
datasource: datasource:
dynamic: driver-class-name: com.mysql.cj.jdbc.Driver
type: com.zaxxer.hikari.HikariDataSource url: jdbc:mysql://192.168.234.100:3306/data_cloud_workflow?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
hikari: username: root
auto-commit: false password: 1234@abcd
connection-timeout: 30000 type: com.zaxxer.hikari.HikariDataSource
idle-timeout: 25000 hikari:
login-timeout: 5 auto-commit: false
max-lifetime: 30000 connection-timeout: 30000
read-only: false idle-timeout: 25000
validation-timeout: 3000 login-timeout: 5
maximum-pool-size: 15 max-lifetime: 30000
minimum-idle: 5 read-only: false
pool-name: DataxHikariCP validation-timeout: 3000
connection-test-query: SELECT 1 FROM DUAL maximum-pool-size: 15
data-source-properties: minimum-idle: 5
cachePrepStmts: true pool-name: DataxHikariCP
prepStmtCacheSize: 250 connection-test-query: SELECT 1 FROM DUAL
prepStmtCacheSqlLimit: 2048 data-source-properties:
useServerPrepStmts: true cachePrepStmts: true
useLocalSessionState: true prepStmtCacheSize: 250
rewriteBatchedStatements: true prepStmtCacheSqlLimit: 2048
cacheResultSetMetadata: true useServerPrepStmts: true
cacheServerConfiguration: true useLocalSessionState: true
elideSetAutoCommits: true rewriteBatchedStatements: true
maintainTimeStats: false cacheResultSetMetadata: true
primary: mysql cacheServerConfiguration: true
datasource: elideSetAutoCommits: true
mysql: maintainTimeStats: false
driver-class-name: com.p6spy.engine.spy.P6SpyDriver
url: jdbc:p6spy:mysql://127.0.0.1:3306/data_cloud?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
username: root
password: 1234@abcd
mybatis-plus:
mapper-locations: classpath*:mapper/*Mapper.xml
type-aliases-package: cn.datax.service.workflow.api.entity
global-config:
db-config:
id-type: ASSIGN_ID
banner: false
configuration:
map-underscore-to-camel-case: true
cache-enabled: false
call-setters-on-nulls: true
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
# spring security 配置 # spring security 配置
security: security:
...@@ -70,4 +53,10 @@ security: ...@@ -70,4 +53,10 @@ security:
scope: all scope: all
resource: resource:
loadBalanced: true loadBalanced: true
token-info-uri: http://localhost:8612/auth/oauth/check_token token-info-uri: http://localhost:8612/auth/oauth/check_token
\ No newline at end of file
flowable:
# 关闭定时任务JOB
async-executor-activate: false
# 将databaseSchemaUpdate设置为true。当Flowable发现库与数据库表结构不一致时,会自动将数据库表结构升级至新版本。
database-schema-update: true
\ No newline at end of file
...@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable; ...@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable;
@FeignClient(contextId = "dataSourceServiceFeign", value = "datax-service-data-factory", fallbackFactory = DataSourceServiceFeignFallbackFactory.class) @FeignClient(contextId = "dataSourceServiceFeign", value = "datax-service-data-factory", fallbackFactory = DataSourceServiceFeignFallbackFactory.class)
public interface DataSourceServiceFeign { public interface DataSourceServiceFeign {
@GetMapping("/inner/dataSource/{id}") @GetMapping("/dataSource/{id}")
R getDataSourceById(@PathVariable("id") String id); R getDataSourceById(@PathVariable("id") String id);
} }
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-data-sql-console"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
...@@ -2,17 +2,13 @@ package cn.datax.service.data.factory.controller; ...@@ -2,17 +2,13 @@ package cn.datax.service.data.factory.controller;
import cn.datax.common.core.JsonPage; import cn.datax.common.core.JsonPage;
import cn.datax.common.core.R; import cn.datax.common.core.R;
import cn.datax.common.database.DbQuery;
import cn.datax.common.database.core.PageResult;
import cn.datax.common.validate.ValidationGroups; import cn.datax.common.validate.ValidationGroups;
import cn.datax.service.data.factory.api.dto.DataSetDto; import cn.datax.service.data.factory.api.dto.DataSetDto;
import cn.datax.service.data.factory.api.entity.DataSetEntity; import cn.datax.service.data.factory.api.entity.DataSetEntity;
import cn.datax.service.data.factory.api.query.DbDataQuery;
import cn.datax.service.data.factory.api.vo.DataSetVo; import cn.datax.service.data.factory.api.vo.DataSetVo;
import cn.datax.service.data.factory.api.query.DataSetQuery; import cn.datax.service.data.factory.api.query.DataSetQuery;
import cn.datax.service.data.factory.mapstruct.DataSetMapper; import cn.datax.service.data.factory.mapstruct.DataSetMapper;
import cn.datax.service.data.factory.service.DataSetService; import cn.datax.service.data.factory.service.DataSetService;
import cn.datax.service.data.factory.service.DataSourceService;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
...@@ -27,7 +23,6 @@ import org.springframework.web.bind.annotation.*; ...@@ -27,7 +23,6 @@ import org.springframework.web.bind.annotation.*;
import cn.datax.common.base.BaseController; import cn.datax.common.base.BaseController;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** /**
...@@ -49,9 +44,6 @@ public class DataSetController extends BaseController { ...@@ -49,9 +44,6 @@ public class DataSetController extends BaseController {
@Autowired @Autowired
private DataSetMapper dataSetMapper; private DataSetMapper dataSetMapper;
@Autowired
private DataSourceService dataSourceService;
/** /**
* 通过ID查询信息 * 通过ID查询信息
* *
...@@ -62,8 +54,8 @@ public class DataSetController extends BaseController { ...@@ -62,8 +54,8 @@ public class DataSetController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path") @ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}") @GetMapping("/{id}")
public R getDataSetById(@PathVariable String id) { public R getDataSetById(@PathVariable String id) {
DataSetVo dataSetVo = dataSetService.getDataSetById(id); DataSetEntity dataSetEntity = dataSetService.getDataSetById(id);
return R.ok().setData(dataSetVo); return R.ok().setData(dataSetMapper.toVO(dataSetEntity));
} }
/** /**
...@@ -126,23 +118,4 @@ public class DataSetController extends BaseController { ...@@ -126,23 +118,4 @@ public class DataSetController extends BaseController {
dataSetService.deleteDataSetById(id); dataSetService.deleteDataSetById(id);
return R.ok(); return R.ok();
} }
@ApiOperation(value = "获取SQL结果", notes = "根据数据源的id来获取SQL结果")
@ApiImplicitParam(name = "dbDataQuery", value = "详细实体dbDataQuery", required = true, dataType = "DbDataQuery")
@GetMapping("/queryList")
public R queryList(@RequestBody @Validated DbDataQuery dbDataQuery) {
DbQuery dbQuery = dataSourceService.getDbQuery(dbDataQuery.getDataSourceId());
List<Map<String, Object>> list = dbQuery.queryList(dbDataQuery.getSql());
return R.ok().setData(list);
}
@ApiOperation(value = "分页获取SQL结果", notes = "根据数据源的id来分页获取SQL结果")
@ApiImplicitParam(name = "dbDataQuery", value = "详细实体dbDataQuery", required = true, dataType = "DbDataQuery")
@GetMapping("/queryByPage")
public R queryByPage(@RequestBody @Validated DbDataQuery dbDataQuery) {
DbQuery dbQuery = dataSourceService.getDbQuery(dbDataQuery.getDataSourceId());
PageResult<Map<String, Object>> page = dbQuery.queryByPage(dbDataQuery.getSql(), dbDataQuery.getOffset(), dbDataQuery.getPageSize());
page.setPageNum(dbDataQuery.getPageNum()).setPageSize(dbDataQuery.getPageSize());
return R.ok().setData(page);
}
} }
...@@ -5,10 +5,12 @@ import cn.datax.common.core.R; ...@@ -5,10 +5,12 @@ import cn.datax.common.core.R;
import cn.datax.common.database.DbQuery; import cn.datax.common.database.DbQuery;
import cn.datax.common.database.core.DbColumn; import cn.datax.common.database.core.DbColumn;
import cn.datax.common.database.core.DbTable; import cn.datax.common.database.core.DbTable;
import cn.datax.common.database.core.PageResult;
import cn.datax.common.validate.ValidationGroups; import cn.datax.common.validate.ValidationGroups;
import cn.datax.service.data.factory.api.dto.DataSourceDto; import cn.datax.service.data.factory.api.dto.DataSourceDto;
import cn.datax.service.data.factory.api.entity.DataSourceEntity; import cn.datax.service.data.factory.api.entity.DataSourceEntity;
import cn.datax.service.data.factory.api.query.DataSourceQuery; import cn.datax.service.data.factory.api.query.DataSourceQuery;
import cn.datax.service.data.factory.api.query.DbDataQuery;
import cn.datax.service.data.factory.api.vo.DataSourceVo; import cn.datax.service.data.factory.api.vo.DataSourceVo;
import cn.datax.service.data.factory.mapstruct.DataSourceMapper; import cn.datax.service.data.factory.mapstruct.DataSourceMapper;
import cn.datax.service.data.factory.service.DataSourceService; import cn.datax.service.data.factory.service.DataSourceService;
...@@ -26,6 +28,7 @@ import org.springframework.web.bind.annotation.*; ...@@ -26,6 +28,7 @@ import org.springframework.web.bind.annotation.*;
import cn.datax.common.base.BaseController; import cn.datax.common.base.BaseController;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** /**
...@@ -57,8 +60,8 @@ public class DataSourceController extends BaseController { ...@@ -57,8 +60,8 @@ public class DataSourceController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path") @ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}") @GetMapping("/{id}")
public R getDataSourceById(@PathVariable String id) { public R getDataSourceById(@PathVariable String id) {
DataSourceVo dataSourceVo = dataSourceService.getDataSourceById(id); DataSourceEntity dataSourceEntity = dataSourceService.getDataSourceById(id);
return R.ok().setData(dataSourceVo); return R.ok().setData(dataSourceMapper.toVO(dataSourceEntity));
} }
/** /**
...@@ -170,4 +173,23 @@ public class DataSourceController extends BaseController { ...@@ -170,4 +173,23 @@ public class DataSourceController extends BaseController {
List<DbColumn> columns = dbQuery.getTableColumns(dbName, tableName); List<DbColumn> columns = dbQuery.getTableColumns(dbName, tableName);
return R.ok().setData(columns); return R.ok().setData(columns);
} }
@ApiOperation(value = "获取SQL结果", notes = "根据数据源的id来获取SQL结果")
@ApiImplicitParam(name = "dbDataQuery", value = "详细实体dbDataQuery", required = true, dataType = "DbDataQuery")
@GetMapping("/queryList")
public R queryList(@RequestBody @Validated DbDataQuery dbDataQuery) {
DbQuery dbQuery = dataSourceService.getDbQuery(dbDataQuery.getDataSourceId());
List<Map<String, Object>> list = dbQuery.queryList(dbDataQuery.getSql());
return R.ok().setData(list);
}
@ApiOperation(value = "分页获取SQL结果", notes = "根据数据源的id来分页获取SQL结果")
@ApiImplicitParam(name = "dbDataQuery", value = "详细实体dbDataQuery", required = true, dataType = "DbDataQuery")
@GetMapping("/queryByPage")
public R queryByPage(@RequestBody @Validated DbDataQuery dbDataQuery) {
DbQuery dbQuery = dataSourceService.getDbQuery(dbDataQuery.getDataSourceId());
PageResult<Map<String, Object>> page = dbQuery.queryByPage(dbDataQuery.getSql(), dbDataQuery.getOffset(), dbDataQuery.getPageSize());
page.setPageNum(dbDataQuery.getPageNum()).setPageSize(dbDataQuery.getPageSize());
return R.ok().setData(page);
}
} }
package cn.datax.service.data.factory.controller; package cn.datax.service.data.factory.controller;
import cn.datax.common.base.BaseController; import cn.datax.common.base.BaseController;
import cn.datax.common.core.R;
import cn.datax.common.security.annotation.DataInner;
import cn.datax.service.data.factory.api.vo.DataSourceVo;
import cn.datax.service.data.factory.service.DataSourceService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
...@@ -15,13 +8,4 @@ import org.springframework.web.bind.annotation.RestController; ...@@ -15,13 +8,4 @@ import org.springframework.web.bind.annotation.RestController;
@RequestMapping("/inner") @RequestMapping("/inner")
public class InnerController extends BaseController { public class InnerController extends BaseController {
@Autowired
private DataSourceService dataSourceService;
@DataInner
@GetMapping("/dataSource/{id}")
public R getDataSourceById(@PathVariable String id) {
DataSourceVo dataSourceVo = dataSourceService.getDataSourceById(id);
return R.ok().setData(dataSourceVo);
}
} }
...@@ -3,7 +3,6 @@ package cn.datax.service.data.factory.service; ...@@ -3,7 +3,6 @@ package cn.datax.service.data.factory.service;
import cn.datax.service.data.factory.api.entity.DataSetEntity; import cn.datax.service.data.factory.api.entity.DataSetEntity;
import cn.datax.service.data.factory.api.dto.DataSetDto; import cn.datax.service.data.factory.api.dto.DataSetDto;
import cn.datax.common.base.BaseService; import cn.datax.common.base.BaseService;
import cn.datax.service.data.factory.api.vo.DataSetVo;
/** /**
* <p> * <p>
...@@ -19,7 +18,7 @@ public interface DataSetService extends BaseService<DataSetEntity> { ...@@ -19,7 +18,7 @@ public interface DataSetService extends BaseService<DataSetEntity> {
void updateDataSet(DataSetDto dataSet); void updateDataSet(DataSetDto dataSet);
DataSetVo getDataSetById(String id); DataSetEntity getDataSetById(String id);
void deleteDataSetById(String id); void deleteDataSetById(String id);
} }
...@@ -4,7 +4,6 @@ import cn.datax.common.database.DbQuery; ...@@ -4,7 +4,6 @@ import cn.datax.common.database.DbQuery;
import cn.datax.service.data.factory.api.dto.DataSourceDto; import cn.datax.service.data.factory.api.dto.DataSourceDto;
import cn.datax.service.data.factory.api.entity.DataSourceEntity; import cn.datax.service.data.factory.api.entity.DataSourceEntity;
import cn.datax.common.base.BaseService; import cn.datax.common.base.BaseService;
import cn.datax.service.data.factory.api.vo.DataSourceVo;
/** /**
* <p> * <p>
...@@ -20,7 +19,7 @@ public interface DataSourceService extends BaseService<DataSourceEntity> { ...@@ -20,7 +19,7 @@ public interface DataSourceService extends BaseService<DataSourceEntity> {
void updateDataSource(DataSourceDto dataSource); void updateDataSource(DataSourceDto dataSource);
DataSourceVo getDataSourceById(String id); DataSourceEntity getDataSourceById(String id);
void deleteDataSourceById(String id); void deleteDataSourceById(String id);
......
...@@ -2,7 +2,6 @@ package cn.datax.service.data.factory.service.impl; ...@@ -2,7 +2,6 @@ package cn.datax.service.data.factory.service.impl;
import cn.datax.service.data.factory.api.dto.DataSetDto; import cn.datax.service.data.factory.api.dto.DataSetDto;
import cn.datax.service.data.factory.api.entity.DataSetEntity; import cn.datax.service.data.factory.api.entity.DataSetEntity;
import cn.datax.service.data.factory.api.vo.DataSetVo;
import cn.datax.service.data.factory.service.DataSetService; import cn.datax.service.data.factory.service.DataSetService;
import cn.datax.service.data.factory.mapstruct.DataSetMapper; import cn.datax.service.data.factory.mapstruct.DataSetMapper;
import cn.datax.service.data.factory.dao.DataSetDao; import cn.datax.service.data.factory.dao.DataSetDao;
...@@ -51,9 +50,10 @@ public class DataSetServiceImpl extends BaseServiceImpl<DataSetDao, DataSetEntit ...@@ -51,9 +50,10 @@ public class DataSetServiceImpl extends BaseServiceImpl<DataSetDao, DataSetEntit
} }
@Cacheable(key = "#id", unless = "#result == null") @Cacheable(key = "#id", unless = "#result == null")
public DataSetVo getDataSetById(String id) { @Override
public DataSetEntity getDataSetById(String id) {
DataSetEntity dataSetEntity = super.getById(id); DataSetEntity dataSetEntity = super.getById(id);
return dataSetMapper.toVO(dataSetEntity); return dataSetEntity;
} }
@CacheEvict(key = "#id") @CacheEvict(key = "#id")
......
...@@ -6,7 +6,6 @@ import cn.datax.common.database.constants.DbQueryProperty; ...@@ -6,7 +6,6 @@ import cn.datax.common.database.constants.DbQueryProperty;
import cn.datax.service.data.factory.api.dto.DataSourceDto; import cn.datax.service.data.factory.api.dto.DataSourceDto;
import cn.datax.service.data.factory.api.dto.DbSchema; import cn.datax.service.data.factory.api.dto.DbSchema;
import cn.datax.service.data.factory.api.entity.DataSourceEntity; import cn.datax.service.data.factory.api.entity.DataSourceEntity;
import cn.datax.service.data.factory.api.vo.DataSourceVo;
import cn.datax.service.data.factory.dao.DataSourceDao; import cn.datax.service.data.factory.dao.DataSourceDao;
import cn.datax.service.data.factory.service.DataSourceService; import cn.datax.service.data.factory.service.DataSourceService;
import cn.datax.service.data.factory.mapstruct.DataSourceMapper; import cn.datax.service.data.factory.mapstruct.DataSourceMapper;
...@@ -58,9 +57,10 @@ public class DataSourceServiceImpl extends BaseServiceImpl<DataSourceDao, DataSo ...@@ -58,9 +57,10 @@ public class DataSourceServiceImpl extends BaseServiceImpl<DataSourceDao, DataSo
} }
@Cacheable(key = "#id", unless = "#result == null") @Cacheable(key = "#id", unless = "#result == null")
public DataSourceVo getDataSourceById(String id) { @Override
public DataSourceEntity getDataSourceById(String id) {
DataSourceEntity dataSourceEntity = super.getById(id); DataSourceEntity dataSourceEntity = super.getById(id);
return dataSourceMapper.toVO(dataSourceEntity); return dataSourceEntity;
} }
@CacheEvict(key = "#id") @CacheEvict(key = "#id")
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-data-factory"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-data-api-call"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
...@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable; ...@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable;
@FeignClient(contextId = "apiMaskServiceFeign", value = "datax-service-data-market", fallbackFactory = ApiMaskServiceFeignFallbackFactory.class) @FeignClient(contextId = "apiMaskServiceFeign", value = "datax-service-data-market", fallbackFactory = ApiMaskServiceFeignFallbackFactory.class)
public interface ApiMaskServiceFeign { public interface ApiMaskServiceFeign {
@GetMapping("/inner/apiMask/{apiId}") @GetMapping("/apiMask/api/{apiId}")
R getApiMaskByApiId(@PathVariable("apiId") String apiId); R getApiMaskByApiId(@PathVariable("apiId") String apiId);
} }
...@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable; ...@@ -9,6 +9,6 @@ import org.springframework.web.bind.annotation.PathVariable;
@FeignClient(contextId = "dataApiServiceFeign", value = "datax-service-data-market", fallbackFactory = DataApiServiceFeignFallbackFactory.class) @FeignClient(contextId = "dataApiServiceFeign", value = "datax-service-data-market", fallbackFactory = DataApiServiceFeignFallbackFactory.class)
public interface DataApiServiceFeign { public interface DataApiServiceFeign {
@GetMapping("/inner/dataApi/{id}") @GetMapping("/dataApi/{id}")
R getDataApiById(@PathVariable("id") String id); R getDataApiById(@PathVariable("id") String id);
} }
...@@ -53,8 +53,22 @@ public class ApiMaskController extends BaseController { ...@@ -53,8 +53,22 @@ public class ApiMaskController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path") @ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}") @GetMapping("/{id}")
public R getApiMaskById(@PathVariable String id) { public R getApiMaskById(@PathVariable String id) {
ApiMaskVo apiMaskVo = apiMaskService.getApiMaskById(id); ApiMaskEntity apiMaskEntity = apiMaskService.getApiMaskById(id);
return R.ok().setData(apiMaskVo); return R.ok().setData(apiMaskMapper.toVO(apiMaskEntity));
}
/**
* 通过ID查询信息
*
* @param id
* @return
*/
@ApiOperation(value = "获取详细信息", notes = "根据url的id来获取详细信息")
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/api/{id}")
public R getApiMaskByApiId(@PathVariable String id) {
ApiMaskEntity apiMaskEntity = apiMaskService.getApiMaskByApiId(id);
return R.ok().setData(apiMaskMapper.toVO(apiMaskEntity));
} }
/** /**
......
...@@ -56,8 +56,8 @@ public class DataApiController extends BaseController { ...@@ -56,8 +56,8 @@ public class DataApiController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path") @ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}") @GetMapping("/{id}")
public R getDataApiById(@PathVariable String id) { public R getDataApiById(@PathVariable String id) {
DataApiVo dataApiVo = dataApiService.getDataApiById(id); DataApiEntity dataApiEntity = dataApiService.getDataApiById(id);
return R.ok().setData(dataApiVo); return R.ok().setData(dataApiMapper.toVO(dataApiEntity));
} }
/** /**
......
package cn.datax.service.data.market.controller; package cn.datax.service.data.market.controller;
import cn.datax.common.base.BaseController; import cn.datax.common.base.BaseController;
import cn.datax.common.core.R;
import cn.datax.common.security.annotation.DataInner;
import cn.datax.service.data.market.api.vo.ApiMaskVo;
import cn.datax.service.data.market.api.vo.DataApiVo;
import cn.datax.service.data.market.service.ApiMaskService;
import cn.datax.service.data.market.service.DataApiService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
...@@ -17,35 +8,4 @@ import org.springframework.web.bind.annotation.RestController; ...@@ -17,35 +8,4 @@ import org.springframework.web.bind.annotation.RestController;
@RequestMapping("/inner") @RequestMapping("/inner")
public class InnerController extends BaseController { public class InnerController extends BaseController {
@Autowired
private DataApiService dataApiService;
@Autowired
private ApiMaskService apiMaskService;
/**
* 通过ID查询信息
*
* @param id
* @return
*/
@DataInner
@GetMapping("/dataApi/{id}")
public R getDataApiById(@PathVariable String id) {
DataApiVo dataApiVo = dataApiService.getDataApiById(id);
return R.ok().setData(dataApiVo);
}
/**
* 通过ID查询信息
*
* @param apiId
* @return
*/
@DataInner
@GetMapping("/apiMask/{apiId}")
public R getApiMaskByApiId(@PathVariable String apiId) {
ApiMaskVo apiMaskVo = apiMaskService.getApiMaskByApiId(apiId);
return R.ok().setData(apiMaskVo);
}
} }
...@@ -3,7 +3,6 @@ package cn.datax.service.data.market.service; ...@@ -3,7 +3,6 @@ package cn.datax.service.data.market.service;
import cn.datax.common.base.BaseService; import cn.datax.common.base.BaseService;
import cn.datax.service.data.market.api.dto.ApiMaskDto; import cn.datax.service.data.market.api.dto.ApiMaskDto;
import cn.datax.service.data.market.api.entity.ApiMaskEntity; import cn.datax.service.data.market.api.entity.ApiMaskEntity;
import cn.datax.service.data.market.api.vo.ApiMaskVo;
/** /**
* <p> * <p>
...@@ -19,9 +18,9 @@ public interface ApiMaskService extends BaseService<ApiMaskEntity> { ...@@ -19,9 +18,9 @@ public interface ApiMaskService extends BaseService<ApiMaskEntity> {
void updateApiMask(ApiMaskDto dataApiMask); void updateApiMask(ApiMaskDto dataApiMask);
ApiMaskVo getApiMaskById(String id); ApiMaskEntity getApiMaskById(String id);
ApiMaskVo getApiMaskByApiId(String apiId); ApiMaskEntity getApiMaskByApiId(String apiId);
void deleteApiMaskById(String id); void deleteApiMaskById(String id);
} }
...@@ -4,7 +4,6 @@ import cn.datax.service.data.market.api.dto.SqlParseDto; ...@@ -4,7 +4,6 @@ import cn.datax.service.data.market.api.dto.SqlParseDto;
import cn.datax.service.data.market.api.entity.DataApiEntity; import cn.datax.service.data.market.api.entity.DataApiEntity;
import cn.datax.service.data.market.api.dto.DataApiDto; import cn.datax.service.data.market.api.dto.DataApiDto;
import cn.datax.common.base.BaseService; import cn.datax.common.base.BaseService;
import cn.datax.service.data.market.api.vo.DataApiVo;
import cn.datax.service.data.market.api.vo.SqlParseVo; import cn.datax.service.data.market.api.vo.SqlParseVo;
/** /**
...@@ -21,7 +20,7 @@ public interface DataApiService extends BaseService<DataApiEntity> { ...@@ -21,7 +20,7 @@ public interface DataApiService extends BaseService<DataApiEntity> {
void updateDataApi(DataApiDto dataApi); void updateDataApi(DataApiDto dataApi);
DataApiVo getDataApiById(String id); DataApiEntity getDataApiById(String id);
void deleteDataApiById(String id); void deleteDataApiById(String id);
......
...@@ -52,16 +52,17 @@ public class ApiMaskServiceImpl extends BaseServiceImpl<ApiMaskDao, ApiMaskEntit ...@@ -52,16 +52,17 @@ public class ApiMaskServiceImpl extends BaseServiceImpl<ApiMaskDao, ApiMaskEntit
} }
@Cacheable(key = "#id") @Cacheable(key = "#id")
public ApiMaskVo getApiMaskById(String id) { @Override
public ApiMaskEntity getApiMaskById(String id) {
ApiMaskEntity apiMaskEntity = super.getById(id); ApiMaskEntity apiMaskEntity = super.getById(id);
return apiMaskMapper.toVO(apiMaskEntity); return apiMaskEntity;
} }
@Cacheable(key = "#apiId") @Cacheable(key = "'api:' + #apiId")
@Override @Override
public ApiMaskVo getApiMaskByApiId(String apiId) { public ApiMaskEntity getApiMaskByApiId(String apiId) {
ApiMaskEntity apiMaskEntity = apiMaskDao.selectOne(new QueryWrapper<ApiMaskEntity>().eq("api_id", apiId)); ApiMaskEntity apiMaskEntity = apiMaskDao.selectOne(new QueryWrapper<ApiMaskEntity>().eq("api_id", apiId));
return apiMaskMapper.toVO(apiMaskEntity); return apiMaskEntity;
} }
@CacheEvict(key = "#id") @CacheEvict(key = "#id")
......
...@@ -5,7 +5,6 @@ import cn.datax.common.utils.ThrowableUtil; ...@@ -5,7 +5,6 @@ import cn.datax.common.utils.ThrowableUtil;
import cn.datax.service.data.market.api.dto.*; import cn.datax.service.data.market.api.dto.*;
import cn.datax.service.data.market.api.entity.DataApiEntity; import cn.datax.service.data.market.api.entity.DataApiEntity;
import cn.datax.service.data.market.api.enums.ConfigType; import cn.datax.service.data.market.api.enums.ConfigType;
import cn.datax.service.data.market.api.vo.DataApiVo;
import cn.datax.service.data.market.api.vo.SqlParseVo; import cn.datax.service.data.market.api.vo.SqlParseVo;
import cn.datax.service.data.market.service.DataApiService; import cn.datax.service.data.market.service.DataApiService;
import cn.datax.service.data.market.mapstruct.DataApiMapper; import cn.datax.service.data.market.mapstruct.DataApiMapper;
...@@ -90,9 +89,10 @@ public class DataApiServiceImpl extends BaseServiceImpl<DataApiDao, DataApiEntit ...@@ -90,9 +89,10 @@ public class DataApiServiceImpl extends BaseServiceImpl<DataApiDao, DataApiEntit
} }
@Cacheable(key = "#id", unless = "#result == null") @Cacheable(key = "#id", unless = "#result == null")
public DataApiVo getDataApiById(String id) { @Override
public DataApiEntity getDataApiById(String id) {
DataApiEntity dataApiEntity = super.getById(id); DataApiEntity dataApiEntity = super.getById(id);
return dataApiMapper.toVO(dataApiEntity); return dataApiEntity;
} }
@CacheEvict(key = "#id") @CacheEvict(key = "#id")
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-data-market"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-file"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
...@@ -54,8 +54,8 @@ public class QrtzJobController extends BaseController { ...@@ -54,8 +54,8 @@ public class QrtzJobController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path") @ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}") @GetMapping("/{id}")
public R getQrtzJobById(@PathVariable String id) { public R getQrtzJobById(@PathVariable String id) {
QrtzJobVo qrtzJobVo = qrtzJobService.getQrtzJobById(id); QrtzJobEntity qrtzJobEntity = qrtzJobService.getQrtzJobById(id);
return R.ok().setData(qrtzJobVo); return R.ok().setData(qrtzJobMapper.toVO(qrtzJobEntity));
} }
/** /**
......
...@@ -54,8 +54,8 @@ public class QrtzJobLogController extends BaseController { ...@@ -54,8 +54,8 @@ public class QrtzJobLogController extends BaseController {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path") @ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}") @GetMapping("/{id}")
public R getQrtzJobLogById(@PathVariable String id) { public R getQrtzJobLogById(@PathVariable String id) {
QrtzJobLogVo qrtzJobLogVo = qrtzJobLogService.getQrtzJobLogById(id); QrtzJobLogEntity qrtzJobLogEntity = qrtzJobLogService.getQrtzJobLogById(id);
return R.ok().setData(qrtzJobLogVo); return R.ok().setData(qrtzJobLogMapper.toVO(qrtzJobLogEntity));
} }
/** /**
......
...@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service; ...@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service;
import cn.datax.service.quartz.api.entity.QrtzJobLogEntity; import cn.datax.service.quartz.api.entity.QrtzJobLogEntity;
import cn.datax.service.quartz.api.dto.QrtzJobLogDto; import cn.datax.service.quartz.api.dto.QrtzJobLogDto;
import cn.datax.common.base.BaseService; import cn.datax.common.base.BaseService;
import cn.datax.service.quartz.api.vo.QrtzJobLogVo;
/** /**
* <p> * <p>
...@@ -19,7 +18,7 @@ public interface QrtzJobLogService extends BaseService<QrtzJobLogEntity> { ...@@ -19,7 +18,7 @@ public interface QrtzJobLogService extends BaseService<QrtzJobLogEntity> {
void updateQrtzJobLog(QrtzJobLogDto qrtzJobLog); void updateQrtzJobLog(QrtzJobLogDto qrtzJobLog);
QrtzJobLogVo getQrtzJobLogById(String id); QrtzJobLogEntity getQrtzJobLogById(String id);
void deleteQrtzJobLogById(String id); void deleteQrtzJobLogById(String id);
} }
...@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service; ...@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service;
import cn.datax.service.quartz.api.entity.QrtzJobEntity; import cn.datax.service.quartz.api.entity.QrtzJobEntity;
import cn.datax.service.quartz.api.dto.QrtzJobDto; import cn.datax.service.quartz.api.dto.QrtzJobDto;
import cn.datax.common.base.BaseService; import cn.datax.common.base.BaseService;
import cn.datax.service.quartz.api.vo.QrtzJobVo;
/** /**
* <p> * <p>
...@@ -19,7 +18,7 @@ public interface QrtzJobService extends BaseService<QrtzJobEntity> { ...@@ -19,7 +18,7 @@ public interface QrtzJobService extends BaseService<QrtzJobEntity> {
void updateQrtzJob(QrtzJobDto qrtzJob); void updateQrtzJob(QrtzJobDto qrtzJob);
QrtzJobVo getQrtzJobById(String id); QrtzJobEntity getQrtzJobById(String id);
void deleteQrtzJobById(String id); void deleteQrtzJobById(String id);
......
...@@ -2,7 +2,6 @@ package cn.datax.service.quartz.service.impl; ...@@ -2,7 +2,6 @@ package cn.datax.service.quartz.service.impl;
import cn.datax.service.quartz.api.entity.QrtzJobLogEntity; import cn.datax.service.quartz.api.entity.QrtzJobLogEntity;
import cn.datax.service.quartz.api.dto.QrtzJobLogDto; import cn.datax.service.quartz.api.dto.QrtzJobLogDto;
import cn.datax.service.quartz.api.vo.QrtzJobLogVo;
import cn.datax.service.quartz.service.QrtzJobLogService; import cn.datax.service.quartz.service.QrtzJobLogService;
import cn.datax.service.quartz.mapstruct.QrtzJobLogMapper; import cn.datax.service.quartz.mapstruct.QrtzJobLogMapper;
import cn.datax.service.quartz.dao.QrtzJobLogDao; import cn.datax.service.quartz.dao.QrtzJobLogDao;
...@@ -45,9 +44,9 @@ public class QrtzJobLogServiceImpl extends BaseServiceImpl<QrtzJobLogDao, QrtzJo ...@@ -45,9 +44,9 @@ public class QrtzJobLogServiceImpl extends BaseServiceImpl<QrtzJobLogDao, QrtzJo
} }
@Override @Override
public QrtzJobLogVo getQrtzJobLogById(String id) { public QrtzJobLogEntity getQrtzJobLogById(String id) {
QrtzJobLogEntity qrtzJobLogEntity = super.getById(id); QrtzJobLogEntity qrtzJobLogEntity = super.getById(id);
return qrtzJobLogMapper.toVO(qrtzJobLogEntity); return qrtzJobLogEntity;
} }
@Override @Override
......
...@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service.impl; ...@@ -3,7 +3,6 @@ package cn.datax.service.quartz.service.impl;
import cn.datax.common.core.DataConstant; import cn.datax.common.core.DataConstant;
import cn.datax.service.quartz.api.entity.QrtzJobEntity; import cn.datax.service.quartz.api.entity.QrtzJobEntity;
import cn.datax.service.quartz.api.dto.QrtzJobDto; import cn.datax.service.quartz.api.dto.QrtzJobDto;
import cn.datax.service.quartz.api.vo.QrtzJobVo;
import cn.datax.service.quartz.quartz.utils.ScheduleUtil; import cn.datax.service.quartz.quartz.utils.ScheduleUtil;
import cn.datax.service.quartz.service.QrtzJobService; import cn.datax.service.quartz.service.QrtzJobService;
import cn.datax.service.quartz.mapstruct.QrtzJobMapper; import cn.datax.service.quartz.mapstruct.QrtzJobMapper;
...@@ -56,9 +55,9 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit ...@@ -56,9 +55,9 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit
@Cacheable(key = "#id") @Cacheable(key = "#id")
@Override @Override
public QrtzJobVo getQrtzJobById(String id) { public QrtzJobEntity getQrtzJobById(String id) {
QrtzJobEntity qrtzJobEntity = super.getById(id); QrtzJobEntity qrtzJobEntity = super.getById(id);
return qrtzJobMapper.toVO(qrtzJobEntity); return qrtzJobEntity;
} }
@CacheEvict(key = "#id") @CacheEvict(key = "#id")
...@@ -69,17 +68,19 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit ...@@ -69,17 +68,19 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit
qrtzJobDao.deleteById(id); qrtzJobDao.deleteById(id);
} }
@CachePut(key = "#id")
@Override @Override
public void pauseById(String id) { public void pauseById(String id) {
QrtzJobEntity job = super.getById(id); QrtzJobEntity job = getQrtzJobById(id);
job.setStatus(DataConstant.EnableState.DISABLE.getKey()); job.setStatus(DataConstant.EnableState.DISABLE.getKey());
super.updateById(job); super.updateById(job);
ScheduleUtil.pauseJob(id); ScheduleUtil.pauseJob(id);
} }
@CachePut(key = "#id")
@Override @Override
public void resumeById(String id) { public void resumeById(String id) {
QrtzJobEntity job = super.getById(id); QrtzJobEntity job = getQrtzJobById(id);
job.setStatus(DataConstant.EnableState.ENABLE.getKey()); job.setStatus(DataConstant.EnableState.ENABLE.getKey());
super.updateById(job); super.updateById(job);
ScheduleUtil.resumeJob(id); ScheduleUtil.resumeJob(id);
...@@ -87,7 +88,7 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit ...@@ -87,7 +88,7 @@ public class QrtzJobServiceImpl extends BaseServiceImpl<QrtzJobDao, QrtzJobEntit
@Override @Override
public void runById(String id) { public void runById(String id) {
QrtzJobEntity job = super.getById(id); QrtzJobEntity job = getQrtzJobById(id);
ScheduleUtil.runJob(job); ScheduleUtil.runJob(job);
} }
} }
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-quartz"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
...@@ -36,18 +36,18 @@ import org.springframework.web.servlet.DispatcherServlet; ...@@ -36,18 +36,18 @@ import org.springframework.web.servlet.DispatcherServlet;
"org.flowable.ui.common.rest" "org.flowable.ui.common.rest"
}, },
excludeFilters = { excludeFilters = {
//移除flowable.cmmon.app 的设置 // 移除flowable.cmmon.app 的设置
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = EditorUsersResource.class), @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = EditorUsersResource.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = EditorGroupsResource.class), @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = EditorGroupsResource.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = RemoteIdmServiceImpl.class), @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = RemoteIdmServiceImpl.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = RemoteIdmAuthenticationProvider.class), @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = RemoteIdmAuthenticationProvider.class),
//移除flowable 中的spring security 的设置 // 移除flowable 中的spring security 的设置
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.class), @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.ApiWebSecurityConfigurationAdapter.class), @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.ApiWebSecurityConfigurationAdapter.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.ActuatorWebSecurityConfigurationAdapter.class), @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.ActuatorWebSecurityConfigurationAdapter.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.FormLoginWebSecurityConfigurerAdapter.class), @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SecurityConfiguration.FormLoginWebSecurityConfigurerAdapter.class),
//编辑器国际化文件 这个在flowable 6.5 版本中前端支持国际化了, 不需要排除了 // 编辑器国际化文件 这个在flowable 6.5 版本中前端支持国际化了, 不需要排除了
//@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = StencilSetResource.class), // @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE,classes = StencilSetResource.class),
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = org.flowable.ui.modeler.conf.ApplicationConfiguration.class) , @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = org.flowable.ui.modeler.conf.ApplicationConfiguration.class) ,
// 排除获取用户信息,采用自定义方式实现 // 排除获取用户信息,采用自定义方式实现
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = RemoteAccountResource.class) @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = RemoteAccountResource.class)
......
log4j.rootLogger=DEBUG, CONSOLE
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern= %d{hh:mm:ss,SSS} [%t] %-5p %c %x - %m%n
\ No newline at end of file
...@@ -41,10 +41,5 @@ ...@@ -41,10 +41,5 @@
<artifactId>datax-common-core</artifactId> <artifactId>datax-common-core</artifactId>
<version>2.0.0</version> <version>2.0.0</version>
</dependency> </dependency>
<dependency>
<groupId>cn.datax</groupId>
<artifactId>datax-common-dictionary</artifactId>
<version>2.0.0</version>
</dependency>
</dependencies> </dependencies>
</project> </project>
\ No newline at end of file
package cn.datax.service.workflow.config;
import lombok.extern.slf4j.Slf4j;
import org.flowable.spring.SpringProcessEngineConfiguration;
import org.flowable.spring.boot.EngineConfigurationConfigurer;
import org.springframework.context.annotation.Configuration;
@Slf4j
@Configuration
public class ProcessEngineConfig implements EngineConfigurationConfigurer<SpringProcessEngineConfiguration> {
@Override
public void configure(SpringProcessEngineConfiguration engineConfiguration) {
engineConfiguration.setActivityFontName("宋体");
engineConfiguration.setLabelFontName("宋体");
engineConfiguration.setAnnotationFontName("宋体");
log.info("配置字体:" + engineConfiguration.getActivityFontName());
}
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<property name="log.path" value="logs/datax-service-workflow"/>
<property name="log.maxHistory" value="15"/>
<property name="log.totalSizeCap" value="500MB"/>
<property name="log.maxFileSize" value="10MB"/>
<property name="log.colorPattern"
value="%magenta(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %boldCyan(${springAppName:-}) %yellow(%thread) %green(%logger) %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${springAppName:-} %thread %logger %msg%n"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.colorPattern}</pattern>
</encoder>
</appender>
<!--输出到文件-->
<!-- RollingFileAppender:滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<!-- 以下的大概意思是:1.先按日期存日志,日期变了,将前一天的日志文件名重命名为XXX%日期%索引,新的日志仍然是project_info.log -->
<!-- 2.如果日期没有发生变化,但是当前日志的文件大小超过10MB时,对当前日志进行分割 重命名-->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志文件路径和名称-->
<File>${log.path}/info/info.log</File>
<!--是否追加到文件末尾,默认为true-->
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 日志文件的名字会根据fileNamePattern的值,每隔一段时间改变一次 -->
<!-- 文件名:logs/project_info.2017-12-05.0.log -->
<!-- 注意:SizeAndTimeBasedRollingPolicy中 %i和%d令牌都是强制性的,必须存在,要不会报错 -->
<fileNamePattern>${log.path}/info/info.%d.%i.log</fileNamePattern>
<!-- 每产生一个日志文件,该日志文件的保存期限为30天, ps:maxHistory的单位是根据fileNamePattern中的翻转策略自动推算出来的,例如上面选用了yyyy-MM-dd,则单位为天
如果上面选用了yyyy-MM,则单位为月,另外上面的单位默认为yyyy-MM-dd-->
<MaxHistory>${log.maxHistory}</MaxHistory>
<!-- 每个日志文件到2mb的时候开始切分,最多保留30天,但最大到500MB,哪怕没到30天也要删除多余的日志 -->
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<!-- maxFileSize:这是活动文件的大小,默认值是10MB,测试时可改成5KB看效果 -->
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.path}/error/error.log</File>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.%d.%i.log</fileNamePattern>
<MaxHistory>${log.maxHistory}</MaxHistory>
<totalSizeCap>${log.totalSizeCap}</totalSizeCap>
<maxFileSize>${log.maxFileSize}</maxFileSize>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<root level="debug">
<appender-ref ref="console"/>
</root>
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>
\ No newline at end of file
module.log=com.p6spy.engine.logging.P6LogFactory,com.p6spy.engine.outage.P6OutageFactory
# 自定义日志打印
logMessageFormat=com.baomidou.mybatisplus.extension.p6spy.P6SpyLogger
#日志输出到控制台
appender=com.baomidou.mybatisplus.extension.p6spy.StdoutLogger
# 使用日志系统记录 sql
#appender=com.p6spy.engine.spy.appender.Slf4JLogger
# 设置 p6spy driver 代理
deregisterdrivers=true
# 取消JDBC URL前缀
useprefix=true
# 配置记录 Log 例外,可去掉的结果集有error,info,batch,debug,statement,commit,rollback,result,resultset.
excludecategories=info,debug,result,batch,resultset
# 日期格式
dateformat=yyyy-MM-dd HH:mm:ss
# 实际驱动可多个
#driverlist=org.h2.Driver
# 是否开启慢SQL记录
outagedetection=true
# 慢SQL记录标准 2 秒
outagedetectioninterval=2
# 开启过滤
filter=true
# 配置不打印的内容
exclude=select 1
\ No newline at end of file
...@@ -64,8 +64,8 @@ public class ${table.controllerName} { ...@@ -64,8 +64,8 @@ public class ${table.controllerName} {
@ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path") @ApiImplicitParam(name = "id", value = "ID", required = true, dataType = "String", paramType = "path")
@GetMapping("/{id}") @GetMapping("/{id}")
public R get${className}ById(@PathVariable String id) { public R get${className}ById(@PathVariable String id) {
${className}Vo ${classNameLower}Vo = ${classNameLower}Service.get${className}ById(id); ${entity} ${classNameLower}Entity = ${classNameLower}Service.get${className}ById(id);
return R.ok().setData(${classNameLower}Vo); return R.ok().setData(${classNameLower}Mapper.toVO(${classNameLower}Entity));
} }
/** /**
......
package ${package.Service}; package ${package.Service};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.entity.${entity}; import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.entity.${entity};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.vo.${className}Vo;
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.dto.${className}Dto; import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.dto.${className}Dto;
import ${superServiceClassPackage}; import ${superServiceClassPackage};
...@@ -19,7 +18,7 @@ public interface ${table.serviceName} extends ${superServiceClass}<${entity}> { ...@@ -19,7 +18,7 @@ public interface ${table.serviceName} extends ${superServiceClass}<${entity}> {
void update${className}(${className}Dto ${classNameLower}); void update${className}(${className}Dto ${classNameLower});
${className}Vo get${className}ById(String id); ${entity} get${className}ById(String id);
void delete${className}ById(String id); void delete${className}ById(String id);
} }
package ${package.ServiceImpl}; package ${package.ServiceImpl};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.entity.${entity}; import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.entity.${entity};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.vo.${className}Vo;
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.dto.${className}Dto; import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.api.dto.${className}Dto;
import ${package.Service}.${table.serviceName}; import ${package.Service}.${table.serviceName};
import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.mapstruct.${className}Mapper; import ${cfg.PackageParent}#if(${package.ModuleName}).${package.ModuleName}#end.mapstruct.${className}Mapper;
...@@ -45,9 +44,9 @@ public class ${table.serviceImplName} extends ${superServiceImplClass}<${table.m ...@@ -45,9 +44,9 @@ public class ${table.serviceImplName} extends ${superServiceImplClass}<${table.m
} }
@Override @Override
public ${className}Vo get${className}ById(String id) { public ${entity} get${className}ById(String id) {
${entity} ${classNameLower}Entity = super.getById(id); ${entity} ${classNameLower}Entity = super.getById(id);
return ${classNameLower}Mapper.toVO(${classNameLower}Entity); return ${classNameLower}Entity;
} }
@Override @Override
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment