Merge branch 'dev' of gitee.com:septemyang/RuoYi into bo_dev

This commit is contained in:
Bo 2021-08-13 11:40:36 +00:00 committed by Gitee
commit a6ae06dc31
19 changed files with 763 additions and 85 deletions

View File

@ -115,6 +115,19 @@
<artifactId>jtds</artifactId>
<version>1.2.4</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
</dependency>
<!--
<dependency>
<groupId>org.apache.poi</groupId>

View File

@ -0,0 +1,195 @@
package com.ruoyi.kettle.config;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.lettuce.core.cluster.ClusterClientOptions;
import io.lettuce.core.cluster.ClusterTopologyRefreshOptions;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.data.redis.RedisProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.RedisNode;
import org.springframework.data.redis.connection.RedisStandaloneConfiguration;
import org.springframework.data.redis.connection.lettuce.LettuceClientConfiguration;
import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory;
import org.springframework.data.redis.connection.lettuce.LettucePoolingClientConfiguration;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import java.time.Duration;
import java.util.HashSet;
import java.util.Set;
/**
* @author zh
* @date 2020/12/1 16:49
*/
@Configuration
public class RedisConfig {
/** redis 服务器地址 */
@Value("${spring.redis.host}")
private String host;
/** redis 端口号 */
@Value("${spring.redis.port}")
private int port;
/** redis 服务器密码 */
@Value("${spring.redis.password}")
private String password;
/** redis 连接池最大连接数(使用负值无限制) */
@Value("${spring.redis.lettuce.pool.max-active}")
private int maxActive;
/** redis 连接池最大空闲数 */
@Value("${spring.redis.lettuce.pool.max-idle}")
private int maxIdle;
/** redis 连接池小空闲数 */
@Value("${spring.redis.lettuce.pool.min-idle}")
private int minIdle;
/** redis 连接池最大阻塞等待时间(负值无限制) */
@Value("${spring.redis.lettuce.pool.max-wait}")
private int maxWait;
/** redis 数据库索引(默认0) */
@Value("${spring.redis.database}")
private int database;
/** redis 超时时间 */
@Value("${spring.redis.timeout}")
private int timeout;
@Autowired
private RedisProperties redisProperties;
//这是固定的模板
//自己定义了一个RedisTemplate
@Bean
@SuppressWarnings("all")
public RedisTemplate<String, Object> redisTemplate(@Qualifier("lettuceConnectionFactoryUvPv") RedisConnectionFactory factory) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(factory);
//Json序列化配置
Jackson2JsonRedisSerializer<Object> jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer<Object>(Object.class);
ObjectMapper om = new ObjectMapper();
om.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
om.activateDefaultTyping(om.getPolymorphicTypeValidator());
om.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
//解决序列化问题
om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
jackson2JsonRedisSerializer.setObjectMapper(om);
//String的序列化
StringRedisSerializer stringRedisSerializer = new StringRedisSerializer();
//key采用String的序列化方式
template.setKeySerializer(stringRedisSerializer);
//hash的key也采用String的序列化方式
template.setHashKeySerializer(stringRedisSerializer);
//value序列化方式采用jackson
template.setValueSerializer(jackson2JsonRedisSerializer);
//hash的value序列化方式采用jackson
template.setHashValueSerializer(jackson2JsonRedisSerializer);
template.afterPropertiesSet();
return template;
}
/**
* 为RedisTemplate配置Redis连接工厂实现
* LettuceConnectionFactory实现了RedisConnectionFactory接口
* UVPV用Redis
*
* @return 返回LettuceConnectionFactory
*/
@Bean(destroyMethod = "destroy")
//这里要注意的是在构建LettuceConnectionFactory 如果不使用内置的destroyMethod可能会导致Redis连接早于其它Bean被销毁
public LettuceConnectionFactory lettuceConnectionFactoryUvPv() throws Exception {
// List<String> clusterNodes = redisProperties.getCluster().getNodes();
// Set<RedisNode> nodes = new HashSet<>();
// clusterNodes.forEach(address -> nodes.add(new RedisNode(address.split(":")[0].trim(), Integer.parseInt(address.split(":")[1]))));
// RedisClusterConfiguration clusterConfiguration = new RedisClusterConfiguration();
// clusterConfiguration.setClusterNodes(nodes);
// clusterConfiguration.setPassword(RedisPassword.of(redisProperties.getPassword()));
// clusterConfiguration.setMaxRedirects(redisProperties.getCluster().getMaxRedirects());
//我使用的是单机redis集群使用上面注释的代码
Set<RedisNode> nodes = new HashSet<>();
nodes.add(new RedisNode(redisProperties.getHost(), redisProperties.getPort()));
RedisStandaloneConfiguration redisStandaloneConfiguration=new RedisStandaloneConfiguration();
redisStandaloneConfiguration.setHostName(redisProperties.getHost());
redisStandaloneConfiguration.setPassword(redisProperties.getPassword());
redisStandaloneConfiguration.setDatabase(redisProperties.getDatabase());
redisStandaloneConfiguration.setPort(redisProperties.getPort());
GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig();
poolConfig.setMaxIdle(redisProperties.getLettuce().getPool().getMaxIdle());
poolConfig.setMinIdle(redisProperties.getLettuce().getPool().getMinIdle());
poolConfig.setMaxTotal(redisProperties.getLettuce().getPool().getMaxActive());
return new LettuceConnectionFactory(redisStandaloneConfiguration, getLettuceClientConfiguration(poolConfig));
}
/**
* 配置LettuceClientConfiguration 包括线程池配置和安全项配置
*
* @param genericObjectPoolConfig common-pool2线程池
* @return lettuceClientConfiguration
*/
private LettuceClientConfiguration getLettuceClientConfiguration(GenericObjectPoolConfig genericObjectPoolConfig) {
/*
ClusterTopologyRefreshOptions配置用于开启自适应刷新和定时刷新如自适应刷新不开启Redis集群变更时将会导致连接异常
*/
ClusterTopologyRefreshOptions topologyRefreshOptions = ClusterTopologyRefreshOptions.builder()
//开启自适应刷新
//.enableAdaptiveRefreshTrigger(ClusterTopologyRefreshOptions.RefreshTrigger.MOVED_REDIRECT, ClusterTopologyRefreshOptions.RefreshTrigger.PERSISTENT_RECONNECTS)
//开启所有自适应刷新MOVEDASKPERSISTENT都会触发
.enableAllAdaptiveRefreshTriggers()
// 自适应刷新超时时间(默认30秒)
.adaptiveRefreshTriggersTimeout(Duration.ofSeconds(25)) //默认关闭开启后时间为30秒
// 开周期刷新
.enablePeriodicRefresh(Duration.ofSeconds(20)) // 默认关闭开启后时间为60秒 ClusterTopologyRefreshOptions.DEFAULT_REFRESH_PERIOD 60 .enablePeriodicRefresh(Duration.ofSeconds(2)) = .enablePeriodicRefresh().refreshPeriod(Duration.ofSeconds(2))
.build();
return LettucePoolingClientConfiguration.builder()
.poolConfig(genericObjectPoolConfig)
.clientOptions(ClusterClientOptions.builder().topologyRefreshOptions(topologyRefreshOptions).build())
//将appID传入连接方便Redis监控中查看
//.clientName(appName + "_lettuce")
.build();
}
@Bean
public JedisPool jedisPool() {
JedisPool jedisPool = new JedisPool(getRedisConfig(), host, port, timeout,password);
return jedisPool;
}
@Bean
public JedisPoolConfig getRedisConfig(){
JedisPoolConfig config = new JedisPoolConfig();
config.setMaxTotal(maxActive);
config.setMaxIdle(maxIdle);
config.setMinIdle(minIdle);
config.setMaxWaitMillis(maxWait);
return config;
}
}

View File

@ -162,9 +162,9 @@ public class KettleTransController extends BaseController
@RequiresPermissions("kettle:trans:run")
@PostMapping("/run")
@ResponseBody
public AjaxResult run(KettleTrans trans)
public AjaxResult runToQueue(KettleTrans trans)
{
AjaxResult result = kettleTransService.run(trans);
AjaxResult result = kettleTransService.runToQueue(trans);
return result;
}

View File

@ -72,10 +72,16 @@ public class KettleJob extends BaseEntity
/** */
@Excel(name = "")
private String tplKey;
@Excel(name = "最后一次成功时间")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date lastSucceedTime;
public void setId(Long id)
{
this.id = id;
public Date getLastSucceedTime() {
return lastSucceedTime;
}
public void setLastSucceedTime(Date lastSucceedTime) {
this.lastSucceedTime = lastSucceedTime;
}
public Long getId()

View File

@ -68,6 +68,17 @@ public class KettleTrans extends BaseEntity
@Excel(name = "可执行角色key")
private String roleKey;
@Excel(name = "最后一次成功时间")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date lastSucceedTime;
public Date getLastSucceedTime() {
return lastSucceedTime;
}
public void setLastSucceedTime(Date lastSucceedTime) {
this.lastSucceedTime = lastSucceedTime;
}
public void setId(Long id)
{
this.id = id;
@ -217,4 +228,5 @@ public class KettleTrans extends BaseEntity
.append("remark", getRemark())
.toString();
}
}

View File

@ -68,4 +68,6 @@ public interface IKettleJobService
Long checkQuartzExist(String checkStr);
public AjaxResult runJobQuartz(String id, String jobName);
void runJobRightNow(Long valueOf, String userId);
}

View File

@ -67,7 +67,7 @@ public interface IKettleTransService
* @param trans :
* @return: void
**/
AjaxResult run(KettleTrans trans);
AjaxResult runToQueue(KettleTrans trans);
List<String> queryTransLog(KettleTrans trans) ;
/**
@ -81,4 +81,6 @@ public interface IKettleTransService
public AjaxResult runTransQuartz(String id,String transName);
Long checkQuartzExist(String checkStr);
void runTransRightNow(Long valueOf, String userId);
}

View File

@ -2,16 +2,20 @@ package com.ruoyi.kettle.service.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import com.ruoyi.common.core.domain.AjaxResult;
import com.ruoyi.common.core.domain.entity.SysRole;
import com.ruoyi.common.utils.DateUtils;
import com.ruoyi.common.utils.security.PermissionUtils;
import com.ruoyi.kettle.domain.KettleTrans;
import com.ruoyi.kettle.domain.XRepository;
import com.ruoyi.kettle.mapper.XRepositoryMapper;
import com.ruoyi.kettle.tools.KettleUtil;
import com.ruoyi.kettle.tools.RedisStreamUtil;
import com.ruoyi.system.service.IWechatApiService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.ruoyi.kettle.mapper.KettleJobMapper;
@ -28,6 +32,7 @@ import com.ruoyi.common.core.text.Convert;
@Service("kettleJobServiceImpl")
public class KettleJobServiceImpl implements IKettleJobService
{
private static final Logger log = LoggerFactory.getLogger(KettleJobServiceImpl.class);
@Autowired
private KettleJobMapper kettleJobMapper;
@Autowired
@ -36,6 +41,11 @@ public class KettleJobServiceImpl implements IKettleJobService
@Autowired
private KettleUtil kettleUtil;
@Autowired
private RedisStreamUtil redisStreamUtil;
@Autowired
IWechatApiService wechatApiService;
/**
* 查询作业调度
*
@ -84,11 +94,14 @@ public class KettleJobServiceImpl implements IKettleJobService
}
String userName = (String) PermissionUtils.getPrincipalProperty("userName");
if(kettleJob.getRoleKey()==null){
kettleJob.setRoleKey("admin");
kettleJob.setRoleKey("admin,bpsadmin");
}else{
if(!kettleJob.getRoleKey().contains("admin")){
kettleJob.setRoleKey(kettleJob.getRoleKey().concat(",admin"));
}
if(!kettleJob.getRoleKey().contains("bpsadmin")){
kettleJob.setRoleKey(kettleJob.getRoleKey().concat(",bpsadmin"));
}
}
kettleJob.setCreatedBy(userName);
kettleJob.setUpdateBy(userName);
@ -105,7 +118,21 @@ public class KettleJobServiceImpl implements IKettleJobService
@Override
public int updateKettleJob(KettleJob kettleJob)
{
String userName = (String) PermissionUtils.getPrincipalProperty("userName");
kettleJob.setUpdateTime(DateUtils.getNowDate());
kettleJob.setUpdateBy(userName);
kettleJob.setJobType("File");
if(kettleJob.getRoleKey()==null){
kettleJob.setRoleKey("admin,bpsadmin");
}else{
if(!kettleJob.getRoleKey().contains("admin")){
kettleJob.setRoleKey(kettleJob.getRoleKey().concat(",admin"));
}
if(!kettleJob.getRoleKey().contains("bpsadmin")){
kettleJob.setRoleKey(kettleJob.getRoleKey().concat(",bpsadmin"));
}
}
return kettleJobMapper.updateKettleJob(kettleJob);
}
@ -144,20 +171,63 @@ public class KettleJobServiceImpl implements IKettleJobService
if(repository==null){
return AjaxResult.error("资源库不存在!");
}
String path = kettleJob.getJobPath();
try {
kettleUtil.KETTLE_LOG_LEVEL=kettleJob.getJobLogLevel();
kettleUtil.KETTLE_REPO_ID=String.valueOf(kettleJob.getJobRepositoryId());
kettleUtil.KETTLE_REPO_NAME=repository.getRepoName();
kettleUtil.KETTLE_REPO_PATH=repository.getBaseDir();
kettleUtil.callJob(path,kettleJob.getJobName(),null,null);
} catch (Exception e) {
e.printStackTrace();
//加入队列中,等待执行
redisStreamUtil.addKettleJob(kettleJob);
//更新一下状态
kettleJob.setJobStatus("等待中");
kettleJobMapper.updateKettleJob(kettleJob);
return AjaxResult.success("已加入执行队列,请等待运行结果通知!");
// String path = kettleJob.getJobPath();
// try {
// kettleUtil.KETTLE_LOG_LEVEL=kettleJob.getJobLogLevel();
// kettleUtil.KETTLE_REPO_ID=String.valueOf(kettleJob.getJobRepositoryId());
// kettleUtil.KETTLE_REPO_NAME=repository.getRepoName();
// kettleUtil.KETTLE_REPO_PATH=repository.getBaseDir();
// kettleUtil.callJob(path,kettleJob.getJobName(),null,null);
// } catch (Exception e) {
// e.printStackTrace();
// }
//
}
@Override
public void runJobRightNow(Long id, String userId) {
KettleJob kettleJob = kettleJobMapper.selectKettleJobById(id);
if(kettleJob ==null){
log.error("作业不存在!");
return;
}
XRepository repository=repositoryMapper.selectXRepositoryById(kettleJob.getJobRepositoryId());
if(repository==null){
log.error("资源库不存在!");
return;
}
return AjaxResult.success("执行成功!"); }
//更新一下状态
kettleJob.setJobStatus("运行中");
kettleJobMapper.updateKettleJob(kettleJob);
StringBuilder title = new StringBuilder(kettleJob.getJobName()).append(".kjb 执行结果:");
StringBuilder msg = new StringBuilder(kettleJob.getJobName()).append(".kjb 执行结果:");
try {
kettleUtil.callJob(kettleJob,repository,null,null);
kettleJob.setJobStatus("成功");
kettleJob.setLastSucceedTime(DateUtils.getNowDate());
kettleJobMapper.updateKettleJob(kettleJob);
title.append("成功!");
msg.append("成功!");
} catch (Exception e) {
kettleJob.setJobStatus("异常");
kettleJobMapper.updateKettleJob(kettleJob);
title.append("异常!");
msg.append("异常!");
e.printStackTrace();
}
List<String> userIdList = new ArrayList<>();
userIdList.add(userId);
Map<String, String> resultMap = wechatApiService.SendTextCardMessageToWechatUser(userIdList,title.toString(),msg.toString(),"http://report.bpsemi.cn:8081/it_war");
log.info("job微信消息发送结果"+resultMap);
}
@Override
public List<String> queryJobLog(KettleJob kettleJob) {
List<String> logs=kettleJobMapper.queryJobLog(kettleJob.getJobName());
@ -173,4 +243,5 @@ public class KettleJobServiceImpl implements IKettleJobService
KettleJob kettleJob = kettleJobMapper.selectKettleJobById(Long.valueOf(id));
return run(kettleJob);
}
}

View File

@ -2,6 +2,7 @@ package com.ruoyi.kettle.service.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import com.ruoyi.common.core.domain.AjaxResult;
@ -12,6 +13,10 @@ import com.ruoyi.kettle.domain.XRepository;
import com.ruoyi.kettle.mapper.XRepositoryMapper;
import com.ruoyi.kettle.service.IKettleTransService;
import com.ruoyi.kettle.tools.KettleUtil;
import com.ruoyi.kettle.tools.RedisStreamUtil;
import com.ruoyi.system.service.IWechatApiService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.ruoyi.kettle.mapper.KettleTransMapper;
@ -27,6 +32,8 @@ import com.ruoyi.common.core.text.Convert;
@Service("kettleTransServiceImpl")
public class KettleTransServiceImpl implements IKettleTransService
{
private static final Logger log = LoggerFactory.getLogger(KettleTransServiceImpl.class);
@Autowired
private KettleTransMapper kettleTransMapper;
@Autowired
@ -35,6 +42,10 @@ public class KettleTransServiceImpl implements IKettleTransService
@Autowired
private KettleUtil kettleUtil;
@Autowired
private RedisStreamUtil redisStreamUtil;
@Autowired
IWechatApiService wechatApiService;
/**
* 查询转换
*
@ -86,11 +97,14 @@ public class KettleTransServiceImpl implements IKettleTransService
}
String userName = (String) PermissionUtils.getPrincipalProperty("userName");
if(kettleTrans.getRoleKey()==null){
kettleTrans.setRoleKey("admin");
kettleTrans.setRoleKey("admin,bpsadmin");
}else{
if(!kettleTrans.getRoleKey().contains("admin")){
kettleTrans.setRoleKey(kettleTrans.getRoleKey().concat(",admin"));
}
if(!kettleTrans.getRoleKey().contains("bpsadmin")){
kettleTrans.setRoleKey(kettleTrans.getRoleKey().concat(",bpsadmin"));
}
}
kettleTrans.setCreatedBy(userName);
kettleTrans.setUpdateBy(userName);
@ -112,11 +126,14 @@ public class KettleTransServiceImpl implements IKettleTransService
kettleTrans.setUpdateTime(DateUtils.getNowDate());
kettleTrans.setTransType("File");
if(kettleTrans.getRoleKey()==null){
kettleTrans.setRoleKey("admin");
kettleTrans.setRoleKey("admin,bpsadmin");
}else{
if(!kettleTrans.getRoleKey().contains("admin")){
kettleTrans.setRoleKey(kettleTrans.getRoleKey().concat(",admin"));
}
if(!kettleTrans.getRoleKey().contains("bpsadmin")){
kettleTrans.setRoleKey(kettleTrans.getRoleKey().concat(",bpsadmin"));
}
} return kettleTransMapper.updateKettleTrans(kettleTrans);
}
@ -146,36 +163,67 @@ public class KettleTransServiceImpl implements IKettleTransService
/**
* @Description:立即执行一次转换
* @Description:立即执行一次转换,放到redis队列中
* @Author: Kone.wang
* @Date: 2021/7/15 14:31
* @param trans :
* @return: void
**/
@Override
public AjaxResult run(KettleTrans trans) {
public AjaxResult runToQueue(KettleTrans trans) {
Long id = trans.getId();
KettleTrans kettleTrans = kettleTransMapper.selectKettleTransById(id);
if(kettleTrans ==null){
if(kettleTrans ==null || kettleTrans.getId()==null){
return AjaxResult.error("转换不存在!");
}
XRepository repository=repositoryMapper.selectXRepositoryById(kettleTrans.getTransRepositoryId());
if(repository==null){
return AjaxResult.error("资源库不存在!");
}
String path = kettleTrans.getTransPath();
try {
kettleUtil.KETTLE_LOG_LEVEL=kettleTrans.getTransLogLevel();
kettleUtil.KETTLE_REPO_ID=String.valueOf(kettleTrans.getTransRepositoryId());
kettleUtil.KETTLE_REPO_NAME=repository.getRepoName();
kettleUtil.KETTLE_REPO_PATH=repository.getBaseDir();
kettleUtil.callTrans(path,kettleTrans.getTransName(),null,null);
} catch (Exception e) {
e.printStackTrace();
//加入队列中,等待执行
redisStreamUtil.addKettleTrans(kettleTrans);
//更新一下状态
trans.setTransStatus("等待中");
kettleTransMapper.updateKettleTrans(trans);
return AjaxResult.success("已加入执行队列,请等待运行结果通知!");
}
@Override
public void runTransRightNow(Long id, String userId) {
KettleTrans kettleTrans = kettleTransMapper.selectKettleTransById(id);
if(kettleTrans ==null || kettleTrans.getId()==null){
log.error("转换不存在!:"+id);
return;
}
XRepository repository=repositoryMapper.selectXRepositoryById(kettleTrans.getTransRepositoryId());
if(repository==null){
log.error("资源库不存在!");
return;
}
//更新状态未运行中
kettleTrans.setTransStatus("运行中");
kettleTransMapper.updateKettleTrans(kettleTrans);
StringBuilder title = new StringBuilder(kettleTrans.getTransName()).append(".ktr 执行结果:");
StringBuilder msg = new StringBuilder(kettleTrans.getTransName()).append(".ktr 执行结果:");
try {
kettleUtil.callTrans(kettleTrans,repository,null,null);
kettleTrans.setTransStatus("成功");
kettleTrans.setLastSucceedTime(DateUtils.getNowDate());
kettleTransMapper.updateKettleTrans(kettleTrans);
title.append("成功!");
msg.append("成功!");
} catch (Exception e) {
kettleTrans.setTransStatus("异常");
kettleTransMapper.updateKettleTrans(kettleTrans);
title.append("异常!");
msg.append("异常!");
log.error(id+"的trans执行失败:"+e.getMessage());
}
List<String> userIdList = new ArrayList<>();
userIdList.add(userId);
Map<String, String> resultMap = wechatApiService.SendTextCardMessageToWechatUser(userIdList,title.toString(),msg.toString(),"http://report.bpsemi.cn:8081/it_war");
log.info("trans微信消息发送结果"+resultMap);
return AjaxResult.success("执行成功!");
}
/**
* @Description:查询抓换执行日志
@ -200,7 +248,7 @@ public class KettleTransServiceImpl implements IKettleTransService
@Override
public AjaxResult runTransQuartz(String id, String transName) {
KettleTrans kettleTrans = kettleTransMapper.selectKettleTransById(Long.valueOf(id));
return run(kettleTrans);
return runToQueue(kettleTrans);
}
/**
* @Description:检查该转换是否设置了定时任务
@ -214,4 +262,5 @@ public class KettleTransServiceImpl implements IKettleTransService
return kettleTransMapper.checkQuartzExist(checkStr);
}
}

View File

@ -0,0 +1,27 @@
package com.ruoyi.kettle.tools;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.CommandLineRunner;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
@Component
public class CommandLineRunnerImpl implements CommandLineRunner {
@Autowired
private RedisStreamUtil redisStreamUtil;
@Override
public void run(String... args) throws Exception {
new Thread(){
public void run() {
redisStreamUtil.readGroup();
}
}.start();
}
}

View File

@ -1,6 +1,9 @@
package com.ruoyi.kettle.tools;
import com.ruoyi.common.config.datasource.DynamicDataSourceContextHolder;
import com.ruoyi.kettle.domain.KettleJob;
import com.ruoyi.kettle.domain.KettleTrans;
import com.ruoyi.kettle.domain.XRepository;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
@ -22,32 +25,28 @@ import org.springframework.stereotype.Component;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@Component
public class KettleUtil {
public static final Logger log = LoggerFactory.getLogger(KettleUtil.class);
public String KETTLE_LOG_LEVEL = "basic";
public String KETTLE_REPO_ID = "2";
public String KETTLE_REPO_NAME = "koneTest";
public String KETTLE_REPO_DESC = "DESC";
public String KETTLE_REPO_PATH = "D:\\etl";
/**
* 执行文件资源库转换
* @param transPath 转换路径相对于资源库
* @param transName 转换名称不需要后缀
* @param namedParams 命名参数
* @param clParams 命令行参数
*/
public void callTrans(String transPath, String transName, Map<String,String> namedParams, String[] clParams) throws Exception {
public void callTrans(KettleTrans kettleTrans, XRepository xrepository, Map<String, String> namedParams, String[] clParams) throws Exception {
KettleEnv.init();
DatabaseMeta databaseMeta=new DatabaseMeta("kettle_trans_log", "mysql", "Native(JDBC)",
"xxx.xxx.x.xx","bps?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=GMT%2B8", "3306", "root", "password");
"192.168.2.18","bps?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=GMT%2B8", "3306", "root", "abc.123");
String msg;
KettleFileRepository repo = this.fileRepositoryCon();
TransMeta transMeta = this.loadTrans(repo, transPath, transName);
KettleFileRepository repo = this.fileRepositoryCon(xrepository);
TransMeta transMeta = this.loadTrans(repo, kettleTrans.getTransPath(), kettleTrans.getTransName());
transMeta.addDatabase(databaseMeta);
VariableSpace space=new Variables();
@ -69,7 +68,7 @@ public class KettleUtil {
trans.setParameterValue(entry.getKey(), entry.getValue());
}
}
trans.setLogLevel(this.getLogerLevel(KETTLE_LOG_LEVEL));
trans.setLogLevel(this.getLogerLevel(kettleTrans.getTransLogLevel()));
//执行
trans.execute(clParams);
trans.waitUntilFinished();
@ -87,20 +86,20 @@ public class KettleUtil {
log.error(msg);
throw new Exception(msg);
}
TimeUnit.SECONDS.sleep(10);
}
/**
* 执行文件资源库job
* @param jobName
* @throws Exception
*/
public boolean callJob(String jobPath, String jobName, Map<String,String> variables, String[] clParams) throws Exception {
public boolean callJob(KettleJob kettleJob,XRepository xRepository, Map<String,String> variables, String[] clParams) throws Exception {
KettleEnv.init();
String msg;
DatabaseMeta databaseMeta=new DatabaseMeta("kettle_job_log", "mysql", "Native(JDBC)",
"xxx.xxx.x.xx","bps?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=GMT%2B8", "3306", "root", "password");
KettleFileRepository repo = this.fileRepositoryCon();
JobMeta jobMeta = this.loadJob(repo, jobPath, jobName);
"192.168.2.18","bps?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=GMT%2B8", "3306", "root", "abc.123");
KettleFileRepository repo = this.fileRepositoryCon(xRepository);
JobMeta jobMeta = this.loadJob(repo, kettleJob.getJobPath(), kettleJob.getJobName());
jobMeta.addDatabase(databaseMeta);
VariableSpace space=new Variables();
space.setVariable("test","fromDbName");
@ -118,7 +117,7 @@ public class KettleUtil {
}
}
//设置日志级别
job.setLogLevel(this.getLogerLevel(KETTLE_LOG_LEVEL));
job.setLogLevel(this.getLogerLevel(kettleJob.getJobLogLevel()));
job.setArguments(clParams);
job.start();
job.waitUntilFinished();
@ -155,13 +154,6 @@ public class KettleUtil {
}
return transMeta;
}
/**
* 加载job
* @param repo kettle文件资源库
@ -212,9 +204,9 @@ public class KettleUtil {
* @param jobName
* @throws Exception
*/
public void callNativeJob(String jobName) throws Exception {
/* public void callNativeJob(String jobName) throws Exception {
// 初始化
/*KettleEnvironment.init();*/
*//*KettleEnvironment.init();*//*
JobMeta jobMeta = new JobMeta(jobName, null);
Job job = new Job(null, jobMeta);
@ -227,7 +219,7 @@ public class KettleUtil {
if (job.getErrors() > 0) {
throw new Exception("There are errors during job exception!(执行job发生异常)");
}
}
}*/
/**
* 取得kettle的日志级别
@ -257,14 +249,14 @@ public class KettleUtil {
/**
* 配置kettle文件库资源库环境
**/
public KettleFileRepository fileRepositoryCon() throws KettleException {
public KettleFileRepository fileRepositoryCon(XRepository xRepository) throws KettleException {
String msg;
//初始化
/*EnvUtil.environmentInit();
KettleEnvironment.init();*/
//资源库元对象
KettleFileRepositoryMeta fileRepositoryMeta = new KettleFileRepositoryMeta(this.KETTLE_REPO_ID, this.KETTLE_REPO_NAME, this.KETTLE_REPO_DESC, this.KETTLE_REPO_PATH);
KettleFileRepositoryMeta fileRepositoryMeta = new KettleFileRepositoryMeta(String.valueOf(xRepository.getId()), xRepository.getRepoName(), xRepository.getRemark(), xRepository.getBaseDir());
// 文件形式的资源库
KettleFileRepository repo = new KettleFileRepository();
repo.init(fileRepositoryMeta);
@ -272,11 +264,11 @@ public class KettleUtil {
repo.connect("", "");//默认的连接资源库的用户名和密码
if (repo.isConnected()) {
msg = "kettle文件库资源库【" + KETTLE_REPO_PATH + "】连接成功";
msg = "kettle文件库资源库【" + xRepository.getBaseDir() + "】连接成功";
log.info(msg);
return repo;
} else {
msg = "kettle文件库资源库【" + KETTLE_REPO_PATH + "】连接失败";
msg = "kettle文件库资源库【" + xRepository.getBaseDir() + "】连接失败";
log.error(msg);
throw new KettleException(msg);
}

View File

@ -0,0 +1,258 @@
package com.ruoyi.kettle.tools;
import com.ruoyi.common.utils.security.PermissionUtils;
import com.ruoyi.kettle.domain.KettleJob;
import com.ruoyi.kettle.domain.KettleTrans;
import com.ruoyi.kettle.service.IKettleJobService;
import com.ruoyi.kettle.service.IKettleTransService;
import com.ruoyi.system.service.ISysConfigService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.StreamEntry;
import redis.clients.jedis.StreamEntryID;
import java.net.InetAddress;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* @Description:
* 现在redis安装后新建一个stream: XADD koneStream * user kang msg Hello
*再把他读掉:XREAD streams koneStream 0
*最后创建一个这个steam的消费者:XGROUP CREATE koneStream koneGroup 0
* @Author: Kone.wang
* @Date: 2021/8/10 13:19
**/
@Component
public class RedisStreamUtil {
private static final Logger log = LoggerFactory.getLogger(RedisStreamUtil.class);
String koneConsumer="bpsemi_consumer";
//
// @Value("${stream.key}")
// String koneStream ;
// @Value("${stream.group}")
// String koneGroup ;
@Value("${spring.redis.timeout}")
Long waitTIme;
@Autowired
private JedisPool jedisPool;
@Autowired
private IKettleTransService transService;
@Autowired
private IKettleJobService jobService;
@Autowired
private ISysConfigService configService;
/**
* @Description: 往队列中插入trans
* @Author: Kone.wang
* @Date: 2021/8/6 13:50
* @param trans:
* @return: com.ruoyi.common.core.domain.AjaxResult
**/
public void addKettleTrans(KettleTrans trans) {
//获取主机ip
String localAddr = configService.selectConfigByKey("sys.local.addr");
localAddr =localAddr!=null?localAddr:"192.168.2.84";
String koneStream="bpsemi_test";
try{
InetAddress addr = InetAddress.getLocalHost();
String address = addr.getHostAddress();
if(address.equals(localAddr)){
koneStream="bpsemi";
}
}catch (Exception e){
log.error("addKettleTrans()获取主机ip异常:"+e);
}
String transName=trans.getTransName();
Long trandId = trans.getId();
//定时任务跑的时候这个会报错,所以捕获一下然后设置默认的
String userId ="";
try{
userId = String.valueOf(PermissionUtils.getPrincipalProperty("userId"));
}catch (Exception e){
log.warn("定时任务执行的,默认发送给天宁吧408");
userId="454";
}
log.info(userId+"开始增加:trans_"+trandId+"@"+userId+":::"+transName);
//这里可以添加更多的属性
Map<String,String> map = new HashMap<String,String>();
map.put("trans_"+trandId+"@"+userId, transName);
Jedis jedis = jedisPool.getResource();
try{
StreamEntryID id =jedis.xadd(koneStream, new StreamEntryID().NEW_ENTRY, map);
log.info(userId+"成功增加:trans_"+trandId+"@"+userId+":::"+transName+"[StreamEntryID:"+id+"]");
}catch (Exception e){
log.error(userId+"失败增加:trans"+trandId+"@"+userId+":::"+transName+"]");
}finally {
if (jedis != null) {
try {
jedis.close();
} catch (Exception e) {
}
}
}
}
/**
* @Description: 往队列中插入job
* @Author: Kone.wang
* @Date: 2021/8/6 13:50
* @param job:
* @return: com.ruoyi.common.core.domain.AjaxResult
**/
public void addKettleJob(KettleJob job) {
//获取主机ip
String localAddr = configService.selectConfigByKey("sys.local.addr");
localAddr =localAddr!=null?localAddr:"192.168.2.84";
String koneStream="bpsemi_test";
try{
InetAddress addr = InetAddress.getLocalHost();
String address = addr.getHostAddress();
if(address.equals(localAddr)){
koneStream="bpsemi";
}
}catch (Exception e){
log.error("addKettleJob()获取主机ip异常:"+e);
}
String jobName=job.getJobName();
Long jobId = job.getId();
String userId ="";
try{
userId = String.valueOf(PermissionUtils.getPrincipalProperty("userId"));
}catch (Exception e){
log.warn("定时任务执行的,默认发送给天宁吧408");
userId="454";
}
log.info(userId+"开始增加:job_"+jobId+"@"+userId+":::"+jobName);
//这里可以添加更多的属性
Map<String,String> map = new HashMap<String,String>();
map.put("job_"+jobId+"@"+userId, jobName);
Jedis jedis = jedisPool.getResource();
try{
StreamEntryID id = jedis.xadd(koneStream, new StreamEntryID().NEW_ENTRY, map);
log.info(userId+"成功增加:job_"+jobId+"@"+userId+":::"+jobName+"[StreamEntryID:"+id+"]");
}catch (Exception e){
log.error(userId+"失败增加:job_"+jobId+"@"+userId+":::"+jobName+"]");
}finally {
if (jedis != null) {
try {
jedis.close();
} catch (Exception e) {
}
}
}
}
/**
* @Description: 循环重队列中读消息
* @Author: Kone.wang
* @Date: 2021/8/6 13:50
* @return: void
**/
public void readGroup() {
//获取主机ip
String localAddr = configService.selectConfigByKey("sys.local.addr");
localAddr =localAddr!=null?localAddr:"192.168.2.84";
String koneStream="bpsemi_test";
String koneGroup="bpsemi_group_test";
String koneConsumer="bpsemi_consumer";
try{
InetAddress addr = InetAddress.getLocalHost();
String address = addr.getHostAddress();
if(address.equals(localAddr)){
koneStream="bpsemi";
koneGroup="bpsemi_group";
}
}catch (Exception e){
log.error("addKettleJob()获取主机ip异常:"+e);
}
while (true){
Jedis jedis = jedisPool.getResource();
if(jedis ==null){
return;
}else{
try{
Map<String,StreamEntryID> t = new HashMap<String,StreamEntryID>();
List<java.util.Map.Entry<java.lang.String,java.util.List<redis.clients.jedis.StreamEntry>>> list = new ArrayList<java.util.Map.Entry<java.lang.String,java.util.List<redis.clients.jedis.StreamEntry>>>();
t.put(koneStream, null);//null 则为 > 重头读起也可以为$接受新消息还可以是上一次未读完的消息id
Map.Entry<java.lang.String,redis.clients.jedis.StreamEntryID> e = null;
for(Map.Entry<java.lang.String,redis.clients.jedis.StreamEntryID> c:t.entrySet()){
e=c;
}
//noAck为false的话需要手动acktrue则自动ack. commsumer新建的方式为xreadgroup
log.info("开始读消息");
try{
list = jedis.xreadGroup(koneGroup, koneConsumer, 1, 30000L, false, e);
}catch (Exception ex){
log.error("超时了!!!!!!!!");
}
log.info("读消息结束!");
if(list ==null){
log.error("读到的list为空");
}else{
for (Map.Entry m : list) {
if (m.getValue() instanceof ArrayList) {
List<StreamEntry> l = (List<StreamEntry>) m.getValue();
Map<String, String> result = l.get(0).getFields();
for (Map.Entry entry : result.entrySet()) {
System.out.println(entry.getKey() + "---" + entry.getValue());
if(entry.getKey() != null){
String key = String.valueOf(entry.getKey());
String value =String.valueOf(entry.getValue());
String id=key.substring(key.indexOf("_")+1,key.indexOf("@"));
String userId=key.substring(key.indexOf("@")+1);
if(key.startsWith("trans_")){
log.info(value+"的trans:开始执行");
transService.runTransRightNow(Long.valueOf(id),userId);
log.info(value+"的trans:结束执行");
}else if(key.startsWith("job_")){
log.info(value+"的job:开始执行");
jobService.runJobRightNow(Long.valueOf(id),userId);
log.info(value+"的job:结束执行");
}
}
}
long id = jedis.xack(koneStream, koneGroup, l.get(0).getID());
log.info("消息消费成功:"+id);
}
}
}
}catch (Exception e){
log.error(e.getMessage());
}finally {
if (jedis != null) {
try {
jedis.close();
} catch (Exception e) {
}
}
}
}
}
}
}

View File

@ -21,10 +21,11 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<result property="isMonitorEnabled" column="is_monitor_enabled" />
<result property="roleKey" column="role_key" />
<result property="tplKey" column="tpl_key" />
<result property="lastSucceedTime" column="last_succeed_time" />
</resultMap>
<sql id="selectKettleJobVo">
select id, created_time, update_time, created_by, update_by, job_name, job_description, job_type, job_path, job_repository_id, job_log_level, job_status, is_del, is_monitor_enabled, role_key, tpl_key from kettle_job
select id, created_time, update_time, created_by, update_by, job_name, job_description, job_type, job_path, job_repository_id, job_log_level, job_status, is_del, is_monitor_enabled, role_key, tpl_key,last_succeed_time from kettle_job
</sql>
<select id="selectKettleJobList" parameterType="KettleJob" resultMap="KettleJobResult">
@ -123,6 +124,7 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<if test="isMonitorEnabled != null">is_monitor_enabled = #{isMonitorEnabled},</if>
<if test="roleKey != null">role_key = #{roleKey},</if>
<if test="tplKey != null">tpl_key = #{tplKey},</if>
<if test="lastSucceedTime != null">last_succeed_time = #{lastSucceedTime},</if>
</trim>
where id = #{id}
</update>

View File

@ -22,10 +22,11 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<result property="tplKey" column="tpl_key" />
<result property="roleKey" column="role_key" />
<result property="remark" column="remark" />
<result property="lastSucceedTime" column="last_succeed_time" />
</resultMap>
<sql id="selectKettleTransVo">
select id, trans_name, trans_description, created_time, update_time, created_by, update_by, trans_type, trans_path, trans_repository_id, trans_log_level, trans_status, is_del, is_monitor_enabled, tpl_key, role_key, remark from kettle_trans
select id, trans_name, trans_description, created_time, update_time, created_by, update_by, trans_type, trans_path, trans_repository_id, trans_log_level, trans_status, is_del, is_monitor_enabled, tpl_key, role_key, remark,last_succeed_time from kettle_trans
</sql>
<select id="selectKettleTransList" parameterType="KettleTrans" resultMap="KettleTransResult">
@ -126,6 +127,7 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<if test="tplKey != null">tpl_key = #{tplKey},</if>
<if test="roleKey != null">role_key = #{roleKey},</if>
<if test="remark != null">remark = #{remark},</if>
<if test="lastSucceedTime != null">last_succeed_time = #{lastSucceedTime},</if>
</trim>
where id = #{id}
</update>

View File

@ -53,13 +53,27 @@
$("#treeId").val(treeId);
$("#treeName").val(treeName);
$("#transPath").val(path);
var pNodeId = treeId;
if(/^[0-9]+.?[0-9]*$/.test(treeId)){
$("#transRepositoryId").val(treeId);
}
console.log("pathpathpathpath::"+path);
console.log("pathpathpathpath::"+path+"_id:"+treeId);
var treeObj = $.fn.zTree.getZTreeObj( "tree");
var selectedNode = treeObj .getSelectedNodes();
var pNode = selectedNode[0].getParentNode();
while(!!pNode) {
var id = pNode.id;
pNode = pNode.getParentNode();
if(pNode == null){
pNodeId = id;
}
}
$("#transRepositoryId").val(pNodeId);
if(/^[0-9]+.?[0-9]*$/.test(treeId) && !selectedNode[0].isParent){
console.log("进来了");
qryRepoSubTree(treeId);

View File

@ -67,7 +67,8 @@
var quartzFlag=[[${@permission.hasPermi('kettle:job:setquartz')}]];
var prefix = ctx + "kettle/job";
var logLevel = [[${@dict.getType('kettle_log_level')}]];
var jobStatus = [[${@dict.getType('kettle_trans_status')}]];
$(function() {
var options = {
url: prefix + "/list",
@ -110,11 +111,21 @@
},*/
{
field: 'jobLogLevel',
title: '日志级别'
title: '日志级别',
formatter: function(value, row, index) {
return $.table.selectDictLabel(logLevel, value);
}
},
{
field: 'jobStatus',
title: '状态'
title: '状态',
formatter: function(value, row, index) {
return $.table.selectDictLabel(jobStatus, value);
}
},
{
field: 'lastSucceedTime',
title: '最后成功时间',
},
/* {
field: 'isDel',

View File

@ -109,17 +109,21 @@
return $.table.selectDictLabel(transStatus, value);
}
},
{
field: 'lastSucceedTime',
title: '最后成功时间',
},
// {
// field: 'isDel',
// title: '是否删除'
// },
{
field: 'isMonitorEnabled',
title: '是否启用',
formatter: function(value, row, index) {
return value==1?"是":"否";
}
},
// {
// field: 'isMonitorEnabled',
// title: '是否启用',
// formatter: function(value, row, index) {
// return value==1?"是":"否";
// }
// },
// {
// field: 'tplKey',
// title: '保留备用'

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -12,7 +12,6 @@ ruoyi:
profile: C:/bps-it/uploadPath
# 获取ip地址开关
addressEnabled: false
# 开发环境配置
server:
# 服务器的HTTP端口默认为80
@ -76,7 +75,26 @@ spring:
username: administrator@bpsemi.com
password: Bps@2831!
redis:
host: 192.168.2.88
port: 6379
password: "bpsemi2021"
timeout: 30000
database: 2
lettuce:
pool:
max-active: 100
max-idle: 10
min-idle: 0
max-wait: 30000
cluster:
refresh:
adaptive: true
#20秒自动刷新一次
period: 20
# MyBatis
mybatis:
# 搜索指定包别名
typeAliasesPackage: com.ruoyi.**.domain
@ -176,4 +194,4 @@ express:
topgp:
webservice:
topprod: http://192.168.2.81:85/web/ws/r/aws_ttsrv2
toptest: http://192.168.2.81:85/web/ws/r/aws_ttsrv2_toptest
toptest: http://192.168.2.81:85/web/ws/r/aws_ttsrv2_toptest