org.springframework.boot
spring-boot-starter-quartz
#ID设置为自动获取 每一个必须不同 (所有调度器实例中是唯一的)
org.quartz.scheduler.instanceId=AUTO
#指定调度程序的主线程是否应该是守护线程
org.quartz.scheduler.makeSchedulerThreadDaemon=true
#ThreadPool实现的类名
org.quartz.threadPool.class=org.quartz.simpl.SimpleThreadPool
#ThreadPool配置线程守护进程
org.quartz.threadPool.makeThreadsDaemons=true
#线程数量
org.quartz.threadPool.threadCount:20
#线程优先级
org.quartz.threadPool.threadPriority:5
#数据保存方式为持久化
org.quartz.jobStore.class=org.quartz.impl.jdbcjobstore.JobStoreTX
#StdJDBCDelegate说明支持集群
org.quartz.jobStore.driverDelegateClass=org.quartz.impl.jdbcjobstore.StdJDBCDelegate
#quartz内部表的前缀
org.quartz.jobStore.tablePrefix=QRTZ_
#是否加入集群
org.quartz.jobStore.isClustered=true
#容许的最大作业延长时间
org.quartz.jobStore.misfireThreshold=25000
package com.config.quartz;
import org.quartz.spi.JobFactory;
import org.quartz.spi.TriggerFiredBundle;
import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.scheduling.quartz.SpringBeanJobFactory;
import javax.sql.DataSource;
import java.io.IOException;
import java.util.Properties;
/**
* Created by EalenXie on 2018/6/4 11:02
* Quartz的核心配置类
*/
@Configuration
public class ConfigureQuartz {
//配置JobFactory
@Bean
public JobFactory jobFactory(ApplicationContext applicationContext) {
AutowiringSpringBeanJobFactory jobFactory = new AutowiringSpringBeanJobFactory();
jobFactory.setApplicationContext(applicationContext);
return jobFactory;
}
/**
* SchedulerFactoryBean这个类的真正作用提供了对org.quartz.Scheduler的创建与配置,并且会管理它的生命周期与Spring同步。
* org.quartz.Scheduler: 调度器。所有的调度都是由它控制。
* @param dataSource 为SchedulerFactory配置数据源
* @param jobFactory 为SchedulerFactory配置JobFactory
*/
@Bean
public SchedulerFactoryBean schedulerFactoryBean(DataSource dataSource, JobFactory jobFactory) throws IOException {
SchedulerFactoryBean factory = new SchedulerFactoryBean();
//可选,QuartzScheduler启动时更新己存在的Job,这样就不用每次修改targetObject后删除qrtz_job_details表对应记录
factory.setOverwriteExistingJobs(true);
factory.setAutoStartup(true); //设置自行启动
factory.setDataSource(dataSource);
factory.setJobFactory(jobFactory);
factory.setQuartzProperties(quartzProperties());
return factory;
}
//从quartz.properties文件中读取Quartz配置属性
@Bean
public Properties quartzProperties() throws IOException {
PropertiesFactoryBean propertiesFactoryBean = new PropertiesFactoryBean();
propertiesFactoryBean.setLocation(new ClassPathResource("/quartz.properties"));
propertiesFactoryBean.afterPropertiesSet();
return propertiesFactoryBean.getObject();
}
//配置JobFactory,为quartz作业添加自动连接支持
public final class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory implements
ApplicationContextAware {
private transient AutowireCapableBeanFactory beanFactory;
@Override
public void setApplicationContext(final ApplicationContext context) {
beanFactory = context.getAutowireCapableBeanFactory();
}
@Override
protected Object createJobInstance(final TriggerFiredBundle bundle) throws Exception {
final Object job = super.createJobInstance(bundle);
beanFactory.autowireBean(job);
return job;
}
}
}
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
-- job 持久化队列的支持 by CHENYB date 2019-08-05
-- in your Quartz properties file, you'll need to set org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.StdJDBCDelegate
-- 你需要在你的quartz.properties文件中设置org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.StdJDBCDelegate
-- StdJDBCDelegate说明支持集群,所有的任务信息都会保存到数据库中,可以控制事物,还有就是如果应用服务器关闭或者重启,任务信息都不会丢失,并且可以恢复因服务器关闭或者重启而导致执行失败的任务
-- This is the script from Quartz to create the tables in a MySQL database, modified to use INNODB instead of MYISAM
-- 这是来自quartz的脚本,在MySQL数据库中创建以下的表,修改为使用INNODB而不是MYISAM
-- 你需要在数据库中执行以下的sql脚本
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS;
DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE;
DROP TABLE IF EXISTS QRTZ_LOCKS;
DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_JOB_DETAILS;
DROP TABLE IF EXISTS QRTZ_CALENDARS;
DROP TABLE IF EXISTS `qrtz_job_entity`;
-- Table structure for qrtz_job_entity job实体类,主要任务控制表
CREATE TABLE `qrtz_job_entity` (
`job_id` varchar(32) NOT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_group` varchar(255) DEFAULT NULL,
`job_cron` varchar(255) DEFAULT NULL,
`job_parameter` varchar(255) NOT NULL,
`job_description` varchar(255) DEFAULT NULL,
`vm_param` varchar(255) DEFAULT NULL,
`jar_path` varchar(255) DEFAULT NULL,
`status` varchar(255) DEFAULT NULL,
PRIMARY KEY (`job_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 存储每一个已配置的Job的详细信息
CREATE TABLE QRTZ_JOB_DETAILS(
SCHED_NAME VARCHAR(120) NOT NULL,
JOB_NAME VARCHAR(200) NOT NULL,
JOB_GROUP VARCHAR(200) NOT NULL,
DESCRIPTION VARCHAR(250) NULL,
JOB_CLASS_NAME VARCHAR(250) NOT NULL,
IS_DURABLE VARCHAR(1) NOT NULL,
IS_NONCONCURRENT VARCHAR(1) NOT NULL,
IS_UPDATE_DATA VARCHAR(1) NOT NULL,
REQUESTS_RECOVERY VARCHAR(1) NOT NULL,
JOB_DATA BLOB NULL,
PRIMARY KEY (SCHED_NAME,JOB_NAME,JOB_GROUP))
ENGINE=InnoDB;
-- 存储已配置的Trigger的信息
CREATE TABLE QRTZ_TRIGGERS (
SCHED_NAME VARCHAR(120) NOT NULL,
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
JOB_NAME VARCHAR(200) NOT NULL,
JOB_GROUP VARCHAR(200) NOT NULL,
DESCRIPTION VARCHAR(250) NULL,
NEXT_FIRE_TIME BIGINT(13) NULL,
PREV_FIRE_TIME BIGINT(13) NULL,
PRIORITY INTEGER NULL,
TRIGGER_STATE VARCHAR(16) NOT NULL,
TRIGGER_TYPE VARCHAR(8) NOT NULL,
START_TIME BIGINT(13) NOT NULL,
END_TIME BIGINT(13) NULL,
CALENDAR_NAME VARCHAR(200) NULL,
MISFIRE_INSTR SMALLINT(2) NULL,
JOB_DATA BLOB NULL,
PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
FOREIGN KEY (SCHED_NAME,JOB_NAME,JOB_GROUP)
REFERENCES QRTZ_JOB_DETAILS(SCHED_NAME,JOB_NAME,JOB_GROUP))
ENGINE=InnoDB;
-- 存储已配置的Simple Trigger的信息
CREATE TABLE QRTZ_SIMPLE_TRIGGERS (
SCHED_NAME VARCHAR(120) NOT NULL,
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
REPEAT_COUNT BIGINT(7) NOT NULL,
REPEAT_INTERVAL BIGINT(12) NOT NULL,
TIMES_TRIGGERED BIGINT(10) NOT NULL,
PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP))
ENGINE=InnoDB;
-- 存储Cron Trigger,包括Cron表达式和时区信息
CREATE TABLE QRTZ_CRON_TRIGGERS (
SCHED_NAME VARCHAR(120) NOT NULL,
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
CRON_EXPRESSION VARCHAR(120) NOT NULL,
TIME_ZONE_ID VARCHAR(80),
PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP))
ENGINE=InnoDB;
CREATE TABLE QRTZ_SIMPROP_TRIGGERS
(
SCHED_NAME VARCHAR(120) NOT NULL,
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
STR_PROP_1 VARCHAR(512) NULL,
STR_PROP_2 VARCHAR(512) NULL,
STR_PROP_3 VARCHAR(512) NULL,
INT_PROP_1 INT NULL,
INT_PROP_2 INT NULL,
LONG_PROP_1 BIGINT NULL,
LONG_PROP_2 BIGINT NULL,
DEC_PROP_1 NUMERIC(13,4) NULL,
DEC_PROP_2 NUMERIC(13,4) NULL,
BOOL_PROP_1 VARCHAR(1) NULL,
BOOL_PROP_2 VARCHAR(1) NULL,
PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP))
ENGINE=InnoDB;
-- Trigger作为Blob类型存储(用于Quartz用户用JDBC创建他们自己定制的Trigger类型,JobStore并不知道如何存储实例的时候)
CREATE TABLE QRTZ_BLOB_TRIGGERS (
SCHED_NAME VARCHAR(120) NOT NULL,
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
BLOB_DATA BLOB NULL,
PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
INDEX (SCHED_NAME,TRIGGER_NAME, TRIGGER_GROUP),
FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP))
ENGINE=InnoDB;
-- 以Blob类型存储Quartz的Calendar日历信息,quartz可配置一个日历来指定一个时间范围
CREATE TABLE QRTZ_CALENDARS (
SCHED_NAME VARCHAR(120) NOT NULL,
CALENDAR_NAME VARCHAR(200) NOT NULL,
CALENDAR BLOB NOT NULL,
PRIMARY KEY (SCHED_NAME,CALENDAR_NAME))
ENGINE=InnoDB;
-- 存储已暂停的Trigger组的信息
CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS (
SCHED_NAME VARCHAR(120) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
PRIMARY KEY (SCHED_NAME,TRIGGER_GROUP))
ENGINE=InnoDB;
-- 存储与已触发的Trigger相关的状态信息,以及相联Job的执行信息
CREATE TABLE QRTZ_FIRED_TRIGGERS (
SCHED_NAME VARCHAR(120) NOT NULL,
ENTRY_ID VARCHAR(95) NOT NULL,
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
INSTANCE_NAME VARCHAR(200) NOT NULL,
FIRED_TIME BIGINT(13) NOT NULL,
SCHED_TIME BIGINT(13) NOT NULL,
PRIORITY INTEGER NOT NULL,
STATE VARCHAR(16) NOT NULL,
JOB_NAME VARCHAR(200) NULL,
JOB_GROUP VARCHAR(200) NULL,
IS_NONCONCURRENT VARCHAR(1) NULL,
REQUESTS_RECOVERY VARCHAR(1) NULL,
PRIMARY KEY (SCHED_NAME,ENTRY_ID))
ENGINE=InnoDB;
-- 存储少量的有关 Scheduler的状态信息,和别的 Scheduler 实例(假如是用于一个集群中)
CREATE TABLE QRTZ_SCHEDULER_STATE (
SCHED_NAME VARCHAR(120) NOT NULL,
INSTANCE_NAME VARCHAR(200) NOT NULL,
LAST_CHECKIN_TIME BIGINT(13) NOT NULL,
CHECKIN_INTERVAL BIGINT(13) NOT NULL,
PRIMARY KEY (SCHED_NAME,INSTANCE_NAME))
ENGINE=InnoDB;
-- 存储程序的非观锁的信息(假如使用了悲观锁)
CREATE TABLE QRTZ_LOCKS (
SCHED_NAME VARCHAR(120) NOT NULL,
LOCK_NAME VARCHAR(40) NOT NULL,
PRIMARY KEY (SCHED_NAME,LOCK_NAME))
ENGINE=InnoDB;
CREATE INDEX IDX_QRTZ_J_REQ_RECOVERY ON QRTZ_JOB_DETAILS(SCHED_NAME,REQUESTS_RECOVERY);
CREATE INDEX IDX_QRTZ_J_GRP ON QRTZ_JOB_DETAILS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_J ON QRTZ_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_JG ON QRTZ_TRIGGERS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_C ON QRTZ_TRIGGERS(SCHED_NAME,CALENDAR_NAME);
CREATE INDEX IDX_QRTZ_T_G ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP);
CREATE INDEX IDX_QRTZ_T_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_N_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_N_G_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_NEXT_FIRE_TIME ON QRTZ_TRIGGERS(SCHED_NAME,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_ST ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE_GRP ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_FT_TRIG_INST_NAME ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME);
CREATE INDEX IDX_QRTZ_FT_INST_JOB_REQ_RCVRY ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME,REQUESTS_RECOVERY);
CREATE INDEX IDX_QRTZ_FT_J_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_FT_JG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_FT_T_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE INDEX IDX_QRTZ_FT_TG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_GROUP);
commit;
package com.richfit.job.domain;
import javax.persistence.*;
import java.io.Serializable;
@Entity
@Table(name = "JOB_ENTITY")
public class JobEntity implements Serializable {
private static final long serialVersionUID = 4122384962907036642L;
@Id
@Column(name = "job_id")
private String id;
@Column(name = "job_name")
private String name; //job名称
@Column(name = "job_group")
private String group; //job组名
@Column(name = "job_cron")
private String cron; //执行的cron
@Column(name = "job_parameter")
private String parameter; //job的参数
@Column(name = "job_description")
private String description; //job描述信息
@Column(name = "vm_param")
private String vmParam; //vm参数
@Column(name = "jar_path")
private String jarPath; //job的jar路径,在这里我选择的是定时执行一些可执行的jar包
private String status; //job的执行状态,这里我设置为OPEN/CLOSE且只有该值为OPEN才会执行该Job
public JobEntity() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public String getCron() {
return cron;
}
public void setCron(String cron) {
this.cron = cron;
}
public String getParameter() {
return parameter;
}
public void setParameter(String parameter) {
this.parameter = parameter;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getVmParam() {
return vmParam;
}
public void setVmParam(String vmParam) {
this.vmParam = vmParam;
}
public String getJarPath() {
return jarPath;
}
public void setJarPath(String jarPath) {
this.jarPath = jarPath;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
@Override
public String toString() {
return "JobEntity{" +
"id=" + id +
", name='" + name + '\'' +
", group='" + group + '\'' +
", cron='" + cron + '\'' +
", parameter='" + parameter + '\'' +
", description='" + description + '\'' +
", vmParam='" + vmParam + '\'' +
", jarPath='" + jarPath + '\'' +
", status='" + status + '\'' +
'}';
}
//新增Builder模式,可选,选择设置任意属性初始化对象
public JobEntity(Builder builder) {
id = builder.id;
name = builder.name;
group = builder.group;
cron = builder.cron;
parameter = builder.parameter;
description = builder.description;
vmParam = builder.vmParam;
jarPath = builder.jarPath;
status = builder.status;
}
public static class Builder {
private String id;
private String name = ""; //job名称
private String group = ""; //job组名
private String cron = ""; //执行的cron
private String parameter = ""; //job的参数
private String description = ""; //job描述信息
private String vmParam = ""; //vm参数
private String jarPath = ""; //job的jar路径
private String status = ""; //job的执行状态,只有该值为OPEN才会执行该Job
public Builder withId(String i) {
id = i;
return this;
}
public Builder withName(String n) {
name = n;
return this;
}
public Builder withGroup(String g) {
group = g;
return this;
}
public Builder withCron(String c) {
cron = c;
return this;
}
public Builder withParameter(String p) {
parameter = p;
return this;
}
public Builder withDescription(String d) {
description = d;
return this;
}
public Builder withVMParameter(String vm) {
vmParam = vm;
return this;
}
public Builder withJarPath(String jar) {
jarPath = jar;
return this;
}
public Builder withStatus(String s) {
status = s;
return this;
}
public JobEntity newJobEntity() {
return new JobEntity(this);
}
}
}
package com.richfit.job.dao;
import com.richfit.job.domain.JobEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface JobEntityRepository extends JpaRepository {
JobEntity getById(String id);
}
package com.richfit.job.execute;
import com.richfit.job.util.StringUtils;
import org.quartz.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
/**
* :@DisallowConcurrentExecution : 此标记用在实现Job的类上面,意思是不允许并发执行.
* :注意org.quartz.threadPool.threadCount线程池中线程的数量至少要多个,否则@DisallowConcurrentExecution不生效
* :假如Job的设置时间间隔为3秒,但Job执行时间是5秒,设置@DisallowConcurrentExecution以后程序会等任务执行完毕以后再去执行,否则会在3秒时再启用新的线程执行
* by CHENYB date 2019/07/31
*/
@DisallowConcurrentExecution
@Component
public class DynamicJob implements Job {
private Logger logger = LoggerFactory.getLogger(DynamicJob.class);
/**
* 核心方法,Quartz Job真正的执行逻辑.
* @param executorContext executorContext JobExecutionContext中封装有Quartz运行所需要的所有信息
* @throws JobExecutionException execute()方法只允许抛出JobExecutionException异常
*/
@Override
public void execute(JobExecutionContext executorContext) throws JobExecutionException {
//JobDetail中的JobDataMap是共用的,从getMergedJobDataMap获取的JobDataMap是全新的对象
JobDataMap map = executorContext.getMergedJobDataMap();
String jarPath = map.getString("jarPath");
String parameter = map.getString("parameter");
String vmParam = map.getString("vmParam");
logger.info("Running Job name : {} ", map.getString("name"));
logger.info("Running Job description : " + map.getString("JobDescription"));
logger.info("Running Job group: {} ", map.getString("group"));
logger.info("Running Job cron : " + map.getString("cronExpression"));
logger.info("Running Job jar path : {} ", jarPath);
logger.info("Running Job parameter : {} ", parameter);
logger.info("Running Job vmParam : {} ", vmParam);
long startTime = System.currentTimeMillis();
if (!StringUtils.getStringUtil.isEmpty(jarPath)) {
File jar = new File(jarPath);
if (jar.exists()) {
ProcessBuilder processBuilder = new ProcessBuilder();
processBuilder.directory(jar.getParentFile());
List commands = new ArrayList<>();
commands.add("java");
if (!StringUtils.getStringUtil.isEmpty(vmParam)) commands.add(vmParam);
commands.add("-jar");
commands.add(jarPath);
if (!StringUtils.getStringUtil.isEmpty(parameter)) commands.add(parameter);
processBuilder.command(commands);
logger.info("Running Job details as follows >>>>>>>>>>>>>>>>>>>>: ");
logger.info("Running Job commands : {} ", StringUtils.getStringUtil.getListString(commands));
try {
Process process = processBuilder.start();
logProcess(process.getInputStream(), process.getErrorStream());
} catch (IOException e) {
throw new JobExecutionException(e);
}
} else throw new JobExecutionException("Job Jar not found >> " + jarPath);
}
long endTime = System.currentTimeMillis();
logger.info(">>>>>>>>>>>>> Running Job has been completed , cost time : " + (endTime - startTime) + "ms\n");
}
//打印Job执行内容的日志
private void logProcess(InputStream inputStream, InputStream errorStream) throws IOException {
String inputLine;
String errorLine;
BufferedReader inputReader = new BufferedReader(new InputStreamReader(inputStream));
BufferedReader errorReader = new BufferedReader(new InputStreamReader(errorStream));
while ((inputLine = inputReader.readLine()) != null) logger.info(inputLine);
while ((errorLine = errorReader.readLine()) != null) logger.error(errorLine);
}
}
package com.richfit.job.service.impl;
import com.richfit.job.dao.JobEntityRepository;
import com.richfit.job.domain.JobEntity;
import com.utils.ReflectUtils;
import org.quartz.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
/**
* BeanJobService by CHENYB date 2019/7/31
*/
@Service
public class IJobServiceImpl {
@Autowired
private JobEntityRepository repository;
//通过Id获取Job
public JobEntity getJobEntityById(String id) {
return repository.getById(id);
}
//从数据库中加载获取到所有Job
public List loadJobs() {
List list = new ArrayList<>();
repository.findAll().forEach(list::add);
return list;
}
//Entity 转换 JobDataMap.(Job参数对象) ,博客有工具类
public JobDataMap getJobDataMap(JobEntity job) {
JobDataMap map = new JobDataMap();
map.putAll( MapUtil.objToMap( job ) );
return map;
}
//获取JobDetail,JobDetail是任务的定义,而Job是任务的执行逻辑,JobDetail里会引用一个Job Class来定义
public JobDetail getJobDetail(JobKey jobKey, String description, JobDataMap map ,Class extends Job> cls) {
return JobBuilder.newJob(cls)
.withIdentity(jobKey)
.withDescription(description)
.setJobData(map)
.storeDurably()
.build();
}
//获取Trigger (Job的触发器,执行规则)
public Trigger getTrigger(JobEntity job) {
return TriggerBuilder.newTrigger()
.withIdentity(job.getName(), job.getGroup())
.withSchedule(CronScheduleBuilder.cronSchedule(job.getCron()))
.build();
}
//获取JobKey,包含Name和Group
public JobKey getJobKey(JobEntity job) {
return JobKey.jobKey(job.getName(), job.getGroup());
}
}
/**
* 鏂囦欢鍚嶏細BaseController.java
*
*鍖椾含涓补鐟為淇℃伅鎶�鏈湁闄愯矗浠诲叕鍙�(http://www.richfit.com)
* Copyright 漏 2017 Richfit Information Technology Co., LTD. All Right Reserved.
*/
package com.base;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.annotation.PostConstruct;
import javax.servlet.http.HttpServletRequest;
import com.richfit.job.domain.JobEntity;
import com.richfit.job.execute.DynamicJob;
import com.richfit.job.service.impl.IJobServiceImpl;
import org.apache.commons.lang3.ArrayUtils;
import org.quartz.*;
import org.quartz.impl.matchers.GroupMatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.web.bind.annotation.RestController;
/**
* Controller 基类 by CHENYB data 2019-07-31
*/
@RestController
public class BaseController {
public static final Logger logger = LoggerFactory.getLogger(BaseController.class);
@Autowired
private SchedulerFactoryBean schedulerFactoryBean;
@Autowired
private IJobServiceImpl jobService;
//初始化启动所有的Job
private @PostConstruct void initialize() {
try {
reStartAllJobs();
logger.info("INIT JOB SUCCESS");
} catch (SchedulerException e) {
logger.info("INIT JOB EXCEPTION : " + e.getMessage());
e.printStackTrace();
}
}
//根据ID重启某个Job
public Boolean refresh(String id) throws SchedulerException {
boolean result = false;
JobEntity entity = jobService.getJobEntityById(id);
if (entity == null) return false;
synchronized (logger) {
JobKey jobKey = jobService.getJobKey(entity);
Scheduler scheduler = schedulerFactoryBean.getScheduler();
scheduler.pauseJob(jobKey);
scheduler.unscheduleJob(TriggerKey.triggerKey(jobKey.getName(), jobKey.getGroup()));
scheduler.deleteJob(jobKey);
JobDataMap map = jobService.getJobDataMap(entity);
JobDetail jobDetail = jobService.getJobDetail(jobKey, entity.getDescription(), map , DynamicJob.class );
if (entity.getStatus().equals("OPEN")) {
scheduler.scheduleJob(jobDetail, jobService.getTrigger(entity));
result = true;
} else {
result = false;
}
}
return result;
}
/**
* 重新启动所有的job
*/
public Boolean refreshAll() {
boolean result = false;
try {
reStartAllJobs();
result = true;
} catch (SchedulerException e) {
result = false;
e.printStackTrace();
}
return result;
}
private void reStartAllJobs() throws SchedulerException {
synchronized (logger) { //只允许一个线程进入操作
Scheduler scheduler = schedulerFactoryBean.getScheduler();
Set set = scheduler.getJobKeys( GroupMatcher.anyGroup());
scheduler.pauseJobs(GroupMatcher.anyGroup()); //暂停所有JOB
for (JobKey jobKey : set) { //删除从数据库中注册的所有JOB
scheduler.unscheduleJob( TriggerKey.triggerKey(jobKey.getName(), jobKey.getGroup()));
scheduler.deleteJob(jobKey);
}
for (JobEntity job : jobService.loadJobs()) { //从数据库中注册的所有JOB
logger.info("Job register name : {} , group : {} , cron : {}", job.getName(), job.getGroup(), job.getCron());
JobDataMap map = jobService.getJobDataMap(job);
JobKey jobKey = jobService.getJobKey(job);
JobDetail jobDetail = jobService.getJobDetail(jobKey, job.getDescription(), map , DynamicJob.class);
if (job.getStatus().equals("OPEN")) scheduler.scheduleJob(jobDetail, jobService.getTrigger(job));
else
logger.info("Job jump name : {} , Because {} status is {}", job.getName(), job.getName(), job.getStatus());
}
}
}
/**
* 将 HttpServletRequest 获得的参数封装成Map
*/
protected Map initRequestParams(HttpServletRequest request) {
Map paramMap = new HashMap();
if (request == null) {
return paramMap;
}
Enumeration> paramNames = request.getParameterNames();
if (request != null && paramNames != null && paramNames.hasMoreElements()) {
while (paramNames.hasMoreElements()) {
String paramName = (String) paramNames.nextElement();
String[] paramValues = request.getParameterValues(paramName);
if (paramValues.length == 1) {
paramMap.put(paramName, paramValues[0]);
} else {
paramMap.put(paramName, ArrayUtils.toString(paramValues));
}
}
}
return paramMap;
}
}
package com.richfit.job.controller;
import com.base.BaseController;
import com.exception.ExceptionEnum;
import com.exception.MyException;
import com.richfit.job.dao.JobEntityRepository;
import com.richfit.job.domain.JobEntity;
import com.richfit.job.service.impl.JobServiceImpl;
import com.utils.ConvertUtils;
import com.utils.ReflectUtils;
import com.utils.ResultVo;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import java.util.Map;
/**
* 此时如果在数据库中手动修改某个Job的执行cron,并不会马上生效,
* 则可以调用上面写到的业务方法,/refresh/all,则可刷新所有的Job,或/refresh/{id},刷新某个Job。
* 定时任务接口 by CHENYB date 2019-07-31
*/
@Api(description = "定时任务接口")
@RestController
@RequestMapping("/job")
public class JobController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(JobController.class);
@Autowired
private JobEntityRepository repository;
@ApiOperation(value = "根据ID重启Job", notes = "根据ID重启定时任务")
@PostMapping(value = "/refreshJobById")
public String refreshJobById(
HttpServletRequest request,
@ApiParam(required = true, name = "id", value = "id") @RequestParam(name = "id", required = true) String id
){
if (!ConvertUtils.isVaild(id, 32, "",false))
throw new MyException( ExceptionEnum.EXCEPTION_PARAMETER);
ResultVo resultVo = new ResultVo();
try {
resultVo.setSuccess( super.refresh( id ) );
}catch (Exception e){
resultVo.setSuccess( false );
e.printStackTrace();
}
return resultVo.toJSONString();
}
@ApiOperation(value = "重启数据库中所有的Job", notes = "重启数据库中所有的定时任务")
@GetMapping(value = "/refreshJobById")
public String refreshAllJob(
HttpServletRequest request
){
ResultVo resultVo = new ResultVo();
try {
super.refreshAll();
resultVo.setSuccess( true );
}catch (Exception e){
resultVo.setSuccess( false );
e.printStackTrace();
}
return resultVo.toJSONString();
}
@ApiOperation(value = "添加/修改定时任务", notes = "添加/修改定时任务")
@PostMapping(value = "/addOrUpdateJob")
public String addOrUpdateJob(
HttpServletRequest request,
@ApiParam(required = true, name = "id", value = "id") @RequestParam(name = "id", required = true) String id,
@ApiParam(required = true, name = "name", value = "任务名称") @RequestParam(name = "name", required = true) String name,
@ApiParam(required = true, name = "group", value = "任务组名") @RequestParam(name = "group", required = true) String group,
@ApiParam(required = true, name = "cron", value = "执行时间") @RequestParam(name = "cron", required = true) String cron,
@ApiParam(required = true, name = "parameter", value = "任务参数") @RequestParam(name = "parameter", required = true) String parameter,
@ApiParam(required = false, name = "description", value = "描述") @RequestParam(name = "description", required = false) String description,
@ApiParam(required = false, name = "vmParam", value = "任务数据") @RequestParam(name = "vmParam", required = false) String vmParam,
@ApiParam(required = false, name = "jarPath", value = "jar路径") @RequestParam(name = "jarPath", required = false) String jarPath,
@ApiParam(required = true, name = "status", value = "状态") @RequestParam(name = "status", required = true) String status
){
Map paramsMap = super.initRequestParams( request );
JobEntity jobEntity = new JobEntity();
ReflectUtils.bindingPropertyValue( paramsMap,jobEntity);
ResultVo resultVo = new ResultVo();
try {
this.repository.save( jobEntity );
resultVo.setSuccess( true );
}catch (Exception e){
resultVo.setSuccess( false );
e.printStackTrace();
}
return resultVo.toJSONString();
}
}
chenyb 随笔记录,方便学习
2019-07-31