本文主要介绍如何实现spring boot的batch架构搭建。
数据库有两个,一个是spring batch需要的底层数据库,主要用于记录job的执行相关数据;另一个是自己的业务数据库,主要用于Demo job抓取数据、写入等。
1. maven的jar依赖配置pom.xml (片段)
org.springframework.boot
spring-boot-starter-parent
2.0.3.RELEASE
1.8
UTF-8
org.springframework.boot
spring-boot-starter-batch
org.springframework.batch
spring-batch-excel
0.5.0-SNAPSHOT
org.apache.poi
poi
3.11
org.apache.poi
poi-ooxml
3.11
org.springframework.boot
spring-boot-starter-data-jpa
org.springframework.boot
spring-boot-starter-web
org.springframework
spring-oxm
org.apache.commons
commons-lang3
3.4
org.postgresql
postgresql
42.1.1
org.glassfish.jersey.media
jersey-media-json-jackson
2.12
com.zaxxer
HikariCP
3.1.0
my-job-master
org.apache.maven.plugins
maven-compiler-plugin
3.2
${jdk.version}
${project.build.sourceEncoding}
org.apache.maven.plugins
maven-surefire-plugin
2.18
org.springframework.boot
spring-boot-maven-plugin
2.项目包结构
conf包:下主要是batch job数据源配置、业务数据源配置及batch配置
core包:job启动相关类
job包:实现自己job的地方
3. spring boot入口文件
@SpringBootApplication
@EnableBatchProcessing
public class ApJobMasterApplication {
public static void main(String[] args) {
SpringApplication.run(ApJobMasterApplication.class, args);
}
}
4. Batch配置
@Configuration
@EnableBatchProcessing
public class BatchConfig{
@Bean
public JobRepository jobRepository(@Qualifier("batchDataSource")DataSource dataSource, @Qualifier("batchTransactionManager")PlatformTransactionManager transactionManager) throws Exception{
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setDataSource(dataSource);
jobRepositoryFactoryBean.setTransactionManager(transactionManager);
jobRepositoryFactoryBean.setDatabaseType(DatabaseType.POSTGRES.name());
return jobRepositoryFactoryBean.getObject();
}
@Bean
public SimpleJobLauncher jobLauncher(@Qualifier("batchDataSource")DataSource dataSource, @Qualifier("batchTransactionManager")PlatformTransactionManager transactionManager) throws Exception{
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(this.jobRepository(dataSource, transactionManager));
return jobLauncher;
}
@Bean
public SimpleJobExecutionListener myJobListener(){
return new SimpleJobExecutionListener();
}
}
5. Batch需要的数据源配置
@Configuration
@EnableTransactionManagement
@EnableJpaRepositories(basePackages = "com.thingple.dao.batch",entityManagerFactoryRef = "batchEntityManagerFactory",transactionManagerRef="batchTransactionManager")
public class SpringBatchDataSourceConfig {
@Bean(name="batchDataSourceConf")
@ConfigurationProperties("spring.batch.datasource")
public DataSourceProperties batchtDataSourceProperties() {
return new DataSourceProperties();
}
@Bean(name="batchDataSource")
@ConfigurationProperties("spring.batch.datasource")
public DataSource getBatchDataSource() {
return batchtDataSourceProperties().initializeDataSourceBuilder().type(com.zaxxer.hikari.HikariDataSource.class).build();
}
@Bean(name="batchEntityManagerFactory")
public LocalContainerEntityManagerFactoryBean apTruckEntityManagerFactory(EntityManagerFactoryBuilder builder) {
return builder
.dataSource(getBatchDataSource())
.packages("com.thingple.entity.batch")
.persistenceUnit("batchDs")
.build();
}
@Bean(name="batchTransactionManager")
public PlatformTransactionManager transactionManager(@Qualifier("batchEntityManagerFactory")EntityManagerFactory emf){
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(emf);
return transactionManager;
}
}
6. 业务数据源配置
@Configuration
@EnableTransactionManagement
@EnableJpaRepositories(basePackages = "com.thingple.dao.ap",entityManagerFactoryRef = "apTruckEntityManagerFactory",transactionManagerRef="apTruckTransactionManager")
public class ApTruckDataSourceConfig {
@Bean(name="apDataSourceConf")
@ConfigurationProperties("ap.truck.datasource")
@Primary
public DataSourceProperties apTruckDataSourceProperties() {
return new DataSourceProperties();
}
@Bean(name="apDataSource")
@ConfigurationProperties("ap.truck.datasource")
@Primary
public DataSource getApDataSource() {
return apTruckDataSourceProperties().initializeDataSourceBuilder().type(com.zaxxer.hikari.HikariDataSource.class).build();
//return apTruckDataSourceProperties().initializeDataSourceBuilder().build();
}
@Bean(name="apTruckEntityManagerFactory")
@Primary
public LocalContainerEntityManagerFactoryBean apTruckEntityManagerFactory(
EntityManagerFactoryBuilder builder) {
return builder
.dataSource(getApDataSource())
.packages("com.thingple.entity.ap")
.persistenceUnit("apTruckDs")
.build();
}
@Bean(name="apTruckTransactionManager")
@Primary
public PlatformTransactionManager transactionManager(@Qualifier("apTruckEntityManagerFactory")EntityManagerFactory emf){
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(emf);
return transactionManager;
}
@Bean(name="apJdbcTemplate")
@Primary
public JdbcTemplate jdbcTemplate(){
return new JdbcTemplate(getApDataSource());
}
@Bean(name="apNameParameterJdbcTemplate")
@Primary
public NamedParameterJdbcTemplate namedParameterJdbcTemplate() {
return new NamedParameterJdbcTemplate(getApDataSource());
}
@Bean(name="apTransactionTemplate")
@Primary
public TransactionTemplate transactionTemplate(@Qualifier("apTruckTransactionManager")PlatformTransactionManager platformTransactionManager){
return new TransactionTemplate(platformTransactionManager);
}
}
7.Demo Job配置
@Configuration
public class DemoJobConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(DemoJobConfig.class);
@Bean(name="demoJobReader")
@StepScope
ItemReader demoJobReader(@Qualifier("apTruckEntityManagerFactory") EntityManagerFactory emf,
@Value("#{jobParameters[startTime]}") String startTime,
@Value("#{jobParameters[endTime]}") String endTime) throws Exception {
LOGGER.info("[DemoJobConfig][demoJobReader]startTime={},endTime={}",startTime,endTime);
String query = "from TruckDailyImportedData d where d.startTime>=:startTime and endTime<=:endTime";
Map params = new HashMap<>();
params.put("startTime", DateUtil.getDayofBegin(DateUtil.parse(startTime)));
params.put("endTime", DateUtil.getDayofEnd(DateUtil.parse(endTime)));
JpaPagingItemReader reader = new JpaPagingItemReader<>();
reader.setEntityManagerFactory(emf);
reader.setPageSize(100);
reader.setQueryString(query);
reader.setParameterValues(params);
reader.open(new ExecutionContext());
reader.setTransacted(false);
return reader;
}
@Bean(name="demoJobProcessor")
@StepScope
ItemProcessor demoJobProcessor() {
return new ItemProcessor(){
@Override
public TruckDailyImportedData process(TruckDailyImportedData item) throws Exception {
return item;
}
};
}
@Bean(name="demoJobWriter")
@StepScope
DemoJobDataWriter demoJobWriter() {
return new DemoJobDataWriter();
}
@Bean(name="demoJobStep")
Step demoJobStep(@Qualifier("demoJobReader")ItemReader demoJobReader,
@Qualifier("demoJobProcessor")ItemProcessor demoJobProcessor,
@Qualifier("demoJobWriter")ItemWriter super TruckDailyImportedData> demoJobWriter,
@Qualifier("apTruckTransactionManager") PlatformTransactionManager transactionManager,
StepBuilderFactory stepBuilderFactory) throws Exception {
return stepBuilderFactory.get("demoJobStep")
.chunk(1)
.reader(demoJobReader)
.processor(demoJobProcessor)
.writer(demoJobWriter)
.chunk(50).transactionManager(transactionManager)
.build();
}
@Bean
Job demoJob(JobBuilderFactory jobBuilderFactory,
@Qualifier("demoJobStep") Step demoJobStep,
SimpleJobExecutionListener listener) {
return jobBuilderFactory.get("demoJob")
.incrementer(new RunIdIncrementer())
.flow(demoJobStep)
.end()
.listener(listener)
.build();
}
}
8. ItemWriter的实现类
public class DemoJobDataWriter implements ItemWriter {
private static final Logger LOGGER = LoggerFactory.getLogger(DemoJobDataWriter.class);
@Autowired
private TruckMilesageLogDao truckMilesageLogDao;
@Override
public void write(List extends TruckDailyImportedData> items) throws Exception {
LOGGER.info("Received the information of {} TruckDailyData", items.size());
items.forEach(i -> LOGGER.debug("Received the information of a TruckDailyData: {}", i));
if(CollectionUtils.isEmpty(items)) {
return;
}
for(TruckDailyImportedData item:items) {
insertOneItem(item);
}
}
// @Transactional(value="apTruckTransactionManager",propagation=Propagation.REQUIRED)
public void insertOneItem(TruckDailyImportedData item) {
TruckMilesageLog data = new TruckMilesageLog();
data.setTruckId(item.getLicensePlateNum());
data.setMilesage(item.getMilesage());
data.setOilWear(item.getOilWear());
data.setcTime(new Date());
truckMilesageLogDao.save(data);
}
}
9. JPA dao接口
@Repository
public interface TruckMilesageLogDao extends JpaRepository {
}
10. 两个实体类(略)
11. spring properties文件
#Batch Configuration
#启动时要执行的Job,默认执行全部Job
spring.batch.job.names=
#是否自动执行定义的Job,默认是
spring.batch.job.enabled=false
#是否初始化Spring Batch的数据库,默认为是
spring.batch.initializer.enabled=false
spring.batch.schema=
#设置SpringBatch的数据库表的前缀
spring.batch.table-prefix=batch_
#Database Configuration
spring.batch.datasource.url=jdbc:postgresql://192.168.1.252:5432/job
spring.batch.datasource.driver-class-name=org.postgresql.Driver
spring.batch.datasource.username=xxx
spring.batch.datasource.password=xxx
spring.batch.datasource.test-on-borrow=true
spring.batch.datasource.remove-abandoned=true
spring.batch.datasource.validation-query=SELECT 1;
spring.batch.datasource.type=com.zaxxer.hikari.HikariDataSource
spring.batch.datasource.hikari.minimum-idle=4
spring.batch.datasource.hikari.maximum-pool-size=15
spring.batch.datasource.hikari.auto-commit=true
spring.batch.datasource.hikari.idle-timeout=30000
spring.batch.datasource.hikari.pool-name=BatchHikariCP
spring.batch.datasource.hikari.max-lifetime=1800000
spring.batch.datasource.hikari.connection-timeout=30000
spring.batch.datasource.hikari.connection-test-query=SELECT 1
#database pool config
# Number of ms to wait before throwing an exception if no connection is available.
#spring.datasource.tomcat.max-wait=10000
# Maximum number of active connections that can be allocated from this pool at the same time.
#spring.datasource.tomcat.max-active=3
# Validate the connection before borrowing it from the pool.
#spring.datasource.tomcat.test-on-borrow=true
# initial pool size
#spring.datasource.tomcat.initial-size=20
ap.truck.datasource.url=jdbc:postgresql://192.168.1.252:5432/ap_mgt
ap.truck.datasource.driver-class-name=org.postgresql.Driver
ap.truck.datasource.username=xxx
ap.truck.datasource.password=xxx
ap.truck.datasource.test-on-borrow=true
ap.truck.datasource.remove-abandoned=true
ap.truck.datasource.validation-query=SELECT 1;
ap.truck.datasource.hikari.minimum-idle=3
ap.truck.datasource.hikari.maximum-pool-size=15
ap.truck.datasource.hikari.auto-commit=true
ap.truck.datasource.hikari.idle-timeout=30000
ap.truck.datasource.hikari.pool-name=BatchHikariCP
ap.truck.datasource.hikari.max-lifetime=1800000
ap.truck.datasource.hikari.connection-timeout=30000
ap.truck.datasource.hikari.connection-test-query=SELECT 1
#=====================jpa config================================
#实体类维护数据库表结构的具体行为:update/create/create-drop/validate/none
spring.jpa.hibernate.ddl-auto=none
#打印sql语句
spring.jpa.show-sql=true
#=============jackson serialize config =========================
#格式化输出的json字符串
spring.jackson.serialization.indent_output=true
#=====================log config================================
#==================== 日志配合·标准 ============================
logging.config=classpath:logback-job.xml
12 job数据库脚本
create database job with owner=postgres ENCODING='UTF-8';
-- Autogenerated: do not edit this file
CREATE TABLE BATCH_JOB_INSTANCE (
JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT ,
JOB_NAME VARCHAR(100) NOT NULL,
JOB_KEY VARCHAR(32) NOT NULL,
constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)
) ;
CREATE TABLE BATCH_JOB_EXECUTION (
JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT ,
JOB_INSTANCE_ID BIGINT NOT NULL,
CREATE_TIME TIMESTAMP NOT NULL,
START_TIME TIMESTAMP DEFAULT NULL ,
END_TIME TIMESTAMP DEFAULT NULL ,
STATUS VARCHAR(10) ,
EXIT_CODE VARCHAR(2500) ,
EXIT_MESSAGE VARCHAR(2500) ,
LAST_UPDATED TIMESTAMP,
JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,
constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)
references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)
) ;
CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (
JOB_EXECUTION_ID BIGINT NOT NULL ,
TYPE_CD VARCHAR(6) NOT NULL ,
KEY_NAME VARCHAR(100) NOT NULL ,
STRING_VAL VARCHAR(250) ,
DATE_VAL TIMESTAMP DEFAULT NULL ,
LONG_VAL BIGINT ,
DOUBLE_VAL DOUBLE PRECISION ,
IDENTIFYING CHAR(1) NOT NULL ,
constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE TABLE BATCH_STEP_EXECUTION (
STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT NOT NULL,
STEP_NAME VARCHAR(100) NOT NULL,
JOB_EXECUTION_ID BIGINT NOT NULL,
START_TIME TIMESTAMP NOT NULL ,
END_TIME TIMESTAMP DEFAULT NULL ,
STATUS VARCHAR(10) ,
COMMIT_COUNT BIGINT ,
READ_COUNT BIGINT ,
FILTER_COUNT BIGINT ,
WRITE_COUNT BIGINT ,
READ_SKIP_COUNT BIGINT ,
WRITE_SKIP_COUNT BIGINT ,
PROCESS_SKIP_COUNT BIGINT ,
ROLLBACK_COUNT BIGINT ,
EXIT_CODE VARCHAR(2500) ,
EXIT_MESSAGE VARCHAR(2500) ,
LAST_UPDATED TIMESTAMP,
constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (
STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,
SHORT_CONTEXT VARCHAR(2500) NOT NULL,
SERIALIZED_CONTEXT TEXT ,
constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)
references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)
) ;
CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (
JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,
SHORT_CONTEXT VARCHAR(2500) NOT NULL,
SERIALIZED_CONTEXT TEXT ,
constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE;
CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE;
CREATE SEQUENCE BATCH_JOB_SEQ MAXVALUE 9223372036854775807 NO CYCLE;
至此,一个简单的batch框架就搭建好了。
Demo job中,读取业务数据库TruckDailyImportedData实体对应的表的数据,然后写入到TruckMilesageLog实体对应的表中。
其中有两个坑需要注意,否则报报错导致job无法正常读取、写入数据:
1 DemoJobConfig 类中的demoJobReader方法,必须显示调用read.open(),如上述示例中:
reader.open(new ExecutionContext());
2 DemoJobConfig 类中的demoJobStep方法,要显示设置PlatformTransactionManager,如上述示例中:
.transactionManager(transactionManager)