前言:springbatch是一个轻量级的批处理框架,在企业级应用中,我们常见一些批处理业务场景,借助Spring
Batch我们可以很方便的开发出健壮、易用的批处理应用。
目前我做的项目的应用场景是数据迁移,顾名思义就是把老系统的数据迁移到新系统。
代码已上传GitHub上面地址:https://github.com/FadeHub/spring-boot-learn/tree/master/spring-boot-springbatch
SpringBatch其它文章直通车:
SpringBatch读单个文件(FlatFileItemReader)和写单个文件(FlatFileItemWriter)(一)
SpringBatch顺序读取多文件(MultiResourceItemReader)和顺序写文件(MultiResourceItemWriter)(二)
SpringBatch读数据库(MyBatisPagingItemReader)(三)
SpringBatch读文件(FlatFileItemReader)写据库(MyBatisBatchItemWriter)(四)
SpringBatch 监听器之Job监听器(JobExecutionListener)和Step监听器(StepExecutionListener)(五)
SpringBatch 监听器之Chunk监听器(ChunkListener)和Skip监听器(SkipListener)(六)
现在我们做一个简单的例子读文件和输出文件,看看Springbatch的魅力所在。
-- Autogenerated: do not edit this file
DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT ;
DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT ;
DROP TABLE IF EXISTS BATCH_STEP_EXECUTION ;
DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS ;
DROP TABLE IF EXISTS BATCH_JOB_EXECUTION ;
DROP TABLE IF EXISTS BATCH_JOB_INSTANCE ;
DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_SEQ ;
DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_SEQ ;
DROP TABLE IF EXISTS BATCH_JOB_SEQ ;
-- Autogenerated: do not edit this file
CREATE TABLE BATCH_JOB_INSTANCE (
JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT ,
JOB_NAME VARCHAR(100) NOT NULL,
JOB_KEY VARCHAR(32) NOT NULL,
constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)
) ENGINE=InnoDB;
CREATE TABLE BATCH_JOB_EXECUTION (
JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT ,
JOB_INSTANCE_ID BIGINT NOT NULL,
CREATE_TIME DATETIME NOT NULL,
START_TIME DATETIME DEFAULT NULL ,
END_TIME DATETIME DEFAULT NULL ,
STATUS VARCHAR(10) ,
EXIT_CODE VARCHAR(2500) ,
EXIT_MESSAGE VARCHAR(2500) ,
LAST_UPDATED DATETIME,
JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,
constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)
references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)
) ENGINE=InnoDB;
CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (
JOB_EXECUTION_ID BIGINT NOT NULL ,
TYPE_CD VARCHAR(6) NOT NULL ,
KEY_NAME VARCHAR(100) NOT NULL ,
STRING_VAL VARCHAR(250) ,
DATE_VAL DATETIME DEFAULT NULL ,
LONG_VAL BIGINT ,
DOUBLE_VAL DOUBLE PRECISION ,
IDENTIFYING CHAR(1) NOT NULL ,
constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ENGINE=InnoDB;
CREATE TABLE BATCH_STEP_EXECUTION (
STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT NOT NULL,
STEP_NAME VARCHAR(100) NOT NULL,
JOB_EXECUTION_ID BIGINT NOT NULL,
START_TIME DATETIME NOT NULL ,
END_TIME DATETIME DEFAULT NULL ,
STATUS VARCHAR(10) ,
COMMIT_COUNT BIGINT ,
READ_COUNT BIGINT ,
FILTER_COUNT BIGINT ,
WRITE_COUNT BIGINT ,
READ_SKIP_COUNT BIGINT ,
WRITE_SKIP_COUNT BIGINT ,
PROCESS_SKIP_COUNT BIGINT ,
ROLLBACK_COUNT BIGINT ,
EXIT_CODE VARCHAR(2500) ,
EXIT_MESSAGE VARCHAR(2500) ,
LAST_UPDATED DATETIME,
constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ENGINE=InnoDB;
CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (
STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,
SHORT_CONTEXT VARCHAR(2500) NOT NULL,
SERIALIZED_CONTEXT TEXT ,
constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)
references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)
) ENGINE=InnoDB;
CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (
JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,
SHORT_CONTEXT VARCHAR(2500) NOT NULL,
SERIALIZED_CONTEXT TEXT ,
constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ENGINE=InnoDB;
CREATE TABLE BATCH_STEP_EXECUTION_SEQ (
ID BIGINT NOT NULL,
UNIQUE_KEY CHAR(1) NOT NULL,
constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)
) ENGINE=InnoDB;
INSERT INTO BATCH_STEP_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_STEP_EXECUTION_SEQ);
CREATE TABLE BATCH_JOB_EXECUTION_SEQ (
ID BIGINT NOT NULL,
UNIQUE_KEY CHAR(1) NOT NULL,
constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)
) ENGINE=InnoDB;
INSERT INTO BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_EXECUTION_SEQ);
CREATE TABLE BATCH_JOB_SEQ (
ID BIGINT NOT NULL,
UNIQUE_KEY CHAR(1) NOT NULL,
constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)
) ENGINE=InnoDB;
INSERT INTO BATCH_JOB_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_SEQ);
公共reader继承FlatFileItemReader读文件类。
package com.sl.common;
import com.sl.common.CommonConstants;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DefaultFieldSetFactory;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.core.io.FileSystemResource;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
/**
* 公共读方法
* @author shuliangzhao
* @Title: CommonItemReader
* @ProjectName spring-boot-learn
* @Description: TODO
* @date 2019/9/7 16:30
*/
public class CommonFileItemReader extends FlatFileItemReader {
private FileSystemResource fileSystemResource;
public CommonFileItemReader(Class clz) {
setEncoding(CommonConstants.ENCODING_READ);
fileSystemResource = new FileSystemResource("D:\\aplus\\shuqian\\target\\"+clz.getSimpleName()+".csv");
setResource(fileSystemResource);
DefaultLineMapper defaultLineMapper = new DefaultLineMapper();
DelimitedLineTokenizer delimitedLineTokenizer = new DelimitedLineTokenizer();
delimitedLineTokenizer.setFieldSetFactory(new DefaultFieldSetFactory());
Field[] fields = clz.getDeclaredFields();
List list = new ArrayList<>();
for (Field field : fields) {
if (!Modifier.isStatic(field.getModifiers())) {
list.add(field.getName());
}
}
String[] names = new String[list.size()];
delimitedLineTokenizer.setNames(list.toArray(names));
delimitedLineTokenizer.setDelimiter(",");
defaultLineMapper.setLineTokenizer(delimitedLineTokenizer);
BeanWrapperFieldSetMapper fieldSetMapper = new BeanWrapperFieldSetMapper();
fieldSetMapper.setTargetType(clz);
defaultLineMapper.setFieldSetMapper(fieldSetMapper);
setLineMapper(defaultLineMapper);
}
}
公共的process继承ItemProcessor。
package com.sl.common;
import org.springframework.batch.item.ItemProcessor;
import javax.annotation.PostConstruct;
import java.lang.reflect.ParameterizedType;
/**
* 公共处理
* @author shuliangzhao
* @Title: CommonProcessor
* @ProjectName spring-boot-learn
* @Description: TODO
* @date 2019/9/7 16:39
*/
public abstract class CommonProcessor implements ItemProcessor {
private Class input;
private Class output;
@PostConstruct
public void init() {
input = (Class)((ParameterizedType)getClass().getGenericSuperclass()).getActualTypeArguments()[0];
output = (Class)((ParameterizedType)getClass().getGenericSuperclass()).getActualTypeArguments()[1];
}
@Override
public O process(I i) throws Exception {
O o = output.newInstance();
processor(o,i);
return o;
}
public abstract void processor(O o, I i);
}
公共的写继承ItemWriter。
package com.sl.common;
import com.sl.common.CommonConstants;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor;
import org.springframework.batch.item.file.transform.DelimitedLineAggregator;
import org.springframework.core.io.FileSystemResource;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
/**
* 公共写
* @author shuliangzhao
* @Title: CommonFileItemWriter
* @ProjectName spring-boot-learn
* @Description: TODO
* @date 2019/9/7 16:48
*/
public class CommonFileItemWriter extends FlatFileItemWriter {
private FileSystemResource fileSystemResource;
public CommonFileItemWriter(Class clz) {
BeanWrapperFieldExtractor beanWrapperFieldExtractor = new BeanWrapperFieldExtractor();
Field[] fields = clz.getDeclaredFields();
List list = new ArrayList<>();
for (java.lang.reflect.Field field : fields) {
if (!Modifier.isStatic(field.getModifiers())) {
list.add(field.getName());
}
}
String[] names = new String[list.size()];
beanWrapperFieldExtractor.setNames(list.toArray(names));
beanWrapperFieldExtractor.afterPropertiesSet();
DelimitedLineAggregator lineAggregator = new DelimitedLineAggregator();
lineAggregator.setDelimiter(",");
lineAggregator.setFieldExtractor(beanWrapperFieldExtractor);
setName(clz.getSimpleName());
setEncoding(CommonConstants.ENCODING_READ);
fileSystemResource = new FileSystemResource("D:\\aplus\\shuqian\\source\\"+ clz.getSimpleName() + ".csv");
setResource(fileSystemResource);
setLineAggregator(lineAggregator);
}
}
package com.sl.config;
import com.sl.common.CommonFileItemWriter;
import com.sl.entity.People;
import com.sl.entity.Student;
import com.sl.common.CommonFileItemReader;
import com.sl.processor.StudentProcessor;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author shuliangzhao
* @Title: UserConfiguration
* @ProjectName spring-boot-learn
* @Description: TODO
* @date 2019/9/7 17:06
*/
@Configuration
@EnableBatchProcessing
public class StudentConfiguration {
@Autowired
private JobBuilderFactory jobBuilderFactory;
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Autowired
private StudentProcessor studentProcessor;
@Bean
public Job studentJob() {
return jobBuilderFactory.get("studentJob")
.start(studentStep())
.build();
}
@Bean
public Step studentStep() {
return stepBuilderFactory.get("studentStep")
.chunk(10)
.reader(peopleCommonFileItemReader())
.processor(studentProcessor)
.writer(studentCommonFileItemWriter())
.build();
}
@Bean
@StepScope
public CommonFileItemReader peopleCommonFileItemReader() {
return new CommonFileItemReader<>(People.class);
}
public CommonFileItemWriter studentCommonFileItemWriter() {
return new CommonFileItemWriter<>(Student.class);
}
}
package com.sl.processor;
import com.sl.common.CommonProcessor;
import com.sl.entity.People;
import com.sl.entity.Student;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.stereotype.Component;
/**
* @author shuliangzhao
* @Title: PeopleProcessor
* @ProjectName spring-boot-learn
* @Description: TODO
* @date 2019/9/7 17:12
*/
@Component
@StepScope
public class StudentProcessor extends CommonProcessor {
@Override
public void processor(Student o, People people) {
o.setName(people.getName());
o.setAddress(people.getAdress());
o.setAge(people.getAge());
o.setIdCar(people.getIdCard());
}
}
package com.sl.common;
/**
* @author shuliangzhao
* @Title: CommonConstants
* @ProjectName spring-boot-learn
* @Description: TODO
* @date 2019/9/7 16:34
*/
public class CommonConstants {
public static final String ENCODING_READ = "GBK";
}
package com.sl;
import org.springframework.batch.core.*;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ConfigurableApplicationContext;
import java.util.Date;
@SpringBootApplication
public class Application {
public static void main(String[] args) throws Exception {
ApplicationContext run = SpringApplication.run(Application.class, args);
run(run);
}
private static void run(ApplicationContext ctx) throws Exception{
JobLauncher jobLauncher = ctx.getBean(JobLauncher.class);
JobParameters jobParameters = new JobParametersBuilder().addDate("test", new Date()).toJobParameters();
JobExecution studentJob = jobLauncher.run(ctx.getBean("studentJob", Job.class), jobParameters);
}
}
运行完job就可以看到在指定目录已经把文件写入。以上代码比较多,基础理论可以通过阅读springbatch官方文档。。
后面章节会介绍springbatch的其他功能,比如读表入库、skip机制、异常重跑等功能点。