近期在使用easypoi开发一套office模板导出的功能,考虑到poi在处理文件时存在阻塞问题,后期数据量庞大的话(可能不会 )会严重影响用户体验,所以决定采用多线程进行优化处理(如果文章内存在错误欢迎指正)
public Object produceWords(HttpServletRequest request, HttpServletResponse response, ModelMap modelMap){
Map result = new HashMap();
DatabaseUtils db = null;
try {
......
//获取目标数据size=需转换文件数量
Map map = (Map) this.getDatasByName("getDataList", new String[]{bgbhs,mbbh,"",""}, false);
long startTime = System.currentTimeMillis();
if(map.size()>0 && Integer.parseInt(map.get("O_CODE").toString()) > 0){
List<Map<String,String>> list = (List<Map<String,String>>) map.get("O_RESULT");
if(list.size()>0){
db = new OutDB().getDB();
for (Map<String,String> item : list) {
......
//单个word文件生成
Map wordMap = exportWord(mbbh,BGBH,CPDM,templatepath+templateName,root,ZT,uid);
if(wordMap.size()>0 && Integer.parseInt(wordMap.get("O_CODE").toString()) > 0){
String filePath = root+ZT+".docx";
File word = new File(filePath);
if(word.exists()){
//文件转义(系统业务代码)
FileUploadResult fileUploadResult = LivebosDocUtils.putDocument(tableName, "FJ", BGBH, word);
if (!fileUploadResult.isSuccess()) {
result.put("O_CODE", "-1");
result.put("O_NOTE", "文件转换失败");
return result;
}else{
//数据库处理
String sql = "UPDATE FISP."+tableName+" SET FJLJ=?,SCSJ=SYSDATE WHERE BGBH = ?";
db.executeUpdate(sql,new String[]{filePath,BGBH});
}
}
}else{
return wordMap;
}
}
}else{
result.put("O_CODE", "-1");
result.put("O_NOTE", "返回数据为空!");
return result;
}
}else{
result.put("O_CODE", "-1");
result.put("O_NOTE", "数据获取异常!");
return result;
}
System.out.println(">>>>>>>>>>>>>>>>>>>>>>>花费时间:"+(System.currentTimeMillis() - startTime));
} catch (Exception e) {
e.printStackTrace();
result.put("O_CODE", "-1");
result.put("O_NOTE", e.toString());
return result;
}finally {
if (db != null) {
db.close();
}
}
result.put("O_CODE", "1");
result.put("O_NOTE", "生成成功");
return result;
}
代码说明
一般任务处理可以分为CPU密集型和IO密集型,这段代码的cpu密集型处理并不多,主要影响性能的是exportWord(mbbh,BGBH,CPDM,templatepath+templateName,root,ZT,uid)
这句,exportWord()内进行poi数据填充,涉及到大量的IO阻塞操作,所以我们可以采用将exportWord()与文件转义(LivebosDocUtils.putDocument())作为独立线程处理
private class ExportWordTask implements Callable<Map> {
private String mbbh;
private String bgbh;
private String cpdm;
private String templateFile;
private String temDir;
private String fileName;
private String uid;
private String filePath;
private String tableName;
private ExportWordTask(String mbbh,String bgbh,String cpdm,String templateFile,String temDir,String fileName,String uid,String filePath,String tableName) {
this.mbbh = mbbh;
this.bgbh = bgbh;
this.cpdm = cpdm;
this.templateFile = templateFile;
this.temDir = temDir;
this.fileName = fileName;
this.uid = uid;
this.filePath = filePath;
this.tableName = tableName;
}
@Override
public Map call() throws Exception {
Map result = new HashMap();
Map wordMap = exportWord(mbbh,bgbh,cpdm,templateFile,temDir,fileName,uid);//生成word文件
if(wordMap.size()>0 && Integer.parseInt(wordMap.get("O_CODE").toString()) > 0){
File word = new File(filePath);
if(word.exists()){
//文件转义(系统业务代码)
FileUploadResult fileUploadResult = LivebosDocUtils.putDocument("TBB_CPBG", "FJ", bgbh, word);
if (!fileUploadResult.isSuccess()) {
result.put("O_CODE", "-1");
result.put("O_NOTE", "文件转换失败");
return result;
}
}
}else{
return wordMap;
}
result.put("O_CODE", "1");
result.put("FILEPATH", filePath);
result.put("BGBH", bgbh);
return result;
}
}
public Object produceWordsConcurrent(HttpServletRequest request, HttpServletResponse response, ModelMap modelMap){
Map result = new HashMap();
DatabaseUtils db = null;
ExecutorService executorService = null;
try {
......
Map map = (Map) this.getDatasByName("getDataList", new String[]{bgbhs,mbbh,"",""}, false);
long startTime = System.currentTimeMillis();
if(map.size()>0 && Integer.parseInt(map.get("O_CODE").toString()) > 0){
List<Map<String,String>> list = (List<Map<String,String>>) map.get("O_RESULT");
if(list.size()>0){
String sql = "UPDATE FISP."+tableName+" SET FJLJ=?,SCSJ=SYSDATE WHERE BGBH = ?";
db = new OutDB().getDB();
executorService = Executors.newFixedThreadPool(5);
CompletionService<Map> ecs = new ExecutorCompletionService<Map>(executorService);//用于获取Future
for (Map<String,String> item : list) {
......
ecs.submit(new OutReportAction.ExportWordTask(mbbh,BGBH,CPDM,templatepath+templateName,root,ZT,uid,root+ZT+".docx",tableName));
}
for (int i = 0; i < list.size(); ++i) {
Map r = ecs.take().get(1,TimeUnit.MINUTES);//超过一分钟放弃获取抛出超时异常
if (r.size()>0 && Integer.parseInt(r.get("O_CODE").toString()) > 0){
//数据库处理
db.executeUpdate(sql,new String[]{(String)r.get("FILEPATH"),(String)r.get("BGBH")});
}else{
return r;
}
}
}else{
result.put("O_CODE", "-1");
result.put("O_NOTE", "返回数据为空!");
return result;
}
}else{
result.put("O_CODE", "-1");
result.put("O_NOTE", "数据获取异常!");
return result;
}
System.out.println(">>>>>>>>>>>>>>>>>>>>>>>花费时间:"+(System.currentTimeMillis() - startTime));
} catch (Exception e) {
e.printStackTrace();
result.put("O_CODE", "-1");
result.put("O_NOTE", e.toString());
return result;
}finally {
if (db != null) db.close();
if (executorService != null) executorService.shutdown();
}
result.put("O_CODE", "1");
result.put("O_NOTE", "生成成功");
return result;
}
核心处理
private class ExportWordTask implements Callable<Map> {
private ExportWordTask(......) {
}
@Override
public Map call() throws Exception {
Map result = new HashMap();
......
return result;
}
}
ExecutorService executorService = Executors.newFixedThreadPool(5);
CompletionService<Map> ecs = new ExecutorCompletionService<Map>(executorService);//用于获取Future
for (Map<String,String> item : list) {
......
ecs.submit(new ExportWordTask(......));
}
for (int i = 0; i < list.size(); ++i) {
Map r = ecs.take().get(1,TimeUnit.MINUTES);//超过一分钟放弃获取抛出超时异常
......
}
代码说明
1、为了避免重复创建线程和控制线程数量,我们这里使用线程池newFixedThreadPool()
2、该任务为cpu密集型任务,Linux服务器环境为双核心,所以我们线程数给定为5(2*2+1,开过多的线程数,只能增加上下文切换的次数带来额外的开销)
3、为了避免线程安全问题,我们这里通过ExecutorCompletionService来获取线程返回Future(ExecutorCompletionService是异步非阻塞获取任务执行结果),再通过Future获取线程结果进行数据库操作
4、给线程Future获取设定时间避免异常造成阻塞引起系统崩溃
5、一定要关闭流和线程池否则会造成的内存溢出
6、该接口实际上控制层代码(项目框架配置在xml所以代码未体现),为了保证线程安全我们可以配置非单例模式(@Scope(value = “prototype”))或者添加事务控制(@Transactional),前者占用内存后者影响效率
比对
origin code
16个文件
>>>>>>>>>>>>>>>>>>>>>>>花费时间:13701
>>>>>>>>>>>>>>>>>>>>>>>花费时间:13847
64个文件
>>>>>>>>>>>>>>>>>>>>>>>花费时间:53385
>>>>>>>>>>>>>>>>>>>>>>>花费时间:55818
256个文件
>>>>>>>>>>>>>>>>>>>>>>>花费时间:319328
optimized code
16个文件
>>>>>>>>>>>>>>>>>>>>>>>花费时间:5124
>>>>>>>>>>>>>>>>>>>>>>>花费时间:4790
64个文件
>>>>>>>>>>>>>>>>>>>>>>>花费时间:14984
>>>>>>>>>>>>>>>>>>>>>>>花费时间:13403
256个文件
>>>>>>>>>>>>>>>>>>>>>>>花费时间:73073
可以明显看到使用线程池优化后的接口效率提高了3倍左右,随着文件数量的增加性能差距也越来越大,到256个文件后相差了4.3倍的时长