日志统计平台2

ImportTask里面我们就实现了ftp下载日志文件,然后批量导入数据库,其实该task是一个线程的实现类,
public class ImportTask implements Runnable {
	final static Logger logger = LoggerFactory.getLogger(ImportTask.class);	
         private Element taskEle;
	private String taskName;
	/**
	 * Construct for ImportTask.java.
	 */
	public ImportTask(Element ele ) {
		this.taskEle = ele;
		this.taskName = taskEle.getAttribute("name");
	}
	
	protected void doTask(Date preDate) {
		try {
			//批量导数据
			BatchImport2DB bidb = new BatchImport2DB();
//设置数据源,连接数据库			
bidb.setDataSource( GloalDataUtil.getDataSource());
			bidb.init(taskEle);
			//下载日志文件
			DownloadFileLog dflog = new DownloadFileLog();//初始化下载日志的一些参数
			dflog.init(taskEle);
			long logStart = System.currentTimeMillis();
//返加根据日期下载的日志文件名列表,downloadlogfile就是apache的软件实现的ftp下载			
String[] arrFileName = dflog.downloadLogFile(preDate);
			long logEnd = System.currentTimeMillis();
			logger.info(taskName+"查询符合条件的日志共用时(ms)[ " + (logEnd - logStart) + " ]");
			for(String fileName: arrFileName){
				// 调用BatchImport2DB.class
				long batchStart = System.currentTimeMillis();
				ICSVReader csvReader = (ICSVReader) Class.forName(taskEle.getAttribute("csvReader")).newInstance();
				String csvpattern = taskEle.getAttribute("csvpattern");
//批导入
				if(csvReader.beginReader(fileName,csvpattern)) bidb.batchImport(csvReader);		
				long batchCost = (System.currentTimeMillis()- batchStart);
				logger.info("将[ " + fileName + " ]导入数据库共用时[ " + (batchCost) + " ]ms");
			}		
		} catch (Exception e) {
			logger.error(e.toString(),e);
		}
	}

	public void run() {
		//only use for test
//		doTask( new Date(1216098921590l - 24 * 60 *60 *1000L));
		//该任务是得到当前日期的前一天的日志.
		doTask( new Date(System.currentTimeMillis()- 24 * 60 *60 *1000L));
	}
}

该类就实现了sftp或ftp下载,然后再将这些文件导入数据库中,具体的是由csvreader来读取,然后则batchimport2db来导入数据库,读取就不在这写了不外是流读写,然后一行一行的读.
BatchImport2DB.java
public class BatchImport2DB {
	// 得到DB,连接数据库,操作sql
	private List<Field> fieldList = new ArrayList<Field>();
	private String sql;
	private static int BATCH_SIZE = 100;
	protected static final Logger logger = LoggerFactory
			.getLogger(BatchImport2DB.class);
	/**
	 * spring container invoke and the data source is setter IOC
	 */
	private JdbcTemplate jdbcTemplate;//spring最佳实践
	public void setDataSource(DataSource dataSource) {
		this.jdbcTemplate = new JdbcTemplate(dataSource);
	}
	public void init(Element ele) {
		// 得到insertsql语句
		Element sqlEle = (Element) ele.getElementsByTagName("sql").item(0);
		sql = sqlEle.getAttribute("insert");
		BATCH_SIZE = Integer.parseInt(sqlEle.getAttribute("maxCommitNumber"));
		Element tabEle = (Element) XmlManager.querySingle("table", ele);
		NodeList nlc = tabEle.getElementsByTagName("column");
		for (int i = 0; i < nlc.getLength(); i++) {
			Element nmEle = (Element) nlc.item(i);
			// 将字段名称加入list表中
			String name = nmEle.getAttribute("name");
			String type = nmEle.getAttribute("type");
			int csvindex = Integer.parseInt(nmEle.getAttribute("csvindex"));
			String format = nmEle.getAttribute("format");
			fieldList.add(new Field(name, type, csvindex, format));
		}
	}
	/**
	 * Method for batchImport 用spring jdbcTemplate来插入sql.
	 * 
	 * @param csvReader
	 */
	public void batchImport(final ICSVReader csvReader) {
		// 得到sql clause
		InterruptibleBatchPreparedStatementSetter psset = new InterruptibleBatchPreparedStatementSetter() {
			public void setValues(PreparedStatement ps, int pos)
					throws SQLException {
				try {
					int leng = fieldList.size();
					String[] arrvalue = null;
					do {
						arrvalue = csvReader.readLine();
						if (!csvReader.lineIsNotNull()) {
							return;
						}

					} while (arrvalue.length < leng);
					// 处理结果数组
					for (int i = 0; i < leng; i++) {
						Field f = fieldList.get(i);
						if ("datetime".equals(f.type)) {
							ps.setTimestamp(i + 1, StringUtil.parse2Timestamp(
									arrvalue[f.csvindex], f.format));
						} else if ("int".equals(f.type)) {
							ps.setInt(i + 1, Integer
									.parseInt(arrvalue[f.csvindex]));
						} else if ("long".equals(f.type)) {
							ps.setLong(i + 1, Long
									.parseLong(arrvalue[f.csvindex]));
						} else if ("text".equals(f.type)) {
							ps.setString(i + 1, arrvalue[f.csvindex].substring(
									0, 1));
						} else {
							String content = arrvalue[f.csvindex];
							if (content.length() > 100) {
								content = content.substring(0, 99);
							}
							ps.setString(i + 1, content);
						}
					}
				} catch (SQLException ex) {
					logger.error(ex.toString(), ex);
					throw ex;
				} catch (Exception ex) {
					logger.error(ex.toString(), ex);
					throw new SQLException(ex.toString());
				}
			}
			public int getBatchSize() {
				return BATCH_SIZE;
			}
			 // 判断批处理是否完成.
			public boolean isBatchExhausted(int pos) {
				return !csvReader.lineIsNotNull();
			}
		};
		do {
			jdbcTemplate.batchUpdate(sql, psset);
		} while (csvReader.lineIsNotNull());
	}
	static class Field {
		String format;
		String name;
		String type;
		int csvindex;
		Field(String name, String type, int index, String format) {
			this.name = name;
			this.type = type;
			this.csvindex = index;
			this.format = format;
		}
	}
}



至此,所有的下载啊,导入啊全部完成.还未完成的就是SqlServer存储过程,以及web界面的图表显示.下面文章我们只讲jfreechart的图表显示.不讲Sqlserver存储过程.

你可能感兴趣的:(spring,sql,jfreechart,F#,IOC)