public class Constant {
static final String LOG_SOURCE_DIR="LOG_SOURCE_DIR";
static final String LOG_TOUPLOAD_DIR="LOG_TOUPLOAD_DIR";
static final String LOG_BACKUP_BASE_DIR="LOG_BACKUP_BASE_DIR";
static final String LOG_BACKUP_TIMEOUT="LOG_BACKUP_TIMEOUT";
static final String LOG_LEGAL_PREFIX="LOG_LEGAL_PREFIX";
static final String HDFS_URI="HDFS_URI";
static final String HDFS_DEST_BASE_DIR="HDFS_DEST_BASE_DIR";
static final String HDFS_FILE_PREFIX="HDFS_FILE_PREFIX";
static final String HDFS_FILE_SUFFIX="HDFS_FILE_SUFFIX";
}
PropertyHolderLazy.java(懒汉式)
public class PropertyHolderLazy {
private static Properties prop=null;
public static Properties getProps(){
if(prop==null){
synchronized (PropertyHolderLazy.class){
if (prop==null){
prop=new Properties();
try {
prop.load(PropertyHolderLazy.class.getClassLoader().getResourceAsStream("collect.properties"));
}catch (Exception e){
e.printStackTrace();
}
}
}
}
return prop;
}
}
3.3、日志收集模块
CollectTask.java
public class CollectTask extends TimerTask {
@Override
public void run() {
//探测日志源目录
SimpleDateFormat sdf=new SimpleDateFormat("yy-MM-dd-HH");
String data = sdf.format(new Date());
Properties props = PropertyHolderLazy.getProps();
File srcDir = new File(props.getProperty(Constant.LOG_SOURCE_DIR));
File[] listFiles = srcDir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if (name.startsWith(props.getProperty(Constant.LOG_LEGAL_PREFIX))) {
return true;
} else {
return false;
}
}
});
System.out.println(listFiles);
//移动这些文件到一个待上传临时目录
try {
File toupload = new File(props.getProperty(Constant.LOG_TOUPLOAD_DIR));
for (File files:listFiles
) {
FileUtils.moveFileToDirectory(files, toupload, true);
}
//遍历待上传目录中各文件,逐一传输到HDFS的目标路径,同时将传输完成的文件移动到备份目录
FileSystem fs = FileSystem.get(new URI(props.getProperty(Constant.HDFS_URI)), new Configuration(), "ws");
File[] touploadFiles = toupload.listFiles();
Path hdfsDestPath = new Path(props.getProperty(Constant.HDFS_DEST_BASE_DIR) + data);
if(fs.exists(hdfsDestPath)){
fs.create(hdfsDestPath);
}
for (File file:touploadFiles
) {
fs.copyFromLocalFile(new Path(file.getAbsolutePath()),
new Path(hdfsDestPath+props.getProperty(Constant.HDFS_FILE_PREFIX)+ UUID.randomUUID()+props.getProperty(Constant.HDFS_FILE_SUFFIX)));
FileUtils.moveFileToDirectory(file,new File(props.getProperty(Constant.LOG_BACKUP_BASE_DIR)+data+"/"),true);
}
}catch (Exception e){
e.printStackTrace();
}
}
}
3.4、超时日志删除模块
DeleOvertimeFile.java
public class DeleOvertimeFile extends TimerTask{
@Override
public void run() {
SimpleDateFormat sdf=new SimpleDateFormat();
long now = new Date().getTime();
Properties props = PropertyHolderLazy.getProps();
File backupBaseDir = new File(props.getProperty(Constant.LOG_BACKUP_BASE_DIR));
File[] backupDir =backupBaseDir.listFiles();
try{
for (File dir:backupDir
) {
long time = sdf.parse(dir.getName()).getTime();
if (now-time>24*60*60*1000L){
FileUtils.deleteDirectory(dir);
}
}
}catch (Exception e){
e.printStackTrace();
}
}
}
3.5、主函数
public class DataConllectMain {
public static void main(String[] args) {
Properties props = PropertyHolderLazy.getProps();
Timer timer = new Timer();
// 收集任务
timer.schedule( new CollectTask(),0,24*60*60*1000L);
// 删除过期文件任务
timer.schedule(new DeleOvertimeFile(),0,24*60*60*1000L);
}
}
public interface Mapper {
void map(String line,Context context);
}
Context.java-----使用hashmap简单实现缓存
public class Context {
HashMap
WordCountMapper.java---实现mapper接口,处理具体的业务逻辑
public class WordCountMapper implements Mapper {
@Override
public void map(String line, Context context) {
String[] words = line.split(" ");
for (String word :words
) {
Object value = context.getValue(word);
if (null==value){
context.write(word,1);
}else {
int v=(int)value;
context.write(word,v+1);
}
}
}
}
HdfsWordCount.java---main函数
public class HdfsWordCount {
public static void main(String[] args) throws Exception{
/*
* @Author shun
* @Description 初始化
**/
Properties properties = new Properties();
properties.load(HdfsWordCount.class.getClassLoader().getResourceAsStream("job.properties"));
String mapper_class = properties.getProperty("MAPPER_CLASS");
Mapper mapper = (Mapper) Class.forName(mapper_class).newInstance();
Context context=new Context();
/*
* @Author shun
* @Description 数据处理
**/
FileSystem fs = FileSystem.get(new URI("hdfs://node1:9000"), new Configuration(), "ws");
RemoteIterator iter = fs.listFiles(new Path(properties.getProperty("INPUT_PATH")), false);
while (iter.hasNext()){
LocatedFileStatus file = iter.next();
FSDataInputStream in = fs.open(file.getPath());
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
String line=null;
while ((line=bufferedReader.readLine())!=null){
// 调用业务逻辑
mapper.map(line,context);
}
bufferedReader.close();
}
/*
* @Author shun
* @Description 输出结果
**/
HashMap contextMap = context.getContextMap();
Path outpath = new Path("/wordcount/output/");
if (!fs.exists(outpath)){
fs.mkdirs(outpath);
}
FSDataOutputStream out = fs.create(new Path(properties.getProperty("OUTPUT_PATH")));
Set> entries = contextMap.entrySet();
for (Map.Entry entry:entries
) {
out.write((entry.getKey().toString()+"\t"+entry.getValue()+"\n").getBytes());
}
fs.close();
}
}
//关键字的使用探讨/*访问关键词private 只能在本类中访问public 只能在本工程中访问protected 只能在包中和子类中访问默认的 只能在包中访问*//*final 类 方法 变量 final 类 不能被继承 final 方法 不能被子类覆盖,但可以继承 final 变量 只能有一次赋值,赋值后不能改变 final 不能用来修饰构造方法*///this()
What’s new in Zabbix 2.0?
去年开始使用Zabbix的时候,是1.8.X的版本,今年Zabbix已经跨入了2.0的时代。看了2.0的release notes,和performance相关的有下面几个:
:: Performance improvements::Trigger related da
修改jboss端口
%JBOSS_HOME%\server\{服务实例名}\conf\bindingservice.beans\META-INF\bindings-jboss-beans.xml
中找到
<!-- The ports-default bindings are obtained by taking the base bindin
@echo off
::演示:删除指定路径下指定天数之前(以文件名中包含的日期字符串为准)的文件。
::如果演示结果无误,把del前面的echo去掉,即可实现真正删除。
::本例假设文件名中包含的日期字符串(比如:bak-2009-12-25.log)
rem 指定待删除文件的存放路径
set SrcDir=C:/Test/BatHome
rem 指定天数
set DaysAgo=1
HTML5的video和audio标签是用来在网页中加入视频和音频的标签,在支持html5的浏览器中不需要预先加载Adobe Flash浏览器插件就能轻松快速的播放视频和音频文件。而html5media.js可以在不支持html5的浏览器上使video和audio标签生效。 How to enable <video> and <audio> tags in