windows下JAVA开发操作hadoop的方法

先说一下具体情况,

hadoop版本2.4X  本地使用的  eclipse 开发,操作虚拟机中的hadoop系统

第一次开发估计会遇到不少报错信息,如果有疑问 贴出来,可以解答

package cn.itcast.hadoop.hdfs;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class MyHdfsDao {
	
	 
	static String hdfsPath="hdfs://hadoop01:9000/";
	
	public static void main(String[] args) throws IOException {
		  Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://hadoop01:9000/");
		System.setProperty("hadoop.home.dir", "E:/hadoop");//设置本地hadoop文件位置
		System.setProperty("HADOOP_USER_NAME", "firefly");//设置上传用户的信息名
		
		ls("/",conf);//查看信息
		String javacreat="/javaCreat/haha";
		// mkdirs(javacreat,conf);//创建文件夹操作
		
	 	//rmr(javacreat,conf);//删除操作
		//copyToHDFS("c:/movies.xml","/movies.xml",conf);//上传文件
		//cat("/movies.xml",conf);//查看文件操作
		//downLoad("c:/movies2.xml","/movies.xml",conf);//下载操作
		//createFile("Hello world!!","/movies33.xml" ,conf);//创建文件
		
		  Path oldName=new Path("/movies33.xml");//原文件
		  Path newName=new Path("/movies44.xml");//新文件名
		 
		rename(oldName,newName,conf);   //改名
		   
		
	}
	
	
	 private static void rename(Path oldName, Path newName,
			Configuration conf) throws IOException {
		  FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
		 fs.rename(oldName, newName); 
		
	}


	private static void createFile(String content,String remote, Configuration conf) throws IOException {
		  FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
	        byte[] buff = content.getBytes();
	        FSDataOutputStream os = null;
	        try {
	            os = fs.create(new Path(remote));
	            os.write(buff, 0, buff.length);
	            System.out.println("Create: " + remote);
	        } finally {
	            if (os != null)
	                os.close();
	        }
	        fs.close();
		
	}


	private static void downLoad(String local,String remote, Configuration conf) throws IOException {
		 	Path path = new Path(remote);
	        FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
	        fs.copyToLocalFile(path, new Path(local));
	        System.out.println("download: from" + remote + " to " + local);
	        fs.close();
		
	}


	private static void cat(String remoteFile,Configuration conf) throws IOException {
		 Path path = new Path(remoteFile);
	        FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
	        FSDataInputStream fsdis = null;
	        System.out.println("cat: " + remoteFile);
	        try {  
	            fsdis =fs.open(path);
	            IOUtils.copyBytes(fsdis, System.out, 4096, false);  
	          } finally {  
	            IOUtils.closeStream(fsdis);
	            fs.close();
	          }
		
	}


	private static void copyToHDFS(String local,String remote, Configuration conf) throws IOException {
		  FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
	        fs.copyFromLocalFile(new Path(local), new Path(remote));
	        System.out.println("copy from: " + local + " to " + remote);
	        fs.close();
		
	}


	private static void rmr(String javacreat,Configuration conf) throws IOException {
		   Path path = new Path(javacreat);
	        FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
	        fs.deleteOnExit(path);
	        System.out.println("Delete: " + javacreat);
	        fs.close();
		
	}


	private static void mkdirs(String javacreat,Configuration conf) throws IOException {
		 	Path path = new Path(javacreat);
	        FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
	        if (!fs.exists(path)) {
	            fs.mkdirs(path);
	            System.out.println("Create: " + javacreat);
	        }
	        fs.close();
		
	}


	public static void ls(String folder,Configuration conf) throws IOException {
	        Path path = new Path(folder);
	        FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
	        FileStatus[] list = fs.listStatus(path);
	        System.out.println("ls: " + folder);
	        System.out.println("==========================================================");
	        for (FileStatus f : list) {
	            System.out.printf("name: %s, folder: %s, size: %d\n b", f.getPath(), f.isDir(), f.getLen());
	        }
	        System.out.println("一共有文件数量"+list.length);
	        System.out.println("==========================================================");
	        fs.close();
	    }
}




你可能感兴趣的:(java,hadoop,windows,hdfs)