hadoop2.2 java API 操作 HDFS(1)

导入hadoop需要的jar包,本文采用Maven方式管理项目

    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.7</version>
      <scope>test</scope>
    </dependency>
    <dependency>
		  <groupId>org.apache.hadoop</groupId>
		  <artifactId>hadoop-minicluster</artifactId>
		  <version>2.2.0</version>
    </dependency>
    <dependency>
		  <groupId>org.apache.hadoop</groupId>
		  <artifactId>hadoop-client</artifactId>
		  <version>2.2.0</version>
    </dependency>
    <dependency>
		  <groupId>org.apache.hadoop</groupId>
		  <artifactId>hadoop-assemblies</artifactId>
		  <version>2.2.0</version>
    </dependency>
        <dependency>
		  <groupId>org.apache.hadoop</groupId>
		  <artifactId>hadoop-maven-plugins</artifactId>
		  <version>2.2.0</version>
    </dependency>
        <dependency>
		  <groupId>org.apache.hadoop</groupId>
		  <artifactId>hadoop-common</artifactId>
		  <version>2.2.0</version>
    </dependency>
        <dependency>
		  <groupId>org.apache.hadoop</groupId>
		  <artifactId>hadoop-hdfs</artifactId>
		  <version>2.2.0</version>
    </dependency>

 java的方式操作HDFS:

import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;

public class MkdirHdfs {
	private static final String HADOOP_URL="hdfs://hadoop:9000/user";
	public static void main(String[] args)throws Exception {
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
		final URL url=new URL(HADOOP_URL);
		final InputStream in = url.openStream();
		OutputStream out=new FileOutputStream("hello.txt");
		IOUtils.copyBytes(in, out, 1024,true);
		out.close();
		in.close();
	}
}

  以上操作复杂,但是大家不要担心,因为hadoop为大家提供了简单的API了


你可能感兴趣的:(java,hadoop,hdfs,hadoop2.2.0)