首先要使用apache的commons.fileupload包,这个可以去官网上下的啦...很方便
另外还要使用:hadoop-core.jar以及logging.jar,大家都下下来,放到Servlet的lib下吧。
我身边的哥们用的是Struts实现,大家有兴趣的话,可以以后发给大家一起瞅瞅
upload.htm源码:
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<HTML>
<HEAD>
<META content="text/html; charset=gb2312" http-equiv=Content-Type>
</HEAD>
<BODY bgColor=#e6e6e6><BR>
<FORM action="uploadServlet" encType="multipart/form-data" method="post">
<TABLE><TR><TD><FONT color=#000000 face=helv,helvetica size=1>
File: </FONT> <INPUT size=60 type=file name="file"> </TD></TR></TABLE>
<INPUT type=submit value=Send name="send">
</FORM>
</BODY>
</HTML>
Servlet的源代码:
使用了两种方式来实现,一种比较直接,一种使用API来实现。。。
import javax.servlet.*;
import javax.servlet.http.*;
import java.io.*;
import java.text.*;
import java.util.*;
import org.apache.commons.fileupload.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class uploadServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final int BUFFER_SIZE = 16 * 1024;
private static final String CONTENT_TYPE = "text/html; charset=GBK";
public void init() throws ServletException {
}
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Configuration conf = new Configuration();
conf.set("fs.default.name", "hdfs://192.168.1.200:9000/");
conf.set("dfs.replication","1" );
//conf.addResource(new Path("/home/grid/Hadoop/hadoop-0.20.2/conf/core-site.xml"));
//conf.addResource(new Path("/home/grid/Hadoop/hadoop-0.20.2/conf/hdfs-site.xml"));
//conf.addResource(new Path("/home/grid/Hadoop/hadoop-0.20.2/conf/mapred-site.xml");
FileSystem hdfs = FileSystem.get(conf);
String yourTempDirectory = "/home/grid/upload/";
boolean writeToFile = true;
response.setContentType(CONTENT_TYPE);
PrintWriter out = response.getWriter();
out.println("<html>");
out.println("<head><title>uploadServlet</title></head>");
out.println("<link rel='stylesheet' href='css/turbocrm.css' type='text/css'>");
out.println("<body bgcolor=\"#ffffff\">");
out.println("<p>成功上载</p>");
out.println("<a href='upload.htm'>返回</a>");
out.println("</body></html>");
//先判断是否为multipart
boolean isMultipart = FileUpload.isMultipartContent(request);
System.out.println(isMultipart);
if(isMultipart){
try{
// 创建DiskFileUpload对象
DiskFileUpload upload = new DiskFileUpload();
// 解析request
List items = upload.parseRequest(request);
// 处理Item
Iterator iter = items.iterator();
while (iter.hasNext()) {
FileItem item = (FileItem) iter.next();
if (item.isFormField()) {
String name = item.getFieldName();
String value = item.getString();
System.out.println(name + " "+ value );
}
//如果是文件
else {
//String fieldName = item.getFieldName();
//String contentType = item.getContentType();
//boolean isInMemory = item.isInMemory();
//long sizeInBytes = item.getSize();
String fileName = item.getName();
int index = fileName.lastIndexOf(".");
String temp = fileName.substring(index,fileName.length());
//解析出上传文件的扩展名
if (writeToFile) {
System.out.println("write");
//保存到硬盘设定的目录
SimpleDateFormat dateformat1=new SimpleDateFormat("yyyyMMddHHmmssSS");
String time = dateformat1.format(new Date());
//方式一:直接将数据存储到HDFS中去
FSDataOutputStream fsdo = null;
InputStream in = null;
Path dfs = new Path(yourTempDirectory + time +temp);
fsdo = hdfs.create(dfs);
in = new BufferedInputStream(item.getInputStream(), BUFFER_SIZE);
byte[] buffer = new byte[BUFFER_SIZE];
int len = 0;
while ((len = in.read(buffer)) > 0) {
fsdo.write(buffer, 0, len);
}
//方式二:先将数据存储到Hadoop的节点中,在上传到HDFS中
// File uploadedFile = new File(yourTempDirectory + time +temp);
// item.write(uploadedFile);
// Path src = new Path(yourTempDirectory + time +temp);
// Path dst = new Path("/");
// hdfs.copyFromLocalFile(src, dst);
} else {
InputStream uploadedStream = item.getInputStream();
uploadedStream.close();
}
}
}
}
catch(Exception e){
e.printStackTrace();
}
}
}
public void destroy() {
}
}
这个其实偶刚写的,也没有测试,大家瞅瞅玩吧~明天偶看看去 呵呵~