HDFS+JavaWeb实现简易数据云盘

利用HDFS和JavaWeb实现数据云盘初步开发

1.index.jsp

<%@ page language="java" contentType="text/html; charset=UTF-8"
pageEncoding="UTF-8"%>

<%@page import="org.apache.hadoop.fs.FileStatus"%>



Insert title here



<% FileStatus[] list = (FileStatus[])request.getAttribute("list"); if(list != null) for (int i=0; i <% if(list[i].isDir()) { out.print(""); }else{ out.print(""); } %> <% } %>
文件名 类型 大小(KB) 操作 操作
"+list[i].getPath().getName()+""+list[i].getPath().getName()+"<%= (list[i].isDir()?"目录":"文件") %> <%= list[i].getLen()/1024%> " class="delete">删除 ">下载

2、HDFSDao.java

 HDFSDao.java,用于对 HDFS 文件系统访问。
package com.hadoop.yunpan.model;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
public class HDFSDao {
private final Log log = LogFactory.getLog(HDFSDao.class);
// HDFS 访问地址
45
private static final String HDFS_PATH = "hdfs://master:9000/user/hadoop";
public HDFSDao(Configuration conf) {
this(HDFS_PATH, conf);
}
public HDFSDao(String hdfs, Configuration conf) {
this.hdfsPath = hdfs;
this.conf = conf;
}
// hdfs 路径
private String hdfsPath;
// Hadoop 系统配置
private Configuration conf;
// 加载 Hadoop 配置文件
public static JobConf getConfig() {
JobConf conf = new JobConf(HDFSDao.class);
conf.setJobName("HdfsDAO");
conf.addResource("classpath:/hadoop/core-site.xml");
conf.addResource("classpath:/hadoop/hdfs-site.xml");
conf.addResource("classpath:/hadoop/mapred-site.xml");
return conf;
}
// 在根目录下创建文件夹
public void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
fs.close();
}
// 某个文件夹的文件列表
public FileStatus[] ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out
46
.println("==========================================================");
if (list != null)
for (FileStatus f : list) {
System.out.println(f.getPath().getName() + ", folder: "
+ (f.isDir() ? "目录" : "文件") + ", 大小: " + f.getLen()
/ 1024 + "\n");
}
System.out
.println("==========================================================");
fs.close();
return list;
}
public void copyFile(String local, String remote) throws IOException {
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
// remote---/用户/用户下的文件或文件夹
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
}
// 删除文件或文件夹
public void rmr(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder);
fs.close();
}
// 下载文件到本地系统
public void download(String remote, String local) throws IOException {
Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.copyToLocalFile(path, new Path(local));
System.out.println("download: from" + remote + " to " + local);
fs.close();
}
/**
* @param args
*/
47
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
JobConf conf = getConfig();
HDFSDao hdfs = new HDFSDao(conf);
hdfs.ls("hdfs://master:9000/user/hadoop");
} }

3、UploadServlet.java

package com.hadoop.yunpan.controller;

import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.hadoop.yunpan.model.HDFSDao;
/**
* Servlet implementation class UploadServlet
*/
//@WebServlet(name="UploadServlet",urlPatterns="/UploadServlet")
public class UploadServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private final Log log = LogFactory.getLog(UploadServlet.class);
 private final int MAX_FILE_SIZE = 50 * 1024 * 1024; // 50M
48
 private final int MAX_MEM_SIZE = 50 * 1024 * 1024; // 50M
 private String fileUploadPath;
 /**
 * @see HttpServlet#HttpServlet()
 */
 public UploadServlet() {
 super();
 // TODO Auto-generated constructor stub
 }
 
 protected void doGet(HttpServletRequest request,HttpServletResponse response)throws 
ServletException,IOException {
 this.doPost(request, response);
}
/**
* @see Servlet#init(ServletConfig)
*/
public void init(ServletConfig config) throws ServletException {
// TODO Auto-generated method stub
// 重写 了 Servlet 的 init 方 法后 一定要记 得调 用父 类 的 init 方 法, 否则 在
service/doGet/doPost 方法中使用 getServletContext()方法
//获取 ServletContext 对象时就会出现 java.lang.NullPointerException 异常
super.init(config);
System.out.println("init UploadServlet");
ServletContext context = getServletContext();
this.fileUploadPath = context.getInitParameter("file-upload");
System.out.println("source file path:" + fileUploadPath + "");
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
request.setCharacterEncoding("UTF-8");
File file;
JobConf conf = HDFSDao.getConfig();
HDFSDao hdfs = new HDFSDao(conf);
// 验证上传内容了类型
String contentType = request.getContentType();
49
if ((contentType.indexOf("multipart/form-data") >= 0)) {
DiskFileItemFactory factory = new DiskFileItemFactory();
// 设置内存中存储文件的最大值
factory.setSizeThreshold(MAX_MEM_SIZE);
// 本地存储的数据大于 maxMemSize.
factory.setRepository(new File("/tmp"));
// 创建一个新的文件上传处理程序
ServletFileUpload upload = new ServletFileUpload(factory);
// 设置最大上传的文件大小
upload.setSizeMax(MAX_FILE_SIZE);
try {
// 解析获取的文件
List fileList = upload.parseRequest(request);
// 处理上传的文件
Iterator iterator = fileList.iterator();
System.out.println("begin to upload file to tomcat server

"); while (iterator.hasNext()) { FileItem item = iterator.next(); if (!item.isFormField()) { // 获取上传文件的参数 String fileName = item.getName(); String fn = fileName.substring(fileName.lastIndexOf("\\") + 1); System.out.println("
" + fn + "
"); // 写入文件 if (fileName.lastIndexOf("\\") >= 0) { file = new File(fileUploadPath, fileName.substring(fileName.lastIndexOf("\\"))); } else { file = new File(fileUploadPath, fileName.substring(fileName.lastIndexOf("\\") + 1)); } item.write(file); System.out.println("upload file to tomcat server success!"); System.out.println("begin to upload file to hadoop hdfs

"); String name = fileUploadPath+File.separator+fn; System.out.println(name); hdfs.copyFile(fileUploadPath+File.separator+fn, "/user/hadoop/"+fn); System.out.println("upload file to tomcat server success!"); } } 50 FileStatus[] list = hdfs.ls("hdfs://master:9000/user/hadoop"); request.setAttribute("list", list); request.getRequestDispatcher("index.jsp").forward(request,response); } catch (Exception ex) { System.out.println(ex.getMessage()); } } else { System.out.println("

No file uploaded

"); } } }

4、DownloadServlet.java


package com.hadoop.yunpan.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.hadoop.yunpan.model.HDFSDao;
/**
* Servlet implementation class DownloadServlet
*/
//@WebServlet("/DownloadServlet")
public class DownloadServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
this.doPost(request,response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
String local = "/home/hadoop/Downloads";
String filePath = new 
String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");
System.out.println(filePath);
JobConf conf = HDFSDao.getConfig();
HDFSDao hdfs = new HDFSDao(conf);
hdfs.download(filePath, local);
FileStatus[] list = hdfs.ls("hdfs://master:9000/user/hadoop");
request.setAttribute("list", list);
request.getRequestDispatcher("index.jsp").forward(request, response);
} }

5、DeleteFileServlet.java


package com.hadoop.yunpan.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.hadoop.yunpan.model.HDFSDao;
/**
* Servlet implementation class DeleteFileServlet
*/
//@WebServlet(name="DeleteFileServlet",urlPatterns="/DeleteFileServlet")
public class DeleteFileServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
this.doPost(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
String filePath = new 
String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");
JobConf conf = HDFSDao.getConfig();
HDFSDao hdfs = new HDFSDao(conf);
hdfs.rmr(filePath);
System.out.println("====="+filePath+"=====");
FileStatus[] list = hdfs.ls("hdfs://master:9000/user/hadoop");
request.setAttribute("list", list);
request.getRequestDispatcher("index.jsp").forward(request, response);
} }

6、web.xml配置



 webtest1
 
 index.html
 index.htm
 index.jsp
 default.html
 default.htm
 default.jsp
 
 
 Location to store uploaded file
 file-upload
 /home/hadoop/tmp
 
 
 
 UploadServlet
 UploadServlet
 com.hadoop.yunpan.controller.UploadServlet
 
 
 UploadServlet
 /UploadServlet
 
 
 
 
 DeleteFileServlet
 DeleteFileServlet
 com.hadoop.yunpan.controller.DeleteFileServlet
 
 
 DeleteFileServlet
 /DeleteFileServlet
 
 
 
 
 DownloadServlet
 DownloadServlet
 com.hadoop.yunpan.controller.DownloadServlet
 
 
 DownloadServlet
 /DownloadServlet
 

在启动Tomcat服务器时,因为之前在WEB-INF的lib目录下加入了hadoop的全部jar包,在启动项目时,会出现报错:

java.lang.NoSuchMethodError: org.eclipse.jdt.internal.compiler.CompilationResult.getProblems()

在查询资料之后,我找到了原因:
hadoop相关jar中依赖jetty、servlet等第三方jar包和当前项目中的有冲突
解决方法一:

以这个项目为例自己手工在lib加入的话,哪就要将这几个包去掉,目前我就是这样做的,
去掉之后就可以运行(有具体版本号,我没写,但是前面都一样)
  jersey-server-1.9.jar
  jasper-runtime-5.5.23.jar
  jasper-compiler-5.5.23.jar
  servlet-api.jar
  jsp-api.jar
  jetty-util.jar
  jetty.jar

解决方法二:

如果是用maven管理项目,在pom.xml中删除以下依赖就可以了,但我没有试过

  org.apache.hadoop
  hadoop-common
  ${hadoop.version}
    
    
        org.mortbay.jetty
        jetty
    
    
        org.mortbay.jetty
        jetty-util
    
    
        org.mortbay.jetty
        jsp-2.1
    
    
        org.mortbay.jetty
        jsp-api-2.1
    
    
        org.mortbay.jetty
        servlet-api-2.1
    
    
        javax.servlet
        servlet-api
    
    
        javax.servlet.jsp
        jsp-api
    
    
        tomcat
        jasper-compiler
    
    
        tomcat
        jasper-runtime
    
  

最后
完成的效果图如下
HDFS+JavaWeb实现简易数据云盘_第1张图片

源码打包链接:
链接:https://pan.baidu.com/s/1v4eaMFRgqm1LYt-IwgzGnA
提取码:qzbs

你可能感兴趣的:(笔记)