File对象上传到hdfs
/**
* File对象上传到hdfs
* @param filesrc
* @param hdfsPath
* @param filename
* @return
*/
public static Boolean uploadFile(String filesrc, String hdfsPath,String filename) {
InputStream in = null;
try {
Configuration conf = new Configuration();
// conf.set("hadoop.home.dir", "E:\\hadoop-2.2.0");
// if(System.getProperties().getProperty("os.name").toLowerCase().contains("windows")){
// System.setProperty("hadoop.home.dir", "E:\\hadoop-2.2.0");//windowg下配置
// }
conf.set("fs.defaultFS", DMPConfigUtils.DMP_HDFS_URL);
conf.set("user.name", DMPConfigUtils.DMP_HDFS_USER);
System.setProperty("HADOOP_USER_NAME", DMPConfigUtils.DMP_HDFS_USER);
logger.info("HADOOP_USER_NAME="+DMPConfigUtils.DMP_HDFS_USER);
logger.info("hadoop.home.dir="+System.getProperty("hadoop.home.dir"));
logger.info("HADOOP_HOME="+System.getenv("HADOOP_HOME"));
try {
FileSystem fileSystem = FileSystem.get(URI.create(hdfsPath), conf);
Path resP = new Path(filesrc);
Path destP = new Path(hdfsPath);
if(!fileSystem.exists(destP)){
fileSystem.mkdirs(destP);
}
fileSystem.copyFromLocalFile(resP, new Path(hdfsPath+"/"+filename));
return true;
} catch (IllegalArgumentException e) {
e.printStackTrace();
logger.error(e.getMessage());
return false;
} catch (IOException e) {
e.printStackTrace();
logger.error(e.getMessage());
return false;
}
} finally {
IOUtils.closeStream(in);
}
}
下载方式一:返回InputStream
public static InputStream downLoadFile(String filePath ) throws IllegalArgumentException, IOException, URISyntaxException{
Configuration conf = new Configuration();
conf.set("hadoop.home.dir", "E:\\hadoop-2.2.0");
conf.set("fs.defaultFS", DMPConfigUtils.DMP_HDFS_URL);
conf.set("user.name", DMPConfigUtils.DMP_HDFS_USER);
System.setProperty("hadoop.home.dir", "E:\\hadoop-2.2.0");//windowg下配置
System.setProperty("HADOOP_USER_NAME", DMPConfigUtils.DMP_HDFS_USER);//windowg下配置
FileSystem fs = FileSystem.get(new URI(DMPConfigUtils.DMP_HDFS_URL), conf);
InputStream in = fs.open(new Path(filePath));
return in;
}
下载方法二:通过浏览器下载API下载文件
private ResponseEntity downloadUserDiffusion(LabelUserDiffusionEx labelUserDiffusionEx) throws Exception {
//hdfs文件 地址
String localPath = "";
Configuration config = new Configuration();
config.set("fs.defaultFS", DMPConfigUtils.DMP_HDFS_URL);
config.set("user.name", DMPConfigUtils.DMP_HDFS_USER);
String Path = System.getProperty("user.dir") + "\\UserDiffusion.txt";
try {
//构建FileSystem
FileSystem fs = FileSystem.get(URI.create(localPath), config);
//读取文件
InputStream is = fs.open(new Path(localPath));
IOUtils.copyBytes(is, new FileOutputStream(new File(Path)), 2048, true);//保存到本地 最后 关闭输入输出流
fs.close();
} catch (Exception e) {
logger.error(e.getMessage());
}
File file = new File(Path);
HttpHeaders headers = new HttpHeaders();
String filename = new String("test.txt".getBytes("utf-8"), "iso-8859-1");//为了解决中文名称乱码问题
headers.setContentDispositionFormData("attachment", filename);
headers.setContentType(MediaType.APPLICATION_OCTET_STREAM);
ResponseEntity responseEntity = new ResponseEntity(org.apache.commons.io.FileUtils.readFileToByteArray(file), headers, HttpStatus.OK);
file.delete();
return responseEntity;
}
读取文件内容:
public void readhdfs(){
//hdfs的文件完整路径
String path = "";
String line;
try {
Configuration conf = new Configuration();
//hdfs链接配置
conf.set("fs.defaultFS", "hdfs://ns1");
conf.set("user.name", "hdfs");
FileSystem fs = FileSystem.get(URI.create(path), conf);
FSDataInputStream hdfsInStream = fs.open(new Path(path));
InputStreamReader isr = new InputStreamReader(hdfsInStream, "utf-8");
BufferedReader br = new BufferedReader(isr);
// int k = 0;
while ((line = br.readLine()) != null) {
System.out.println(line);
}
}catch (Exception e){
}
}