pom
<properties>
<fileSystem>S3fileSystem>
properties>
<dependency>
<groupId>com.amazonawsgroupId>
<artifactId>aws-java-sdk-s3artifactId>
<version>1.11.659version>
dependency>
yml
fileSystem: @fileSystem@
配置
@Configuration
public class BeanConfig {
@Bean
@Primary
@ConditionalOnExpression("#{'S3'.equals(environment.getProperty('fileSystem'))}")
public IFileService s3FileService() {
return new S3FileServiceImpl();
}
@Bean
@ConditionalOnExpression("#{'HDFS'.equals(environment.getProperty('fileSystem'))}")
public IFileService hdfsFileService() {
return new HDFSFileServiceImpl();
}
}
service:
public interface IFileService {
String fileUpload(MultipartFile file, String path) throws IOException;
String fileCreate(FileCreateDTO fileCreateDTO) throws IOException;
InputStream fileDownload(String path) throws IOException;
String rename(String oldPath, String newPath) throws IOException;
boolean delete(String path) throws IOException;
String modifyContent(FileContentReqDto fileContentReqDto) throws IOException;
String readContent(String filePath) throws IOException;
String replace(MultipartFile file, String filePath) throws IOException;
}
package com.xinyan.mp.base.service;
import com.xinyan.mp.base.manager.S3Manager;
import com.xinyan.mp.base.manager.model.req.FileContentReqDto;
import com.xinyan.mp.base.manager.model.req.FileCreateDTO;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.multipart.MultipartFile;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
@Slf4j
public class S3FileServiceImpl implements IFileService {
@Autowired
S3Manager s3Manager;
@Override
public String fileUpload(MultipartFile file, String path) throws IOException {
try (InputStream inputStream = file.getInputStream()) {
return s3Manager.upload(inputStream, path);
}
}
@Override
public String fileCreate(FileCreateDTO fileCreateDTO) throws IOException {
String content = StringUtils.isNotBlank(fileCreateDTO.getContent()) ? fileCreateDTO.getContent() : "";
try (ByteArrayInputStream inputStream = new ByteArrayInputStream(content.getBytes())) {
return s3Manager.upload(inputStream, fileCreateDTO.getFilePath());
}
}
@Override
public InputStream fileDownload(String path) {
return s3Manager.download(path);
}
@Override
public String rename(String oldPath, String newPath) {
return s3Manager.rename(oldPath, newPath);
}
@Override
public boolean delete(String path) {
return s3Manager.delete(path);
}
@Override
public String modifyContent(FileContentReqDto fileContentReqDto) throws IOException {
return s3Manager.modifyFileContent(fileContentReqDto.getLocation(), fileContentReqDto.getContent());
}
@Override
public String readContent(String filePath) throws IOException {
return s3Manager.readContent(filePath);
}
@Override
public String replace(MultipartFile file, String filePath) throws IOException {
return s3Manager.replace(file, filePath);
}
}
package com.xinyan.mp.base.service;
import com.xinyan.mp.base.manager.HDFSManager;
import com.xinyan.mp.base.manager.model.req.FileContentReqDto;
import com.xinyan.mp.base.manager.model.req.FileCreateDTO;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.io.InputStream;
@Slf4j
public class HDFSFileServiceImpl implements IFileService {
@Autowired
HDFSManager hdfsManager;
@Override
public String fileUpload(MultipartFile file, String path) throws IOException {
return hdfsManager.createFile(subPathSeparator(path), file.getBytes());
}
@Override
public String fileCreate(FileCreateDTO fileCreateDTO) throws IOException {
return hdfsManager.createFile(subPathSeparator(fileCreateDTO.getFilePath()),
StringUtils.isNotBlank(fileCreateDTO.getContent()) ? fileCreateDTO.getContent().getBytes() : "".getBytes());
}
@Override
public InputStream fileDownload(String path) throws IOException {
return hdfsManager.readFile(path);
}
@Override
public String rename(String oldPath, String newPath) throws IOException {
return hdfsManager.rename(subPathSeparator(oldPath), subPathSeparator(newPath));
}
@Override
public boolean delete(String path) throws IOException {
return hdfsManager.delete(path);
}
@Override
public String modifyContent(FileContentReqDto fileContentReqDto) throws IOException {
return hdfsManager.modifyContent(fileContentReqDto.getLocation(), fileContentReqDto.getContent());
}
@Override
public String readContent(String path) throws IOException {
return hdfsManager.readContent(path);
}
@Override
public String replace(MultipartFile file, String filePath) throws IOException {
return hdfsManager.replace(filePath, file.getBytes());
}
private String subPathSeparator(String source) {
if (source.startsWith("/")) {
return source.substring(1);
}
return source;
}
}
package com.xinyan.mp.base.manager;
import cn.hutool.core.util.StrUtil;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;
import javax.annotation.PostConstruct;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.List;
/**
* @Description S3文件操作
*/
@Slf4j
@Component
@ConditionalOnExpression("#{'S3'.equals(environment.getProperty('fileSystem'))}")
public class S3Manager {
/**
* 首先需要保证s3上已经存在该存储桶,且同名必须大写开头
*/
@Value("${aws.bucket}")
private String awsBucket;
/**
* AWS 访问密钥
*/
@Value("${aws.accessKey}")
private String accessKey;
/**
* AWS SECRET_KEY
*/
@Value("${aws.secretKey}")
private String secretKey;
/**
* AWS 云平台地址
*/
@Value("${aws.url}")
private String url;
private AmazonS3 amazonS3Client;
@PostConstruct
public void init() {
ClientConfiguration clientConfig = new ClientConfiguration();
clientConfig.setProtocol(Protocol.HTTP);
amazonS3Client = AmazonS3ClientBuilder.standard().withCredentials(
new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, secretKey))).
withClientConfiguration(clientConfig).
withEndpointConfiguration(
new AwsClientBuilder.EndpointConfiguration(url, Regions.DEFAULT_REGION.getName())).
build();
List<Bucket> buckets = amazonS3Client.listBuckets();
log.info("s3文件云平台存储桶列表,{}", buckets);
}
public String upload(InputStream inputStream, String path) throws IOException {
String key;
String bucketName;
if (!path.contains("/")) {
key = path;
bucketName = awsBucket;
} else {
key = path.substring(path.lastIndexOf('/') + 1);
if (path.startsWith("/")) {
bucketName = awsBucket + path.substring(0, path.lastIndexOf("/"));
} else {
bucketName = awsBucket + "/" + path.substring(0, path.lastIndexOf("/"));
}
}
amazonS3Client.putObject(bucketName, key, inputStream, new ObjectMetadata());
amazonS3Client.setObjectAcl(bucketName, key, CannedAccessControlList.PublicReadWrite);
return amazonS3Client.getUrl(bucketName, key).toString();
}
public boolean delete(String path) {
try {
String str = path.substring(path.indexOf(awsBucket));
String bucketName = StrUtil.sub(str, 0, str.lastIndexOf("/"));
String key = StrUtil.sub(str, str.lastIndexOf("/") + 1, str.length());
amazonS3Client.deleteObject(bucketName, key);
return true;
} catch (Exception e) {
return false;
}
}
public InputStream download(String path) {
String str = path.substring(path.indexOf(awsBucket));
String bucketName = StrUtil.sub(str, 0, str.lastIndexOf("/"));
String key = StrUtil.sub(str, str.lastIndexOf("/") + 1, str.length());
S3Object s3Object = amazonS3Client.getObject(bucketName, key);
return s3Object.getObjectContent();
}
public String modifyFileContent(String location, String content) throws IOException {
String str = location.substring(location.indexOf(awsBucket));
String bucketName = StrUtil.sub(str, 0, str.lastIndexOf("/"));
String key = StrUtil.sub(str, str.lastIndexOf("/") + 1, str.length());
try (InputStream inputStream = IOUtils.toInputStream(content, StandardCharsets.UTF_8.name())) {
amazonS3Client.putObject(bucketName, key, inputStream, new ObjectMetadata());
amazonS3Client.setObjectAcl(bucketName, key, CannedAccessControlList.PublicReadWrite);
return amazonS3Client.getUrl(bucketName, key).toString();
}
}
public String readContent(String path) throws IOException {
try (InputStream inputStream = download(path);
StringWriter writer = new StringWriter()) {
IOUtils.copy(inputStream, writer, StandardCharsets.UTF_8.name());
return writer.toString();
}
}
public String rename(String oldPath, String newPath) {
String str = oldPath.substring(oldPath.indexOf(awsBucket));
String sourceBucketName = StrUtil.sub(str, 0, str.lastIndexOf("/"));
String sourceKey = StrUtil.sub(str, str.lastIndexOf("/") + 1, str.length());
String destinationKey;
String destinationBucketName;
if (!newPath.contains("/")) {
destinationKey = newPath;
destinationBucketName = awsBucket;
} else {
destinationKey = newPath.substring(newPath.lastIndexOf('/') + 1);
if (newPath.startsWith("/")) {
destinationBucketName = awsBucket + newPath.substring(0, newPath.lastIndexOf("/"));
} else {
destinationBucketName = awsBucket + "/" + newPath.substring(0, newPath.lastIndexOf("/"));
}
}
CopyObjectRequest copyObjectRequest = new CopyObjectRequest(sourceBucketName, sourceKey, destinationBucketName, destinationKey);
amazonS3Client.copyObject(copyObjectRequest);
amazonS3Client.deleteObject(sourceBucketName, sourceKey);
return amazonS3Client.getUrl(destinationBucketName, destinationKey).toString();
}
public String replace(MultipartFile file, String filePath) throws IOException {
String str = filePath.substring(filePath.indexOf(awsBucket));
String bucketName = StrUtil.sub(str, 0, str.lastIndexOf("/"));
String key = StrUtil.sub(str, str.lastIndexOf("/") + 1, str.length());
try (InputStream inputStream = file.getInputStream()) {
amazonS3Client.putObject(bucketName, key, inputStream, new ObjectMetadata());
amazonS3Client.setObjectAcl(bucketName, key, CannedAccessControlList.PublicReadWrite);
return amazonS3Client.getUrl(bucketName, key).toString();
}
}
}
package com.xinyan.mp.base.manager;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.security.UserGroupInformation;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
@Slf4j
@Component
@ConditionalOnExpression("#{'HDFS'.equals(environment.getProperty('fileSystem'))}")
public class HDFSManager {
private FileSystem fileSystem;
@Value("${hdfs.root.path}")
private String rootPath;
@Value("${hdfs.user}")
private String user;
@Value("${hdfs.kerberos.enable}")
private Boolean kerberosEnable;
@Value("${hdfs.config.path}")
private String hdfsConfigPath;
@Value("${hdfs.keytab.name:''}")
private String hdfsKeytabName;
@Value("${hdfs.krb5.conf.name:''}")
private String hdfsKrb5ConfName;
@PostConstruct
private void init() throws IOException {
Configuration configuration = new Configuration();
if (!hdfsConfigPath.endsWith("/")) {
hdfsConfigPath = hdfsConfigPath + "/";
}
configuration.addResource(new Path(hdfsConfigPath + "core-site.xml"));
configuration.addResource(new Path(hdfsConfigPath + "hdfs-site.xml"));
if (kerberosEnable) {
System.setProperty("java.security.krb5.conf", hdfsConfigPath + hdfsKrb5ConfName);
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(user, hdfsConfigPath + hdfsKeytabName);
} else {
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(user));
}
log.info("当前HDFS user :{}", UserGroupInformation.getCurrentUser().getUserName());
fileSystem = FileSystem.newInstance(configuration);
log.info("fsDefault :{}", configuration.get(FileSystem.FS_DEFAULT_NAME_KEY));
mkdirs(rootPath);
}
public FileStatus[] list(String path) throws IOException {
return fileSystem.listStatus(new Path(path));
}
private void mkdirs(String path) throws IOException {
Path srcPath = new Path(path);
if (fileSystem.exists(srcPath)) {
log.info("目录已存在");
return;
}
fileSystem.mkdirs(srcPath);
}
public String createFile(String path, byte[] b) throws IOException {
String filePath = rootPath + path;
try (FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path(filePath))) {
fsDataOutputStream.write(b);
fsDataOutputStream.close();
return filePath;
}
}
public String rename(String oldName, String newName) throws IOException {
String newPath = rootPath + newName;
if (fileSystem.rename(new Path(oldName), new Path(newPath))) {
return newPath;
}
throw new IOException("重命名失败");
}
public InputStream readFile(String path) throws IOException {
return fileSystem.open(new Path(path));
}
public boolean delete(String path) throws IOException {
return fileSystem.delete(new Path(path), false);
}
public String readContent(String path) throws IOException {
try (InputStream inputStream = readFile(path);
StringWriter writer = new StringWriter()) {
IOUtils.copy(inputStream, writer, StandardCharsets.UTF_8.name());
return writer.toString();
}
}
public String modifyContent(String location, String content) throws IOException {
try (FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path(location))) {
fsDataOutputStream.write(content.getBytes());
return location;
}
}
public String replace(String filePath, byte[] b) throws IOException {
try (FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path(filePath))) {
fsDataOutputStream.write(b);
return filePath;
}
}
}
打包参数
mvn clean -Dmaven.test.skip=true -DfileSystem=HDFS package -U -Ptest