基于Hadoop的数据压缩与解压缩实例

数据压缩

测试的输入数据(/input/kmeans/sample.txt)为:

1,1
2,2
3,3
-3,-3
-4,-4
-5,-5

输出结果(/output/compress.gz)为一系列压缩字符串

import java.io.IOException;
import java.io.InputStream;  
import java.io.OutputStream;  
  
import org.apache.hadoop.conf.Configuration;    
import org.apache.hadoop.fs.FileSystem;  
import org.apache.hadoop.fs.Path;  
import org.apache.hadoop.io.IOUtils;  
import org.apache.hadoop.io.compress.CompressionCodec;  
import org.apache.hadoop.io.compress.CompressionOutputStream;  
import org.apache.hadoop.util.ReflectionUtils;  
  
import com.panky.tools.Tools;  
  
public class StreamCompressor {  
    public static void main(String[] args) throws IOException, ClassNotFoundException {  
        String codecClassname="org.apache.hadoop.io.compress.GzipCodec";  
        //获得压缩类的全名称  
        Class codecClass=Class.forName(codecClassname);  
        Configuration configuration=new Tools().getTools();  
        FileSystem fileSystem=FileSystem.get(configuration);  
        InputStream inputStream=fileSystem.open(new Path("/input/kmeans/sample.txt"));  
        OutputStream outputStream=fileSystem.create(new Path("/output/compress.gz"));  
        //通过ReflectionUtil来创建一个CompressionCodec实例  
        CompressionCodec compressionCodec=(CompressionCodec)ReflectionUtils.newInstance(codecClass, configuration);  
         //由CompressionOutputStream对System.out进行包装,对数据进行压缩  
        CompressionOutputStream compressionOutputStream=compressionCodec.createOutputStream(outputStream);  
        IOUtils.copyBytes(inputStream, compressionOutputStream, 4096, false);  
        compressionOutputStream.finish();  
    }  
} 
数据解压缩

输入数据(/output/compress.gz)为:数据压缩的输出结果

输出结果(/output/compress)<去掉输入文件后缀名即为输出文件名>:

1,1
2,2
3,3
-3,-3
-4,-4
-5,-5

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.CompressionInputStream;

import com.panky.tools.Tools;

public class FileDecompressor {
	public static void main(String[] args) throws IOException {
		//如果文件以.gz结尾,则可以使用GzipCodec读取。
		String uriString="/output/compress.gz";
		Configuration configuration=new Tools().getTools();
		FileSystem fileSystem=FileSystem.get(configuration);
		CompressionCodecFactory factory=new CompressionCodecFactory(configuration);
		//使用getCodec()方法,CompressionCodecFactory提供了一种可以将文件扩展名映射到一个CompressionCodec的方法。
		CompressionCodec codec=factory.getCodec(new Path(uriString));
		if(codec==null){
			System.out.println("No codec found for"+uriString);
			System.exit(1);
		}
		//通过CompressionCodecFactory对象的静态方法removeSuffix()实现  找到对应的codec,去除文件扩展名形成输出文件名
		String outString=CompressionCodecFactory.removeSuffix(uriString, codec.getDefaultExtension());
		//通过createInputStream(InputStream in)方法获得CompressionInputStream对象
		InputStream inputStream=codec.createInputStream(fileSystem.open(new Path(uriString)));
		//使用这句也可以进行解压缩
		//CompressionInputStream inputStream=codec.createInputStream(fileSystem.open(new Path(uriString)));
		OutputStream outputStream=fileSystem.create(new Path(outString));
		IOUtils.copyBytes(inputStream, outputStream, configuration);
		IOUtils.closeStream(inputStream);
		IOUtils.closeStream(outputStream);
	}
}



你可能感兴趣的:(Hadoop)