Hadoop学习--测试压缩--day05

import java.io.FileInputStream;java

import java.io.FileOutputStream;算法


import org.apache.hadoop.conf.Configuration;apache

import org.apache.hadoop.io.IOUtils;ide

import org.apache.hadoop.io.compress.CompressionOutputStream;oop

import org.apache.hadoop.io.compress.DeflateCodec;编码

import org.apache.hadoop.util.ReflectionUtils;spa

import org.junit.BeforeClass;code

import org.junit.Test;对象


public class TestCompressDemo {hadoop

  private static Configuration conf;

  @BeforeClass

   public static void iniConf(){

 conf = new Configuration();

  }

  /**

   * 使用deflate压缩算法

   */

  @Test

  public void compressByDeflate() throws Exception{

//deflate编码器

 String codecStr = "org.apache.hadoop.io.compress.DeflateCodec";

 Class<DeflateCodec> clazz = (Class<DeflateCodec>) Class.forName(codecStr);

 DeflateCodec codec = ReflectionUtils.newInstance(clazz,conf);

//对输出流包装,产生新的压缩流

 FileOutputStream fos = new FileOutputStream("E:/zhaopian.deflate");

 CompressionOutputStream comOut = codec.createOutputStream(fos);

 //写入流

 IOUtils.copyBytes(new FileInputStream("E:/zhaopian.jpg"),comOut,1024);

 

  }

  /**

   * 使用deflate压缩算法

   */

  @Test

  public void compressByDeflate2() throws Exception{

 //直接实例化codec对象

 DeflateCodec codec = new DeflateCodec();

 //检查并设置conf对象

 ReflectionUtils.setConf(codec,conf);

 //对输出流包装,产生新的压缩流

 FileOutputStream fos = new FileOutputStream("E:/zhaopian2.deflate");

 CompressionOutputStream comOut = codec.createOutputStream(fos);

 //写入流

 IOUtils.copyBytes(new FileInputStream("E:/zhaopian.jpg"), comOut, 1024);

  }

}

相关文章
相关标签/搜索