org.apache.hadoop.io.compress.DefaultCodec# setConf ( ) 源码实例Demo

下面列出了org.apache.hadoop.io.compress.DefaultCodec# setConf ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: hadoop   文件: TestIFile.java

@Test
/**
 * Create an IFile.Writer using GzipCodec since this code does not
 * have a compressor when run via the tests (ie no native libraries).
 */
public void testIFileWriterWithCodec() throws Exception {
  Configuration conf = new Configuration();
  FileSystem localFs = FileSystem.getLocal(conf);
  FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
  Path path = new Path(new Path("build/test.ifile"), "data");
  DefaultCodec codec = new GzipCodec();
  codec.setConf(conf);
  IFile.Writer<Text, Text> writer =
    new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
                                 codec, null);
  writer.close();
}
 
源代码2 项目: hadoop   文件: TestIFile.java

@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
  Configuration conf = new Configuration();
  FileSystem localFs = FileSystem.getLocal(conf);
  FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
  Path path = new Path(new Path("build/test.ifile"), "data");
  DefaultCodec codec = new GzipCodec();
  codec.setConf(conf);
  FSDataOutputStream out = rfs.create(path);
  IFile.Writer<Text, Text> writer =
      new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
                                   codec, null);
  writer.close();
  FSDataInputStream in = rfs.open(path);
  IFile.Reader<Text, Text> reader =
    new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
        codec, null);
  reader.close();
  
  // test check sum 
  byte[] ab= new byte[100];
  int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
  assertEquals( readed,reader.checksumIn.getChecksum().length);
  
}
 
源代码3 项目: big-c   文件: TestIFile.java

@Test
/**
 * Create an IFile.Writer using GzipCodec since this code does not
 * have a compressor when run via the tests (ie no native libraries).
 */
public void testIFileWriterWithCodec() throws Exception {
  Configuration conf = new Configuration();
  FileSystem localFs = FileSystem.getLocal(conf);
  FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
  Path path = new Path(new Path("build/test.ifile"), "data");
  DefaultCodec codec = new GzipCodec();
  codec.setConf(conf);
  IFile.Writer<Text, Text> writer =
    new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
                                 codec, null);
  writer.close();
}
 
源代码4 项目: big-c   文件: TestIFile.java

@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
  Configuration conf = new Configuration();
  FileSystem localFs = FileSystem.getLocal(conf);
  FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
  Path path = new Path(new Path("build/test.ifile"), "data");
  DefaultCodec codec = new GzipCodec();
  codec.setConf(conf);
  FSDataOutputStream out = rfs.create(path);
  IFile.Writer<Text, Text> writer =
      new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
                                   codec, null);
  writer.close();
  FSDataInputStream in = rfs.open(path);
  IFile.Reader<Text, Text> reader =
    new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
        codec, null);
  reader.close();
  
  // test check sum 
  byte[] ab= new byte[100];
  int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
  assertEquals( readed,reader.checksumIn.getChecksum().length);
  
}
 
源代码5 项目: hadoop   文件: TestReduceTask.java

public void testValueIteratorWithCompression() throws Exception {
  Path tmpDir = new Path("build/test/test.reduce.task.compression");
  Configuration conf = new Configuration();
  DefaultCodec codec = new DefaultCodec();
  codec.setConf(conf);
  for (Pair[] testCase: testCases) {
    runValueIterator(tmpDir, testCase, conf, codec);
  }
}
 
源代码6 项目: big-c   文件: TestReduceTask.java

public void testValueIteratorWithCompression() throws Exception {
  Path tmpDir = new Path("build/test/test.reduce.task.compression");
  Configuration conf = new Configuration();
  DefaultCodec codec = new DefaultCodec();
  codec.setConf(conf);
  for (Pair[] testCase: testCases) {
    runValueIterator(tmpDir, testCase, conf, codec);
  }
}
 

@Test
public void testSeqFileCompression() throws Exception {
  DefaultCodec codec = new DefaultCodec();
  codec.setConf(new Configuration());
  testSeqFile(codec, SequenceFile.CompressionType.RECORD);
  testSeqFile(codec, SequenceFile.CompressionType.BLOCK);
}
 
源代码8 项目: RDFS   文件: TestReduceTask.java

public void testValueIteratorWithCompression() throws Exception {
  Path tmpDir = new Path("build/test/test.reduce.task.compression");
  Configuration conf = new Configuration();
  DefaultCodec codec = new DefaultCodec();
  codec.setConf(conf);
  for (Pair[] testCase: testCases) {
    runValueIterator(tmpDir, testCase, conf, codec);
  }
}
 
源代码9 项目: hadoop-gpu   文件: TestReduceTask.java

public void testValueIteratorWithCompression() throws Exception {
  Path tmpDir = new Path("build/test/test.reduce.task.compression");
  Configuration conf = new Configuration();
  DefaultCodec codec = new DefaultCodec();
  codec.setConf(conf);
  for (Pair[] testCase: testCases) {
    runValueIterator(tmpDir, testCase, conf, codec);
  }
}
 

@Test
public void testTextFileCompression() throws Exception {
  DefaultCodec codec = new DefaultCodec();
  codec.setConf(new Configuration());
  testTextFile(codec);
}