org.apache.hadoop.io.SequenceFile.CompressionType#RECORD源码实例Demo

下面列出了org.apache.hadoop.io.SequenceFile.CompressionType#RECORD 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: hadoop   文件: SequenceFileAsBinaryOutputFormat.java
@Override 
public void checkOutputSpecs(FileSystem ignored, JobConf job) 
          throws IOException {
  super.checkOutputSpecs(ignored, job);
  if (getCompressOutput(job) && 
      getOutputCompressionType(job) == CompressionType.RECORD ){
      throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat "
                  + "doesn't support Record Compression" );
  }

}
 
源代码2 项目: hadoop   文件: SequenceFileAsBinaryOutputFormat.java
@Override 
public void checkOutputSpecs(JobContext job) throws IOException {
  super.checkOutputSpecs(job);
  if (getCompressOutput(job) && 
      getOutputCompressionType(job) == CompressionType.RECORD ) {
    throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat "
      + "doesn't support Record Compression" );
  }
}
 
源代码3 项目: hadoop   文件: TestArrayFile.java
/** 
 * test on {@link ArrayFile.Reader} iteration methods
 * <pre> 
 * {@code next(), seek()} in and out of range.
 * </pre>
 */
public void testArrayFileIteration() {
  int SIZE = 10;
  Configuration conf = new Configuration();    
  try {
    FileSystem fs = FileSystem.get(conf);
    ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, TEST_FILE, 
        LongWritable.class, CompressionType.RECORD, defaultProgressable);
    assertNotNull("testArrayFileIteration error !!!", writer);
    
    for (int i = 0; i < SIZE; i++)
      writer.append(new LongWritable(i));
    
    writer.close();
    
    ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf);
    LongWritable nextWritable = new LongWritable(0);
    
    for (int i = 0; i < SIZE; i++) {
      nextWritable = (LongWritable)reader.next(nextWritable);
      assertEquals(nextWritable.get(), i);
    }
      
    assertTrue("testArrayFileIteration seek error !!!",
        reader.seek(new LongWritable(6)));
    nextWritable = (LongWritable) reader.next(nextWritable);
    assertTrue("testArrayFileIteration error !!!", reader.key() == 7);
    assertTrue("testArrayFileIteration error !!!",
        nextWritable.equals(new LongWritable(7)));
    assertFalse("testArrayFileIteration error !!!",
        reader.seek(new LongWritable(SIZE + 5)));
    reader.close();
  } catch (Exception ex) {
    fail("testArrayFileWriterConstruction error !!!");
  }
}
 
源代码4 项目: big-c   文件: SequenceFileAsBinaryOutputFormat.java
@Override 
public void checkOutputSpecs(FileSystem ignored, JobConf job) 
          throws IOException {
  super.checkOutputSpecs(ignored, job);
  if (getCompressOutput(job) && 
      getOutputCompressionType(job) == CompressionType.RECORD ){
      throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat "
                  + "doesn't support Record Compression" );
  }

}
 
源代码5 项目: big-c   文件: SequenceFileAsBinaryOutputFormat.java
@Override 
public void checkOutputSpecs(JobContext job) throws IOException {
  super.checkOutputSpecs(job);
  if (getCompressOutput(job) && 
      getOutputCompressionType(job) == CompressionType.RECORD ) {
    throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat "
      + "doesn't support Record Compression" );
  }
}
 
源代码6 项目: big-c   文件: TestArrayFile.java
/** 
 * test on {@link ArrayFile.Reader} iteration methods
 * <pre> 
 * {@code next(), seek()} in and out of range.
 * </pre>
 */
public void testArrayFileIteration() {
  int SIZE = 10;
  Configuration conf = new Configuration();    
  try {
    FileSystem fs = FileSystem.get(conf);
    ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, TEST_FILE, 
        LongWritable.class, CompressionType.RECORD, defaultProgressable);
    assertNotNull("testArrayFileIteration error !!!", writer);
    
    for (int i = 0; i < SIZE; i++)
      writer.append(new LongWritable(i));
    
    writer.close();
    
    ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf);
    LongWritable nextWritable = new LongWritable(0);
    
    for (int i = 0; i < SIZE; i++) {
      nextWritable = (LongWritable)reader.next(nextWritable);
      assertEquals(nextWritable.get(), i);
    }
      
    assertTrue("testArrayFileIteration seek error !!!",
        reader.seek(new LongWritable(6)));
    nextWritable = (LongWritable) reader.next(nextWritable);
    assertTrue("testArrayFileIteration error !!!", reader.key() == 7);
    assertTrue("testArrayFileIteration error !!!",
        nextWritable.equals(new LongWritable(7)));
    assertFalse("testArrayFileIteration error !!!",
        reader.seek(new LongWritable(SIZE + 5)));
    reader.close();
  } catch (Exception ex) {
    fail("testArrayFileWriterConstruction error !!!");
  }
}
 
源代码7 项目: RDFS   文件: SequenceFileAsBinaryOutputFormat.java
@Override 
public void checkOutputSpecs(FileSystem ignored, JobConf job) 
          throws IOException {
  super.checkOutputSpecs(ignored, job);
  if (getCompressOutput(job) && 
      getOutputCompressionType(job) == CompressionType.RECORD ){
      throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat "
                  + "doesn't support Record Compression" );
  }

}
 
@Override 
public void checkOutputSpecs(FileSystem ignored, JobConf job) 
          throws IOException {
  super.checkOutputSpecs(ignored, job);
  if (getCompressOutput(job) && 
      getOutputCompressionType(job) == CompressionType.RECORD ){
      throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat "
                  + "doesn't support Record Compression" );
  }

}
 
源代码9 项目: hadoop   文件: TestBloomMapFile.java
/**
 * test {@code BloomMapFile.Writer} constructors
 */
@SuppressWarnings("deprecation")
public void testBloomMapFileConstructors() {
  BloomMapFile.Writer writer = null;
  try {
    FileSystem ts = FileSystem.get(conf);
    String testFileName = TEST_FILE.toString();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
        defaultCodec, defaultProgress);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
        defaultProgress);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.BLOCK);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
        defaultCodec, defaultProgress);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
        defaultProgress);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.RECORD);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, WritableComparator.get(Text.class), Text.class);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
  } catch (Exception ex) {
    fail("testBloomMapFileConstructors error !!!");
  } finally {
    IOUtils.cleanup(null, writer);
  }
}
 
源代码10 项目: hadoop   文件: TestMapFile.java
/**
 * test all available constructor for {@code MapFile.Writer}
 */
@Test
@SuppressWarnings("deprecation")
public void testDeprecatedConstructors() {
  String path = new Path(TEST_DIR, "writes.mapfile").toString();
  MapFile.Writer writer = null;
  MapFile.Reader reader = null;
  try {
    FileSystem fs = FileSystem.getLocal(conf);
    writer = new MapFile.Writer(conf, fs, path,
        IntWritable.class, Text.class, CompressionType.RECORD);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
        Text.class, CompressionType.RECORD, defaultProgressable);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
        Text.class, CompressionType.RECORD, defaultCodec, defaultProgressable);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path,
        WritableComparator.get(Text.class), Text.class);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path,
        WritableComparator.get(Text.class), Text.class,
        SequenceFile.CompressionType.RECORD);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path,
        WritableComparator.get(Text.class), Text.class,
        CompressionType.RECORD, defaultProgressable);
    assertNotNull(writer);
    writer.close();

    reader = new MapFile.Reader(fs, path,
        WritableComparator.get(IntWritable.class), conf);
    assertNotNull(reader);
    assertNotNull("reader key is null !!!", reader.getKeyClass());
    assertNotNull("reader value in null", reader.getValueClass());
  } catch (IOException e) {
    fail(e.getMessage());
  } finally {
    IOUtils.cleanup(null, writer, reader);
  }
}
 
源代码11 项目: big-c   文件: TestBloomMapFile.java
/**
 * test {@code BloomMapFile.Writer} constructors
 */
@SuppressWarnings("deprecation")
public void testBloomMapFileConstructors() {
  BloomMapFile.Writer writer = null;
  try {
    FileSystem ts = FileSystem.get(conf);
    String testFileName = TEST_FILE.toString();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
        defaultCodec, defaultProgress);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
        defaultProgress);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.BLOCK);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
        defaultCodec, defaultProgress);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
        defaultProgress);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, IntWritable.class, Text.class, CompressionType.RECORD);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
    writer = new BloomMapFile.Writer(conf, ts,
        testFileName, WritableComparator.get(Text.class), Text.class);
    assertNotNull("testBloomMapFileConstructors error !!!", writer);
    writer.close();
  } catch (Exception ex) {
    fail("testBloomMapFileConstructors error !!!");
  } finally {
    IOUtils.cleanup(null, writer);
  }
}
 
源代码12 项目: big-c   文件: TestMapFile.java
/**
 * test all available constructor for {@code MapFile.Writer}
 */
@Test
@SuppressWarnings("deprecation")
public void testDeprecatedConstructors() {
  String path = new Path(TEST_DIR, "writes.mapfile").toString();
  MapFile.Writer writer = null;
  MapFile.Reader reader = null;
  try {
    FileSystem fs = FileSystem.getLocal(conf);
    writer = new MapFile.Writer(conf, fs, path,
        IntWritable.class, Text.class, CompressionType.RECORD);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
        Text.class, CompressionType.RECORD, defaultProgressable);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
        Text.class, CompressionType.RECORD, defaultCodec, defaultProgressable);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path,
        WritableComparator.get(Text.class), Text.class);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path,
        WritableComparator.get(Text.class), Text.class,
        SequenceFile.CompressionType.RECORD);
    assertNotNull(writer);
    writer.close();
    writer = new MapFile.Writer(conf, fs, path,
        WritableComparator.get(Text.class), Text.class,
        CompressionType.RECORD, defaultProgressable);
    assertNotNull(writer);
    writer.close();

    reader = new MapFile.Reader(fs, path,
        WritableComparator.get(IntWritable.class), conf);
    assertNotNull(reader);
    assertNotNull("reader key is null !!!", reader.getKeyClass());
    assertNotNull("reader value in null", reader.getValueClass());
  } catch (IOException e) {
    fail(e.getMessage());
  } finally {
    IOUtils.cleanup(null, writer, reader);
  }
}