org.apache.hadoop.io.DataOutputBuffer#getData ( )源码实例Demo

下面列出了org.apache.hadoop.io.DataOutputBuffer#getData ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: Bats   文件: LaunchContainerRunnable.java
public static ByteBuffer getTokens(UserGroupInformation ugi, Token<StramDelegationTokenIdentifier> delegationToken)
{
  try {
    Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens();
    Credentials credentials = new Credentials();
    for (Token<? extends TokenIdentifier> token : tokens) {
      if (!token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
        credentials.addToken(token.getService(), token);
        LOG.debug("Passing container token {}", token);
      }
    }
    credentials.addToken(delegationToken.getService(), delegationToken);
    DataOutputBuffer dataOutput = new DataOutputBuffer();
    credentials.writeTokenStorageToStream(dataOutput);
    byte[] tokenBytes = dataOutput.getData();
    ByteBuffer cTokenBuf = ByteBuffer.wrap(tokenBytes);
    return cTokenBuf.duplicate();
  } catch (IOException e) {
    throw new RuntimeException("Error generating delegation token", e);
  }
}
 
源代码2 项目: hadoop   文件: Chain.java
private <E> E makeCopyForPassByValue(Serialization<E> serialization,
                                      E obj) throws IOException {
  Serializer<E> ser =
    serialization.getSerializer(GenericsUtil.getClass(obj));
  Deserializer<E> deser =
    serialization.getDeserializer(GenericsUtil.getClass(obj));

  DataOutputBuffer dof = threadLocalDataOutputBuffer.get();

  dof.reset();
  ser.open(dof);
  ser.serialize(obj);
  ser.close();
  obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj),
                                    getChainJobConf());
  ByteArrayInputStream bais =
    new ByteArrayInputStream(dof.getData(), 0, dof.getLength());
  deser.open(bais);
  deser.deserialize(obj);
  deser.close();
  return obj;
}
 
源代码3 项目: hadoop   文件: TestDelegationTokenRemoteFetcher.java
@Override
public void handle(Channel channel, Token<DelegationTokenIdentifier> token,
    String serviceUrl) throws IOException {
  Assert.assertEquals(testToken, token);

  Credentials creds = new Credentials();
  creds.addToken(new Text(serviceUrl), token);
  DataOutputBuffer out = new DataOutputBuffer();
  creds.write(out);
  int fileLength = out.getData().length;
  ChannelBuffer cbuffer = ChannelBuffers.buffer(fileLength);
  cbuffer.writeBytes(out.getData());
  HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
  response.setHeader(HttpHeaders.Names.CONTENT_LENGTH,
      String.valueOf(fileLength));
  response.setContent(cbuffer);
  channel.write(response).addListener(ChannelFutureListener.CLOSE);
}
 
源代码4 项目: hadoop   文件: TestDFSPacket.java
@Test
public void testPacket() throws Exception {
  Random r = new Random(12345L);
  byte[] data =  new byte[chunkSize];
  r.nextBytes(data);
  byte[] checksum = new byte[checksumSize];
  r.nextBytes(checksum);

  DataOutputBuffer os =  new DataOutputBuffer(data.length * 2);

  byte[] packetBuf = new byte[data.length * 2];
  DFSPacket p = new DFSPacket(packetBuf, maxChunksPerPacket,
                              0, 0, checksumSize, false);
  p.setSyncBlock(true);
  p.writeData(data, 0, data.length);
  p.writeChecksum(checksum, 0, checksum.length);
  p.writeTo(os);

  //we have set syncBlock to true, so the header has the maximum length
  int headerLen = PacketHeader.PKT_MAX_HEADER_LEN;
  byte[] readBuf = os.getData();

  assertArrayRegionsEqual(readBuf, headerLen, checksum, 0, checksum.length);
  assertArrayRegionsEqual(readBuf, headerLen + checksum.length, data, 0, data.length);

}
 
源代码5 项目: hadoop   文件: CryptoStreamsTestBase.java
@Before
public void setUp() throws IOException {
  // Generate data
  final int seed = new Random().nextInt();
  final DataOutputBuffer dataBuf = new DataOutputBuffer();
  final RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  for(int i = 0; i < count; ++i) {
    generator.next();
    final RandomDatum key = generator.getKey();
    final RandomDatum value = generator.getValue();
    
    key.write(dataBuf);
    value.write(dataBuf);
  }
  LOG.info("Generated " + count + " records");
  data = dataBuf.getData();
  dataLen = dataBuf.getLength();
}
 
源代码6 项目: phoenix   文件: TestClientKeyValueLocal.java
private void validate(KeyValue kv, byte[] row, byte[] family, byte[] qualifier, long ts,
    Type type, byte[] value) throws IOException {
  DataOutputBuffer out = new DataOutputBuffer();
  kv.write(out);
  out.close();
  byte[] data = out.getData();
  // read it back in
  KeyValue read = new KeyValue();
  DataInputBuffer in = new DataInputBuffer();
  in.reset(data, data.length);
  read.readFields(in);
  in.close();

  // validate that its the same
  assertTrue("Row didn't match!", Bytes.equals(row, read.getRow()));
  assertTrue("Family didn't match!", Bytes.equals(family, read.getFamily()));
  assertTrue("Qualifier didn't match!", Bytes.equals(qualifier, read.getQualifier()));
  assertTrue("Value didn't match!", Bytes.equals(value, read.getValue()));
  assertEquals("Timestamp didn't match", ts, read.getTimestamp());
  assertEquals("Type didn't match", type.getCode(), read.getType());
}
 
源代码7 项目: big-c   文件: Chain.java
private <E> E makeCopyForPassByValue(Serialization<E> serialization,
                                      E obj) throws IOException {
  Serializer<E> ser =
    serialization.getSerializer(GenericsUtil.getClass(obj));
  Deserializer<E> deser =
    serialization.getDeserializer(GenericsUtil.getClass(obj));

  DataOutputBuffer dof = threadLocalDataOutputBuffer.get();

  dof.reset();
  ser.open(dof);
  ser.serialize(obj);
  ser.close();
  obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj),
                                    getChainJobConf());
  ByteArrayInputStream bais =
    new ByteArrayInputStream(dof.getData(), 0, dof.getLength());
  deser.open(bais);
  deser.deserialize(obj);
  deser.close();
  return obj;
}
 
源代码8 项目: hadoop-gpu   文件: Chain.java
private <E> E makeCopyForPassByValue(Serialization<E> serialization,
                                      E obj) throws IOException {
  Serializer<E> ser =
    serialization.getSerializer(GenericsUtil.getClass(obj));
  Deserializer<E> deser =
    serialization.getDeserializer(GenericsUtil.getClass(obj));

  DataOutputBuffer dof = threadLocalDataOutputBuffer.get();

  dof.reset();
  ser.open(dof);
  ser.serialize(obj);
  ser.close();
  obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj),
                                    getChainJobConf());
  ByteArrayInputStream bais =
    new ByteArrayInputStream(dof.getData(), 0, dof.getLength());
  deser.open(bais);
  deser.deserialize(obj);
  deser.close();
  return obj;
}
 
源代码9 项目: big-c   文件: CryptoStreamsTestBase.java
@Before
public void setUp() throws IOException {
  // Generate data
  final int seed = new Random().nextInt();
  final DataOutputBuffer dataBuf = new DataOutputBuffer();
  final RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  for(int i = 0; i < count; ++i) {
    generator.next();
    final RandomDatum key = generator.getKey();
    final RandomDatum value = generator.getValue();
    
    key.write(dataBuf);
    value.write(dataBuf);
  }
  LOG.info("Generated " + count + " records");
  data = dataBuf.getData();
  dataLen = dataBuf.getLength();
}
 
源代码10 项目: big-c   文件: TestIndexedSort.java
public WritableSortable(int j) throws IOException {
  seed = r.nextLong();
  r.setSeed(seed);
  Text t = new Text();
  StringBuilder sb = new StringBuilder();
  indices = new int[j];
  offsets = new int[j];
  check = new String[j];
  DataOutputBuffer dob = new DataOutputBuffer();
  for (int i = 0; i < j; ++i) {
    indices[i] = i;
    offsets[i] = dob.getLength();
    genRandom(t, r.nextInt(15) + 1, sb);
    t.write(dob);
    check[i] = t.toString();
  }
  eob = dob.getLength();
  bytes = dob.getData();
  comparator = WritableComparator.get(Text.class);
}
 
源代码11 项目: attic-apex-core   文件: LaunchContainerRunnable.java
public static ByteBuffer getTokens(UserGroupInformation ugi, Token<StramDelegationTokenIdentifier> delegationToken)
{
  try {
    Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens();
    Credentials credentials = new Credentials();
    for (Token<? extends TokenIdentifier> token : tokens) {
      if (!token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
        credentials.addToken(token.getService(), token);
        LOG.debug("Passing container token {}", token);
      }
    }
    credentials.addToken(delegationToken.getService(), delegationToken);
    DataOutputBuffer dataOutput = new DataOutputBuffer();
    credentials.writeTokenStorageToStream(dataOutput);
    byte[] tokenBytes = dataOutput.getData();
    ByteBuffer cTokenBuf = ByteBuffer.wrap(tokenBytes);
    return cTokenBuf.duplicate();
  } catch (IOException e) {
    throw new RuntimeException("Error generating delegation token", e);
  }
}
 
源代码12 项目: incubator-retired-blur   文件: QueryWritableTest.java
@Test
public void testTermQuery() throws IOException {
  TermQuery query = new TermQuery(new Term("field", "value"));
  QueryWritable queryWritable = new QueryWritable();
  queryWritable.setQuery(query);
  DataOutputBuffer out = new DataOutputBuffer();
  queryWritable.write(out);
  byte[] data = out.getData();
  int length = out.getLength();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(data, length);

  QueryWritable newQueryWritable = new QueryWritable();
  newQueryWritable.readFields(in);

  Query termQuery = newQueryWritable.getQuery();

  assertEquals(query, termQuery);

}
 
源代码13 项目: spliceengine   文件: HFilesystemAdmin.java
private static byte[] toByteArray(Writable... writables) {
    final DataOutputBuffer out = new DataOutputBuffer();
    try {
        for(Writable w : writables) {
            w.write(out);
        }
        out.close();
    } catch (IOException e) {
        throw new RuntimeException("Fail to convert writables to a byte array",e);
    }
    byte[] bytes = out.getData();
    if (bytes.length == out.getLength()) {
        return bytes;
    }
    byte[] result = new byte[out.getLength()];
    System.arraycopy(bytes, 0, result, 0, out.getLength());
    return result;
}
 
源代码14 项目: RDFS   文件: TestCorruptFileBlocks.java
/**
 * Serialize the cfb given, deserialize and return the result.
 */
static CorruptFileBlocks serializeAndDeserialize(CorruptFileBlocks cfb) 
  throws IOException {
  DataOutputBuffer buf = new DataOutputBuffer();
  cfb.write(buf);

  byte[] data = buf.getData();
  DataInputStream input = new DataInputStream(new ByteArrayInputStream(data));

  CorruptFileBlocks result = new CorruptFileBlocks();
  result.readFields(input);

  return result;
}
 
源代码15 项目: tez   文件: TestEntityDescriptor.java
public void testSingularWrite(InputDescriptor entityDescriptor, InputDescriptor deserialized, UserPayload payload,
                              String confVal) throws IOException {
  DataOutputBuffer out = new DataOutputBuffer();
  entityDescriptor.write(out);
  out.close();
  ByteArrayOutputStream bos = new ByteArrayOutputStream(out.getData().length);
  bos.write(out.getData());

  Mockito.verify(entityDescriptor).writeSingular(eq(out), any(ByteBuffer.class));
  deserialized.readFields(new DataInputStream(new ByteArrayInputStream(bos.toByteArray())));
  verifyResults(entityDescriptor, deserialized, payload, confVal);
}
 
源代码16 项目: incubator-retired-mrql   文件: FlinkXMLParser.java
public String slice () {
     if (splitter.hasNext()) {
         DataOutputBuffer b = splitter.next();
         return new String(b.getData(),0,b.getLength());
     } else return null;
}
 
源代码17 项目: incubator-retired-mrql   文件: FlinkJsonParser.java
public String slice () {
    if (splitter.hasNext()) {
        DataOutputBuffer b = splitter.next();
        return new String(b.getData(),0,b.getLength());
    } else return null;
}
 
源代码18 项目: incubator-retired-mrql   文件: JsonFormatParser.java
public String slice () {
    if (splitter.hasNext()) {
        DataOutputBuffer b = splitter.next();
        return new String(b.getData(),0,b.getLength());
    } else return null;
}
 
源代码19 项目: incubator-retired-mrql   文件: XMLParser.java
public String slice () {
    if (splitter.hasNext()) {
        DataOutputBuffer b = splitter.next();
        return new String(b.getData(),0,b.getLength());
    } else return null;
}
 
源代码20 项目: Hadoop-BAM   文件: TestVCFOutputFormat.java
@Test
public void testVariantContextReadWrite() throws IOException, InterruptedException
{
    // This is to check whether issue https://github.com/HadoopGenomics/Hadoop-BAM/issues/1 has been
    // resolved
	VariantContextBuilder vctx_builder = new VariantContextBuilder();

    ArrayList<Allele> alleles = new ArrayList<Allele>();
    alleles.add(Allele.create("C", false));
    alleles.add(Allele.create("G", true));
    vctx_builder.alleles(alleles);

    ArrayList<Genotype> genotypes = new ArrayList<Genotype>();
    GenotypeBuilder builder = new GenotypeBuilder();
    genotypes.add(builder.alleles(alleles.subList(0, 1)).name("NA00001").GQ(48).DP(1).make());
    genotypes.add(builder.alleles(alleles.subList(0, 1)).name("NA00002").GQ(42).DP(2).make());
    genotypes.add(builder.alleles(alleles.subList(0, 1)).name("NA00003").GQ(39).DP(3).make());
    vctx_builder.genotypes(genotypes);

    HashSet<String> filters = new HashSet<String>();
    vctx_builder.filters(filters);

    HashMap<String, Object> attributes = new HashMap<String, Object>();
    attributes.put("NS", new Integer(4));
    vctx_builder.attributes(attributes);

    vctx_builder.loc("20", 2, 2);
    vctx_builder.log10PError(-8.0);

    VariantContext ctx = vctx_builder.make();
    VariantContextWithHeader ctxh = new VariantContextWithHeader(ctx, readHeader());
    writable.set(ctxh);

    DataOutputBuffer out = new DataOutputBuffer(1000);
    writable.write(out);
    
    byte[] data = out.getData();
    ByteArrayInputStream bis = new ByteArrayInputStream(data);

    writable = new VariantContextWritable();
    writable.readFields(new DataInputStream(bis));

    VariantContext vc = writable.get();
    Assert.assertArrayEquals("comparing Alleles",ctx.getAlleles().toArray(),vc.getAlleles().toArray());
    Assert.assertEquals("comparing Log10PError",ctx.getLog10PError(),vc.getLog10PError(),0.01);
    Assert.assertArrayEquals("comparing Filters",ctx.getFilters().toArray(),vc.getFilters().toArray());
    Assert.assertEquals("comparing Attributes",ctx.getAttributes(),vc.getAttributes());

    // Now check the genotypes. Note: we need to make the header accessible before decoding the genotypes.
    GenotypesContext gc = vc.getGenotypes();
    assert(gc instanceof LazyVCFGenotypesContext);
    LazyVCFGenotypesContext.HeaderDataCache headerDataCache = new LazyVCFGenotypesContext.HeaderDataCache();
    headerDataCache.setHeader(readHeader());
    ((LazyVCFGenotypesContext) gc).getParser().setHeaderDataCache(headerDataCache);

    for (Genotype genotype : genotypes) {
        Assert.assertEquals("checking genotype name", genotype.getSampleName(), gc.get(genotypes.indexOf(genotype)).getSampleName());
        Assert.assertEquals("checking genotype quality", genotype.getGQ(), gc.get(genotypes.indexOf(genotype)).getGQ());
        Assert.assertEquals("checking genotype read depth", genotype.getDP(), gc.get(genotypes.indexOf(genotype)).getDP());
    }
}