类org.apache.hadoop.fs.FSInputChecker源码实例Demo

下面列出了怎么用org.apache.hadoop.fs.FSInputChecker的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: RDFS   文件: BlockXCodingMerger.java

/**
 * reads in the partial crc chunk and computes checksum of pre-existing data
 * in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff,
		int bytesPerChecksum) throws IOException {

	// find offset of the beginning of partial chunk.
	//
	int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
	int checksumSize = checksum.getChecksumSize();
	blkoff = blkoff - sizePartialChunk;

	// create an input stream from the block file
	// and read in partial crc chunk into temporary buffer
	byte[] buf = new byte[sizePartialChunk];
	byte[] crcbuf = new byte[checksumSize];
	FSDataset.BlockInputStreams instr = null;
	try {
		instr = datanode.data.getTmpInputStreams(namespaceId, block,
				blkoff, ckoff);
		IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

		// open meta file and read in crc value computer earlier
		IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
	} finally {
		IOUtils.closeStream(instr);
	}

	// compute crc of partial chunk from data read in the block file.
	partialCrc = new CRC32();
	partialCrc.update(buf, 0, sizePartialChunk);

	// paranoia! verify that the pre-computed crc matches what we
	// recalculated just now
	if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
		String msg = "Partial CRC " + partialCrc.getValue()
				+ " does not match value computed the "
				+ " last time file was closed "
				+ FSInputChecker.checksum2long(crcbuf);
		throw new IOException(msg);
	}
}
 
源代码2 项目: RDFS   文件: BlockReceiver.java

/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(namespaceId, block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new CRC32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}
 
源代码3 项目: hadoop-gpu   文件: BlockReceiver.java

/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new CRC32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}
 
 类所在包
 类方法
 同包方法