org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner#atEnd ( )源码实例Demo

下面列出了org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner#atEnd ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: hadoop   文件: TestTFileByteArrays.java
private void readValueWithoutKey(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  byte[] vbuf1 = new byte[BUF_SIZE];
  int vlen1 = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf1);
  Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

  if (scanner.advance() && !scanner.atEnd()) {
    byte[] vbuf2 = new byte[BUF_SIZE];
    int vlen2 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf2);
    Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
        + (recordIndex + 1));
  }

  scanner.close();
  reader.close();
}
 
源代码2 项目: big-c   文件: TestTFileByteArrays.java
private void readValueWithoutKey(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  byte[] vbuf1 = new byte[BUF_SIZE];
  int vlen1 = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf1);
  Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

  if (scanner.advance() && !scanner.atEnd()) {
    byte[] vbuf2 = new byte[BUF_SIZE];
    int vlen2 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf2);
    Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
        + (recordIndex + 1));
  }

  scanner.close();
  reader.close();
}
 
源代码3 项目: RDFS   文件: TestTFileByteArrays.java
private void readValueWithoutKey(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  byte[] vbuf1 = new byte[BUF_SIZE];
  int vlen1 = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf1);
  Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

  if (scanner.advance() && !scanner.atEnd()) {
    byte[] vbuf2 = new byte[BUF_SIZE];
    int vlen2 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf2);
    Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
        + (recordIndex + 1));
  }

  scanner.close();
  reader.close();
}
 
源代码4 项目: hadoop-gpu   文件: TestTFileByteArrays.java
private void readValueWithoutKey(int count, int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

  Scanner scanner =
      reader.createScanner(composeSortedKey(KEY, count, recordIndex)
          .getBytes(), null);

  byte[] vbuf1 = new byte[BUF_SIZE];
  int vlen1 = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf1);
  Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

  if (scanner.advance() && !scanner.atEnd()) {
    byte[] vbuf2 = new byte[BUF_SIZE];
    int vlen2 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf2);
    Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
        + (recordIndex + 1));
  }

  scanner.close();
  reader.close();
}
 
源代码5 项目: hadoop   文件: TestTFileSeek.java
public void seekTFile() throws IOException {
  int miss = 0;
  long totalBytes = 0;
  FSDataInputStream fsdis = fs.open(path);
  Reader reader =
    new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
  KeySampler kSampler =
      new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
          keyLenGen);
  Scanner scanner = reader.createScanner();
  BytesWritable key = new BytesWritable();
  BytesWritable val = new BytesWritable();
  timer.reset();
  timer.start();
  for (int i = 0; i < options.seekCount; ++i) {
    kSampler.next(key);
    scanner.lowerBound(key.get(), 0, key.getSize());
    if (!scanner.atEnd()) {
      scanner.entry().get(key, val);
      totalBytes += key.getSize();
      totalBytes += val.getSize();
    }
    else {
      ++miss;
    }
  }
  timer.stop();
  double duration = (double) timer.read() / 1000; // in us.
  System.out.printf(
      "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
      timer.toString(), NanoTimer.nanoTimeToString(timer.read()
          / options.seekCount), options.seekCount - miss, miss,
      (double) totalBytes / 1024 / (options.seekCount - miss));

}
 
源代码6 项目: hadoop   文件: TestTFileByteArrays.java
private void readKeyWithoutValue(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  try {
    // read the indexed key
    byte[] kbuf1 = new byte[BUF_SIZE];
    int klen1 = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf1);
    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
        recordIndex));

    if (scanner.advance() && !scanner.atEnd()) {
      // read the next key following the indexed
      byte[] kbuf2 = new byte[BUF_SIZE];
      int klen2 = scanner.entry().getKeyLength();
      scanner.entry().getKey(kbuf2);
      Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY,
          recordIndex + 1));
    }
  } finally {
    scanner.close();
    reader.close();
  }
}
 
源代码7 项目: big-c   文件: TestTFileSeek.java
public void seekTFile() throws IOException {
  int miss = 0;
  long totalBytes = 0;
  FSDataInputStream fsdis = fs.open(path);
  Reader reader =
    new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
  KeySampler kSampler =
      new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
          keyLenGen);
  Scanner scanner = reader.createScanner();
  BytesWritable key = new BytesWritable();
  BytesWritable val = new BytesWritable();
  timer.reset();
  timer.start();
  for (int i = 0; i < options.seekCount; ++i) {
    kSampler.next(key);
    scanner.lowerBound(key.get(), 0, key.getSize());
    if (!scanner.atEnd()) {
      scanner.entry().get(key, val);
      totalBytes += key.getSize();
      totalBytes += val.getSize();
    }
    else {
      ++miss;
    }
  }
  timer.stop();
  double duration = (double) timer.read() / 1000; // in us.
  System.out.printf(
      "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
      timer.toString(), NanoTimer.nanoTimeToString(timer.read()
          / options.seekCount), options.seekCount - miss, miss,
      (double) totalBytes / 1024 / (options.seekCount - miss));

}
 
源代码8 项目: big-c   文件: TestTFileByteArrays.java
private void readKeyWithoutValue(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  try {
    // read the indexed key
    byte[] kbuf1 = new byte[BUF_SIZE];
    int klen1 = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf1);
    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
        recordIndex));

    if (scanner.advance() && !scanner.atEnd()) {
      // read the next key following the indexed
      byte[] kbuf2 = new byte[BUF_SIZE];
      int klen2 = scanner.entry().getKeyLength();
      scanner.entry().getKey(kbuf2);
      Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY,
          recordIndex + 1));
    }
  } finally {
    scanner.close();
    reader.close();
  }
}
 
源代码9 项目: RDFS   文件: TestTFileSeek.java
public void seekTFile() throws IOException {
  int miss = 0;
  long totalBytes = 0;
  FSDataInputStream fsdis = fs.open(path);
  Reader reader =
    new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
  KeySampler kSampler =
      new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
          keyLenGen);
  Scanner scanner = reader.createScanner();
  BytesWritable key = new BytesWritable();
  BytesWritable val = new BytesWritable();
  timer.reset();
  timer.start();
  for (int i = 0; i < options.seekCount; ++i) {
    kSampler.next(key);
    scanner.lowerBound(key.get(), 0, key.getSize());
    if (!scanner.atEnd()) {
      scanner.entry().get(key, val);
      totalBytes += key.getSize();
      totalBytes += val.getSize();
    }
    else {
      ++miss;
    }
  }
  timer.stop();
  double duration = (double) timer.read() / 1000; // in us.
  System.out.printf(
      "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
      timer.toString(), NanoTimer.nanoTimeToString(timer.read()
          / options.seekCount), options.seekCount - miss, miss,
      (double) totalBytes / 1024 / (options.seekCount - miss));

}
 
源代码10 项目: RDFS   文件: TestTFileByteArrays.java
private void readKeyWithoutValue(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  try {
    // read the indexed key
    byte[] kbuf1 = new byte[BUF_SIZE];
    int klen1 = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf1);
    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
        recordIndex));

    if (scanner.advance() && !scanner.atEnd()) {
      // read the next key following the indexed
      byte[] kbuf2 = new byte[BUF_SIZE];
      int klen2 = scanner.entry().getKeyLength();
      scanner.entry().getKey(kbuf2);
      Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY,
          recordIndex + 1));
    }
  } finally {
    scanner.close();
    reader.close();
  }
}
 
源代码11 项目: hadoop-gpu   文件: TestTFileSeek.java
public void seekTFile() throws IOException {
  int miss = 0;
  long totalBytes = 0;
  FSDataInputStream fsdis = fs.open(path);
  Reader reader =
    new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
  KeySampler kSampler =
      new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
          keyLenGen);
  Scanner scanner = reader.createScanner();
  BytesWritable key = new BytesWritable();
  BytesWritable val = new BytesWritable();
  timer.reset();
  timer.start();
  for (int i = 0; i < options.seekCount; ++i) {
    kSampler.next(key);
    scanner.lowerBound(key.get(), 0, key.getSize());
    if (!scanner.atEnd()) {
      scanner.entry().get(key, val);
      totalBytes += key.getSize();
      totalBytes += val.getSize();
    }
    else {
      ++miss;
    }
  }
  timer.stop();
  double duration = (double) timer.read() / 1000; // in us.
  System.out.printf(
      "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
      timer.toString(), NanoTimer.nanoTimeToString(timer.read()
          / options.seekCount), options.seekCount - miss, miss,
      (double) totalBytes / 1024 / (options.seekCount - miss));

}
 
源代码12 项目: hadoop-gpu   文件: TestTFileByteArrays.java
private void readKeyWithoutValue(int count, int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner =
      reader.createScanner(composeSortedKey(KEY, count, recordIndex)
          .getBytes(), null);

  try {
    // read the indexed key
    byte[] kbuf1 = new byte[BUF_SIZE];
    int klen1 = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf1);
    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
        count, recordIndex));

    if (scanner.advance() && !scanner.atEnd()) {
      // read the next key following the indexed
      byte[] kbuf2 = new byte[BUF_SIZE];
      int klen2 = scanner.entry().getKeyLength();
      scanner.entry().getKey(kbuf2);
      Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY,
          count, recordIndex + 1));
    }
  }
  finally {
    scanner.close();
    reader.close();
  }
}