类org.apache.hadoop.hbase.util.AbstractHBaseTool源码实例Demo

下面列出了怎么用org.apache.hadoop.hbase.util.AbstractHBaseTool的API类实例代码及写法,或者点击链接到github查看源代码。

private Job doVerify(Configuration conf, TableDescriptor tableDescriptor, String... auths)
    throws IOException, InterruptedException, ClassNotFoundException {
  Path outputDir = getTestDir(TEST_NAME, "verify-output");
  Job job = new Job(conf);
  job.setJarByClass(this.getClass());
  job.setJobName(TEST_NAME + " Verification for " + tableDescriptor.getTableName());
  setJobScannerConf(job);
  Scan scan = new Scan();
  scan.setAuthorizations(new Authorizations(auths));
  TableMapReduceUtil.initTableMapperJob(tableDescriptor.getTableName().getNameAsString(), scan,
      VerifyMapper.class, NullWritable.class, NullWritable.class, job);
  TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class);
  int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING);
  TableMapReduceUtil.setScannerCaching(job, scannerCaching);
  job.setNumReduceTasks(0);
  FileOutputFormat.setOutputPath(job, outputDir);
  assertTrue(job.waitForCompletion(true));
  return job;
}
 
源代码2 项目: hbase   文件: IntegrationTestLoadAndVerify.java
protected Job doLoad(Configuration conf, TableDescriptor tableDescriptor) throws Exception {
  Path outputDir = getTestDir(TEST_NAME, "load-output");
  LOG.info("Load output dir: " + outputDir);

  NMapInputFormat.setNumMapTasks(conf, conf.getInt(NUM_MAP_TASKS_KEY, NUM_MAP_TASKS_DEFAULT));
  conf.set(TABLE_NAME_KEY, tableDescriptor.getTableName().getNameAsString());

  Job job = Job.getInstance(conf);
  job.setJobName(TEST_NAME + " Load for " + tableDescriptor.getTableName());
  job.setJarByClass(this.getClass());
  setMapperClass(job);
  job.setInputFormatClass(NMapInputFormat.class);
  job.setNumReduceTasks(0);
  setJobScannerConf(job);
  FileOutputFormat.setOutputPath(job, outputDir);

  TableMapReduceUtil.addDependencyJars(job);

  TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class);
  TableMapReduceUtil.initCredentials(job);
  assertTrue(job.waitForCompletion(true));
  return job;
}
 
源代码3 项目: hbase   文件: IntegrationTestLoadAndVerify.java
protected void doVerify(Configuration conf, TableDescriptor tableDescriptor) throws Exception {
  Path outputDir = getTestDir(TEST_NAME, "verify-output");
  LOG.info("Verify output dir: " + outputDir);

  Job job = Job.getInstance(conf);
  job.setJarByClass(this.getClass());
  job.setJobName(TEST_NAME + " Verification for " + tableDescriptor.getTableName());
  setJobScannerConf(job);

  Scan scan = new Scan();

  TableMapReduceUtil.initTableMapperJob(
      tableDescriptor.getTableName().getNameAsString(), scan, VerifyMapper.class,
      BytesWritable.class, BytesWritable.class, job);
  TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class);
  int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING);
  TableMapReduceUtil.setScannerCaching(job, scannerCaching);

  job.setReducerClass(VerifyReducer.class);
  job.setNumReduceTasks(conf.getInt(NUM_REDUCE_TASKS_KEY, NUM_REDUCE_TASKS_DEFAULT));
  FileOutputFormat.setOutputPath(job, outputDir);
  assertTrue(job.waitForCompletion(true));

  long numOutputRecords = job.getCounters().findCounter(Counters.ROWS_WRITTEN).getValue();
  assertEquals(0, numOutputRecords);
}
 
源代码4 项目: hbase   文件: PreUpgradeValidator.java
@Override
public int run(String[] args) throws Exception {
  if (args.length == 0) {
    printUsage();
    return AbstractHBaseTool.EXIT_FAILURE;
  }

  Tool tool;

  switch (args[0]) {
    case VALIDATE_CP_NAME:
      tool = new CoprocessorValidator();
      break;
    case VALIDATE_DBE_NAME:
      tool = new DataBlockEncodingValidator();
      break;
    case VALIDATE_HFILE:
      tool = new HFileContentValidator();
      break;
    case "-h":
      printUsage();
      return AbstractHBaseTool.EXIT_FAILURE;
    default:
      System.err.println("Unknown command: " + args[0]);
      printUsage();
      return AbstractHBaseTool.EXIT_FAILURE;
  }

  tool.setConf(getConf());
  return tool.run(Arrays.copyOfRange(args, 1, args.length));
}
 
源代码5 项目: hbase   文件: PreUpgradeValidator.java
public static void main(String[] args) {
  int ret;

  Configuration conf = HBaseConfiguration.create();

  try {
    ret = ToolRunner.run(conf, new PreUpgradeValidator(), args);
  } catch (Exception e) {
    LOG.error("Error running command-line tool", e);
    ret = AbstractHBaseTool.EXIT_FAILURE;
  }

  System.exit(ret);
}
 
源代码6 项目: hbase   文件: RowCounter.java
@Override
protected void printUsage(final String usageStr, final String usageHeader,
    final String usageFooter) {
  HelpFormatter helpFormatter = new HelpFormatter();
  helpFormatter.setWidth(120);
  helpFormatter.setOptionComparator(new AbstractHBaseTool.OptionsOrderComparator());
  helpFormatter.setLongOptSeparator("=");
  helpFormatter.printHelp(usageStr, usageHeader, options, usageFooter);
}
 
private int doVerify(Path outputDir, int numReducers) throws IOException, InterruptedException,
    ClassNotFoundException {
  job = new Job(getConf());

  job.setJobName("Link Verifier");
  job.setNumReduceTasks(numReducers);
  job.setJarByClass(getClass());

  setJobScannerConf(job);

  Scan scan = new Scan();
  scan.addColumn(FAMILY_NAME, COLUMN_PREV);
  scan.setCaching(10000);
  scan.setCacheBlocks(false);
  String[] split = labels.split(COMMA);

  scan.setAuthorizations(new Authorizations(split[this.labelIndex * 2],
      split[(this.labelIndex * 2) + 1]));

  TableMapReduceUtil.initTableMapperJob(tableName.getName(), scan, VerifyMapper.class,
      BytesWritable.class, BytesWritable.class, job);
  TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);

  job.getConfiguration().setBoolean("mapreduce.map.speculative", false);

  job.setReducerClass(VerifyReducer.class);
  job.setOutputFormatClass(TextOutputFormat.class);
  TextOutputFormat.setOutputPath(job, outputDir);
  boolean success = job.waitForCompletion(true);

  return success ? 0 : 1;
}
 
源代码8 项目: hbase   文件: IntegrationTestBigLinkedList.java
public int runGenerator(int numMappers, long numNodes, Path tmpOutput,
    Integer width, Integer wrapMultiplier, Integer numWalkers)
    throws Exception {
  LOG.info("Running Generator with numMappers=" + numMappers +", numNodes=" + numNodes);
  createSchema();
  job = Job.getInstance(getConf());

  job.setJobName("Link Generator");
  job.setNumReduceTasks(0);
  job.setJarByClass(getClass());

  FileInputFormat.setInputPaths(job, tmpOutput);
  job.setInputFormatClass(OneFilePerMapperSFIF.class);
  job.setOutputKeyClass(NullWritable.class);
  job.setOutputValueClass(NullWritable.class);

  setJobConf(job, numMappers, numNodes, width, wrapMultiplier, numWalkers);

  setMapperForGenerator(job);

  job.setOutputFormatClass(NullOutputFormat.class);

  job.getConfiguration().setBoolean("mapreduce.map.speculative", false);
  TableMapReduceUtil.addDependencyJars(job);
  TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
                                                 AbstractHBaseTool.class);
  TableMapReduceUtil.initCredentials(job);

  boolean success = jobCompletion(job);

  return success ? 0 : 1;
}
 
源代码9 项目: hbase   文件: IntegrationTestBigLinkedList.java
public int run(Path outputDir, int numReducers) throws Exception {
  LOG.info("Running Verify with outputDir=" + outputDir +", numReducers=" + numReducers);

  job = Job.getInstance(getConf());

  job.setJobName("Link Verifier");
  job.setNumReduceTasks(numReducers);
  job.setJarByClass(getClass());

  setJobScannerConf(job);

  Scan scan = new Scan();
  scan.addColumn(FAMILY_NAME, COLUMN_PREV);
  scan.setCaching(10000);
  scan.setCacheBlocks(false);
  if (isMultiUnevenColumnFamilies(getConf())) {
    scan.addColumn(BIG_FAMILY_NAME, BIG_FAMILY_NAME);
    scan.addColumn(TINY_FAMILY_NAME, TINY_FAMILY_NAME);
  }

  TableMapReduceUtil.initTableMapperJob(getTableName(getConf()).getName(), scan,
      VerifyMapper.class, BytesWritable.class, BytesWritable.class, job);
  TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
                                                 AbstractHBaseTool.class);

  job.getConfiguration().setBoolean("mapreduce.map.speculative", false);

  job.setReducerClass(VerifyReducer.class);
  job.setOutputFormatClass(SequenceFileAsBinaryOutputFormat.class);
  job.setOutputKeyClass(BytesWritable.class);
  job.setOutputValueClass(BytesWritable.class);
  TextOutputFormat.setOutputPath(job, outputDir);

  boolean success = job.waitForCompletion(true);

  if (success) {
    Counters counters = job.getCounters();
    if (null == counters) {
      LOG.warn("Counters were null, cannot verify Job completion."
          + " This is commonly a result of insufficient YARN configuration.");
      // We don't have access to the counters to know if we have "bad" counts
      return 0;
    }

    // If we find no unexpected values, the job didn't outright fail
    if (verifyUnexpectedValues(counters)) {
      // We didn't check referenced+unreferenced counts, leave that to visual inspection
      return 0;
    }
  }

  // We failed
  return 1;
}
 
 类所在包
 同包方法