类org.apache.hadoop.util.ProgramDriver源码实例Demo

下面列出了怎么用org.apache.hadoop.util.ProgramDriver的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: bigdata-tutorial   文件: ExampleDriver.java
public static void main(String argv[]) {
	int exitCode = -1;
	ProgramDriver pgd = new ProgramDriver();
	try {
		pgd.addClass("wordcount", WordCount.class,
				"A map/reduce program that counts the words in the input files.");

		pgd.addClass("xflowstatic", XflowStatic.class,
				"A map/reduce program that static xflow from data files.");

		exitCode = pgd.run(argv);
	} catch (Throwable e) {
		e.printStackTrace();
	}

	System.exit(exitCode);
}
 
源代码2 项目: cloud-bigtable-examples   文件: WordCountDriver.java
public static void main(String[] args) {
  ProgramDriver programDriver = new ProgramDriver();
  int exitCode = -1;
  try {
    programDriver.addClass("wordcount-hbase", WordCountHBase.class,
        "A map/reduce program that counts the words in the input files.");
    programDriver.addClass("export-table", Export.class,
        "A map/reduce program that exports a table to a file.");
    //programDriver.addClass("cellcounter", CellCounter.class, "Count them cells!");
    programDriver.driver(args);
    exitCode = programDriver.run(args);
  } catch (Throwable e) {
    e.printStackTrace();
  }
  System.exit(exitCode);
}
 
源代码3 项目: RDFS   文件: AllTestDriver.java
/**
 * A description of the test program for running all the tests using jar file
 */
public static void main(String argv[]){
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass("gentest", DFSGeneralTest.class, "A map/reduce benchmark that supports running multi-thread operations in multiple machines");
    pgd.addClass("locktest", DFSLockTest.class, "A benchmark that spawns many threads and each thread run many configurable read/write FileSystem operations to test FSNamesystem lock's concurrency.");
    pgd.addClass("dirtest", DFSDirTest.class, "A map/reduce benchmark that creates many jobs and each job spawns many threads and each thread create/delete many dirs.");
    pgd.addClass("dfstest", DFSIOTest.class, "A map/reduce benchmark that creates many jobs and each jobs can create many files to test i/o rate per task of hadoop cluster.");
    pgd.addClass("structure-gen", StructureGenerator.class, "Create a structure of files and directories as an input for data-gen");
    pgd.addClass("data-gen", DataGenerator.class, "Create files and directories on cluster as inputs for load-gen");
    pgd.addClass("load-gen", LoadGenerator.class, "A tool to test the behavior of NameNode with different client loads.");
    pgd.addClass("testnn", TestNNThroughputBenchmark.class, "Test the behavior of the namenode on localhost." +
        " Here namenode is real and others are simulated");
    pgd.driver(argv);
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码4 项目: hadoop-ozone   文件: OzoneTestDriver.java
public OzoneTestDriver(ProgramDriver pgd) {
  this.pgd = pgd;
  try {
    pgd.addClass("freon", Freon.class,
        "Populates ozone with data.");
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码5 项目: hadoop   文件: YarnTestDriver.java
public YarnTestDriver(ProgramDriver pgd) {
  this.pgd = pgd;
  try {
    pgd.addClass(TestZKRMStateStorePerf.class.getSimpleName(),
        TestZKRMStateStorePerf.class,
        "ZKRMStateStore i/o benchmark.");
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码6 项目: hadoop   文件: HdfsTestDriver.java
public HdfsTestDriver(ProgramDriver pgd) {
  this.pgd = pgd;
  try {
    pgd.addClass("dfsthroughput", BenchmarkThroughput.class, 
        "measure hdfs throughput");
    pgd.addClass("minidfscluster", MiniDFSClusterManager.class, 
        "Run a single-process mini DFS cluster");
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码7 项目: hadoop   文件: CoreTestDriver.java
public CoreTestDriver(ProgramDriver pgd) {
  this.pgd = pgd;
  try {
    pgd.addClass("testsetfile", TestSetFile.class, 
        "A test for flat files of binary key/value pairs.");
    pgd.addClass("testarrayfile", TestArrayFile.class, 
        "A test for flat files of binary key/value pairs.");
    pgd.addClass("testrpc", TestRPC.class, "A test for rpc.");
    pgd.addClass("testipc", TestIPC.class, "A test for ipc.");
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码8 项目: pravega-samples   文件: ExampleDriver.java
public static void main(String argv[]) {
    int exitCode = -1;
    ProgramDriver pgd = new ProgramDriver();
    try {
        pgd.addClass("wordcount", WordCount.class,
                "A map/reduce program that counts the words in pravega.");
        pgd.addClass("wordmean", WordMean.class,
                "A map/reduce program that counts the average length of the words in pravega.");
        pgd.addClass("wordmedian", WordMedian.class,
                "A map/reduce program that counts the median length of the words in pravega.");
        pgd.addClass("randomwriter", RandomWriter.class,
                "A map/reduce program that writes random data to pravega.");
        pgd.addClass("randomtextwriter", RandomTextWriter.class,
                "A map/reduce program that writes random textual data to pravega.");
        pgd.addClass("teragen", TeraGen.class,
                "A map/reduce program that generate the official GraySort input data set to pravega.");
        pgd.addClass("terasort", TeraSort.class,
                "A map/reduce program that sorts events from one pravega stream and write the sorted " +
                        "events into one or more streams in a globally sorted manner.");
        pgd.addClass("terastreamvalidate", TeraStreamValidate.class,
                "A map/reduce program that reads events from sorted streams and write into hdfs files " +
                        "for validation purpose");
        exitCode = pgd.run(argv);
    } catch (Throwable e) {
        e.printStackTrace();
    }

    System.exit(exitCode);
}
 
源代码9 项目: big-c   文件: YarnTestDriver.java
public YarnTestDriver(ProgramDriver pgd) {
  this.pgd = pgd;
  try {
    pgd.addClass(TestZKRMStateStorePerf.class.getSimpleName(),
        TestZKRMStateStorePerf.class,
        "ZKRMStateStore i/o benchmark.");
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码10 项目: big-c   文件: HdfsTestDriver.java
public HdfsTestDriver(ProgramDriver pgd) {
  this.pgd = pgd;
  try {
    pgd.addClass("dfsthroughput", BenchmarkThroughput.class, 
        "measure hdfs throughput");
    pgd.addClass("minidfscluster", MiniDFSClusterManager.class, 
        "Run a single-process mini DFS cluster");
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码11 项目: big-c   文件: CoreTestDriver.java
public CoreTestDriver(ProgramDriver pgd) {
  this.pgd = pgd;
  try {
    pgd.addClass("testsetfile", TestSetFile.class, 
        "A test for flat files of binary key/value pairs.");
    pgd.addClass("testarrayfile", TestArrayFile.class, 
        "A test for flat files of binary key/value pairs.");
    pgd.addClass("testrpc", TestRPC.class, "A test for rpc.");
    pgd.addClass("testipc", TestIPC.class, "A test for ipc.");
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码12 项目: incubator-retired-horn   文件: ExampleDriver.java
public static void main(String args[]) {
  int exitCode = -1;
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass(
        "MNISTConverter",
        MNISTConverter.class,
        "A utility program that converts MNIST training and label datasets "
        + "into HDFS sequence file.");
    pgd.addClass("MNISTEvaluator",
        MNISTEvaluator.class,
        "A utility program that evaluates trained model for the MNIST dataset");
    pgd.addClass(
        "MultiLayerPerceptron",
        MultiLayerPerceptron.class,
        "An example program that trains a multilayer perceptron model from HDFS sequence file.");
    pgd.addClass("ExclusiveOrConverter",
        ExclusiveOrConverter.class,
        "A utility program that converts ExclusiveOR training and label datasets ");
    pgd.addClass(
        "ExclusiveOrRecurrentMultiLayerPerceptron",
        ExclusiveOrRecurrentMultiLayerPerceptron.class,
        "An example program that trains a recurrent multilayer perceptron model with exclusive or"
        + " from HDFS sequence file.");
    pgd.addClass(
        "MnistRecurrentMultiLayerPerceptron",
        MnistRecurrentMultiLayerPerceptron.class,
        "An example program that trains a recurrent multilayer perceptron model with MNIST"
        + " from HDFS sequence file.");
    exitCode = pgd.run(args);
  } catch (Throwable e) {
    e.printStackTrace();
  }
  System.exit(exitCode);
}
 
源代码13 项目: hbase   文件: Driver.java
public static void main(String[] args) throws Throwable {
  ProgramDriver pgd = new ProgramDriver();

  pgd.addClass(RowCounter.NAME, RowCounter.class,
    "Count rows in HBase table.");
  pgd.addClass(CellCounter.NAME, CellCounter.class,
    "Count cells in HBase table.");
  pgd.addClass(Export.NAME, Export.class, "Write table data to HDFS.");
  pgd.addClass(Import.NAME, Import.class, "Import data written by Export.");
  pgd.addClass(ImportTsv.NAME, ImportTsv.class, "Import data in TSV format.");
  pgd.addClass(BulkLoadHFilesTool.NAME, BulkLoadHFilesTool.class,
               "Complete a bulk data load.");
  pgd.addClass(CopyTable.NAME, CopyTable.class,
      "Export a table from local cluster to peer cluster.");
  pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, "Compare" +
      " data from tables in two different clusters. It" +
      " doesn't work for incrementColumnValues'd cells since" +
      " timestamp is changed after appending to WAL.");
  pgd.addClass(WALPlayer.NAME, WALPlayer.class, "Replay WAL files.");
  pgd.addClass(ExportSnapshot.NAME, ExportSnapshot.class, "Export" +
      " the specific snapshot to a given FileSystem.");
  pgd.addClass(MobRefReporter.NAME, MobRefReporter.class, "Check the mob cells in a particular " +
      "table and cf and confirm that the files they point to are correct.");

  ProgramDriver.class.getMethod("driver", new Class [] {String[].class}).
    invoke(pgd, new Object[]{args});
}
 
源代码14 项目: hbase   文件: TestDriver.java
@Test
public void testDriverMainMethod() throws Throwable {
  ProgramDriver programDriverMock = mock(ProgramDriver.class);
  Driver.setProgramDriver(programDriverMock);
  Driver.main(new String[]{});
  verify(programDriverMock).driver(Mockito.any());
}
 
源代码15 项目: sequenceiq-samples   文件: TopKDriver.java
public static void main(String argv[]) {
    int exitCode = -1;
    ProgramDriver pgd = new ProgramDriver();
    try {
        pgd.addClass("topk", TopK.class, "topk");
        pgd.addClass("topkgen", TopKDataGen.class, "topkgen");
        exitCode = pgd.run(argv);
    } catch (Throwable e) {
        e.printStackTrace();
    }

    System.exit(exitCode);
}
 
源代码16 项目: RDFS   文件: AllTestDriver.java
/**
 * A description of the test program for running all the tests using jar file
 */
public static void main(String argv[]){
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass("threadedmapbench", ThreadedMapBenchmark.class, 
                 "A map/reduce benchmark that compares the performance " + 
                 "of maps with multiple spills over maps with 1 spill");
    pgd.addClass("mrbench", MRBench.class, "A map/reduce benchmark that can create many small jobs");
    pgd.addClass("nnbench", NNBench.class, "A benchmark that stresses the namenode.");
    pgd.addClass("mapredtest", TestMapRed.class, "A map/reduce test check.");
    pgd.addClass("testfilesystem", TestFileSystem.class, "A test for FileSystem read/write.");
    pgd.addClass("testsequencefile", TestSequenceFile.class, "A test for flat files of binary key value pairs.");
    pgd.addClass("testsetfile", TestSetFile.class, "A test for flat files of binary key/value pairs.");
    pgd.addClass("testarrayfile", TestArrayFile.class, "A test for flat files of binary key/value pairs.");
    pgd.addClass("testrpc", TestRPC.class, "A test for rpc.");
    pgd.addClass("testipc", TestIPC.class, "A test for ipc.");
    pgd.addClass("testsequencefileinputformat", TestSequenceFileInputFormat.class, "A test for sequence file input format.");
    pgd.addClass("testtextinputformat", TestTextInputFormat.class, "A test for text input format.");
    pgd.addClass("TestDFSIO", TestDFSIO.class, "Distributed i/o benchmark.");
    pgd.addClass("DFSCIOTest", DFSCIOTest.class, "Distributed i/o benchmark of libhdfs.");
    pgd.addClass("DistributedFSCheck", DistributedFSCheck.class, "Distributed checkup of the file system consistency.");
    pgd.addClass("testmapredsort", SortValidator.class, 
                 "A map/reduce program that validates the map-reduce framework's sort.");
    pgd.addClass("testbigmapoutput", BigMapOutput.class, 
                 "A map/reduce program that works on a very big " + 
                 "non-splittable file and does identity map/reduce");
    pgd.addClass("loadgen", GenericMRLoadGenerator.class, "Generic map/reduce load generator");
    pgd.addClass("filebench", FileBench.class, "Benchmark SequenceFile(Input|Output)Format (block,record compressed and uncompressed), Text(Input|Output)Format (compressed and uncompressed)");
    pgd.addClass("dfsthroughput", BenchmarkThroughput.class, 
                 "measure hdfs throughput");
    pgd.addClass("MRReliabilityTest", ReliabilityTest.class,
        "A program that tests the reliability of the MR framework by " +
        "injecting faults/failures");
    pgd.driver(argv);
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码17 项目: incubator-tez   文件: TestDriver.java
public static void main(String argv[]){
  int exitCode = -1;
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass("FaultToleranceTestRunner", FaultToleranceTestRunner.class,
        "Run different DAGs for fault tolerance testing");
    exitCode = pgd.run(argv);
  }
  catch(Throwable e){
    e.printStackTrace();
  }

  System.exit(exitCode);
}
 
源代码18 项目: tez   文件: ExampleDriver.java
public static void main(String argv[]){
  int exitCode = -1;
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass("wordcount", WordCount.class,
        "A native Tez wordcount program that counts the words in the input files.");
    pgd.addClass("orderedwordcount", OrderedWordCount.class,
        "Word Count with words sorted on frequency");
    pgd.addClass("simplesessionexample", SimpleSessionExample.class,
        "Example to run multiple dags in a session");
    pgd.addClass("hashjoin", HashJoinExample.class,
        "Identify all occurences of lines in file1 which also occur in file2 using hash join");
    pgd.addClass("sortmergejoin", SortMergeJoinExample.class,
        "Identify all occurences of lines in file1 which also occur in file2 using sort merge join");
    pgd.addClass("joindatagen", JoinDataGen.class,
        "Generate data to run the joinexample");
    pgd.addClass("joinvalidate", JoinValidate.class,
        "Validate data generated by joinexample and joindatagen");
    pgd.addClass("cartesianproduct", CartesianProduct.class,
        "Cartesian product of two datasets");
    exitCode = pgd.run(argv);
  } catch(Throwable e){
    e.printStackTrace();
  }

  System.exit(exitCode);
}
 
源代码19 项目: tez   文件: TestDriver.java
public static void main(String argv[]){
  int exitCode = -1;
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass("FaultToleranceTestRunner", FaultToleranceTestRunner.class,
        "Run different DAGs for fault tolerance testing");
    exitCode = pgd.run(argv);
  }
  catch(Throwable e){
    e.printStackTrace();
  }

  System.exit(exitCode);
}
 
源代码20 项目: tez   文件: AnalyzerDriver.java
public static void main(String argv[]){
  int exitCode = -1;
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass("CriticalPath", CriticalPathAnalyzer.class,
        "Find the critical path of a DAG");
    pgd.addClass("ContainerReuseAnalyzer", ContainerReuseAnalyzer.class,
        "Print container reuse details in a DAG");
    pgd.addClass("LocalityAnalyzer", LocalityAnalyzer.class,
        "Print locality details in a DAG");
    pgd.addClass("ShuffleTimeAnalyzer", ShuffleTimeAnalyzer.class,
        "Analyze the shuffle time details in a DAG");
    pgd.addClass("SkewAnalyzer", SkewAnalyzer.class,
        "Analyze the skew details in a DAG");
    pgd.addClass("SlowestVertexAnalyzer", SlowestVertexAnalyzer.class,
        "Print slowest vertex details in a DAG");
    pgd.addClass("SlowNodeAnalyzer", SlowNodeAnalyzer.class,
        "Print node details in a DAG");
    pgd.addClass("SlowTaskIdentifier", SlowTaskIdentifier.class,
        "Print slow task details in a DAG");
    pgd.addClass("SpillAnalyzer", SpillAnalyzerImpl.class,
        "Print spill details in a DAG");
    pgd.addClass("TaskAssignmentAnalyzer", TaskAssignmentAnalyzer.class,
        "Print task-to-node assignment details of a DAG");
    pgd.addClass("TaskAttemptResultStatisticsAnalyzer", TaskAttemptResultStatisticsAnalyzer.class,
        "Print vertex:node:status level details of task attempt results");
    pgd.addClass("TaskConcurrencyAnalyzer", TaskConcurrencyAnalyzer.class,
        "Print the task concurrency details in a DAG");
    pgd.addClass("VertexLevelCriticalPathAnalyzer", VertexLevelCriticalPathAnalyzer.class,
        "Find critical path at vertex level in a DAG");
    pgd.addClass("OneOnOneEdgeAnalyzer", OneOnOneEdgeAnalyzer.class,
        "Find out schedule misses in 1:1 edges in a DAG");
    exitCode = pgd.run(argv);
  } catch(Throwable e){
    e.printStackTrace();
  }

  System.exit(exitCode);
}
 
源代码21 项目: hadoop-gpu   文件: AllTestDriver.java
/**
 * A description of the test program for running all the tests using jar file
 */
public static void main(String argv[]){
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass("threadedmapbench", ThreadedMapBenchmark.class, 
                 "A map/reduce benchmark that compares the performance " + 
                 "of maps with multiple spills over maps with 1 spill");
    pgd.addClass("mrbench", MRBench.class, "A map/reduce benchmark that can create many small jobs");
    pgd.addClass("nnbench", NNBench.class, "A benchmark that stresses the namenode.");
    pgd.addClass("mapredtest", TestMapRed.class, "A map/reduce test check.");
    pgd.addClass("testfilesystem", TestFileSystem.class, "A test for FileSystem read/write.");
    pgd.addClass("testsequencefile", TestSequenceFile.class, "A test for flat files of binary key value pairs.");
    pgd.addClass("testsetfile", TestSetFile.class, "A test for flat files of binary key/value pairs.");
    pgd.addClass("testarrayfile", TestArrayFile.class, "A test for flat files of binary key/value pairs.");
    pgd.addClass("testrpc", TestRPC.class, "A test for rpc.");
    pgd.addClass("testipc", TestIPC.class, "A test for ipc.");
    pgd.addClass("testsequencefileinputformat", TestSequenceFileInputFormat.class, "A test for sequence file input format.");
    pgd.addClass("testtextinputformat", TestTextInputFormat.class, "A test for text input format.");
    pgd.addClass("TestDFSIO", TestDFSIO.class, "Distributed i/o benchmark.");
    pgd.addClass("DFSCIOTest", DFSCIOTest.class, "Distributed i/o benchmark of libhdfs.");
    pgd.addClass("DistributedFSCheck", DistributedFSCheck.class, "Distributed checkup of the file system consistency.");
    pgd.addClass("testmapredsort", SortValidator.class, 
                 "A map/reduce program that validates the map-reduce framework's sort.");
    pgd.addClass("testbigmapoutput", BigMapOutput.class, 
                 "A map/reduce program that works on a very big " + 
                 "non-splittable file and does identity map/reduce");
    pgd.addClass("loadgen", GenericMRLoadGenerator.class, "Generic map/reduce load generator");
    pgd.addClass("filebench", FileBench.class, "Benchmark SequenceFile(Input|Output)Format (block,record compressed and uncompressed), Text(Input|Output)Format (compressed and uncompressed)");
    pgd.addClass("dfsthroughput", BenchmarkThroughput.class, 
                 "measure hdfs throughput");
    pgd.addClass("MRReliabilityTest", ReliabilityTest.class,
        "A program that tests the reliability of the MR framework by " +
        "injecting faults/failures");
    pgd.driver(argv);
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码22 项目: hadoop-ozone   文件: OzoneTestDriver.java
public OzoneTestDriver() {
  this(new ProgramDriver());
}
 
源代码23 项目: hadoop   文件: YarnTestDriver.java
public YarnTestDriver() {
  this(new ProgramDriver());
}
 
源代码24 项目: hadoop   文件: MapredTestDriver.java
public MapredTestDriver() {
  this(new ProgramDriver());
}
 
源代码25 项目: hadoop   文件: MapredTestDriver.java
public MapredTestDriver(ProgramDriver pgd) {
  this.pgd = pgd;
  try {
    pgd.addClass("testsequencefile", TestSequenceFile.class, 
    "A test for flat files of binary key value pairs.");
    pgd.addClass("threadedmapbench", ThreadedMapBenchmark.class, 
        "A map/reduce benchmark that compares the performance " + 
        "of maps with multiple spills over maps with 1 spill");
    pgd.addClass("mrbench", MRBench.class, 
        "A map/reduce benchmark that can create many small jobs");
    pgd.addClass("mapredtest", TestMapRed.class, "A map/reduce test check.");
    pgd.addClass("testsequencefileinputformat", 
        TestSequenceFileInputFormat.class, 
        "A test for sequence file input format.");
    pgd.addClass("testtextinputformat", TestTextInputFormat.class, 
        "A test for text input format.");
    pgd.addClass("testmapredsort", SortValidator.class, 
        "A map/reduce program that validates the " +
        "map-reduce framework's sort.");
    pgd.addClass("testbigmapoutput", BigMapOutput.class, 
        "A map/reduce program that works on a very big " +
        "non-splittable file and does identity map/reduce");
    pgd.addClass("loadgen", GenericMRLoadGenerator.class, 
        "Generic map/reduce load generator");
    pgd.addClass("MRReliabilityTest", ReliabilityTest.class,
        "A program that tests the reliability of the MR framework by " +
        "injecting faults/failures");
    pgd.addClass("fail", FailJob.class, "a job that always fails");
    pgd.addClass("sleep", SleepJob.class, 
                 "A job that sleeps at each map and reduce task.");
    pgd.addClass("nnbench", NNBench.class, 
        "A benchmark that stresses the namenode.");
    pgd.addClass("testfilesystem", TestFileSystem.class, 
        "A test for FileSystem read/write.");
    pgd.addClass(TestDFSIO.class.getSimpleName(), TestDFSIO.class, 
        "Distributed i/o benchmark.");
    pgd.addClass("DFSCIOTest", DFSCIOTest.class, "" +
        "Distributed i/o benchmark of libhdfs.");
    pgd.addClass("DistributedFSCheck", DistributedFSCheck.class, 
        "Distributed checkup of the file system consistency.");
    pgd.addClass("filebench", FileBench.class, 
        "Benchmark SequenceFile(Input|Output)Format " +
        "(block,record compressed and uncompressed), " +
        "Text(Input|Output)Format (compressed and uncompressed)");
    pgd.addClass(JHLogAnalyzer.class.getSimpleName(), JHLogAnalyzer.class, 
        "Job History Log analyzer.");
    pgd.addClass(SliveTest.class.getSimpleName(), SliveTest.class, 
        "HDFS Stress Test and Live Data Verification.");
    pgd.addClass("minicluster", MiniHadoopClusterManager.class,
    "Single process HDFS and MR cluster.");
    pgd.addClass("largesorter", LargeSorter.class,
        "Large-Sort tester");
    pgd.addClass("NNloadGenerator", LoadGenerator.class,
            "Generate load on Namenode using NN loadgenerator run WITHOUT MR");
    pgd.addClass("NNloadGeneratorMR", LoadGeneratorMR.class,
        "Generate load on Namenode using NN loadgenerator run as MR job");
    pgd.addClass("NNstructureGenerator", StructureGenerator.class,
        "Generate the structure to be used by NNdataGenerator");
    pgd.addClass("NNdataGenerator", DataGenerator.class,
        "Generate the data to be used by NNloadGenerator");
  } catch(Throwable e) {
    e.printStackTrace();
  }
}
 
源代码26 项目: hadoop   文件: ExampleDriver.java
public static void main(String argv[]){
  int exitCode = -1;
  ProgramDriver pgd = new ProgramDriver();
  try {
    pgd.addClass("wordcount", WordCount.class, 
                 "A map/reduce program that counts the words in the input files.");
    pgd.addClass("wordmean", WordMean.class,
                 "A map/reduce program that counts the average length of the words in the input files.");
    pgd.addClass("wordmedian", WordMedian.class,
                 "A map/reduce program that counts the median length of the words in the input files.");
    pgd.addClass("wordstandarddeviation", WordStandardDeviation.class,
                 "A map/reduce program that counts the standard deviation of the length of the words in the input files.");
    pgd.addClass("aggregatewordcount", AggregateWordCount.class, 
                 "An Aggregate based map/reduce program that counts the words in the input files.");
    pgd.addClass("aggregatewordhist", AggregateWordHistogram.class, 
                 "An Aggregate based map/reduce program that computes the histogram of the words in the input files.");
    pgd.addClass("grep", Grep.class, 
                 "A map/reduce program that counts the matches of a regex in the input.");
    pgd.addClass("randomwriter", RandomWriter.class, 
                 "A map/reduce program that writes 10GB of random data per node.");
    pgd.addClass("randomtextwriter", RandomTextWriter.class, 
    "A map/reduce program that writes 10GB of random textual data per node.");
    pgd.addClass("sort", Sort.class, "A map/reduce program that sorts the data written by the random writer.");

    pgd.addClass("pi", QuasiMonteCarlo.class, QuasiMonteCarlo.DESCRIPTION);
    pgd.addClass("bbp", BaileyBorweinPlouffe.class, BaileyBorweinPlouffe.DESCRIPTION);
    pgd.addClass("distbbp", DistBbp.class, DistBbp.DESCRIPTION);

    pgd.addClass("pentomino", DistributedPentomino.class,
    "A map/reduce tile laying program to find solutions to pentomino problems.");
    pgd.addClass("secondarysort", SecondarySort.class,
                 "An example defining a secondary sort to the reduce.");
    pgd.addClass("sudoku", Sudoku.class, "A sudoku solver.");
    pgd.addClass("join", Join.class, "A job that effects a join over sorted, equally partitioned datasets");
    pgd.addClass("multifilewc", MultiFileWordCount.class, "A job that counts words from several files.");
    pgd.addClass("dbcount", DBCountPageView.class, "An example job that count the pageview counts from a database.");
    pgd.addClass("teragen", TeraGen.class, "Generate data for the terasort");
    pgd.addClass("terasort", TeraSort.class, "Run the terasort");
    pgd.addClass("teravalidate", TeraValidate.class, "Checking results of terasort");
    exitCode = pgd.run(argv);
  }
  catch(Throwable e){
    e.printStackTrace();
  }
  
  System.exit(exitCode);
}
 
源代码27 项目: hadoop   文件: HdfsTestDriver.java
public HdfsTestDriver() {
  this(new ProgramDriver());
}
 
源代码28 项目: hadoop   文件: CoreTestDriver.java
public CoreTestDriver() {
  this(new ProgramDriver());
}
 
源代码29 项目: big-c   文件: YarnTestDriver.java
public YarnTestDriver() {
  this(new ProgramDriver());
}
 
源代码30 项目: big-c   文件: MapredTestDriver.java
public MapredTestDriver() {
  this(new ProgramDriver());
}
 
 类所在包
 类方法
 同包方法