Java Code Examples for org.apache.hadoop.util.ToolRunner#run()

The following examples show how to use org.apache.hadoop.util.ToolRunner#run() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestBinaryTokenFile.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * run a distributed job and verify that TokenCache is available
 * @throws IOException
 */
@Test
public void testBinaryTokenFile() throws IOException {
  Configuration conf = mrCluster.getConfig();
  
  // provide namenodes names for the job to get the delegation tokens for
  final String nnUri = dfsCluster.getURI(0).toString();
  conf.set(MRJobConfig.JOB_NAMENODES, nnUri + "," + nnUri);
  
  // using argument to pass the file name
  final String[] args = { 
      "-m", "1", "-r", "1", "-mt", "1", "-rt", "1"
      };
  int res = -1;
  try {
    res = ToolRunner.run(conf, new MySleepJob(), args);
  } catch (Exception e) {
    System.out.println("Job failed with " + e.getLocalizedMessage());
    e.printStackTrace(System.out);
    fail("Job failed");
  }
  assertEquals("dist job res is not 0:", 0, res);
}
 
Example 2
Source File: HBaseFsck.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Main program
 *
 * @param args
 * @throws Exception
 */
public static void main(String[] args) throws Exception {
  // create a fsck object
  Configuration conf = HBaseConfiguration.create();
  Path hbasedir = CommonFSUtils.getRootDir(conf);
  URI defaultFs = hbasedir.getFileSystem(conf).getUri();
  CommonFSUtils.setFsDefault(conf, new Path(defaultFs));
  int ret = ToolRunner.run(new HBaseFsckTool(conf), args);
  System.exit(ret);
}
 
Example 3
Source File: TestStreamedMerge.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
void lsr() {
  try {
    System.out.println("lsr /");
    ToolRunner.run(conf_, new FsShell(), new String[]{ "-lsr", "/" });
  } catch (Exception e) {
    e.printStackTrace();
  }
}
 
Example 4
Source File: ElementOutputFormatTest.java    From AccumuloGraph with Apache License 2.0 4 votes vote down vote up
public static int main(String[] args) throws Exception {
  return ToolRunner.run(CachedConfiguration.getInstance(), new MRTester(), args);
}
 
Example 5
Source File: NDCuboidJob.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    CuboidJob job = new NDCuboidJob();
    int exitCode = ToolRunner.run(job, args);
    System.exit(exitCode);
}
 
Example 6
Source File: LargeSorter.java    From big-c with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
  int res = ToolRunner.run(new Configuration(), new LargeSorter(), args);
  System.exit(res);
}
 
Example 7
Source File: TestJsonMapperReducer.java    From ojai with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
  int rc = ToolRunner.run(new TestJsonMapperReducer(), args);
  System.exit(rc);
}
 
Example 8
Source File: TotalOrderSortingStage.java    From hadoop-map-reduce-patterns with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
	int res = ToolRunner.run(new Configuration(),
			new TotalOrderSortingStage(), args);
	System.exit(res);
}
 
Example 9
Source File: InputSampler.java    From hadoop-gpu with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
  JobConf job = new JobConf(InputSampler.class);
  InputSampler<?,?> sampler = new InputSampler(job);
  int res = ToolRunner.run(sampler, args);
  System.exit(res);
}
 
Example 10
Source File: SortMergeJoinExample.java    From tez with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
  SortMergeJoinExample job = new SortMergeJoinExample();
  int status = ToolRunner.run(new Configuration(), job, args);
  System.exit(status);
}
 
Example 11
Source File: FactDistinctColumnsJob.java    From kylin with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    FactDistinctColumnsJob job = new FactDistinctColumnsJob();
    int exitCode = ToolRunner.run(job, args);
    System.exit(exitCode);
}
 
Example 12
Source File: LindenJob.java    From linden with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
  System.out.println(Arrays.asList(args));
  int exitCode = ToolRunner.run(new LindenJob(), args);
  System.exit(exitCode);
}
 
Example 13
Source File: MultiFileWordCount.java    From hadoop-book with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    int ret = ToolRunner.run(new MultiFileWordCount(), args);
    System.exit(ret);
}
 
Example 14
Source File: DFSHAAdmin.java    From hadoop with Apache License 2.0 4 votes vote down vote up
public static void main(String[] argv) throws Exception {
  int res = ToolRunner.run(new DFSHAAdmin(), argv);
  System.exit(res);
}
 
Example 15
Source File: TestFullBackupSetRestoreSet.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Test
public void testFullRestoreSetToOtherTable() throws Exception {

  LOG.info("Test full restore set");

  // Create set
  try (BackupSystemTable table = new BackupSystemTable(TEST_UTIL.getConnection())) {
    String name = "name";
    table.addToBackupSet(name, new String[] { table1.getNameAsString() });
    List<TableName> names = table.describeBackupSet(name);

    assertNotNull(names);
    assertTrue(names.size() == 1);
    assertTrue(names.get(0).equals(table1));

    String[] args = new String[] { "create", "full", BACKUP_ROOT_DIR, "-s", name };
    // Run backup
    int ret = ToolRunner.run(conf1, new BackupDriver(), args);
    assertTrue(ret == 0);
    List<BackupInfo> backups = table.getBackupHistory();
    assertTrue(backups.size() == 1);
    String backupId = backups.get(0).getBackupId();
    assertTrue(checkSucceeded(backupId));

    LOG.info("backup complete");

    // Restore from set into other table
    args =
        new String[] { BACKUP_ROOT_DIR, backupId, "-s", name, "-m",
            table1_restore.getNameAsString(), "-o" };
    // Run backup
    ret = ToolRunner.run(conf1, new RestoreDriver(), args);
    assertTrue(ret == 0);
    Admin hba = TEST_UTIL.getAdmin();
    assertTrue(hba.tableExists(table1_restore));
    // Verify number of rows in both tables
    assertEquals(TEST_UTIL.countRows(table1), TEST_UTIL.countRows(table1_restore));
    TEST_UTIL.deleteTable(table1_restore);
    LOG.info("restore into other table is complete");
    hba.close();
  }
}
 
Example 16
Source File: BusyLegs.java    From gemfirexd-oss with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
  System.out.println("SampleApp.main() invoked with " + args);
  int rc = ToolRunner.run(new BusyLegs(), args);
  System.exit(rc);
}
 
Example 17
Source File: Grep.java    From hadoop with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
  int res = ToolRunner.run(new Configuration(), new Grep(), args);
  System.exit(res);
}
 
Example 18
Source File: SmallFilesRead.java    From hiped2 with Apache License 2.0 2 votes vote down vote up
/**
 * Main entry point for the example.
 *
 * @param args arguments
 * @throws Exception when something goes wrong
 */
public static void main(final String[] args) throws Exception {
  int res = ToolRunner.run(new Configuration(), new SmallFilesRead(), args);
  System.exit(res);
}
 
Example 19
Source File: HBaseSpanViewerServer.java    From incubator-retired-htrace with Apache License 2.0 2 votes vote down vote up
/**
 * Runs Embedded Jetty server which exposes traces and spans Servlets exposed over HTTP protocol.
 *
 * @param args Default Arguments which passed to main method
 * @throws Exception Which are propagated from Embedded Jetty Server to Hadoop tool runner.
 */
public static void main(String[] args) throws Exception {
  ToolRunner.run(HBaseConfiguration.create(), new HBaseSpanViewerServer(), args);
}
 
Example 20
Source File: DistributedPentomino.java    From hadoop-book with Apache License 2.0 2 votes vote down vote up
/**
 * Launch the solver on 9x10 board and the one sided pentominos. This takes
 * about 2.5 hours on 20 nodes with 2 cpus/node. Splits the job into 2000
 * maps and 1 reduce.
 */
public static void main(String[] args) throws Exception {
    int res = ToolRunner.run(new Configuration(), new DistributedPentomino(), args);
    System.exit(res);
}