Java Code Examples for org.apache.hadoop.util.ToolRunner#printGenericCommandUsage()

The following examples show how to use org.apache.hadoop.util.ToolRunner#printGenericCommandUsage() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DFSUtil.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Parse the arguments for commands
 * 
 * @param args the argument to be parsed
 * @param helpDescription help information to be printed out
 * @param out Printer
 * @param printGenericCommandUsage whether to print the 
 *              generic command usage defined in ToolRunner
 * @return true when the argument matches help option, false if not
 */
public static boolean parseHelpArgument(String[] args,
    String helpDescription, PrintStream out, boolean printGenericCommandUsage) {
  if (args.length == 1) {
    try {
      CommandLineParser parser = new PosixParser();
      CommandLine cmdLine = parser.parse(helpOptions, args);
      if (cmdLine.hasOption(helpOpt.getOpt())
          || cmdLine.hasOption(helpOpt.getLongOpt())) {
        // should print out the help information
        out.println(helpDescription + "\n");
        if (printGenericCommandUsage) {
          ToolRunner.printGenericCommandUsage(out);
        }
        return true;
      }
    } catch (ParseException pe) {
      return false;
    }
  }
  return false;
}
 
Example 2
Source File: TopTenUsersByReputation.java    From hadoop-map-reduce-patterns with Apache License 2.0 6 votes vote down vote up
@Override
public int run(String[] args) throws Exception {
	Configuration conf = new Configuration();
	String[] otherArgs = new GenericOptionsParser(conf, args)
			.getRemainingArgs();
	if (otherArgs.length != 2) {
		System.err.println("Usage: TopTenUsersByReputation <in> <out>");
		ToolRunner.printGenericCommandUsage(System.err);
		System.exit(2);
	}

	Job job = new Job(conf, "Top Ten Users by Reputation");
	job.setJarByClass(TopTenUsersByReputation.class);
	job.setMapperClass(TopTenMapper.class);
	job.setReducerClass(TopTenReducer.class);
	job.setNumReduceTasks(1);
	job.setOutputKeyClass(IntWritable.class);
	job.setOutputValueClass(TextArrayWritable.class);
	FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
	FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
	boolean success = job.waitForCompletion(true);

	return success ? 0 : 1;
}
 
Example 3
Source File: AvatarZKShell.java    From RDFS with Apache License 2.0 6 votes vote down vote up
/**
 * Displays format of commands.
 */
private static void printUsage(String cmd) {
  String prefix = "Usage: java " + AvatarZKShell.class.getSimpleName();
  if ("-updateZK".equals(cmd)) {
    System.err
        .println("Usage: java AvatarShell [-service serviceName]" + " [-updateZK -{zero|one}]");
  } else if ("-clearZK".equals(cmd)) {
    System.err.println("Usage: java AvatarShell [-servie serviceName]" + " [-clearZK]");
  } else {
    System.err.println("Usage: java AvatarZKShell [-servie serviceName]");
    System.err.println("           [-updateZK -{zero|one} [-force]]");
    System.err.println("           [-clearZK]");
    System.err.println("           [-createZK -{zero|one}]");
    System.err.println("           [-getZK]");
    System.err.println();
    ToolRunner.printGenericCommandUsage(System.err);
  }
}
 
Example 4
Source File: DataGenerator.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
/** Parse the command line arguments and initialize the data */
private int init(String[] args) {
  try { // initialize file system handle
    fs = FileSystem.get(getConf());
  } catch (IOException ioe) {
    System.err.println("Can not initialize the file system: " + 
        ioe.getLocalizedMessage());
    return -1;
  }

  for (int i = 0; i < args.length; i++) { // parse command line
    if (args[i].equals("-root")) {
      root = new Path(args[++i]);
    } else if (args[i].equals("-inDir")) {
      inDir = new File(args[++i]);
    } else {
      System.err.println(USAGE);
      ToolRunner.printGenericCommandUsage(System.err);
      System.exit(-1);
    }
  }
  return 0;
}
 
Example 5
Source File: Join.java    From hadoop-book with Apache License 2.0 5 votes vote down vote up
static int printUsage() {
    System.out.println("join [-m <maps>] [-r <reduces>] "
            + "[-inFormat <input format class>] "
            + "[-outFormat <output format class>] "
            + "[-outKey <output key class>] "
            + "[-outValue <output value class>] "
            + "[-joinOp <inner|outer|override>] "
            + "[input]* <input> <output>");
    ToolRunner.printGenericCommandUsage(System.out);
    return -1;
}
 
Example 6
Source File: RMAdminCLI.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Displays format of commands.
 * @param cmd The command that is being executed.
 */
private static void printUsage(String cmd, boolean isHAEnabled) {
  StringBuilder usageBuilder = new StringBuilder();
  if (ADMIN_USAGE.containsKey(cmd) || USAGE.containsKey(cmd)) {
    buildIndividualUsageMsg(cmd, usageBuilder);
  } else {
    buildUsageMsg(usageBuilder, isHAEnabled);
  }
  System.err.println(usageBuilder);
  ToolRunner.printGenericCommandUsage(System.err);

}
 
Example 7
Source File: Sort.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
static int printUsage() {
  System.out.println("sort [-m <maps>] [-r <reduces>] " +
                     "[-inFormat <input format class>] " +
                     "[-outFormat <output format class>] " + 
                     "[-outKey <output key class>] " +
                     "[-outValue <output value class>] " +
                     "[-totalOrder <pcnt> <num samples> <max splits>] " +
                     "<input> <output>");
  ToolRunner.printGenericCommandUsage(System.out);
  return -1;
}
 
Example 8
Source File: DFSck.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
  // -files option is also used by GenericOptionsParser
  // Make sure that is not the first argument for fsck
  int res = -1;
  if ((args.length == 0) || ("-files".equals(args[0]))) {
    printUsage(System.err);
    ToolRunner.printGenericCommandUsage(System.err);
  } else if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) {
    res = 0;
  } else {
    res = ToolRunner.run(new DFSck(new HdfsConfiguration()), args);
  }
  System.exit(res);
}
 
Example 9
Source File: SleepJob.java    From big-c with Apache License 2.0 5 votes vote down vote up
public int run(String[] args) throws Exception {

    if(args.length < 1) {
      System.err.println("SleepJob [-m numMapper] [-r numReducer]" +
          " [-mt mapSleepTime (msec)] [-rt reduceSleepTime (msec)]" +
          " [-recordt recordSleepTime (msec)]");
      ToolRunner.printGenericCommandUsage(System.err);
      return -1;
    }

    int numMapper = 1, numReducer = 1;
    long mapSleepTime = 100, reduceSleepTime = 100, recSleepTime = 100;
    int mapSleepCount = 1, reduceSleepCount = 1;

    for(int i=0; i < args.length; i++ ) {
      if(args[i].equals("-m")) {
        numMapper = Integer.parseInt(args[++i]);
      }
      else if(args[i].equals("-r")) {
        numReducer = Integer.parseInt(args[++i]);
      }
      else if(args[i].equals("-mt")) {
        mapSleepTime = Long.parseLong(args[++i]);
      }
      else if(args[i].equals("-rt")) {
        reduceSleepTime = Long.parseLong(args[++i]);
      }
      else if (args[i].equals("-recordt")) {
        recSleepTime = Long.parseLong(args[++i]);
      }
    }
    
    // sleep for *SleepTime duration in Task by recSleepTime per record
    mapSleepCount = (int)Math.ceil(mapSleepTime / ((double)recSleepTime));
    reduceSleepCount = (int)Math.ceil(reduceSleepTime / ((double)recSleepTime));
    
    return run(numMapper, numReducer, mapSleepTime, mapSleepCount,
        reduceSleepTime, reduceSleepCount);
  }
 
Example 10
Source File: FailJob.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public int run(String[] args) throws Exception {
  if(args.length < 1) {
    System.err.println("FailJob " +
        " (-failMappers|-failReducers)");
    ToolRunner.printGenericCommandUsage(System.err);
    return 2;
  }
  boolean failMappers = false, failReducers = false;

  for (int i = 0; i < args.length; i++ ) {
    if (args[i].equals("-failMappers")) {
      failMappers = true;
    }
    else if(args[i].equals("-failReducers")) {
      failReducers = true;
    }
  }
  if (!(failMappers ^ failReducers)) {
    System.err.println("Exactly one of -failMappers or -failReducers must be specified.");
    return 3;
  }

  // Write a file with one line per mapper.
  final FileSystem fs = FileSystem.get(getConf());
  Path inputDir = new Path(FailJob.class.getSimpleName() + "_in");
  fs.mkdirs(inputDir);
  for (int i = 0; i < getConf().getInt("mapred.map.tasks", 1); ++i) {
    BufferedWriter w = new BufferedWriter(new OutputStreamWriter(
        fs.create(new Path(inputDir, Integer.toString(i)))));
    w.write(Integer.toString(i) + "\n");
    w.close();
  }

  Job job = createJob(failMappers, failReducers, inputDir);
  return job.waitForCompletion(true) ? 0 : 1;
}
 
Example 11
Source File: RPCLoadGen.java    From tez with Apache License 2.0 5 votes vote down vote up
@Override
protected void printUsage() {
  System.err.println(
      "Usage: " + "RPCLoadGen <numTasks> <max_sleep_time_millis> <get_task_payload_size> [" +
          "<" + VIA_RPC + ">|" + VIA_HDFS_DIST_CACHE + "|" + VIA_HDFS_DIRECT_READ + "]");
  ToolRunner.printGenericCommandUsage(System.err);
}
 
Example 12
Source File: Join.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
static int printUsage() {
  System.out.println("join [-r <reduces>] " +
                     "[-inFormat <input format class>] " +
                     "[-outFormat <output format class>] " + 
                     "[-outKey <output key class>] " +
                     "[-outValue <output value class>] " +
                     "[-joinOp <inner|outer|override>] " +
                     "[input]* <input> <output>");
  ToolRunner.printGenericCommandUsage(System.out);
  return 2;
}
 
Example 13
Source File: WasbFsck.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private static void printUsage() {
  System.out.println("Usage: WasbFSck [<path>] [-move | -delete]");
  System.out.println("\t<path>\tstart checking from this path");
  System.out.println("\t-move\tmove any files whose upload was interrupted"
      + " mid-stream to " + LOST_AND_FOUND_PATH);
  System.out
      .println("\t-delete\tdelete any files whose upload was interrupted"
          + " mid-stream");
  ToolRunner.printGenericCommandUsage(System.out);
}
 
Example 14
Source File: DFSck.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
  // -files option is also used by GenericOptionsParser
  // Make sure that is not the first argument for fsck
  int res = -1;
  if ((args.length == 0) || ("-files".equals(args[0]))) {
    printUsage(System.err);
    ToolRunner.printGenericCommandUsage(System.err);
  } else if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) {
    res = 0;
  } else {
    res = ToolRunner.run(new DFSck(new HdfsConfiguration()), args);
  }
  System.exit(res);
}
 
Example 15
Source File: Grep.java    From RDFS with Apache License 2.0 4 votes vote down vote up
public int run(String[] args) throws Exception {
  if (args.length < 3) {
    System.out.println("Grep <inDir> <outDir> <regex> [<group>]");
    ToolRunner.printGenericCommandUsage(System.out);
    return -1;
  }

  Path tempDir =
    new Path("grep-temp-"+
        Integer.toString(new Random().nextInt(Integer.MAX_VALUE)));

  JobConf grepJob = new JobConf(getConf(), Grep.class);
  
  try {
    
    grepJob.setJobName("grep-search");

    FileInputFormat.setInputPaths(grepJob, args[0]);

    grepJob.setMapperClass(RegexMapper.class);
    grepJob.set("mapred.mapper.regex", args[2]);
    if (args.length == 4)
      grepJob.set("mapred.mapper.regex.group", args[3]);

    grepJob.setCombinerClass(LongSumReducer.class);
    grepJob.setReducerClass(LongSumReducer.class);

    FileOutputFormat.setOutputPath(grepJob, tempDir);
    grepJob.setOutputFormat(SequenceFileOutputFormat.class);
    grepJob.setOutputKeyClass(Text.class);
    grepJob.setOutputValueClass(LongWritable.class);

    JobClient.runJob(grepJob);

    JobConf sortJob = new JobConf(Grep.class);
    sortJob.setJobName("grep-sort");

    FileInputFormat.setInputPaths(sortJob, tempDir);
    sortJob.setInputFormat(SequenceFileInputFormat.class);

    sortJob.setMapperClass(InverseMapper.class);

    sortJob.setNumReduceTasks(1);                 // write a single file
    FileOutputFormat.setOutputPath(sortJob, new Path(args[1]));
    sortJob.setOutputKeyComparatorClass           // sort by decreasing freq
    (LongWritable.DecreasingComparator.class);

    JobClient.runJob(sortJob);
  }
  finally {
    FileSystem.get(grepJob).delete(tempDir, true);
  }
  return 0;
}
 
Example 16
Source File: CompositeUserJoin.java    From hadoop-map-reduce-patterns with Apache License 2.0 4 votes vote down vote up
private void printUsage() {
	System.err.println("Usage: ReduceSideJoin <user_in> <comments_in> <out> <join_type>");
	ToolRunner.printGenericCommandUsage(System.err);
	System.exit(2);
}
 
Example 17
Source File: SleepJob.java    From RDFS with Apache License 2.0 4 votes vote down vote up
public int run(String[] args) throws Exception {

    if(args.length < 1) {
      System.err.println("SleepJob [-m numMapper] [-r numReducer]" +
          " [-mt mapSleepTime (msec)] [-rt reduceSleepTime (msec)]" +
          " [-recordt recordSleepTime (msec)]" +
          " [-slowmaps slowMaps (int separated by ,)]" +
          " [-slowreduces slowReduces (int separated by ,)]" +
          " [-slowratio slowRatio]" + 
          " [-counters numCountersToIncPerRecordPerTask]" +
          " [-nosetup]" +
          " [-hosts hostsToRunMaps (for testing locality. host names" + 
          " separated by ,)]" +
          " [-hostspersplit numHostsPerSplit (for testing locality. number" +
          " of random hosts per split " +
          " ");
      
      ToolRunner.printGenericCommandUsage(System.err);
      return -1;
    }

    int numMapper = 1, numReducer = 1;
    long mapSleepTime = 100, reduceSleepTime = 100, recSleepTime = 100;
    int mapSleepCount = 1, reduceSleepCount = 1;
    int hostsPerSplit = 0;
    List<String> slowMaps = Collections.emptyList();
    List<String> slowReduces = Collections.emptyList();
    int slowRatio = 10;
    boolean setup = true;
    boolean doSpeculation = false;
    List<String> hosts = new ArrayList<String>();
    int countersPerTask = 0;
    
    for(int i=0; i < args.length; i++ ) {
      if(args[i].equals("-m")) {
        numMapper = Integer.parseInt(args[++i]);
      }
      else if(args[i].equals("-r")) {
        numReducer = Integer.parseInt(args[++i]);
      }
      else if(args[i].equals("-mt")) {
        mapSleepTime = Long.parseLong(args[++i]);
      }
      else if(args[i].equals("-rt")) {
        reduceSleepTime = Long.parseLong(args[++i]);
      }
      else if (args[i].equals("-recordt")) {
        recSleepTime = Long.parseLong(args[++i]);
      }
      else if (args[i].equals("-slowmaps")) {
        doSpeculation = true;
        slowMaps = parseSlowTaskList(args[++i]);
      }
      else if (args[i].equals("-slowreduces")) {
        doSpeculation = true;
        slowReduces = parseSlowTaskList(args[++i]);
      }
      else if (args[i].equals("-slowratio")) {
        doSpeculation = true;
        slowRatio = Integer.parseInt(args[++i]);
      }
      else if (args[i].equals("-hosts")) {
        for (String host : args[++i].split(",")) {
          hosts.add(host);
        }
      }
      else if (args[i].equals("-speculation")) {
        doSpeculation = true;
      }
      else if (args[i].equals("-counters")) {
        // Number of counters to increment per record per task
        countersPerTask = Integer.parseInt(args[++i]);
      }
      else if (args[i].equals("-hostspersplit")) {
        hostsPerSplit = Integer.parseInt(args[++i]);
      } 
      else if (args[i].equals("-nosetup")) {
        setup = false;
      }
      else {
        System.err.println("Invalid option " + args[i]);
        System.exit(-1);
      }
    }
    
    // sleep for *SleepTime duration in Task by recSleepTime per record
    mapSleepCount = (int)Math.ceil(mapSleepTime / ((double)recSleepTime));
    reduceSleepCount = (int)Math.ceil(reduceSleepTime / ((double)recSleepTime));
    
    return run(numMapper, numReducer, mapSleepTime, mapSleepCount,
        reduceSleepTime, reduceSleepCount,
        doSpeculation, slowMaps, slowReduces, slowRatio, countersPerTask, 
        hosts, hostsPerSplit, setup);
  }
 
Example 18
Source File: DFSck.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * Print fsck usage information
 */
static void printUsage(PrintStream out) {
  out.println(USAGE + "\n");
  ToolRunner.printGenericCommandUsage(out);
}
 
Example 19
Source File: DFSck.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Print fsck usage information
 */
static void printUsage(PrintStream out) {
  out.println(USAGE + "\n");
  ToolRunner.printGenericCommandUsage(out);
}
 
Example 20
Source File: MigrationTool.java    From RDFS with Apache License 2.0 4 votes vote down vote up
public int run(String[] args) throws Exception {
  
  if (args.length == 0) {
    System.err.println("Usage: MigrationTool <S3 file system URI>");
    System.err.println("\t<S3 file system URI>\tfilesystem to migrate");
    ToolRunner.printGenericCommandUsage(System.err);
    return -1;
  }
  
  URI uri = URI.create(args[0]);
  
  initialize(uri);
  
  FileSystemStore newStore = new Jets3tFileSystemStore();
  newStore.initialize(uri, getConf());
  
  if (get("%2F") != null) { 
    System.err.println("Current version number is [unversioned].");
    System.err.println("Target version number is " +
        newStore.getVersion() + ".");
    Store oldStore = new UnversionedStore();
    migrate(oldStore, newStore);
    return 0;
  } else {
    S3Object root = get("/");
    if (root != null) {
      String version = (String) root.getMetadata("fs-version");
      if (version == null) {
        System.err.println("Can't detect version - exiting.");
      } else {
        String newVersion = newStore.getVersion();
        System.err.println("Current version number is " + version + ".");
        System.err.println("Target version number is " + newVersion + ".");
        if (version.equals(newStore.getVersion())) {
          System.err.println("No migration required.");
          return 0;
        }
        // use version number to create Store
        //Store oldStore = ... 
        //migrate(oldStore, newStore);
        System.err.println("Not currently implemented.");
        return 0;
      }
    }
    System.err.println("Can't detect version - exiting.");
    return 0;
  }
  
}