org.apache.hadoop.util.GenericOptionsParser Java Examples
The following examples show how to use
org.apache.hadoop.util.GenericOptionsParser.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CsvBlurDriver.java From incubator-retired-blur with Apache License 2.0 | 6 votes |
public static void main(String... args) throws Exception { Configuration configuration = new Configuration(); String[] otherArgs = new GenericOptionsParser(configuration, args).getRemainingArgs(); AtomicReference<Callable<Void>> ref = new AtomicReference<Callable<Void>>(); Job job = setupJob(configuration, new ControllerPool() { @Override public Iface getClient(String controllerConnectionStr) { return BlurClient.getClient(controllerConnectionStr); } }, ref, otherArgs); if (job == null) { System.exit(1); } boolean waitForCompletion = job.waitForCompletion(true); if (waitForCompletion) { Callable<Void> callable = ref.get(); if (callable != null) { callable.call(); } } System.exit(waitForCompletion ? 0 : 1); }
Example #2
Source File: ApplicationHistoryServer.java From hadoop with Apache License 2.0 | 6 votes |
static ApplicationHistoryServer launchAppHistoryServer(String[] args) { Thread .setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(ApplicationHistoryServer.class, args, LOG); ApplicationHistoryServer appHistoryServer = null; try { appHistoryServer = new ApplicationHistoryServer(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(appHistoryServer), SHUTDOWN_HOOK_PRIORITY); YarnConfiguration conf = new YarnConfiguration(); new GenericOptionsParser(conf, args); appHistoryServer.init(conf); appHistoryServer.start(); } catch (Throwable t) { LOG.fatal("Error starting ApplicationHistoryServer", t); ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer"); } return appHistoryServer; }
Example #3
Source File: CellCounter.java From cloud-bigtable-examples with Apache License 2.0 | 6 votes |
@Override public int run(String[] args) throws Exception { String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs(); if (otherArgs.length < 2) { System.err.println("ERROR: Wrong number of parameters: " + args.length); System.err.println("Usage: CellCounter "); System.err.println(" <tablename> <outputDir> <reportSeparator> [^[regex pattern] or " + "[Prefix] for row filter]] --starttime=[starttime] --endtime=[endtime]"); System.err.println(" Note: -D properties will be applied to the conf used. "); System.err.println(" Additionally, the following SCAN properties can be specified"); System.err.println(" to get fine grained control on what is counted.."); System.err.println(" -D " + TableInputFormat.SCAN_COLUMN_FAMILY + "=<familyName>"); System.err.println(" <reportSeparator> parameter can be used to override the default report separator " + "string : used to separate the rowId/column family name and qualifier name."); System.err.println(" [^[regex pattern] or [Prefix] parameter can be used to limit the cell counter count " + "operation to a limited subset of rows from the table based on regex or prefix pattern."); return -1; } Job job = createSubmittableJob(getConf(), otherArgs); return (job.waitForCompletion(true) ? 0 : 1); }
Example #4
Source File: ResourceManager.java From hadoop with Apache License 2.0 | 6 votes |
public static void main(String argv[]) { Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(ResourceManager.class, argv, LOG); try { Configuration conf = new YarnConfiguration(); GenericOptionsParser hParser = new GenericOptionsParser(conf, argv); argv = hParser.getRemainingArgs(); // If -format-state-store, then delete RMStateStore; else startup normally if (argv.length == 1 && argv[0].equals("-format-state-store")) { deleteRMStateStore(conf); } else { ResourceManager resourceManager = new ResourceManager(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(resourceManager), SHUTDOWN_HOOK_PRIORITY); resourceManager.init(conf); resourceManager.start(); } } catch (Throwable t) { LOG.fatal("Error starting ResourceManager", t); System.exit(-1); } }
Example #5
Source File: WordCount.java From RDFS with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: wordcount <in> <out>"); System.exit(2); } Job job = new Job(conf, "word count"); job.setJarByClass(WordCount.class); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); long t1 = System.currentTimeMillis(); boolean re = job.waitForCompletion(true); long t2 = System.currentTimeMillis(); System.out.println((float)(t2-t1)/1000); if (re) System.exit(0); else System.exit(1); }
Example #6
Source File: MaxmindDbEnrichmentLoaderTest.java From metron with Apache License 2.0 | 6 votes |
@Test public void testLoadGeoIpDatabase() throws Exception { File dbPlainTextFile = new File(remoteDir.getAbsolutePath() + "/MaxmindDbEnrichmentLoaderTest.mmdb"); TestUtils.write(dbPlainTextFile, "hello world"); File dbFile = new File(remoteDir.getAbsolutePath() + "/MaxmindDbEnrichmentLoaderTest.mmdb.gz"); CompressionStrategies.GZIP.compress(dbPlainTextFile, dbFile); String[] argv = { "--geo_url", "file://" + dbFile.getAbsolutePath(), "--remote_dir", remoteDir.getAbsolutePath(), "--remote_asn_dir", remoteDir.getAbsolutePath(), "--tmp_dir", tmpDir.getAbsolutePath(), "--zk_quorum", "test:2181" }; String[] otherArgs = new GenericOptionsParser(argv).getRemainingArgs(); CommandLine cli = MaxmindDbEnrichmentLoader.GeoEnrichmentOptions.parse(new PosixParser(), otherArgs); MaxmindDbEnrichmentLoader loader = new MockMaxmindDbEnrichmentLoader(); loader.loadGeoLiteDatabase(cli); Configuration config = new Configuration(); FileSystem fs = FileSystem.get(config); assertTrue(fs.exists(new Path(remoteDir + "/" + dbFile.getName()))); }
Example #7
Source File: MapperInputSplitInfo.java From bigdata-tutorial with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: MapperInputSplitInfo <in> <out>"); System.exit(2); } Job job = Job.getInstance(conf, MapperInputSplitInfo.class.getSimpleName()); job.setJarByClass(MapperInputSplitInfo.class); job.setMapperClass(MyMapper.class); job.setCombinerClass(MyReducer.class); job.setReducerClass(MyReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #8
Source File: Submitter.java From hadoop with Apache License 2.0 | 6 votes |
void printUsage() { // The CLI package should do this for us, but I can't figure out how // to make it print something reasonable. System.out.println("bin/hadoop pipes"); System.out.println(" [-input <path>] // Input directory"); System.out.println(" [-output <path>] // Output directory"); System.out.println(" [-jar <jar file> // jar filename"); System.out.println(" [-inputformat <class>] // InputFormat class"); System.out.println(" [-map <class>] // Java Map class"); System.out.println(" [-partitioner <class>] // Java Partitioner"); System.out.println(" [-reduce <class>] // Java Reduce class"); System.out.println(" [-writer <class>] // Java RecordWriter"); System.out.println(" [-program <executable>] // executable URI"); System.out.println(" [-reduces <num>] // number of reduces"); System.out.println(" [-lazyOutput <true/false>] // createOutputLazily"); System.out.println(); GenericOptionsParser.printGenericCommandUsage(System.out); }
Example #9
Source File: WordCount.java From knox with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println( "Usage: wordcount <in> <out>" ); System.exit(2); } Job job = Job.getInstance(conf, "Word Count"); job.setJarByClass(WordCount.class); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #10
Source File: WordCount.java From hadoop-book with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: wordcount <in> <out>"); System.exit(2); } Job job = new Job(conf, "word count"); job.setJarByClass(WordCount.class); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #11
Source File: WordCount.java From wifi with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf,args).getRemainingArgs(); // System.out.println(otherArgs); if(otherArgs.length != 2) { System.out.println("Usage:wordcount <in> <out>"); System.exit(2); } // if(args.length != 2) { // System.out.println("param error!"); // System.exit(-1); // } Job job = new Job(conf, "word count"); job.setJarByClass(WordCount.class); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #12
Source File: LinkCountHDFS.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); if (args.length < 2) { System.err.println("Usage: LinkCountHDFS inputDir outputDir"); System.exit(2); } String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); Job job = Job.getInstance(conf, "link count hdfs"); job.setJarByClass(LinkCountHDFS.class); job.setInputFormatClass(HDFSInputFormat.class); job.setMapperClass(RefMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setReducerClass(IntSumReducer.class); job.setOutputFormatClass(TextOutputFormat.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); HDFSInputFormat.setInputPaths(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #13
Source File: LinkCountInProperty.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); if (args.length < 1) { System.err.println("Usage: LinkCountInProperty configFile"); System.exit(2); } String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); Job job = Job.getInstance(conf, "link count in property"); job.setJarByClass(LinkCountInProperty.class); job.setInputFormatClass(ValueInputFormat.class); job.setMapperClass(RefMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setReducerClass(IntSumReducer.class); job.setOutputFormatClass(PropertyOutputFormat.class); job.setOutputKeyClass(DocumentURI.class); job.setOutputValueClass(MarkLogicNode.class); conf = job.getConfiguration(); conf.addResource(otherArgs[0]); conf.setClass(MarkLogicConstants.INPUT_VALUE_CLASS, Text.class, Writable.class); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #14
Source File: ContentLoader.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); if (args.length < 2) { System.err.println("Usage: ContentLoader configFile inputDir"); System.exit(2); } String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); Job job = Job.getInstance(conf, "content loader"); job.setJarByClass(ContentLoader.class); job.setInputFormatClass(ContentInputFormat.class); job.setMapperClass(ContentMapper.class); job.setMapOutputKeyClass(DocumentURI.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(ContentOutputFormat.class); ContentInputFormat.setInputPaths(job, new Path(otherArgs[1])); conf = job.getConfiguration(); conf.addResource(otherArgs[0]); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #15
Source File: LinkCountInDoc.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); if (args.length < 1) { System.err.println("Usage: LinkCountInDoc configFile"); System.exit(2); } String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); Job job = Job.getInstance(conf, "link count in doc"); job.setJarByClass(LinkCountInDoc.class); job.setInputFormatClass(NodeInputFormat.class); job.setMapperClass(RefMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setReducerClass(IntSumReducer.class); job.setOutputFormatClass(NodeOutputFormat.class); job.setOutputKeyClass(NodePath.class); job.setOutputValueClass(MarkLogicNode.class); conf = job.getConfiguration(); conf.addResource(otherArgs[0]); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #16
Source File: WikiLoader.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); if (args.length < 2) { System.err.println("Usage: WikiLoader configFile inputDir"); System.exit(2); } String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); Job job = Job.getInstance(conf, "wiki loader"); job.setJarByClass(WikiLoader.class); job.setInputFormatClass(WikiInputFormat.class); job.setMapperClass(ArticleMapper.class); job.setMapOutputKeyClass(DocumentURI.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(ContentOutputFormat.class); ContentInputFormat.setInputPaths(job, new Path(otherArgs[1])); conf = job.getConfiguration(); conf.addResource(otherArgs[0]); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #17
Source File: ZipContentLoader.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: ZipContentLoader configFile inputDir"); System.exit(2); } Job job = Job.getInstance(conf, "zip content loader"); job.setJarByClass(ZipContentLoader.class); job.setInputFormatClass(ZipContentInputFormat.class); job.setMapperClass(ZipContentMapper.class); job.setMapOutputKeyClass(DocumentURI.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(ContentOutputFormat.class); ZipContentInputFormat.setInputPaths(job, new Path(otherArgs[1])); conf = job.getConfiguration(); conf.addResource(otherArgs[0]); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #18
Source File: HelloWorld.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); Job job = Job.getInstance(conf, "hello world"); job.setJarByClass(HelloWorld.class); // Map related configuration job.setInputFormatClass(DocumentInputFormat.class); job.setMapperClass(MyMapper.class); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(Text.class); // Reduce related configuration job.setReducerClass(MyReducer.class); job.setOutputFormatClass(ContentOutputFormat.class); job.setOutputKeyClass(DocumentURI.class); job.setOutputValueClass(Text.class); conf = job.getConfiguration(); conf.addResource("marklogic-hello-world.xml"); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #19
Source File: ElementValueMatchTest.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length < 1) { System.err.println("Usage: ElementValueMatchTest configFile outputDir"); System.exit(2); } Job job = Job.getInstance(conf); job.setJarByClass(ElementValueMatchTest.class); job.setInputFormatClass(ValueInputFormat.class); job.setMapperClass(ElementValueMatchMapper.class); job.setMapOutputKeyClass(LongWritable.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(TextOutputFormat.class); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); conf = job.getConfiguration(); conf.addResource(otherArgs[0]); conf.setClass(MarkLogicConstants.INPUT_VALUE_CLASS, Text.class, Writable.class); conf.setClass(MarkLogicConstants.INPUT_LEXICON_FUNCTION_CLASS, ElementValueMatchFunction.class, ElementValueMatch.class); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #20
Source File: PreRegionsCreation.java From hbase-secondary-index with GNU General Public License v3.0 | 6 votes |
public static void main(String[] args) throws IOException, ParseException { Configuration conf = HBaseConfiguration.create(); String[] otherArgs = new GenericOptionsParser(conf, args) .getRemainingArgs(); CommandLine cmd = parseArgs(otherArgs); String indexTable = cmd.getOptionValue("i"); String columns = cmd.getOptionValue("c"); String startKey = cmd.getOptionValue("s"); String endKey = cmd.getOptionValue("e"); PreRegionsCreation pc = new PreRegionsCreation(); String[] cfs = columns.split(","); if (null != startKey && null != endKey) pc.create(indexTable, cfs, startKey, endKey); else pc.create(indexTable, cfs); }
Example #21
Source File: WordsTest.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length < 1) { System.err.println("Usage: WordsTest configFile outputDir"); System.exit(2); } Job job = Job.getInstance(conf); job.setJarByClass(WordsTest.class); job.setInputFormatClass(ValueInputFormat.class); job.setMapperClass(WordsMapper.class); job.setMapOutputKeyClass(LongWritable.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(TextOutputFormat.class); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); conf = job.getConfiguration(); conf.addResource(otherArgs[0]); conf.setClass(MarkLogicConstants.INPUT_VALUE_CLASS, Text.class, Writable.class); conf.setClass(MarkLogicConstants.INPUT_LEXICON_FUNCTION_CLASS, Words.class, Words.class); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #22
Source File: FaultToleranceTestRunner.java From tez with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); String className = null; String confFilePath = null; if (otherArgs.length == 1) { className = otherArgs[0]; } else if (otherArgs.length == 2) { className = otherArgs[0]; confFilePath = otherArgs[1]; } else { printUsage(); System.exit(1); } FaultToleranceTestRunner job = new FaultToleranceTestRunner(); if (job.run(conf, className, confFilePath)) { System.out.println("Succeeded."); } else { System.out.println("Failed."); System.exit(2); } }
Example #23
Source File: WikipediaExtractor.java From hadoop-map-reduce-patterns with Apache License 2.0 | 6 votes |
@Override public int run(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: WikipediaExtractor <in> <out>"); ToolRunner.printGenericCommandUsage(System.err); System.exit(2); } Job job = new Job(conf, "StackOverflow Answer to Wikipedia URL Reverse Index Creation"); job.setJarByClass(WikipediaExtractor.class); job.setMapperClass(WikipediaUrlMapper.class); job.setReducerClass(WikipediaUrlReducer.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); boolean success = job.waitForCompletion(true); return success ? 0 : 1; }
Example #24
Source File: ValueCooccurrencesTest.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length < 2) { System.err.println("Usage: ValueCooccurrencesTest configFile outputDir"); System.exit(2); } Job job = Job.getInstance(conf); job.setJarByClass(ValueCooccurrencesTest.class); job.setInputFormatClass(ValueInputFormat.class); job.setMapperClass(ValueCooccurrencesMapper.class); job.setMapOutputKeyClass(LongWritable.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(TextOutputFormat.class); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); conf = job.getConfiguration(); conf.addResource(otherArgs[0]); conf.setClass(MarkLogicConstants.INPUT_VALUE_CLASS, Text.class, Writable.class); conf.setClass(MarkLogicConstants.INPUT_LEXICON_FUNCTION_CLASS, ValueCooccurrencesFunction.class, ValueCooccurrences.class); System.exit(job.waitForCompletion(true) ? 0 : 1); }
Example #25
Source File: ApplicationHistoryServer.java From big-c with Apache License 2.0 | 6 votes |
static ApplicationHistoryServer launchAppHistoryServer(String[] args) { Thread .setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); StringUtils.startupShutdownMessage(ApplicationHistoryServer.class, args, LOG); ApplicationHistoryServer appHistoryServer = null; try { appHistoryServer = new ApplicationHistoryServer(); ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(appHistoryServer), SHUTDOWN_HOOK_PRIORITY); YarnConfiguration conf = new YarnConfiguration(); new GenericOptionsParser(conf, args); appHistoryServer.init(conf); appHistoryServer.start(); } catch (Throwable t) { LOG.fatal("Error starting ApplicationHistoryServer", t); ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer"); } return appHistoryServer; }
Example #26
Source File: Submitter.java From big-c with Apache License 2.0 | 6 votes |
void printUsage() { // The CLI package should do this for us, but I can't figure out how // to make it print something reasonable. System.out.println("bin/hadoop pipes"); System.out.println(" [-input <path>] // Input directory"); System.out.println(" [-output <path>] // Output directory"); System.out.println(" [-jar <jar file> // jar filename"); System.out.println(" [-inputformat <class>] // InputFormat class"); System.out.println(" [-map <class>] // Java Map class"); System.out.println(" [-partitioner <class>] // Java Partitioner"); System.out.println(" [-reduce <class>] // Java Reduce class"); System.out.println(" [-writer <class>] // Java RecordWriter"); System.out.println(" [-program <executable>] // executable URI"); System.out.println(" [-reduces <num>] // number of reduces"); System.out.println(" [-lazyOutput <true/false>] // createOutputLazily"); System.out.println(); GenericOptionsParser.printGenericCommandUsage(System.out); }
Example #27
Source File: MinMaxCount.java From hadoop-map-reduce-patterns with Apache License 2.0 | 6 votes |
@Override public int run(String[] arg0) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, arg0).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: MinMaxCount <in> <out>"); System.exit(2); } Job job = new Job(conf, "StackOverflow Comment MinMaxCount"); job.setJarByClass(MinMaxCount.class); job.setMapperClass(MinMaxCountMapper.class); job.setCombinerClass(MinMaxCountReducer.class); job.setReducerClass(MinMaxCountReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(MinMaxCountTuple.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); boolean success = job.waitForCompletion(true); return success ? 0 : 1; }
Example #28
Source File: Submitter.java From hadoop-gpu with Apache License 2.0 | 6 votes |
void printUsage() { // The CLI package should do this for us, but I can't figure out how // to make it print something reasonable. System.out.println("bin/hadoop pipes"); System.out.println(" [-input <path>] // Input directory"); System.out.println(" [-output <path>] // Output directory"); System.out.println(" [-jar <jar file> // jar filename"); System.out.println(" [-inputformat <class>] // InputFormat class"); System.out.println(" [-map <class>] // Java Map class"); System.out.println(" [-partitioner <class>] // Java Partitioner"); System.out.println(" [-reduce <class>] // Java Reduce class"); System.out.println(" [-writer <class>] // Java RecordWriter"); System.out.println(" [-program <executable>] // executable URI"); System.out.println(" [-reduces <num>] // number of reduces"); System.out.println(); GenericOptionsParser.printGenericCommandUsage(System.out); }
Example #29
Source File: DMLScript.java From systemds with Apache License 2.0 | 5 votes |
/** * * @param args command-line arguments * @throws IOException if an IOException occurs in the hadoop GenericOptionsParser */ public static void main(String[] args) throws IOException { Configuration conf = new Configuration(ConfigurationManager.getCachedJobConf()); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); try { DMLScript.executeScript(conf, otherArgs); } catch (ParseException | DMLScriptException e) { // In case of DMLScriptException, simply print the error message. System.err.println(e.getMessage()); } }
Example #30
Source File: ReliabilityTest.java From big-c with Apache License 2.0 | 5 votes |
public int run(String[] args) throws Exception { Configuration conf = getConf(); if ("local".equals(conf.get(JTConfig.JT_IPC_ADDRESS, "local"))) { displayUsage(); } String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length == 2) { if (otherArgs[0].equals("-scratchdir")) { dir = otherArgs[1]; } else { displayUsage(); } } else if (otherArgs.length == 0) { dir = System.getProperty("user.dir"); } else { displayUsage(); } //to protect against the case of jobs failing even when multiple attempts //fail, set some high values for the max attempts conf.setInt(JobContext.MAP_MAX_ATTEMPTS, 10); conf.setInt(JobContext.REDUCE_MAX_ATTEMPTS, 10); runSleepJobTest(new JobClient(new JobConf(conf)), conf); runSortJobTests(new JobClient(new JobConf(conf)), conf); return 0; }