org.apache.hadoop.hbase.util.AbstractHBaseTool Java Examples
The following examples show how to use
org.apache.hadoop.hbase.util.AbstractHBaseTool.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: IntegrationTestWithCellVisibilityLoadAndVerify.java From hbase with Apache License 2.0 | 6 votes |
private Job doVerify(Configuration conf, TableDescriptor tableDescriptor, String... auths) throws IOException, InterruptedException, ClassNotFoundException { Path outputDir = getTestDir(TEST_NAME, "verify-output"); Job job = new Job(conf); job.setJarByClass(this.getClass()); job.setJobName(TEST_NAME + " Verification for " + tableDescriptor.getTableName()); setJobScannerConf(job); Scan scan = new Scan(); scan.setAuthorizations(new Authorizations(auths)); TableMapReduceUtil.initTableMapperJob(tableDescriptor.getTableName().getNameAsString(), scan, VerifyMapper.class, NullWritable.class, NullWritable.class, job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class); int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING); TableMapReduceUtil.setScannerCaching(job, scannerCaching); job.setNumReduceTasks(0); FileOutputFormat.setOutputPath(job, outputDir); assertTrue(job.waitForCompletion(true)); return job; }
Example #2
Source File: IntegrationTestLoadAndVerify.java From hbase with Apache License 2.0 | 6 votes |
protected Job doLoad(Configuration conf, TableDescriptor tableDescriptor) throws Exception { Path outputDir = getTestDir(TEST_NAME, "load-output"); LOG.info("Load output dir: " + outputDir); NMapInputFormat.setNumMapTasks(conf, conf.getInt(NUM_MAP_TASKS_KEY, NUM_MAP_TASKS_DEFAULT)); conf.set(TABLE_NAME_KEY, tableDescriptor.getTableName().getNameAsString()); Job job = Job.getInstance(conf); job.setJobName(TEST_NAME + " Load for " + tableDescriptor.getTableName()); job.setJarByClass(this.getClass()); setMapperClass(job); job.setInputFormatClass(NMapInputFormat.class); job.setNumReduceTasks(0); setJobScannerConf(job); FileOutputFormat.setOutputPath(job, outputDir); TableMapReduceUtil.addDependencyJars(job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class); TableMapReduceUtil.initCredentials(job); assertTrue(job.waitForCompletion(true)); return job; }
Example #3
Source File: IntegrationTestLoadAndVerify.java From hbase with Apache License 2.0 | 6 votes |
protected void doVerify(Configuration conf, TableDescriptor tableDescriptor) throws Exception { Path outputDir = getTestDir(TEST_NAME, "verify-output"); LOG.info("Verify output dir: " + outputDir); Job job = Job.getInstance(conf); job.setJarByClass(this.getClass()); job.setJobName(TEST_NAME + " Verification for " + tableDescriptor.getTableName()); setJobScannerConf(job); Scan scan = new Scan(); TableMapReduceUtil.initTableMapperJob( tableDescriptor.getTableName().getNameAsString(), scan, VerifyMapper.class, BytesWritable.class, BytesWritable.class, job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class); int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING); TableMapReduceUtil.setScannerCaching(job, scannerCaching); job.setReducerClass(VerifyReducer.class); job.setNumReduceTasks(conf.getInt(NUM_REDUCE_TASKS_KEY, NUM_REDUCE_TASKS_DEFAULT)); FileOutputFormat.setOutputPath(job, outputDir); assertTrue(job.waitForCompletion(true)); long numOutputRecords = job.getCounters().findCounter(Counters.ROWS_WRITTEN).getValue(); assertEquals(0, numOutputRecords); }
Example #4
Source File: PreUpgradeValidator.java From hbase with Apache License 2.0 | 5 votes |
@Override public int run(String[] args) throws Exception { if (args.length == 0) { printUsage(); return AbstractHBaseTool.EXIT_FAILURE; } Tool tool; switch (args[0]) { case VALIDATE_CP_NAME: tool = new CoprocessorValidator(); break; case VALIDATE_DBE_NAME: tool = new DataBlockEncodingValidator(); break; case VALIDATE_HFILE: tool = new HFileContentValidator(); break; case "-h": printUsage(); return AbstractHBaseTool.EXIT_FAILURE; default: System.err.println("Unknown command: " + args[0]); printUsage(); return AbstractHBaseTool.EXIT_FAILURE; } tool.setConf(getConf()); return tool.run(Arrays.copyOfRange(args, 1, args.length)); }
Example #5
Source File: PreUpgradeValidator.java From hbase with Apache License 2.0 | 5 votes |
public static void main(String[] args) { int ret; Configuration conf = HBaseConfiguration.create(); try { ret = ToolRunner.run(conf, new PreUpgradeValidator(), args); } catch (Exception e) { LOG.error("Error running command-line tool", e); ret = AbstractHBaseTool.EXIT_FAILURE; } System.exit(ret); }
Example #6
Source File: RowCounter.java From hbase with Apache License 2.0 | 5 votes |
@Override protected void printUsage(final String usageStr, final String usageHeader, final String usageFooter) { HelpFormatter helpFormatter = new HelpFormatter(); helpFormatter.setWidth(120); helpFormatter.setOptionComparator(new AbstractHBaseTool.OptionsOrderComparator()); helpFormatter.setLongOptSeparator("="); helpFormatter.printHelp(usageStr, usageHeader, options, usageFooter); }
Example #7
Source File: IntegrationTestBigLinkedListWithVisibility.java From hbase with Apache License 2.0 | 5 votes |
private int doVerify(Path outputDir, int numReducers) throws IOException, InterruptedException, ClassNotFoundException { job = new Job(getConf()); job.setJobName("Link Verifier"); job.setNumReduceTasks(numReducers); job.setJarByClass(getClass()); setJobScannerConf(job); Scan scan = new Scan(); scan.addColumn(FAMILY_NAME, COLUMN_PREV); scan.setCaching(10000); scan.setCacheBlocks(false); String[] split = labels.split(COMMA); scan.setAuthorizations(new Authorizations(split[this.labelIndex * 2], split[(this.labelIndex * 2) + 1])); TableMapReduceUtil.initTableMapperJob(tableName.getName(), scan, VerifyMapper.class, BytesWritable.class, BytesWritable.class, job); TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class); job.getConfiguration().setBoolean("mapreduce.map.speculative", false); job.setReducerClass(VerifyReducer.class); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); boolean success = job.waitForCompletion(true); return success ? 0 : 1; }
Example #8
Source File: IntegrationTestBigLinkedList.java From hbase with Apache License 2.0 | 5 votes |
public int runGenerator(int numMappers, long numNodes, Path tmpOutput, Integer width, Integer wrapMultiplier, Integer numWalkers) throws Exception { LOG.info("Running Generator with numMappers=" + numMappers +", numNodes=" + numNodes); createSchema(); job = Job.getInstance(getConf()); job.setJobName("Link Generator"); job.setNumReduceTasks(0); job.setJarByClass(getClass()); FileInputFormat.setInputPaths(job, tmpOutput); job.setInputFormatClass(OneFilePerMapperSFIF.class); job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(NullWritable.class); setJobConf(job, numMappers, numNodes, width, wrapMultiplier, numWalkers); setMapperForGenerator(job); job.setOutputFormatClass(NullOutputFormat.class); job.getConfiguration().setBoolean("mapreduce.map.speculative", false); TableMapReduceUtil.addDependencyJars(job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class); TableMapReduceUtil.initCredentials(job); boolean success = jobCompletion(job); return success ? 0 : 1; }
Example #9
Source File: IntegrationTestBigLinkedList.java From hbase with Apache License 2.0 | 4 votes |
public int run(Path outputDir, int numReducers) throws Exception { LOG.info("Running Verify with outputDir=" + outputDir +", numReducers=" + numReducers); job = Job.getInstance(getConf()); job.setJobName("Link Verifier"); job.setNumReduceTasks(numReducers); job.setJarByClass(getClass()); setJobScannerConf(job); Scan scan = new Scan(); scan.addColumn(FAMILY_NAME, COLUMN_PREV); scan.setCaching(10000); scan.setCacheBlocks(false); if (isMultiUnevenColumnFamilies(getConf())) { scan.addColumn(BIG_FAMILY_NAME, BIG_FAMILY_NAME); scan.addColumn(TINY_FAMILY_NAME, TINY_FAMILY_NAME); } TableMapReduceUtil.initTableMapperJob(getTableName(getConf()).getName(), scan, VerifyMapper.class, BytesWritable.class, BytesWritable.class, job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class); job.getConfiguration().setBoolean("mapreduce.map.speculative", false); job.setReducerClass(VerifyReducer.class); job.setOutputFormatClass(SequenceFileAsBinaryOutputFormat.class); job.setOutputKeyClass(BytesWritable.class); job.setOutputValueClass(BytesWritable.class); TextOutputFormat.setOutputPath(job, outputDir); boolean success = job.waitForCompletion(true); if (success) { Counters counters = job.getCounters(); if (null == counters) { LOG.warn("Counters were null, cannot verify Job completion." + " This is commonly a result of insufficient YARN configuration."); // We don't have access to the counters to know if we have "bad" counts return 0; } // If we find no unexpected values, the job didn't outright fail if (verifyUnexpectedValues(counters)) { // We didn't check referenced+unreferenced counts, leave that to visual inspection return 0; } } // We failed return 1; }