Java Code Examples for org.apache.hadoop.mapreduce.Job#isComplete()

The following examples show how to use org.apache.hadoop.mapreduce.Job#isComplete() . These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hbase   File: MapReduceBackupCopyJob.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void cancel(String jobId) throws IOException {
  JobID id = JobID.forName(jobId);
  Cluster cluster = new Cluster(this.getConf());
  try {
    Job job = cluster.getJob(id);
    if (job == null) {
      LOG.error("No job found for " + id);
      // should we throw exception
      return;
    }
    if (job.isComplete() || job.isRetired()) {
      return;
    }

    job.killJob();
    LOG.debug("Killed copy job " + id);
  } catch (InterruptedException e) {
    throw new IOException(e);
  }
}
 
Example 2
private void submitAndWait(Job job) throws ClassNotFoundException, IOException, InterruptedException {
  job.submit();
  MRCompactor.addRunningHadoopJob(this.dataset, job);
  LOG.info(String.format("MR job submitted for dataset %s, input %s, url: %s", this.dataset, getInputPaths(),
      job.getTrackingURL()));
  while (!job.isComplete()) {
    if (this.policy == Policy.ABORT_ASAP) {
      LOG.info(String.format(
          "MR job for dataset %s, input %s killed due to input data incompleteness." + " Will try again later",
          this.dataset, getInputPaths()));
      job.killJob();
      return;
    }
    Thread.sleep(MR_JOB_CHECK_COMPLETE_INTERVAL_MS);
  }
  if (!job.isSuccessful()) {
    throw new RuntimeException(String.format("MR job failed for topic %s, input %s, url: %s", this.dataset,
        getInputPaths(), job.getTrackingURL()));
  }
}
 
Example 3
Source Project: incubator-gobblin   File: MRCompactor.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void cancel() throws IOException {
  try {
    for (Map.Entry<Dataset, Job> entry : MRCompactor.RUNNING_MR_JOBS.entrySet()) {
      Job hadoopJob = entry.getValue();
      if (!hadoopJob.isComplete()) {
        LOG.info(String.format("Killing hadoop job %s for dataset %s", hadoopJob.getJobID(), entry.getKey()));
        hadoopJob.killJob();
      }
    }
  } finally {
    try {
      ExecutorsUtils.shutdownExecutorService(this.jobExecutor, Optional.of(LOG), 0, TimeUnit.NANOSECONDS);
    } finally {
      if (this.verifier.isPresent()) {
        this.verifier.get().closeNow();
      }
    }
  }
}
 
Example 4
private void waitForJob(Job job) throws InterruptedException, IOException {
  while (!job.isComplete()) {
    LOG.debug("waiting for job {}", job.getJobName());
    sleep(100);
  }
  LOG.info("status for job {}: {}", job.getJobName(), (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 5
private void waitForJob(Job job) throws Exception {
  job.submit();
  while (!job.isComplete()) {
    sleep(100);
  }
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 6
private void waitForJob(Job job) throws Exception {
  job.submit();
  while (!job.isComplete()) {
    sleep(100);
  }
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 7
private void waitForJob(Job job) throws InterruptedException, IOException {
  while (!job.isComplete()) {
    System.out.println("waiting for job " + job.getJobName());
    sleep(100);
  }
  System.out.println("status for job " + job.getJobName() + ": " + (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 8
public static void waitForJob(Job job) throws Exception {
  job.submit();
  while (!job.isComplete()) {
    LOG.debug("waiting for job {}", job.getJobName());
    sleep(100);
  }
  LOG.info("status for job {}: {}", job.getJobName(), (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 9
Source Project: parquet-mr   File: WriteUsingMR.java    License: Apache License 2.0 5 votes vote down vote up
static void waitForJob(Job job) throws Exception {
  job.submit();
  while (!job.isComplete()) {
    LOG.debug("waiting for job {}", job.getJobName());
    sleep(50);
  }
  LOG.debug("status for job " + job.getJobName() + ": " + (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 10
private void waitForJob(Job job) throws Exception {
  job.submit();
  while (!job.isComplete()) {
    LOG.debug("waiting for job {}", job.getJobName());
    sleep(100);
  }
  LOG.info("status for job {}: {}", job.getJobName(), (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 11
private void waitForJob(Job job) throws Exception {
  job.submit();
  while (!job.isComplete()) {
    LOG.debug("waiting for job {}", job.getJobName());
    sleep(100);
  }
  LOG.info("status for job {}: {}", job.getJobName(), (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 12
private void waitForJob(Job job) throws Exception {
  job.submit();
  while (!job.isComplete()) {
    LOG.debug("waiting for job {}", job.getJobName());
    sleep(100);
  }
  LOG.info("status for job {}: {}", job.getJobName(), (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
  if (!job.isSuccessful()) {
    throw new RuntimeException("job failed " + job.getJobName());
  }
}
 
Example 13
Source Project: phoenix   File: IndexRebuildTask.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TaskRegionObserver.TaskResult checkCurrentResult(Task.TaskRecord taskRecord)
        throws Exception {

    String jobID = getJobID(taskRecord.getData());
    if (jobID != null) {
        Configuration conf = HBaseConfiguration.create(env.getConfiguration());
        Configuration configuration = HBaseConfiguration.addHbaseResources(conf);
        Cluster cluster = new Cluster(configuration);

        Job job = cluster.getJob(org.apache.hadoop.mapreduce.JobID.forName(jobID));
        if (job == null) {
            return new  TaskRegionObserver.TaskResult(TaskRegionObserver.TaskResultCode.SKIPPED, "");
        }
        if (job != null && job.isComplete()) {
            if (job.isSuccessful()) {
                LOGGER.warn("IndexRebuildTask checkCurrentResult job is successful "
                        + taskRecord.getTableName());
                return new TaskRegionObserver.TaskResult(TaskRegionObserver.TaskResultCode.SUCCESS, "");
            } else {
                return new TaskRegionObserver.TaskResult(TaskRegionObserver.TaskResultCode.FAIL,
                        "Index is DISABLED");
            }
        }

    }
    return null;
}
 
Example 14
public void testBlurOutputFormatCleanupDuringJobKillTest() throws IOException, InterruptedException,
    ClassNotFoundException {
  Path input = getInDir();
  Path output = getOutDir();
  _fileSystem.delete(input, true);
  _fileSystem.delete(output, true);
  // 1500 * 50 = 75,000
  writeRecordsFile(new Path(input, "part1"), 1, 50, 1, 1500, "cf1");
  // 100 * 5000 = 500,000
  writeRecordsFile(new Path(input, "part2"), 1, 5000, 2000, 100, "cf1");

  Job job = Job.getInstance(_conf, "blur index");
  job.setJarByClass(BlurOutputFormatTest.class);
  job.setMapperClass(CsvBlurMapper.class);
  job.setInputFormatClass(TextInputFormat.class);

  FileInputFormat.addInputPath(job, input);
  CsvBlurMapper.addColumns(job, "cf1", "col");

  Path tablePath = new Path(new Path(_root, "table"), "test");

  TableDescriptor tableDescriptor = new TableDescriptor();
  tableDescriptor.setShardCount(2);
  tableDescriptor.setTableUri(tablePath.toString());
  tableDescriptor.setName("test");

  createShardDirectories(getOutDir(), 2);

  BlurOutputFormat.setupJob(job, tableDescriptor);
  BlurOutputFormat.setOutputPath(job, output);
  BlurOutputFormat.setIndexLocally(job, false);

  job.submit();
  boolean killCalled = false;
  while (!job.isComplete()) {
    Thread.sleep(1000);
    System.out.printf("Killed [" + killCalled + "] Map [%f] Reduce [%f]%n", job.mapProgress() * 100,
        job.reduceProgress() * 100);
    if (job.reduceProgress() > 0.7 && !killCalled) {
      job.killJob();
      killCalled = true;
    }
  }

  assertFalse(job.isSuccessful());

  for (int i = 0; i < tableDescriptor.getShardCount(); i++) {
    Path path = new Path(output, ShardUtil.getShardName(i));
    FileSystem fileSystem = path.getFileSystem(job.getConfiguration());
    FileStatus[] listStatus = fileSystem.listStatus(path);
    assertEquals(toString(listStatus), 0, listStatus.length);
  }
}