Java Code Examples for org.apache.hadoop.mapreduce.Cluster#getAllJobStatuses()

The following examples show how to use org.apache.hadoop.mapreduce.Cluster#getAllJobStatuses() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KillJobByRegex.java    From datawave with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws IOException, InterruptedException {
    Configuration conf = new Configuration();
    GenericOptionsParser parser = new GenericOptionsParser(conf, args);
    args = parser.getRemainingArgs();
    
    if (args.length != 1) {
        System.err.println("usage: KillJobByRegex jobNamePattern");
        System.exit(1);
    }
    
    NAME_PATTERN = Pattern.compile(args[0]);
    
    org.apache.hadoop.mapred.JobConf jobConf = new org.apache.hadoop.mapred.JobConf(conf);
    Cluster cluster = new Cluster(jobConf);
    
    for (JobStatus js : cluster.getAllJobStatuses()) {
        if (!js.isJobComplete()) {
            JOB_KILLER_SVC.execute(new JobKiller(cluster, js));
        }
    }
    
    try {
        JOB_KILLER_SVC.shutdown(); // signal shutdown
        JOB_KILLER_SVC.awaitTermination(1, TimeUnit.MINUTES); // allow processes to stop
    } catch (InterruptedException e) {
        JOB_KILLER_SVC.shutdownNow();
    }
    
    System.out.println("Killed " + JOB_KILLED_COUNT.get() + " jobs");
    System.out.println("Failed to kill " + JOB_FAILED_COUNT.get() + " jobs");
    System.exit(0);
}
 
Example 2
Source File: CLI.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Dump a list of currently running jobs
 * @throws IOException
 */
private void listJobs(Cluster cluster) 
    throws IOException, InterruptedException {
  List<JobStatus> runningJobs = new ArrayList<JobStatus>();
  for (JobStatus job : cluster.getAllJobStatuses()) {
    if (!job.isJobComplete()) {
      runningJobs.add(job);
    }
  }
  displayJobList(runningJobs.toArray(new JobStatus[0]));
}
 
Example 3
Source File: CLI.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Dump a list of currently running jobs
 * @throws IOException
 */
private void listJobs(Cluster cluster) 
    throws IOException, InterruptedException {
  List<JobStatus> runningJobs = new ArrayList<JobStatus>();
  for (JobStatus job : cluster.getAllJobStatuses()) {
    if (!job.isJobComplete()) {
      runningJobs.add(job);
    }
  }
  displayJobList(runningJobs.toArray(new JobStatus[0]));
}
 
Example 4
Source File: ClusterDriver.java    From incubator-retired-blur with Apache License 2.0 5 votes vote down vote up
private void stopAllExistingMRJobs(String blurEnv, Configuration conf) throws YarnException, IOException,
    InterruptedException {
  Cluster cluster = new Cluster(conf);
  JobStatus[] allJobStatuses = cluster.getAllJobStatuses();
  for (JobStatus jobStatus : allJobStatuses) {
    if (jobStatus.isJobComplete()) {
      continue;
    }
    String jobFile = jobStatus.getJobFile();
    JobID jobID = jobStatus.getJobID();
    Job job = cluster.getJob(jobID);
    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
    Configuration configuration = new Configuration(false);
    Path path = new Path(jobFile);
    Path makeQualified = path.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory());
    if (hasReadAccess(fileSystem, makeQualified)) {
      try (FSDataInputStream in = fileSystem.open(makeQualified)) {
        configuration.addResource(copy(in));
      }
      String jobBlurEnv = configuration.get(BLUR_ENV);
      LOG.info("Checking job [{0}] has env [{1}] current env set to [{2}]", jobID, jobBlurEnv, blurEnv);
      if (blurEnv.equals(jobBlurEnv)) {
        LOG.info("Killing running job [{0}]", jobID);
        job.killJob();
      }
    }
  }
}