org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants Java Examples

The following examples show how to use org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SimulatorJobStoryProducer.java    From RDFS with Apache License 2.0 6 votes vote down vote up
/**
 * Filter some jobs being fed to the simulator. For now, we filter out killed
 * jobs to facilitate debugging.
 * 
 * @throws IOException
 */
private JobStory getNextJobFiltered() throws IOException {
  while (true) {
    ZombieJob job = producer.getNextJob();
    if (job == null) {
      return null;
    }
    if (job.getOutcome() == Pre21JobHistoryConstants.Values.KILLED) {
      continue;
    }
    if (job.getNumberMaps() == 0) {
      continue;
    }
    if (job.getNumLoggedMaps() == 0) {
      continue;
    }
    return job;
  }
}
 
Example #2
Source File: DistributedCacheEmulator.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Create the list of unique distributed cache files needed for all the
 * simulated jobs and write the list to a special file.
 * @param jsp job story producer for the trace
 * @return exit code
 * @throws IOException
 */
private int buildDistCacheFilesList(JobStoryProducer jsp) throws IOException {
  // Read all the jobs from the trace file and build the list of unique
  // distributed cache files.
  JobStory jobStory;
  while ((jobStory = jsp.getNextJob()) != null) {
    if (jobStory.getOutcome() == Pre21JobHistoryConstants.Values.SUCCESS && 
       jobStory.getSubmissionTime() >= 0) {
      updateHDFSDistCacheFilesList(jobStory);
    }
  }
  jsp.close();

  return writeDistCacheFilesList();
}
 
Example #3
Source File: DistributedCacheEmulator.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Create the list of unique distributed cache files needed for all the
 * simulated jobs and write the list to a special file.
 * @param jsp job story producer for the trace
 * @return exit code
 * @throws IOException
 */
private int buildDistCacheFilesList(JobStoryProducer jsp) throws IOException {
  // Read all the jobs from the trace file and build the list of unique
  // distributed cache files.
  JobStory jobStory;
  while ((jobStory = jsp.getNextJob()) != null) {
    if (jobStory.getOutcome() == Pre21JobHistoryConstants.Values.SUCCESS && 
       jobStory.getSubmissionTime() >= 0) {
      updateHDFSDistCacheFilesList(jobStory);
    }
  }
  jsp.close();

  return writeDistCacheFilesList();
}
 
Example #4
Source File: MockSimulatorEngine.java    From RDFS with Apache License 2.0 5 votes vote down vote up
private Pre21JobHistoryConstants.Values convertState (JobStatus status) {
  int runState = status.getRunState();
  if (runState == JobStatus.FAILED) {
    return Pre21JobHistoryConstants.Values.FAILED;
  } else if (runState == JobStatus.SUCCEEDED) {
    return Pre21JobHistoryConstants.Values.SUCCESS;
  } else {
    throw new IllegalArgumentException("unknown status " + status);
  }
}
 
Example #5
Source File: MockSimulatorEngine.java    From RDFS with Apache License 2.0 5 votes vote down vote up
private void validateJobComplete(JobCompleteEvent completeEvent) {
  JobID jobId = completeEvent.getJobStatus().getJobID();
  JobStatus finalStatus = completeEvent.getJobStatus();

  Assert.assertTrue("Job completed was not submitted:"+jobId, 
             submittedJobs.contains(jobId));
  Assert.assertFalse("Job completed more than once:" + jobId, 
              completedJobs.contains(jobId));
  completedJobs.add(jobId);
 
  Pre21JobHistoryConstants.Values finalValue = jobs.get(jobId).getOutcome();
  Pre21JobHistoryConstants.Values obtainedStatus = convertState(finalStatus);
  Assert.assertEquals("Job completion final status mismatch", obtainedStatus,
      finalValue);
}
 
Example #6
Source File: MockSimulatorEngine.java    From RDFS with Apache License 2.0 5 votes vote down vote up
private void validateJobSubmission(JobSubmissionEvent submissionEvent) {
  JobID jobId = submissionEvent.getJob().getJobID();
  LOG.info("Job being submitted: " + jobId);
  Assert.assertFalse("Job " + jobId + " is already submitted", submittedJobs
      .contains(jobId));
  LOG.info("Adding to submitted Jobs " + jobId);
  submittedJobs.add(jobId); 
  jobs.put(jobId, submissionEvent.getJob());
  Pre21JobHistoryConstants.Values finalValue = submissionEvent.getJob().getOutcome();
  Assert.assertTrue("Job has final state neither SUCCESS nor FAILED",
      finalValue == Pre21JobHistoryConstants.Values.FAILED
          || finalValue == Pre21JobHistoryConstants.Values.SUCCESS);
}
 
Example #7
Source File: JobFactory.java    From RDFS with Apache License 2.0 5 votes vote down vote up
private JobStory getNextJobFiltered() throws IOException {
  JobStory job;
  do {
    job = jobProducer.getNextJob();
  } while (job != null
      && (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS ||
          job.getSubmissionTime() < 0));
  return null == job ? null : new FilterJobStory(job) {
      @Override
      public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
        return new MinTaskInfo(this.job.getTaskInfo(taskType, taskNumber));
      }
    };
}
 
Example #8
Source File: JobFactory.java    From hadoop with Apache License 2.0 4 votes vote down vote up
protected JobStory getNextJobFiltered() throws IOException {
  JobStory job = getNextJobFromTrace();
  // filter out the following jobs
  //    - unsuccessful jobs
  //    - jobs with missing submit-time
  //    - reduce only jobs
  // These jobs are not yet supported in Gridmix
  while (job != null &&
    (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS ||
      job.getSubmissionTime() < 0 || job.getNumberMaps() == 0)) {
    if (LOG.isDebugEnabled()) {
      List<String> reason = new ArrayList<String>();
      if (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS) {
        reason.add("STATE (" + job.getOutcome().name() + ")");
      }
      if (job.getSubmissionTime() < 0) {
        reason.add("SUBMISSION-TIME (" + job.getSubmissionTime() + ")");
      }
      if (job.getNumberMaps() == 0) {
        reason.add("ZERO-MAPS-JOB");
      }
      
      // TODO This should never happen. Probably we missed something!
      if (reason.size() == 0) {
        reason.add("N/A");
      }
      
      LOG.debug("Ignoring job " + job.getJobID() + " from the input trace."
                + " Reason: " + StringUtils.join(reason, ","));
    }
    job = getNextJobFromTrace();
  }
  return null == job ? null : new FilterJobStory(job) {
    @Override
    public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
      TaskInfo info = this.job.getTaskInfo(taskType, taskNumber);
      if (info != null) {
        info = new MinTaskInfo(info);
      } else {
        info = new MinTaskInfo(new TaskInfo(0, 0, 0, 0, 0));
      }
      return info;
    }
  };
}
 
Example #9
Source File: JobFactory.java    From big-c with Apache License 2.0 4 votes vote down vote up
protected JobStory getNextJobFiltered() throws IOException {
  JobStory job = getNextJobFromTrace();
  // filter out the following jobs
  //    - unsuccessful jobs
  //    - jobs with missing submit-time
  //    - reduce only jobs
  // These jobs are not yet supported in Gridmix
  while (job != null &&
    (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS ||
      job.getSubmissionTime() < 0 || job.getNumberMaps() == 0)) {
    if (LOG.isDebugEnabled()) {
      List<String> reason = new ArrayList<String>();
      if (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS) {
        reason.add("STATE (" + job.getOutcome().name() + ")");
      }
      if (job.getSubmissionTime() < 0) {
        reason.add("SUBMISSION-TIME (" + job.getSubmissionTime() + ")");
      }
      if (job.getNumberMaps() == 0) {
        reason.add("ZERO-MAPS-JOB");
      }
      
      // TODO This should never happen. Probably we missed something!
      if (reason.size() == 0) {
        reason.add("N/A");
      }
      
      LOG.debug("Ignoring job " + job.getJobID() + " from the input trace."
                + " Reason: " + StringUtils.join(reason, ","));
    }
    job = getNextJobFromTrace();
  }
  return null == job ? null : new FilterJobStory(job) {
    @Override
    public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
      TaskInfo info = this.job.getTaskInfo(taskType, taskNumber);
      if (info != null) {
        info = new MinTaskInfo(info);
      } else {
        info = new MinTaskInfo(new TaskInfo(0, 0, 0, 0, 0));
      }
      return info;
    }
  };
}
 
Example #10
Source File: SimulatorJobStory.java    From RDFS with Apache License 2.0 4 votes vote down vote up
@Override
public Pre21JobHistoryConstants.Values getOutcome() {
  return job.getOutcome();
}
 
Example #11
Source File: TestSimulatorJobClient.java    From RDFS with Apache License 2.0 4 votes vote down vote up
@Override
public Pre21JobHistoryConstants.Values getOutcome() {
  return Pre21JobHistoryConstants.Values.SUCCESS;
}
 
Example #12
Source File: FakeJobs.java    From RDFS with Apache License 2.0 4 votes vote down vote up
@Override
public Pre21JobHistoryConstants.Values getOutcome() {
  return Pre21JobHistoryConstants.Values.SUCCESS;
}