Java Code Examples for org.apache.hadoop.conf.Configuration#IntegerRanges

The following examples show how to use org.apache.hadoop.conf.Configuration#IntegerRanges . These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may want to check out the right sidebar which shows the related API usage.
Example 1
Source Project: hadoop   File: JobConf.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Set the ranges of maps or reduces to profile. setProfileEnabled(true) 
 * must also be called.
 * @param newValue a set of integer ranges of the map ids
 */
public void setProfileTaskRange(boolean isMap, String newValue) {
  // parse the value to make sure it is legal
    new Configuration.IntegerRanges(newValue);
  set((isMap ? JobContext.NUM_MAP_PROFILES : JobContext.NUM_REDUCE_PROFILES), 
        newValue);
}
 
Example 2
Source Project: big-c   File: JobConf.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Set the ranges of maps or reduces to profile. setProfileEnabled(true) 
 * must also be called.
 * @param newValue a set of integer ranges of the map ids
 */
public void setProfileTaskRange(boolean isMap, String newValue) {
  // parse the value to make sure it is legal
    new Configuration.IntegerRanges(newValue);
  set((isMap ? JobContext.NUM_MAP_PROFILES : JobContext.NUM_REDUCE_PROFILES), 
        newValue);
}
 
Example 3
Source Project: RDFS   File: JobConf.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Set the ranges of maps or reduces to profile. setProfileEnabled(true)
 * must also be called.
 * @param newValue a set of integer ranges of the map ids
 */
public void setProfileTaskRange(boolean isMap, String newValue) {
  // parse the value to make sure it is legal
  new Configuration.IntegerRanges(newValue);
  set((isMap ? "mapred.task.profile.maps" : "mapred.task.profile.reduces"),
      newValue);
}
 
Example 4
Source Project: hadoop-gpu   File: JobConf.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Set the ranges of maps or reduces to profile. setProfileEnabled(true) 
 * must also be called.
 * @param newValue a set of integer ranges of the map ids
 */
public void setProfileTaskRange(boolean isMap, String newValue) {
  // parse the value to make sure it is legal
  new Configuration.IntegerRanges(newValue);
  set((isMap ? "mapred.task.profile.maps" : "mapred.task.profile.reduces"), 
      newValue);
}
 
Example 5
Source Project: datawave   File: CounterStatsDClient.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public Configuration.IntegerRanges getProfileTaskRange(boolean isMap) {
    return delegate.getProfileTaskRange(isMap);
}
 
Example 6
@Override
public Configuration.IntegerRanges getProfileTaskRange(boolean isMap) {
    return null;
}
 
Example 7
Source Project: Cubert   File: TestContext.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public Configuration.IntegerRanges getProfileTaskRange(boolean isMap)
{
    return null;
}
 
Example 8
Source Project: hbase   File: ReadOnlyConfiguration.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public Configuration.IntegerRanges getRange(String name, String defaultValue) {
  return conf.getRange(name, defaultValue);
}
 
Example 9
Source Project: RDFS   File: JobClient.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Monitor a job and print status in real-time as progress is made and tasks
 * fail.
 * @param conf the job's configuration
 * @param job the job to track
 * @return true if the job succeeded
 * @throws IOException if communication to the JobTracker fails
 */
public boolean monitorAndPrintJob(JobConf conf,
                                  RunningJob job
) throws IOException, InterruptedException {
  String lastReport = null;
  TaskStatusFilter filter;
  filter = getTaskOutputFilter(conf);
  JobID jobId = job.getID();
  LOG.info("Running job: " + jobId);
  int eventCounter = 0;
  boolean profiling = conf.getProfileEnabled();
  Configuration.IntegerRanges mapRanges = conf.getProfileTaskRange(true);
  Configuration.IntegerRanges reduceRanges = conf.getProfileTaskRange(false);

  while (!job.isComplete()) {
    Thread.sleep(MAX_JOBPROFILE_AGE);
    String report =
      (" map " + StringUtils.formatPercent(job.mapProgress(), 0)+
          " reduce " +
          StringUtils.formatPercent(job.reduceProgress(), 0));
    if (!report.equals(lastReport)) {
      LOG.info(report);
      lastReport = report;
    }

    TaskCompletionEvent[] events =
      job.getTaskCompletionEvents(eventCounter);
    eventCounter += events.length;
    for(TaskCompletionEvent event : events){
      TaskCompletionEvent.Status status = event.getTaskStatus();
      if (profiling &&
          (status == TaskCompletionEvent.Status.SUCCEEDED ||
              status == TaskCompletionEvent.Status.FAILED) &&
              (event.isMap ? mapRanges : reduceRanges).
              isIncluded(event.idWithinJob())) {
        downloadProfile(event);
      }
      switch(filter){
      case NONE:
        break;
      case SUCCEEDED:
        if (event.getTaskStatus() ==
          TaskCompletionEvent.Status.SUCCEEDED){
          LOG.info(event.toString());
          displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
        }
        break;
      case FAILED:
        if (event.getTaskStatus() ==
          TaskCompletionEvent.Status.FAILED){
          LOG.info(event.toString());
          // Displaying the task diagnostic information
          TaskAttemptID taskId = event.getTaskAttemptId();
          String[] taskDiagnostics =
            jobSubmitClient.getTaskDiagnostics(taskId);
          if (taskDiagnostics != null) {
            for(String diagnostics : taskDiagnostics){
              System.err.println(diagnostics);
            }
          }
          // Displaying the task logs
          displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
        }
        break;
      case KILLED:
        if (event.getTaskStatus() == TaskCompletionEvent.Status.KILLED){
          LOG.info(event.toString());
        }
        break;
      case ALL:
        LOG.info(event.toString());
        displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
        break;
      }
    }
  }
  LOG.info("Job complete: " + jobId);
  Counters counters = job.getCounters();
  if (counters != null) {
    counters.log(LOG);
  }
  return job.isSuccessful();
}
 
Example 10
Source Project: hadoop-gpu   File: JobClient.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Monitor a job and print status in real-time as progress is made and tasks 
 * fail.
 * @param conf the job's configuration
 * @param job the job to track
 * @return true if the job succeeded
 * @throws IOException if communication to the JobTracker fails
 */
public boolean monitorAndPrintJob(JobConf conf, 
                                  RunningJob job
) throws IOException, InterruptedException {
  String lastReport = null;
  TaskStatusFilter filter;
  filter = getTaskOutputFilter(conf);
  JobID jobId = job.getID();
  LOG.info("Running job: " + jobId);
  int eventCounter = 0;
  boolean profiling = conf.getProfileEnabled();
  Configuration.IntegerRanges mapRanges = conf.getProfileTaskRange(true);
  Configuration.IntegerRanges reduceRanges = conf.getProfileTaskRange(false);

  while (!job.isComplete()) {
    Thread.sleep(1000);
    String report = 
      (" map " + StringUtils.formatPercent(job.mapProgress(), 0)+
          " reduce " + 
          StringUtils.formatPercent(job.reduceProgress(), 0));
    if (!report.equals(lastReport)) {
      LOG.info(report);
      lastReport = report;
    }

    TaskCompletionEvent[] events = 
      job.getTaskCompletionEvents(eventCounter); 
    eventCounter += events.length;
    for(TaskCompletionEvent event : events){
      TaskCompletionEvent.Status status = event.getTaskStatus();
      if (profiling && 
          (status == TaskCompletionEvent.Status.SUCCEEDED ||
              status == TaskCompletionEvent.Status.FAILED) &&
              (event.isMap ? mapRanges : reduceRanges).
              isIncluded(event.idWithinJob())) {
        downloadProfile(event);
      }
      switch(filter){
      case NONE:
        break;
      case SUCCEEDED:
        if (event.getTaskStatus() == 
          TaskCompletionEvent.Status.SUCCEEDED){
          LOG.info(event.toString());
          displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
        }
        break; 
      case FAILED:
        if (event.getTaskStatus() == 
          TaskCompletionEvent.Status.FAILED){
          LOG.info(event.toString());
          // Displaying the task diagnostic information
          TaskAttemptID taskId = event.getTaskAttemptId();
          String[] taskDiagnostics = 
            jobSubmitClient.getTaskDiagnostics(taskId); 
          if (taskDiagnostics != null) {
            for(String diagnostics : taskDiagnostics){
              System.err.println(diagnostics);
            }
          }
          // Displaying the task logs
          displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
        }
        break; 
      case KILLED:
        if (event.getTaskStatus() == TaskCompletionEvent.Status.KILLED){
          LOG.info(event.toString());
        }
        break; 
      case ALL:
        LOG.info(event.toString());
        displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
        break;
      }
    }
  }
  LOG.info("Job complete: " + jobId);
  job.getCounters().log(LOG);
  return job.isSuccessful();
}