Java Code Examples for org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values#toString()
The following examples show how to use
org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values#toString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ZombieJob.java From hadoop with Apache License 2.0 | 4 votes |
private TaskInfo getTaskInfo(LoggedTask loggedTask) { if (loggedTask == null) { return new TaskInfo(0, 0, 0, 0, 0); } List<LoggedTaskAttempt> attempts = loggedTask.getAttempts(); long inputBytes = -1; long inputRecords = -1; long outputBytes = -1; long outputRecords = -1; long heapMegabytes = -1; ResourceUsageMetrics metrics = new ResourceUsageMetrics(); Values type = loggedTask.getTaskType(); if ((type != Values.MAP) && (type != Values.REDUCE)) { throw new IllegalArgumentException( "getTaskInfo only supports MAP or REDUCE tasks: " + type.toString() + " for task = " + loggedTask.getTaskID()); } for (LoggedTaskAttempt attempt : attempts) { attempt = sanitizeLoggedTaskAttempt(attempt); // ignore bad attempts or unsuccessful attempts. if ((attempt == null) || (attempt.getResult() != Values.SUCCESS)) { continue; } if (type == Values.MAP) { inputBytes = attempt.getHdfsBytesRead(); inputRecords = attempt.getMapInputRecords(); outputBytes = (job.getTotalReduces() > 0) ? attempt.getMapOutputBytes() : attempt .getHdfsBytesWritten(); outputRecords = attempt.getMapOutputRecords(); heapMegabytes = (job.getJobMapMB() > 0) ? job.getJobMapMB() : job .getHeapMegabytes(); } else { inputBytes = attempt.getReduceShuffleBytes(); inputRecords = attempt.getReduceInputRecords(); outputBytes = attempt.getHdfsBytesWritten(); outputRecords = attempt.getReduceOutputRecords(); heapMegabytes = (job.getJobReduceMB() > 0) ? job.getJobReduceMB() : job .getHeapMegabytes(); } // set the resource usage metrics metrics = attempt.getResourceUsageMetrics(); break; } TaskInfo taskInfo = new TaskInfo(inputBytes, (int) inputRecords, outputBytes, (int) outputRecords, (int) heapMegabytes, metrics); return taskInfo; }
Example 2
Source File: ZombieJob.java From big-c with Apache License 2.0 | 4 votes |
private TaskInfo getTaskInfo(LoggedTask loggedTask) { if (loggedTask == null) { return new TaskInfo(0, 0, 0, 0, 0); } List<LoggedTaskAttempt> attempts = loggedTask.getAttempts(); long inputBytes = -1; long inputRecords = -1; long outputBytes = -1; long outputRecords = -1; long heapMegabytes = -1; ResourceUsageMetrics metrics = new ResourceUsageMetrics(); Values type = loggedTask.getTaskType(); if ((type != Values.MAP) && (type != Values.REDUCE)) { throw new IllegalArgumentException( "getTaskInfo only supports MAP or REDUCE tasks: " + type.toString() + " for task = " + loggedTask.getTaskID()); } for (LoggedTaskAttempt attempt : attempts) { attempt = sanitizeLoggedTaskAttempt(attempt); // ignore bad attempts or unsuccessful attempts. if ((attempt == null) || (attempt.getResult() != Values.SUCCESS)) { continue; } if (type == Values.MAP) { inputBytes = attempt.getHdfsBytesRead(); inputRecords = attempt.getMapInputRecords(); outputBytes = (job.getTotalReduces() > 0) ? attempt.getMapOutputBytes() : attempt .getHdfsBytesWritten(); outputRecords = attempt.getMapOutputRecords(); heapMegabytes = (job.getJobMapMB() > 0) ? job.getJobMapMB() : job .getHeapMegabytes(); } else { inputBytes = attempt.getReduceShuffleBytes(); inputRecords = attempt.getReduceInputRecords(); outputBytes = attempt.getHdfsBytesWritten(); outputRecords = attempt.getReduceOutputRecords(); heapMegabytes = (job.getJobReduceMB() > 0) ? job.getJobReduceMB() : job .getHeapMegabytes(); } // set the resource usage metrics metrics = attempt.getResourceUsageMetrics(); break; } TaskInfo taskInfo = new TaskInfo(inputBytes, (int) inputRecords, outputBytes, (int) outputRecords, (int) heapMegabytes, metrics); return taskInfo; }
Example 3
Source File: ZombieJob.java From RDFS with Apache License 2.0 | 4 votes |
private TaskInfo getTaskInfo(LoggedTask loggedTask) { List<LoggedTaskAttempt> attempts = loggedTask.getAttempts(); long inputBytes = -1; long inputRecords = -1; long outputBytes = -1; long outputRecords = -1; long heapMegabytes = -1; Values type = loggedTask.getTaskType(); if ((type != Values.MAP) && (type != Values.REDUCE)) { throw new IllegalArgumentException( "getTaskInfo only supports MAP or REDUCE tasks: " + type.toString() + " for task = " + loggedTask.getTaskID()); } for (LoggedTaskAttempt attempt : attempts) { attempt = sanitizeLoggedTaskAttempt(attempt); // ignore bad attempts or unsuccessful attempts. if ((attempt == null) || (attempt.getResult() != Values.SUCCESS)) { continue; } if (type == Values.MAP) { inputBytes = attempt.getHdfsBytesRead(); inputRecords = attempt.getMapInputRecords(); outputBytes = (job.getTotalReduces() > 0) ? attempt.getMapOutputBytes() : attempt .getHdfsBytesWritten(); outputRecords = attempt.getMapOutputRecords(); heapMegabytes = (job.getJobMapMB() > 0) ? job.getJobMapMB() : job .getHeapMegabytes(); } else { inputBytes = attempt.getReduceShuffleBytes(); inputRecords = attempt.getReduceInputRecords(); outputBytes = attempt.getHdfsBytesWritten(); outputRecords = attempt.getReduceOutputRecords(); heapMegabytes = (job.getJobReduceMB() > 0) ? job.getJobReduceMB() : job .getHeapMegabytes(); } break; } TaskInfo taskInfo = new TaskInfo(inputBytes, (int) inputRecords, outputBytes, (int) outputRecords, (int) heapMegabytes); return taskInfo; }