Java Code Examples for org.apache.hadoop.mapreduce.v2.util.MRApps#toTaskID()
The following examples show how to use
org.apache.hadoop.mapreduce.v2.util.MRApps#toTaskID() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestAMWebApp.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testSingleTaskCounterView() { AppContext appContext = new MockAppContext(0, 1, 1, 2); Map<String, String> params = getTaskParams(appContext); params.put(AMParams.COUNTER_GROUP, "org.apache.hadoop.mapreduce.FileSystemCounter"); params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS"); // remove counters from one task attempt // to test handling of missing counters TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID)); Job job = appContext.getJob(taskID.getJobId()); Task task = job.getTask(taskID); TaskAttempt attempt = task.getAttempts().values().iterator().next(); attempt.getReport().setCounters(null); WebAppTests.testPage(SingleCounterPage.class, AppContext.class, appContext, params); }
Example 2
Source File: HsWebServices.java From hadoop with Apache License 2.0 | 6 votes |
@GET @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public JobTaskCounterInfo getSingleTaskCounters( @Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid) { init(); Job job = AMWebServices.getJobFromJobIdString(jid, ctx); checkAccess(job, hsr); TaskId taskID = MRApps.toTaskID(tid); if (taskID == null) { throw new NotFoundException("taskid " + tid + " not found or invalid"); } Task task = job.getTask(taskID); if (task == null) { throw new NotFoundException("task not found with id " + tid); } return new JobTaskCounterInfo(task); }
Example 3
Source File: TestAMWebApp.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testSingleTaskCounterView() { AppContext appContext = new MockAppContext(0, 1, 1, 2); Map<String, String> params = getTaskParams(appContext); params.put(AMParams.COUNTER_GROUP, "org.apache.hadoop.mapreduce.FileSystemCounter"); params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS"); // remove counters from one task attempt // to test handling of missing counters TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID)); Job job = appContext.getJob(taskID.getJobId()); Task task = job.getTask(taskID); TaskAttempt attempt = task.getAttempts().values().iterator().next(); attempt.getReport().setCounters(null); WebAppTests.testPage(SingleCounterPage.class, AppContext.class, appContext, params); }
Example 4
Source File: HsWebServices.java From big-c with Apache License 2.0 | 6 votes |
@GET @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public JobTaskCounterInfo getSingleTaskCounters( @Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid) { init(); Job job = AMWebServices.getJobFromJobIdString(jid, ctx); checkAccess(job, hsr); TaskId taskID = MRApps.toTaskID(tid); if (taskID == null) { throw new NotFoundException("taskid " + tid + " not found or invalid"); } Task task = job.getTask(taskID); if (task == null) { throw new NotFoundException("task not found with id " + tid); } return new JobTaskCounterInfo(task); }
Example 5
Source File: AppController.java From hadoop with Apache License 2.0 | 5 votes |
/** * Ensure that a TASK_ID was passed into the page. */ public void requireTask() { if ($(TASK_ID).isEmpty()) { badRequest("missing task ID"); throw new RuntimeException("missing task ID"); } TaskId taskID = MRApps.toTaskID($(TASK_ID)); Job job = app.context.getJob(taskID.getJobId()); app.setJob(job); if (app.getJob() == null) { notFound(MRApps.toString(taskID.getJobId())); throw new RuntimeException("Not Found: " + $(JOB_ID)); } else { app.setTask(app.getJob().getTask(taskID)); if (app.getTask() == null) { notFound($(TASK_ID)); throw new RuntimeException("Not Found: " + $(TASK_ID)); } } if (!checkAccess(job)) { accessDenied("User " + request().getRemoteUser() + " does not have " + " permission to view job " + $(JOB_ID)); throw new RuntimeException("Access denied: User " + request().getRemoteUser() + " does not have permission to view job " + $(JOB_ID)); } }
Example 6
Source File: HsTaskPage.java From hadoop with Apache License 2.0 | 5 votes |
/** * @return The end of the JS map that is the jquery datatable config for the * attempts table. */ private String attemptsTableInit() { TaskType type = null; String symbol = $(TASK_TYPE); if (!symbol.isEmpty()) { type = MRApps.taskType(symbol); } else { TaskId taskID = MRApps.toTaskID($(TASK_ID)); type = taskID.getTaskType(); } StringBuilder b = tableInit() .append(", 'aaData': attemptsTableData") .append(", bDeferRender: true") .append(", bProcessing: true") .append("\n,aoColumnDefs:[\n") //logs column should not filterable (it includes container ID which may pollute searches) .append("\n{'aTargets': [ 4 ]") .append(", 'bSearchable': false }") .append("\n, {'sType':'numeric', 'aTargets': [ 0 ]") .append(", 'mRender': parseHadoopAttemptID }") .append("\n, {'sType':'numeric', 'aTargets': [ 5, 6") //Column numbers are different for maps and reduces .append(type == TaskType.REDUCE ? ", 7, 8" : "") .append(" ], 'mRender': renderHadoopDate }") .append("\n, {'sType':'numeric', 'aTargets': [") .append(type == TaskType.REDUCE ? "9, 10, 11, 12" : "7") .append(" ], 'mRender': renderHadoopElapsedTime }]") // Sort by id upon page load .append("\n, aaSorting: [[0, 'asc']]") .append("}"); return b.toString(); }
Example 7
Source File: AppController.java From big-c with Apache License 2.0 | 5 votes |
/** * Ensure that a TASK_ID was passed into the page. */ public void requireTask() { if ($(TASK_ID).isEmpty()) { badRequest("missing task ID"); throw new RuntimeException("missing task ID"); } TaskId taskID = MRApps.toTaskID($(TASK_ID)); Job job = app.context.getJob(taskID.getJobId()); app.setJob(job); if (app.getJob() == null) { notFound(MRApps.toString(taskID.getJobId())); throw new RuntimeException("Not Found: " + $(JOB_ID)); } else { app.setTask(app.getJob().getTask(taskID)); if (app.getTask() == null) { notFound($(TASK_ID)); throw new RuntimeException("Not Found: " + $(TASK_ID)); } } if (!checkAccess(job)) { accessDenied("User " + request().getRemoteUser() + " does not have " + " permission to view job " + $(JOB_ID)); throw new RuntimeException("Access denied: User " + request().getRemoteUser() + " does not have permission to view job " + $(JOB_ID)); } }
Example 8
Source File: HsTaskPage.java From big-c with Apache License 2.0 | 5 votes |
/** * @return The end of the JS map that is the jquery datatable config for the * attempts table. */ private String attemptsTableInit() { TaskType type = null; String symbol = $(TASK_TYPE); if (!symbol.isEmpty()) { type = MRApps.taskType(symbol); } else { TaskId taskID = MRApps.toTaskID($(TASK_ID)); type = taskID.getTaskType(); } StringBuilder b = tableInit() .append(", 'aaData': attemptsTableData") .append(", bDeferRender: true") .append(", bProcessing: true") .append("\n,aoColumnDefs:[\n") //logs column should not filterable (it includes container ID which may pollute searches) .append("\n{'aTargets': [ 4 ]") .append(", 'bSearchable': false }") .append("\n, {'sType':'numeric', 'aTargets': [ 0 ]") .append(", 'mRender': parseHadoopAttemptID }") .append("\n, {'sType':'numeric', 'aTargets': [ 5, 6") //Column numbers are different for maps and reduces .append(type == TaskType.REDUCE ? ", 7, 8" : "") .append(" ], 'mRender': renderHadoopDate }") .append("\n, {'sType':'numeric', 'aTargets': [") .append(type == TaskType.REDUCE ? "9, 10, 11, 12" : "7") .append(" ], 'mRender': renderHadoopElapsedTime }]") // Sort by id upon page load .append("\n, aaSorting: [[0, 'asc']]") .append("}"); return b.toString(); }