org.apache.hadoop.util.StringInterner Java Examples

The following examples show how to use org.apache.hadoop.util.StringInterner. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TaskAttemptListenerImpl.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
 public void reportDiagnosticInfo(TaskAttemptID taskAttemptID, String diagnosticInfo)
throws IOException {
   diagnosticInfo = StringInterner.weakIntern(diagnosticInfo);
   LOG.info("Diagnostics report from " + taskAttemptID.toString() + ": "
       + diagnosticInfo);

   org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
     TypeConverter.toYarn(taskAttemptID);
   taskHeartbeatHandler.progressing(attemptID);

   // This is mainly used for cases where we want to propagate exception traces
   // of tasks that fail.

   // This call exists as a hadoop mapreduce legacy wherein all changes in
   // counters/progress/phase/output-size are reported through statusUpdate()
   // call but not diagnosticInformation.
   context.getEventHandler().handle(
       new TaskAttemptDiagnosticsUpdateEvent(attemptID, diagnosticInfo));
 }
 
Example #2
Source File: TezYARNUtils.java    From incubator-tez with Apache License 2.0 6 votes vote down vote up
public static String getFrameworkClasspath(Configuration conf) {
  Map<String, String> environment = new HashMap<String, String>();

  TezYARNUtils.addToEnvironment(environment,
      Environment.CLASSPATH.name(),
      Environment.PWD.$(),
      File.pathSeparator);

  TezYARNUtils.addToEnvironment(environment,
      Environment.CLASSPATH.name(),
      Environment.PWD.$() + File.separator + "*",
      File.pathSeparator);

  // Add YARN/COMMON/HDFS jars and conf locations to path
  for (String c : conf.getStrings(
      YarnConfiguration.YARN_APPLICATION_CLASSPATH,
      YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
    TezYARNUtils.addToEnvironment(environment, Environment.CLASSPATH.name(),
        c.trim(), File.pathSeparator);
  }
  return StringInterner.weakIntern(environment.get(Environment.CLASSPATH.name()));
}
 
Example #3
Source File: JobHistoryParser.java    From big-c with Apache License 2.0 6 votes vote down vote up
private void handleReduceAttemptFinishedEvent
(ReduceAttemptFinishedEvent event) {
  TaskInfo taskInfo = info.tasksMap.get(event.getTaskId());
  TaskAttemptInfo attemptInfo = 
    taskInfo.attemptsMap.get(event.getAttemptId());
  attemptInfo.finishTime = event.getFinishTime();
  attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus());
  attemptInfo.state = StringInterner.weakIntern(event.getState());
  attemptInfo.shuffleFinishTime = event.getShuffleFinishTime();
  attemptInfo.sortFinishTime = event.getSortFinishTime();
  attemptInfo.counters = event.getCounters();
  attemptInfo.hostname = StringInterner.weakIntern(event.getHostname());
  attemptInfo.port = event.getPort();
  attemptInfo.rackname = StringInterner.weakIntern(event.getRackName());
  info.completedTaskAttemptsMap.put(event.getAttemptId(), attemptInfo);
}
 
Example #4
Source File: TaskAttemptImpl.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt, 
    TaskAttemptEvent event) {
  // unregister it to TaskAttemptListener so that it stops listening
  // for it
  taskAttempt.taskAttemptListener.unregister(
      taskAttempt.attemptId, taskAttempt.jvmID);

  if (event instanceof TaskAttemptKillEvent) {
    taskAttempt.addDiagnosticInfo(
        ((TaskAttemptKillEvent) event).getMessage());
  }

  taskAttempt.reportedStatus.progress = 1.0f;
  taskAttempt.updateProgressSplits();
  //send the cleanup event to containerLauncher
  taskAttempt.eventHandler.handle(new ContainerLauncherEvent(
      taskAttempt.attemptId, 
      taskAttempt.container.getId(), StringInterner
          .weakIntern(taskAttempt.container.getNodeId().toString()),
      taskAttempt.container.getContainerToken(),
      ContainerLauncher.EventType.CONTAINER_REMOTE_CLEANUP));
}
 
Example #5
Source File: TaskAttemptListenerImpl.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
 public void reportDiagnosticInfo(TaskAttemptID taskAttemptID, String diagnosticInfo)
throws IOException {
   diagnosticInfo = StringInterner.weakIntern(diagnosticInfo);
   LOG.info("Diagnostics report from " + taskAttemptID.toString() + ": "
       + diagnosticInfo);

   org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
     TypeConverter.toYarn(taskAttemptID);
   taskHeartbeatHandler.progressing(attemptID);

   // This is mainly used for cases where we want to propagate exception traces
   // of tasks that fail.

   // This call exists as a hadoop mapreduce legacy wherein all changes in
   // counters/progress/phase/output-size are reported through statusUpdate()
   // call but not diagnosticInformation.
   context.getEventHandler().handle(
       new TaskAttemptDiagnosticsUpdateEvent(attemptID, diagnosticInfo));
 }
 
Example #6
Source File: ShuffleInputEventHandlerOrderedGrouped.java    From tez with Apache License 2.0 6 votes vote down vote up
/**
 * Helper method to create InputAttemptIdentifier
 *
 * @param targetIndex
 * @param targetIndexCount
 * @param version
 * @param shufflePayload
 * @return CompositeInputAttemptIdentifier
 */
private CompositeInputAttemptIdentifier constructInputAttemptIdentifier(int targetIndex, int targetIndexCount, int version,
                                                                        DataMovementEventPayloadProto shufflePayload) {
  String pathComponent = (shufflePayload.hasPathComponent()) ? StringInterner.weakIntern(shufflePayload.getPathComponent()) : null;
  int spillEventId = shufflePayload.getSpillId();
  CompositeInputAttemptIdentifier srcAttemptIdentifier = null;
  if (shufflePayload.hasSpillId()) {
    boolean lastEvent = shufflePayload.getLastEvent();
    InputAttemptIdentifier.SPILL_INFO info = (lastEvent) ? InputAttemptIdentifier.SPILL_INFO
        .FINAL_UPDATE : InputAttemptIdentifier.SPILL_INFO.INCREMENTAL_UPDATE;
    srcAttemptIdentifier =
        new CompositeInputAttemptIdentifier(targetIndex, version, pathComponent, false, info, spillEventId, targetIndexCount);
  } else {
    srcAttemptIdentifier =
        new CompositeInputAttemptIdentifier(targetIndex, version, pathComponent, targetIndexCount);
  }
  return srcAttemptIdentifier;
}
 
Example #7
Source File: TaskAttemptImpl.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private void sendLaunchedEvents() {
  JobCounterUpdateEvent jce = new JobCounterUpdateEvent(attemptId.getTaskId()
      .getJobId());
  jce.addCounterUpdate(attemptId.getTaskId().getTaskType() == TaskType.MAP ?
      JobCounter.TOTAL_LAUNCHED_MAPS : JobCounter.TOTAL_LAUNCHED_REDUCES, 1);
  eventHandler.handle(jce);

  LOG.info("TaskAttempt: [" + attemptId
      + "] using containerId: [" + container.getId() + " on NM: ["
      + StringInterner.weakIntern(container.getNodeId().toString()) + "]");
  TaskAttemptStartedEvent tase =
    new TaskAttemptStartedEvent(TypeConverter.fromYarn(attemptId),
        TypeConverter.fromYarn(attemptId.getTaskId().getTaskType()),
        launchTime, trackerName, httpPort, shufflePort, container.getId(),
        locality.toString(), avataar.toString());
  eventHandler.handle(
      new JobHistoryEvent(attemptId.getTaskId().getJobId(), tase));
}
 
Example #8
Source File: TaskStatus.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public void readFields(DataInput in) throws IOException {
  this.taskid.readFields(in);
  setProgress(in.readFloat());
  this.numSlots = in.readInt();
  this.runState = WritableUtils.readEnum(in, State.class);
  setDiagnosticInfo(StringInterner.weakIntern(Text.readString(in)));
  setStateString(StringInterner.weakIntern(Text.readString(in)));
  this.phase = WritableUtils.readEnum(in, Phase.class); 
  this.startTime = in.readLong(); 
  this.finishTime = in.readLong(); 
  counters = new Counters();
  this.includeAllCounters = in.readBoolean();
  this.outputSize = in.readLong();
  counters.readFields(in);
  nextRecordRange.readFields(in);
}
 
Example #9
Source File: TaskSpec.java    From tez with Apache License 2.0 6 votes vote down vote up
public TaskSpec(TezTaskAttemptID taskAttemptID,
    String dagName, String vertexName,
    int vertexParallelism,
    ProcessorDescriptor processorDescriptor,
    List<InputSpec> inputSpecList, List<OutputSpec> outputSpecList,
    @Nullable List<GroupInputSpec> groupInputSpecList, Configuration taskConf) {
  Objects.requireNonNull(taskAttemptID, "taskAttemptID is null");
  Objects.requireNonNull(dagName, "dagName is null");
  Objects.requireNonNull(vertexName, "vertexName is null");
  Objects.requireNonNull(processorDescriptor, "processorDescriptor is null");
  Objects.requireNonNull(inputSpecList, "inputSpecList is null");
  Objects.requireNonNull(outputSpecList, "outputSpecList is null");
  this.taskAttemptId = taskAttemptID;
  this.dagName = StringInterner.weakIntern(dagName);
  this.vertexName = StringInterner.weakIntern(vertexName);
  this.processorDescriptor = processorDescriptor;
  this.inputSpecList = inputSpecList;
  this.outputSpecList = outputSpecList;
  this.groupInputSpecList = groupInputSpecList;
  this.vertexParallelism = vertexParallelism;
  this.taskConf = taskConf;
}
 
Example #10
Source File: TaskReport.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public void readFields(DataInput in) throws IOException {
  this.taskid.readFields(in);
  this.progress = in.readFloat();
  this.state = StringInterner.weakIntern(Text.readString(in));
  this.startTime = in.readLong(); 
  this.finishTime = in.readLong();
  
  diagnostics = WritableUtils.readStringArray(in);
  counters = new Counters();
  counters.readFields(in);
  currentStatus = WritableUtils.readEnum(in, TIPStatus.class);
  if (currentStatus == TIPStatus.RUNNING) {
    int num = WritableUtils.readVInt(in);    
    for (int i = 0; i < num; i++) {
      TaskAttemptID t = new TaskAttemptID();
      t.readFields(in);
      runningAttempts.add(t);
    }
  } else if (currentStatus == TIPStatus.COMPLETE) {
    successfulAttempt.readFields(in);
  }
}
 
Example #11
Source File: QueueInfo.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
public void readFields(DataInput in) throws IOException {
  queueName = StringInterner.weakIntern(Text.readString(in));
  queueState = WritableUtils.readEnum(in, QueueState.class);
  schedulingInfo = StringInterner.weakIntern(Text.readString(in));
  int length = in.readInt();
  stats = new JobStatus[length];
  for (int i = 0; i < length; i++) {
    stats[i] = new JobStatus();
    stats[i].readFields(in);
  }
  int count = in.readInt();
  children.clear();
  for (int i = 0; i < count; i++) {
    QueueInfo childQueueInfo = new QueueInfo();
    childQueueInfo.readFields(in);
    children.add(childQueueInfo);
  }
}
 
Example #12
Source File: JobHistoryParser.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private void handleReduceAttemptFinishedEvent
(ReduceAttemptFinishedEvent event) {
  TaskInfo taskInfo = info.tasksMap.get(event.getTaskId());
  TaskAttemptInfo attemptInfo = 
    taskInfo.attemptsMap.get(event.getAttemptId());
  attemptInfo.finishTime = event.getFinishTime();
  attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus());
  attemptInfo.state = StringInterner.weakIntern(event.getState());
  attemptInfo.shuffleFinishTime = event.getShuffleFinishTime();
  attemptInfo.sortFinishTime = event.getSortFinishTime();
  attemptInfo.counters = event.getCounters();
  attemptInfo.hostname = StringInterner.weakIntern(event.getHostname());
  attemptInfo.port = event.getPort();
  attemptInfo.rackname = StringInterner.weakIntern(event.getRackName());
  info.completedTaskAttemptsMap.put(event.getAttemptId(), attemptInfo);
}
 
Example #13
Source File: TaskImpl.java    From big-c with Apache License 2.0 6 votes vote down vote up
private void handleTaskAttemptCompletion(TaskAttemptId attemptId,
    TaskAttemptCompletionEventStatus status) {
  TaskAttempt attempt = attempts.get(attemptId);
  //raise the completion event only if the container is assigned
  // to nextAttemptNumber
  if (attempt.getNodeHttpAddress() != null) {
    TaskAttemptCompletionEvent tce = recordFactory
        .newRecordInstance(TaskAttemptCompletionEvent.class);
    tce.setEventId(-1);
    String scheme = (encryptedShuffle) ? "https://" : "http://";
    tce.setMapOutputServerAddress(StringInterner.weakIntern(scheme
       + attempt.getNodeHttpAddress().split(":")[0] + ":"
       + attempt.getShufflePort()));
    tce.setStatus(status);
    tce.setAttemptId(attempt.getID());
    int runTime = 0;
    if (attempt.getFinishTime() != 0 && attempt.getLaunchTime() !=0)
      runTime = (int)(attempt.getFinishTime() - attempt.getLaunchTime());
    tce.setAttemptRunTime(runTime);
    
    //raise the event to job so that it adds the completion event to its
    //data structures
    eventHandler.handle(new JobTaskAttemptCompletedEvent(tce));
  }
}
 
Example #14
Source File: TaskAttemptImpl.java    From big-c with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt, 
    TaskAttemptEvent event) {
  // unregister it to TaskAttemptListener so that it stops listening
  // for it
  taskAttempt.taskAttemptListener.unregister(
      taskAttempt.attemptId, taskAttempt.jvmID);

  if (event instanceof TaskAttemptKillEvent) {
    taskAttempt.addDiagnosticInfo(
        ((TaskAttemptKillEvent) event).getMessage());
  }

  taskAttempt.reportedStatus.progress = 1.0f;
  taskAttempt.updateProgressSplits();
  //send the cleanup event to containerLauncher
  taskAttempt.eventHandler.handle(new ContainerLauncherEvent(
      taskAttempt.attemptId, 
      taskAttempt.container.getId(), StringInterner
          .weakIntern(taskAttempt.container.getNodeId().toString()),
      taskAttempt.container.getContainerToken(),
      ContainerLauncher.EventType.CONTAINER_REMOTE_CLEANUP));
}
 
Example #15
Source File: TaskAttemptImpl.java    From big-c with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private void sendLaunchedEvents() {
  JobCounterUpdateEvent jce = new JobCounterUpdateEvent(attemptId.getTaskId()
      .getJobId());
  jce.addCounterUpdate(attemptId.getTaskId().getTaskType() == TaskType.MAP ?
      JobCounter.TOTAL_LAUNCHED_MAPS : JobCounter.TOTAL_LAUNCHED_REDUCES, 1);
  eventHandler.handle(jce);

  LOG.info("TaskAttempt: [" + attemptId
      + "] using containerId: [" + container.getId() + " on NM: ["
      + StringInterner.weakIntern(container.getNodeId().toString()) + "]");
  TaskAttemptStartedEvent tase =
    new TaskAttemptStartedEvent(TypeConverter.fromYarn(attemptId),
        TypeConverter.fromYarn(attemptId.getTaskId().getTaskType()),
        launchTime, trackerName, httpPort, shufflePort, container.getId(),
        locality.toString(), avataar.toString());
  eventHandler.handle(
      new JobHistoryEvent(attemptId.getTaskId().getJobId(), tase));
}
 
Example #16
Source File: AbstractCounterGroup.java    From tez with Apache License 2.0 5 votes vote down vote up
@Override
public synchronized void readFields(DataInput in) throws IOException {
  displayName = StringInterner.weakIntern(Text.readString(in));
  counters.clear();
  int size = WritableUtils.readVInt(in);
  for (int i = 0; i < size; i++) {
    T counter = newCounter();
    counter.readFields(in);
    counters.put(counter.getName(), counter);
    limits.incrCounters();
  }
}
 
Example #17
Source File: InputSpec.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput in) throws IOException {
  sourceVertexName = StringInterner.weakIntern(in.readUTF());
  physicalEdgeCount = in.readInt();
  inputDescriptor = new InputDescriptor();
  inputDescriptor.readFields(in);
}
 
Example #18
Source File: JobHistoryParser.java    From big-c with Apache License 2.0 5 votes vote down vote up
private void handleMapAttemptFinishedEvent(MapAttemptFinishedEvent event) {
  TaskInfo taskInfo = info.tasksMap.get(event.getTaskId());
  TaskAttemptInfo attemptInfo = 
    taskInfo.attemptsMap.get(event.getAttemptId());
  attemptInfo.finishTime = event.getFinishTime();
  attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus());
  attemptInfo.state = StringInterner.weakIntern(event.getState());
  attemptInfo.mapFinishTime = event.getMapFinishTime();
  attemptInfo.counters = event.getCounters();
  attemptInfo.hostname = StringInterner.weakIntern(event.getHostname());
  attemptInfo.port = event.getPort();
  attemptInfo.rackname = StringInterner.weakIntern(event.getRackName());
  info.completedTaskAttemptsMap.put(event.getAttemptId(), attemptInfo);
}
 
Example #19
Source File: JobHistoryParser.java    From big-c with Apache License 2.0 5 votes vote down vote up
private void handleJobFailedEvent(JobUnsuccessfulCompletionEvent event) {
  info.finishTime = event.getFinishTime();
  info.finishedMaps = event.getFinishedMaps();
  info.finishedReduces = event.getFinishedReduces();
  info.jobStatus = StringInterner.weakIntern(event.getStatus());
  info.errorInfo = StringInterner.weakIntern(event.getDiagnostics());
}
 
Example #20
Source File: Utils.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Copied method from org.apache.hadoop.yarn.util.Apps.
 * It was broken by YARN-1824 (2.4.0) and fixed for 2.4.1
 * by https://issues.apache.org/jira/browse/YARN-1931
 */
public static void addToEnvironment(Map<String, String> environment,
		String variable, String value) {
	String val = environment.get(variable);
	if (val == null) {
		val = value;
	} else {
		val = val + File.pathSeparator + value;
	}
	environment.put(StringInterner.weakIntern(variable),
			StringInterner.weakIntern(val));
}
 
Example #21
Source File: TestDefaultSorter.java    From tez with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore
/**
 * Disabling this, as this would need 2047 MB io.sort.mb for testing.
 * Provide > 2GB to JVM when running this test to avoid OOM in string generation.
 *
 * Set DefaultSorter.MAX_IO_SORT_MB = 2047 for running this.
 */
public void testSortLimitsWithLargeRecords() throws IOException {
  OutputContext context = createTezOutputContext();

  doReturn(2800 * 1024 * 1024l).when(context).getTotalMemoryAvailableToTask();

  //Setting IO_SORT_MB to 2047 MB
  conf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_MB, 2047);
  context.requestInitialMemory(
      ExternalSorter.getInitialMemoryRequirement(conf,
          context.getTotalMemoryAvailableToTask()), new MemoryUpdateCallbackHandler());

  DefaultSorter sorter = new DefaultSorter(context, conf, 2, 2047 << 20);

  int i = 0;
  /**
   * If io.sort.mb is not capped to 1800, this would end up throwing
   * "java.lang.ArrayIndexOutOfBoundsException" after many spills.
   * Intentionally made it as infinite loop.
   */
  while (true) {
    Text key = new Text(i + "");
    //Generate random size between 1 MB to 100 MB.
    int valSize = ThreadLocalRandom.current().nextInt(1 * 1024 * 1024, 100 * 1024 * 1024);
    String val = StringInterner.weakIntern(StringUtils.repeat("v", valSize));
    sorter.write(key, new Text(val));
    i = (i + 1) % 10;
  }
}
 
Example #22
Source File: TaggedInputSplit.java    From big-c with Apache License 2.0 5 votes vote down vote up
private Class<?> readClass(DataInput in) throws IOException {
  String className = StringInterner.weakIntern(Text.readString(in));
  try {
    return conf.getClassByName(className);
  } catch (ClassNotFoundException e) {
    throw new RuntimeException("readObject can't find class", e);
  }
}
 
Example #23
Source File: Task.java    From big-c with Apache License 2.0 5 votes vote down vote up
public void readFields(DataInput in) throws IOException {
  jobFile = StringInterner.weakIntern(Text.readString(in));
  taskId = TaskAttemptID.read(in);
  partition = in.readInt();
  numSlotsRequired = in.readInt();
  taskStatus.readFields(in);
  skipRanges.readFields(in);
  currentRecIndexIterator = skipRanges.skipRangeIterator();
  currentRecStartIndex = currentRecIndexIterator.next();
  skipping = in.readBoolean();
  jobCleanup = in.readBoolean();
  if (jobCleanup) {
    jobRunStateForCleanup = 
      WritableUtils.readEnum(in, JobStatus.State.class);
  }
  jobSetup = in.readBoolean();
  writeSkipRecs = in.readBoolean();
  taskCleanup = in.readBoolean();
  if (taskCleanup) {
    setPhase(TaskStatus.Phase.CLEANUP);
  }
  user = StringInterner.weakIntern(Text.readString(in));
  int len = in.readInt();
  encryptedSpillKey = new byte[len];
  extraData.readFields(in);
  in.readFully(encryptedSpillKey);
}
 
Example #24
Source File: JobProfile.java    From big-c with Apache License 2.0 5 votes vote down vote up
public void readFields(DataInput in) throws IOException {
  jobid.readFields(in);
  this.jobFile = StringInterner.weakIntern(Text.readString(in));
  this.url = StringInterner.weakIntern(Text.readString(in));
  this.user = StringInterner.weakIntern(Text.readString(in));
  this.name = StringInterner.weakIntern(Text.readString(in));
  this.queueName = StringInterner.weakIntern(Text.readString(in));
}
 
Example #25
Source File: EventMetaData.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
public EventMetaData(EventProducerConsumerType generator,
    String taskVertexName, @Nullable String edgeVertexName,
    @Nullable TezTaskAttemptID taskAttemptID) {
  checkNotNull(generator, "generator is null");
  checkNotNull(taskVertexName, "taskVertexName is null");
  this.producerConsumerType = generator;
  this.taskVertexName = StringInterner.weakIntern(taskVertexName);
  this.edgeVertexName = StringInterner.weakIntern(edgeVertexName);
  this.taskAttemptID = taskAttemptID;
}
 
Example #26
Source File: EventMetaData.java    From tez with Apache License 2.0 5 votes vote down vote up
public EventMetaData(EventProducerConsumerType generator,
    String taskVertexName, @Nullable String edgeVertexName,
    @Nullable TezTaskAttemptID taskAttemptID) {
  Objects.requireNonNull(generator, "generator is null");
  Objects.requireNonNull(taskVertexName, "taskVertexName is null");
  this.producerConsumerType = generator;
  this.taskVertexName = StringInterner.weakIntern(taskVertexName);
  this.edgeVertexName = StringInterner.weakIntern(edgeVertexName);
  this.taskAttemptID = taskAttemptID;
}
 
Example #27
Source File: TaskSpec.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput in) throws IOException {
  taskAttemptId = TezTaskAttemptID.readTezTaskAttemptID(in);
  dagName = StringInterner.weakIntern(in.readUTF());
  vertexName = StringInterner.weakIntern(in.readUTF());
  // TODO TEZ-305 convert this to PB
  processorDescriptor = new ProcessorDescriptor();
  processorDescriptor.readFields(in);
  int numInputSpecs = in.readInt();
  inputSpecList = new ArrayList<InputSpec>(numInputSpecs);
  for (int i = 0; i < numInputSpecs; i++) {
    InputSpec inputSpec = new InputSpec();
    inputSpec.readFields(in);
    inputSpecList.add(inputSpec);
  }
  int numOutputSpecs = in.readInt();
  outputSpecList = new ArrayList<OutputSpec>(numOutputSpecs);
  for (int i = 0; i < numOutputSpecs; i++) {
    OutputSpec outputSpec = new OutputSpec();
    outputSpec.readFields(in);
    outputSpecList.add(outputSpec);
  }
  boolean hasGroupInputs = in.readBoolean();
  if (hasGroupInputs) {
    int numGroups = in.readInt();
    groupInputSpecList = Lists.newArrayListWithCapacity(numGroups);
    for (int i=0; i<numGroups; ++i) {
      GroupInputSpec group = new GroupInputSpec();
      group.readFields(in);
      groupInputSpecList.add(group);
    }
  }
}
 
Example #28
Source File: GroupInputSpec.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput in) throws IOException {
  groupName = StringInterner.weakIntern(Text.readString(in));
  int numMembers = in.readInt();
  groupVertices = Lists.newArrayListWithCapacity(numMembers);
  for (int i=0; i<numMembers; ++i) {
    groupVertices.add(StringInterner.weakIntern(Text.readString(in)));
  }
  mergedInputDescriptor = new InputDescriptor();
  mergedInputDescriptor.readFields(in);
}
 
Example #29
Source File: TezYARNUtils.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
public static void addToEnvironment(
    Map<String, String> environment,
    String variable, String value, String classPathSeparator) {
  String val = environment.get(variable);
  if (val == null) {
    val = value;
  } else {
    val = val + classPathSeparator + value;
  }
  environment.put(StringInterner.weakIntern(variable), 
      StringInterner.weakIntern(val));
}
 
Example #30
Source File: JobStatus.java    From big-c with Apache License 2.0 5 votes vote down vote up
public synchronized void readFields(DataInput in) throws IOException {
  this.jobid = new JobID();
  this.jobid.readFields(in);
  this.setupProgress = in.readFloat();
  this.mapProgress = in.readFloat();
  this.reduceProgress = in.readFloat();
  this.cleanupProgress = in.readFloat();
  this.runState = WritableUtils.readEnum(in, State.class);
  this.startTime = in.readLong();
  this.user = StringInterner.weakIntern(Text.readString(in));
  this.priority = WritableUtils.readEnum(in, JobPriority.class);
  this.schedulingInfo = StringInterner.weakIntern(Text.readString(in));
  this.finishTime = in.readLong();
  this.isRetired = in.readBoolean();
  this.historyFile = StringInterner.weakIntern(Text.readString(in));
  this.jobName = StringInterner.weakIntern(Text.readString(in));
  this.trackingUrl = StringInterner.weakIntern(Text.readString(in));
  this.jobFile = StringInterner.weakIntern(Text.readString(in));
  this.isUber = in.readBoolean();

  // De-serialize the job's ACLs
  int numACLs = in.readInt();
  for (int i = 0; i < numACLs; i++) {
    JobACL aclType = WritableUtils.readEnum(in, JobACL.class);
    AccessControlList acl = new AccessControlList(" ");
    acl.readFields(in);
    this.jobACLs.put(aclType, acl);
  }
}