Java Code Examples for org.apache.hadoop.io.DataInputByteBuffer

The following examples show how to use org.apache.hadoop.io.DataInputByteBuffer. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hadoop   Source File: ContainerManagerImpl.java    License: Apache License 2.0 6 votes vote down vote up
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws IOException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    credentials.readTokenStorageStream(buf);
    if (LOG.isDebugEnabled()) {
      for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
        LOG.debug(tk.getService() + " = " + tk.toString());
      }
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
 
Example 2
Source Project: big-c   Source File: ContainerManagerImpl.java    License: Apache License 2.0 6 votes vote down vote up
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws IOException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    credentials.readTokenStorageStream(buf);
    if (LOG.isDebugEnabled()) {
      for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
        LOG.debug(tk.getService() + " = " + tk.toString());
      }
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
 
Example 3
Source Project: hadoop   Source File: AMLauncher.java    License: Apache License 2.0 5 votes vote down vote up
private void setupTokens(
    ContainerLaunchContext container, ContainerId containerID)
    throws IOException {
  Map<String, String> environment = container.getEnvironment();
  environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
      application.getWebProxyBase());
  // Set AppSubmitTime and MaxAppAttempts to be consumable by the AM.
  ApplicationId applicationId =
      application.getAppAttemptId().getApplicationId();
  environment.put(
      ApplicationConstants.APP_SUBMIT_TIME_ENV,
      String.valueOf(rmContext.getRMApps()
          .get(applicationId)
          .getSubmitTime()));
  environment.put(ApplicationConstants.MAX_APP_ATTEMPTS_ENV,
      String.valueOf(rmContext.getRMApps().get(
          applicationId).getMaxAppAttempts()));

  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  if (container.getTokens() != null) {
    // TODO: Don't do this kind of checks everywhere.
    dibb.reset(container.getTokens());
    credentials.readTokenStorageStream(dibb);
  }

  // Add AMRMToken
  Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
  if (amrmToken != null) {
    credentials.addToken(amrmToken.getService(), amrmToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
 
Example 4
Source Project: hadoop   Source File: RMAppImpl.java    License: Apache License 2.0 5 votes vote down vote up
protected Credentials parseCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
 
Example 5
Source Project: hadoop   Source File: RMAppManager.java    License: Apache License 2.0 5 votes vote down vote up
protected Credentials parseCredentials(
    ApplicationSubmissionContext application) throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = application.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
 
Example 6
Source Project: hadoop   Source File: TestAMAuthorization.java    License: Apache License 2.0 5 votes vote down vote up
public Credentials getContainerCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer buf = new DataInputByteBuffer();
  containerTokens.rewind();
  buf.reset(containerTokens);
  credentials.readTokenStorageStream(buf);
  return credentials;
}
 
Example 7
Source Project: hadoop   Source File: YarnClientImpl.java    License: Apache License 2.0 5 votes vote down vote up
private void addTimelineDelegationToken(
    ContainerLaunchContext clc) throws YarnException, IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = clc.getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  // If the timeline delegation token is already in the CLC, no need to add
  // one more
  for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
      .getAllTokens()) {
    if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
      return;
    }
  }
  org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
      timelineDelegationToken = getTimelineDelegationToken();
  if (timelineDelegationToken == null) {
    return;
  }
  credentials.addToken(timelineService, timelineDelegationToken);
  if (LOG.isDebugEnabled()) {
    LOG.debug("Add timline delegation token into credentials: "
        + timelineDelegationToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(tokens);
}
 
Example 8
Source Project: hadoop   Source File: ShuffleHandler.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * A helper function to deserialize the metadata returned by ShuffleHandler.
 * @param meta the metadata returned by the ShuffleHandler
 * @return the port the Shuffle Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  int port = in.readInt();
  return port;
}
 
Example 9
Source Project: hadoop   Source File: ShuffleHandler.java    License: Apache License 2.0 5 votes vote down vote up
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(secret);
  Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
  jt.readFields(in);
  return jt;
}
 
Example 10
Source Project: twill   Source File: YarnUtils.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Decodes {@link Credentials} from the given buffer.
 * If the buffer is null or empty, it returns an empty Credentials.
 */
public static Credentials decodeCredentials(ByteBuffer buffer) throws IOException {
  Credentials credentials = new Credentials();
  if (buffer != null && buffer.hasRemaining()) {
    DataInputByteBuffer in = new DataInputByteBuffer();
    in.reset(buffer);
    credentials.readTokenStorageStream(in);
  }
  return credentials;
}
 
Example 11
Source Project: big-c   Source File: AMLauncher.java    License: Apache License 2.0 5 votes vote down vote up
private void setupTokens(
    ContainerLaunchContext container, ContainerId containerID)
    throws IOException {
  Map<String, String> environment = container.getEnvironment();
  environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
      application.getWebProxyBase());
  // Set AppSubmitTime and MaxAppAttempts to be consumable by the AM.
  ApplicationId applicationId =
      application.getAppAttemptId().getApplicationId();
  environment.put(
      ApplicationConstants.APP_SUBMIT_TIME_ENV,
      String.valueOf(rmContext.getRMApps()
          .get(applicationId)
          .getSubmitTime()));
  environment.put(ApplicationConstants.MAX_APP_ATTEMPTS_ENV,
      String.valueOf(rmContext.getRMApps().get(
          applicationId).getMaxAppAttempts()));

  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  if (container.getTokens() != null) {
    // TODO: Don't do this kind of checks everywhere.
    dibb.reset(container.getTokens());
    credentials.readTokenStorageStream(dibb);
  }

  // Add AMRMToken
  Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
  if (amrmToken != null) {
    credentials.addToken(amrmToken.getService(), amrmToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
 
Example 12
Source Project: big-c   Source File: RMAppImpl.java    License: Apache License 2.0 5 votes vote down vote up
protected Credentials parseCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
 
Example 13
Source Project: big-c   Source File: RMAppManager.java    License: Apache License 2.0 5 votes vote down vote up
protected Credentials parseCredentials(
    ApplicationSubmissionContext application) throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = application.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
 
Example 14
Source Project: big-c   Source File: TestAMAuthorization.java    License: Apache License 2.0 5 votes vote down vote up
public Credentials getContainerCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer buf = new DataInputByteBuffer();
  containerTokens.rewind();
  buf.reset(containerTokens);
  credentials.readTokenStorageStream(buf);
  return credentials;
}
 
Example 15
Source Project: big-c   Source File: YarnClientImpl.java    License: Apache License 2.0 5 votes vote down vote up
private void addTimelineDelegationToken(
    ContainerLaunchContext clc) throws YarnException, IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = clc.getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  // If the timeline delegation token is already in the CLC, no need to add
  // one more
  for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
      .getAllTokens()) {
    if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
      return;
    }
  }
  org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
      timelineDelegationToken = getTimelineDelegationToken();
  if (timelineDelegationToken == null) {
    return;
  }
  credentials.addToken(timelineService, timelineDelegationToken);
  if (LOG.isDebugEnabled()) {
    LOG.debug("Add timline delegation token into credentials: "
        + timelineDelegationToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(tokens);
}
 
Example 16
Source Project: big-c   Source File: ShuffleHandler.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * A helper function to deserialize the metadata returned by ShuffleHandler.
 * @param meta the metadata returned by the ShuffleHandler
 * @return the port the Shuffle Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  int port = in.readInt();
  return port;
}
 
Example 17
Source Project: big-c   Source File: ShuffleHandler.java    License: Apache License 2.0 5 votes vote down vote up
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(secret);
  Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
  jt.readFields(in);
  return jt;
}
 
Example 18
Source Project: incubator-tajo   Source File: TajoPullServerService.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * A helper function to deserialize the metadata returned by PullServerAuxService.
 * @param meta the metadata returned by the PullServerAuxService
 * @return the port the PullServer Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  return in.readInt();
}
 
Example 19
Source Project: incubator-tajo   Source File: PullServerAuxService.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * A helper function to deserialize the metadata returned by PullServerAuxService.
 * @param meta the metadata returned by the PullServerAuxService
 * @return the port the PullServer Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  return in.readInt();
}
 
Example 20
Source Project: geowave   Source File: NNMapReduceTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testWritable() throws IOException {

  final PartitionDataWritable writable1 = new PartitionDataWritable();
  final PartitionDataWritable writable2 = new PartitionDataWritable();

  writable1.setPartitionData(
      new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abc"), true));
  writable2.setPartitionData(
      new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abc"), false));

  assertTrue(writable1.compareTo(writable2) == 0);
  writable2.setPartitionData(
      new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abd"), false));
  assertTrue(writable1.compareTo(writable2) < 0);
  writable2.setPartitionData(
      new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abd"), true));
  assertTrue(writable1.compareTo(writable2) < 0);

  final DataOutputByteBuffer output = new DataOutputByteBuffer();
  writable1.write(output);
  output.flush();
  final DataInputByteBuffer input = new DataInputByteBuffer();
  input.reset(output.getData());

  writable2.readFields(input);
  assertTrue(writable1.compareTo(writable2) == 0);
}
 
Example 21
Source Project: incubator-tez   Source File: ShuffleUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static SecretKey getJobTokenSecretFromTokenBytes(ByteBuffer meta)
    throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
  jt.readFields(in);
  SecretKey sk = JobTokenSecretManager.createSecretKey(jt.getPassword());
  return sk;
}
 
Example 22
Source Project: incubator-tez   Source File: ShuffleUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static int deserializeShuffleProviderMetaData(ByteBuffer meta)
    throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  try {
    in.reset(meta);
    int port = in.readInt();
    return port;
  } finally {
    in.close();
  }
}
 
Example 23
Source Project: incubator-tez   Source File: DagTypeConverters.java    License: Apache License 2.0 5 votes vote down vote up
public static Credentials convertByteStringToCredentials(ByteString byteString) {
  if (byteString == null) {
    return null;
  }
  DataInputByteBuffer dib = new DataInputByteBuffer();
  dib.reset(byteString.asReadOnlyByteBuffer());
  Credentials credentials = new Credentials();
  try {
    credentials.readTokenStorageStream(dib);
    return credentials;
  } catch (IOException e) {
    throw new TezUncheckedException("Failed to deserialize Credentials", e);
  }
}
 
Example 24
Source Project: tez   Source File: ShuffleUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static SecretKey getJobTokenSecretFromTokenBytes(ByteBuffer meta)
    throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
  jt.readFields(in);
  SecretKey sk = JobTokenSecretManager.createSecretKey(jt.getPassword());
  return sk;
}
 
Example 25
Source Project: tez   Source File: TezRuntimeUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static int deserializeShuffleProviderMetaData(ByteBuffer meta)
    throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  try {
    in.reset(meta);
    int port = in.readInt();
    return port;
  } finally {
    in.close();
  }
}
 
Example 26
Source Project: tez   Source File: TestEdge.java    License: Apache License 2.0 5 votes vote down vote up
public static EdgeManagerConfig fromUserPayload(UserPayload payload)
    throws IOException {
  EdgeManagerConfig emConf = new EdgeManagerConfig();
  DataInputByteBuffer in  = new DataInputByteBuffer();
  in.reset(payload.getPayload());
  emConf.readFields(in);
  return emConf;
}
 
Example 27
Source Project: tez   Source File: DagTypeConverters.java    License: Apache License 2.0 5 votes vote down vote up
public static Credentials convertByteStringToCredentials(ByteString byteString) {
  if (byteString == null) {
    return null;
  }
  DataInputByteBuffer dib = new DataInputByteBuffer();
  dib.reset(byteString.asReadOnlyByteBuffer());
  Credentials credentials = new Credentials();
  try {
    credentials.readTokenStorageStream(dib);
    return credentials;
  } catch (IOException e) {
    throw new TezUncheckedException("Failed to deserialize Credentials", e);
  }
}
 
Example 28
Source Project: tez   Source File: TestTezClientUtils.java    License: Apache License 2.0 5 votes vote down vote up
@Test(timeout = 5000)
public void testSessionTokenInAmClc() throws IOException, YarnException {

  TezConfiguration tezConf = new TezConfiguration();
  tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, STAGING_DIR.getAbsolutePath());

  ApplicationId appId = ApplicationId.newInstance(1000, 1);
  DAG dag = DAG.create("testdag");
  dag.addVertex(Vertex.create("testVertex", ProcessorDescriptor.create("processorClassname"), 1)
      .setTaskLaunchCmdOpts("initialLaunchOpts"));

  Credentials credentials = new Credentials();
  JobTokenSecretManager jobTokenSecretManager = new JobTokenSecretManager();
  TezClientUtils.createSessionToken(appId.toString(), jobTokenSecretManager, credentials);
  Token<JobTokenIdentifier> jobToken = TokenCache.getSessionToken(credentials);
  assertNotNull(jobToken);

  AMConfiguration amConf =
      new AMConfiguration(tezConf, new HashMap<String, LocalResource>(), credentials);
  ApplicationSubmissionContext appSubmissionContext =
      TezClientUtils.createApplicationSubmissionContext(appId, dag, "amName", amConf,
          new HashMap<String, LocalResource>(), credentials, false, new TezApiVersionInfo(),
          null, null);

  ContainerLaunchContext amClc = appSubmissionContext.getAMContainerSpec();
  Map<String, ByteBuffer> amServiceData = amClc.getServiceData();
  assertNotNull(amServiceData);
  assertEquals(1, amServiceData.size());

  DataInputByteBuffer dibb = new DataInputByteBuffer();
  dibb.reset(amServiceData.values().iterator().next());
  Token<JobTokenIdentifier> jtSent = new Token<JobTokenIdentifier>();
  jtSent.readFields(dibb);

  assertTrue(Arrays.equals(jobToken.getIdentifier(), jtSent.getIdentifier()));
}
 
Example 29
Source Project: tez   Source File: ShuffleHandler.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * A helper function to deserialize the metadata returned by ShuffleHandler.
 * @param meta the metadata returned by the ShuffleHandler
 * @return the port the Shuffle Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  int port = in.readInt();
  return port;
}
 
Example 30
Source Project: tez   Source File: ShuffleHandler.java    License: Apache License 2.0 5 votes vote down vote up
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(secret);
  Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
  jt.readFields(in);
  return jt;
}