Java Code Examples for org.apache.hadoop.security.Credentials#readTokenStorageFile()

The following examples show how to use org.apache.hadoop.security.Credentials#readTokenStorageFile() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TokenCache.java    From tez with Apache License 2.0 6 votes vote down vote up
/**
 * Merge tokens from a configured binary file into provided Credentials object
 * @param creds Credentials object to add new tokens to
 * @param tokenFilePath Location of tokens' binary file
 */
@InterfaceAudience.Private
public static void mergeBinaryTokens(Credentials creds,
    Configuration conf, String tokenFilePath)
    throws IOException {
  if (tokenFilePath == null || tokenFilePath.isEmpty()) {
    throw new RuntimeException("Invalid file path provided"
        + ", tokenFilePath=" + tokenFilePath);
  }
  LOG.info("Merging additional tokens from binary file"
      + ", binaryFileName=" + tokenFilePath);
  Credentials binary = Credentials.readTokenStorageFile(
      new Path("file:///" +  tokenFilePath), conf);

  // supplement existing tokens with the tokens in the binary file
  creds.mergeAll(binary);
}
 
Example 2
Source File: TokenCache.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private static void mergeBinaryTokens(Credentials creds, Configuration conf) {
  String binaryTokenFilename =
      conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
  if (binaryTokenFilename != null) {
    Credentials binary;
    try {
      binary = Credentials.readTokenStorageFile(
          FileSystem.getLocal(conf).makeQualified(
              new Path(binaryTokenFilename)),
          conf);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
    // supplement existing tokens with the tokens in the binary file
    creds.mergeAll(binary);
  }
}
 
Example 3
Source File: TokenCache.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * load job token from a file
 * @deprecated Use {@link Credentials#readTokenStorageFile} instead,
 * this method is included for compatibility against Hadoop-1.
 * @param conf
 * @throws IOException
 */
@InterfaceAudience.Private
@Deprecated
public static Credentials loadTokens(String jobTokenFile, JobConf conf)
throws IOException {
  Path localJobTokenFile = new Path ("file:///" + jobTokenFile);

  Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);

  if(LOG.isDebugEnabled()) {
    LOG.debug("Task: Loaded jobTokenFile from: "+
        localJobTokenFile.toUri().getPath() 
        +"; num of sec keys  = " + ts.numberOfSecretKeys() +
        " Number of tokens " +  ts.numberOfTokens());
  }
  return ts;
}
 
Example 4
Source File: TokenCache.java    From incubator-tez with Apache License 2.0 6 votes vote down vote up
/**
 * Merge tokens from a configured binary file into provided Credentials object
 * @param creds Credentials object to add new tokens to
 * @param tokenFilePath Location of tokens' binary file
 */
@InterfaceAudience.Private
public static void mergeBinaryTokens(Credentials creds,
    Configuration conf, String tokenFilePath)
    throws IOException {
  if (tokenFilePath == null || tokenFilePath.isEmpty()) {
    throw new RuntimeException("Invalid file path provided"
        + ", tokenFilePath=" + tokenFilePath);
  }
  LOG.info("Merging additional tokens from binary file"
      + ", binaryFileName=" + tokenFilePath);
  Credentials binary = Credentials.readTokenStorageFile(
      new Path("file:///" +  tokenFilePath), conf);

  // supplement existing tokens with the tokens in the binary file
  creds.mergeAll(binary);
}
 
Example 5
Source File: TestDelegationTokenRemoteFetcher.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Call fetch token using http server 
 */
@Test
public void expectedTokenIsRetrievedFromHttp() throws Exception {
  bootstrap = startHttpServer(httpPort, testToken, serviceUrl);
  DelegationTokenFetcher.main(new String[] { "-webservice=" + serviceUrl,
      tokenFile });
  Path p = new Path(fileSys.getWorkingDirectory(), tokenFile);
  Credentials creds = Credentials.readTokenStorageFile(p, conf);
  Iterator<Token<?>> itr = creds.getAllTokens().iterator();
  assertTrue("token not exist error", itr.hasNext());
  Token<?> fetchedToken = itr.next();
  Assert.assertArrayEquals("token wrong identifier error",
      testToken.getIdentifier(), fetchedToken.getIdentifier());
  Assert.assertArrayEquals("token wrong password error",
      testToken.getPassword(), fetchedToken.getPassword());
  if (assertionError != null)
    throw assertionError;
}
 
Example 6
Source File: TokenCache.java    From big-c with Apache License 2.0 6 votes vote down vote up
private static void mergeBinaryTokens(Credentials creds, Configuration conf) {
  String binaryTokenFilename =
      conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
  if (binaryTokenFilename != null) {
    Credentials binary;
    try {
      binary = Credentials.readTokenStorageFile(
          FileSystem.getLocal(conf).makeQualified(
              new Path(binaryTokenFilename)),
          conf);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
    // supplement existing tokens with the tokens in the binary file
    creds.mergeAll(binary);
  }
}
 
Example 7
Source File: TokenCache.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * load job token from a file
 * @deprecated Use {@link Credentials#readTokenStorageFile} instead,
 * this method is included for compatibility against Hadoop-1.
 * @param conf
 * @throws IOException
 */
@InterfaceAudience.Private
@Deprecated
public static Credentials loadTokens(String jobTokenFile, JobConf conf)
throws IOException {
  Path localJobTokenFile = new Path ("file:///" + jobTokenFile);

  Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);

  if(LOG.isDebugEnabled()) {
    LOG.debug("Task: Loaded jobTokenFile from: "+
        localJobTokenFile.toUri().getPath() 
        +"; num of sec keys  = " + ts.numberOfSecretKeys() +
        " Number of tokens " +  ts.numberOfTokens());
  }
  return ts;
}
 
Example 8
Source File: TestDelegationTokenRemoteFetcher.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Call fetch token using http server 
 */
@Test
public void expectedTokenIsRetrievedFromHttp() throws Exception {
  bootstrap = startHttpServer(httpPort, testToken, serviceUrl);
  DelegationTokenFetcher.main(new String[] { "-webservice=" + serviceUrl,
      tokenFile });
  Path p = new Path(fileSys.getWorkingDirectory(), tokenFile);
  Credentials creds = Credentials.readTokenStorageFile(p, conf);
  Iterator<Token<?>> itr = creds.getAllTokens().iterator();
  assertTrue("token not exist error", itr.hasNext());
  Token<?> fetchedToken = itr.next();
  Assert.assertArrayEquals("token wrong identifier error",
      testToken.getIdentifier(), fetchedToken.getIdentifier());
  Assert.assertArrayEquals("token wrong password error",
      testToken.getPassword(), fetchedToken.getPassword());
  if (assertionError != null)
    throw assertionError;
}
 
Example 9
Source File: YarnTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
public static boolean verifyTokenKindInContainerCredentials(final Collection<String> tokens, final String containerId)
	throws IOException {
	File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY));
	if (!cwd.exists() || !cwd.isDirectory()) {
		return false;
	}

	File containerTokens = findFile(cwd.getAbsolutePath(), new FilenameFilter() {
		@Override
		public boolean accept(File dir, String name) {
			return name.equals(containerId + ".tokens");
		}
	});

	if (containerTokens != null) {
		LOG.info("Verifying tokens in {}", containerTokens.getAbsolutePath());

		Credentials tmCredentials = Credentials.readTokenStorageFile(containerTokens, new Configuration());

		Collection<Token<? extends TokenIdentifier>> userTokens = tmCredentials.getAllTokens();
		Set<String> tokenKinds = new HashSet<>(4);
		for (Token<? extends TokenIdentifier> token : userTokens) {
			tokenKinds.add(token.getKind().toString());
		}

		return tokenKinds.containsAll(tokens);
	} else {
		LOG.warn("Unable to find credential file for container {}", containerId);
		return false;
	}
}
 
Example 10
Source File: HadoopSecurityManager_H_1_0.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
@Override
public void cancelTokens(File tokenFile, String userToProxy, Logger logger)
    throws HadoopSecurityManagerException {
  // nntoken
  Credentials cred = null;
  try {
    cred =
        Credentials.readTokenStorageFile(new Path(tokenFile.toURI()),
            new Configuration());
    for (Token<? extends TokenIdentifier> t : cred.getAllTokens()) {
      logger.info("Got token: " + t.toString());
      logger.info("Token kind: " + t.getKind());
      logger.info("Token id: " + new String(t.getIdentifier()));
      logger.info("Token service: " + t.getService());
      if (t.getKind().equals(new Text("HIVE_DELEGATION_TOKEN"))) {
        logger.info("Cancelling hive token " + new String(t.getIdentifier()));
        cancelHiveToken(t, userToProxy);
      } else if (t.getKind().equals(new Text("MAPREDUCE_DELEGATION_TOKEN"))) {
        logger.info("Cancelling mr job tracker token "
            + new String(t.getIdentifier()));
        cancelMRJobTrackerToken(t, userToProxy);
      } else if (t.getKind().equals(new Text("HDFS_DELEGATION_TOKEN"))) {
        logger.info("Cancelling namenode token "
            + new String(t.getIdentifier()));
        cancelNameNodeToken(t, userToProxy);
      } else {
        logger.info("unknown token type " + t.getKind());
      }
    }
  } catch (Exception e) {
    e.printStackTrace();
  }

}
 
Example 11
Source File: GobblinYarnAppLauncher.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private void setupSecurityTokens(ContainerLaunchContext containerLaunchContext) throws IOException {
  Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();

  // Pass on the credentials from the hadoop token file if present.
  // The value in the token file takes precedence.
  if (System.getenv(HADOOP_TOKEN_FILE_LOCATION) != null) {
    Credentials tokenFileCredentials = Credentials.readTokenStorageFile(new File(System.getenv(HADOOP_TOKEN_FILE_LOCATION)),
        new Configuration());
    credentials.addAll(tokenFileCredentials);
  }

  String tokenRenewer = this.yarnConfiguration.get(YarnConfiguration.RM_PRINCIPAL);
  if (tokenRenewer == null || tokenRenewer.length() == 0) {
    throw new IOException("Failed to get master Kerberos principal for the RM to use as renewer");
  }

  // For now, only getting tokens for the default file-system.
  Token<?> tokens[] = this.fs.addDelegationTokens(tokenRenewer, credentials);
  if (tokens != null) {
    for (Token<?> token : tokens) {
      LOGGER.info("Got delegation token for " + this.fs.getUri() + "; " + token);
    }
  }

  Closer closer = Closer.create();
  try {
    DataOutputBuffer dataOutputBuffer = closer.register(new DataOutputBuffer());
    credentials.writeTokenStorageToStream(dataOutputBuffer);
    ByteBuffer fsTokens = ByteBuffer.wrap(dataOutputBuffer.getData(), 0, dataOutputBuffer.getLength());
    containerLaunchContext.setTokens(fsTokens);
  } catch (Throwable t) {
    throw closer.rethrow(t);
  } finally {
    closer.close();
  }
}
 
Example 12
Source File: YarnTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
public static boolean verifyTokenKindInContainerCredentials(final Collection<String> tokens, final String containerId)
	throws IOException {
	File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY));
	if (!cwd.exists() || !cwd.isDirectory()) {
		return false;
	}

	File containerTokens = TestUtils.findFile(cwd.getAbsolutePath(), new FilenameFilter() {
		@Override
		public boolean accept(File dir, String name) {
			return name.equals(containerId + ".tokens");
		}
	});

	if (containerTokens != null) {
		LOG.info("Verifying tokens in {}", containerTokens.getAbsolutePath());

		Credentials tmCredentials = Credentials.readTokenStorageFile(containerTokens, new Configuration());

		Collection<Token<? extends TokenIdentifier>> userTokens = tmCredentials.getAllTokens();
		Set<String> tokenKinds = new HashSet<>(4);
		for (Token<? extends TokenIdentifier> token : userTokens) {
			tokenKinds.add(token.getKind().toString());
		}

		return tokenKinds.containsAll(tokens);
	} else {
		LOG.warn("Unable to find credential file for container {}", containerId);
		return false;
	}
}
 
Example 13
Source File: YarnTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static boolean verifyTokenKindInContainerCredentials(final Collection<String> tokens, final String containerId)
	throws IOException {
	File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY));
	if (!cwd.exists() || !cwd.isDirectory()) {
		return false;
	}

	File containerTokens = findFile(cwd.getAbsolutePath(), new FilenameFilter() {
		@Override
		public boolean accept(File dir, String name) {
			return name.equals(containerId + ".tokens");
		}
	});

	if (containerTokens != null) {
		LOG.info("Verifying tokens in {}", containerTokens.getAbsolutePath());

		Credentials tmCredentials = Credentials.readTokenStorageFile(containerTokens, new Configuration());

		Collection<Token<? extends TokenIdentifier>> userTokens = tmCredentials.getAllTokens();
		Set<String> tokenKinds = new HashSet<>(4);
		for (Token<? extends TokenIdentifier> token : userTokens) {
			tokenKinds.add(token.getKind().toString());
		}

		return tokenKinds.containsAll(tokens);
	} else {
		LOG.warn("Unable to find credential file for container {}", containerId);
		return false;
	}
}
 
Example 14
Source File: YarnContainerSecurityManager.java    From incubator-gobblin with Apache License 2.0 4 votes vote down vote up
/**
 * Read the {@link Token}s stored in the token file.
 */
@VisibleForTesting
Credentials readCredentials(Path tokenFilePath) throws IOException {
  LOGGER.info("Reading updated credentials from token file: " + tokenFilePath);
  return Credentials.readTokenStorageFile(tokenFilePath, this.fs.getConf());
}
 
Example 15
Source File: HadoopSecurityManager_H_2_0.java    From azkaban-plugins with Apache License 2.0 4 votes vote down vote up
@Override
public void cancelTokens(File tokenFile, String userToProxy, Logger logger)
    throws HadoopSecurityManagerException {
  // nntoken
  Credentials cred = null;
  try {
    cred =
        Credentials.readTokenStorageFile(new Path(tokenFile.toURI()),
            new Configuration());
    for (Token<? extends TokenIdentifier> t : cred.getAllTokens()) {

      logger.info("Got token: " + t.toString());
      logger.info("Token kind: " + t.getKind());
      logger.info("Token id: " + new String(t.getIdentifier()));
      logger.info("Token service: " + t.getService());

      if (t.getKind().equals(new Text("HIVE_DELEGATION_TOKEN"))) {
        logger.info("Cancelling hive token " + new String(t.getIdentifier()));
        cancelHiveToken(t, userToProxy);
      } else if (t.getKind().equals(new Text("RM_DELEGATION_TOKEN"))) {
        logger.info("Cancelling mr job tracker token "
            + new String(t.getIdentifier()));
        // cancelMRJobTrackerToken(t, userToProxy);
      } else if (t.getKind().equals(new Text("HDFS_DELEGATION_TOKEN"))) {
        logger.info("Cancelling namenode token "
            + new String(t.getIdentifier()));
        // cancelNameNodeToken(t, userToProxy);
      } else if (t.getKind().equals(new Text("MR_DELEGATION_TOKEN"))) {
        logger.info("Cancelling jobhistoryserver mr token "
            + new String(t.getIdentifier()));
        // cancelJhsToken(t, userToProxy);
      } else {
        logger.info("unknown token type " + t.getKind());
      }
    }
  } catch (Exception e) {
    throw new HadoopSecurityManagerException("Failed to cancel tokens "
        + e.getMessage() + e.getCause(), e);
  }

}
 
Example 16
Source File: DelegationTokenFetcher.java    From big-c with Apache License 2.0 4 votes vote down vote up
private static Collection<Token<?>> readTokens(Path file, Configuration conf)
    throws IOException {
  Credentials creds = Credentials.readTokenStorageFile(file, conf);
  return creds.getAllTokens();
}
 
Example 17
Source File: DelegationTokenFetcher.java    From hadoop with Apache License 2.0 4 votes vote down vote up
private static Collection<Token<?>> readTokens(Path file, Configuration conf)
    throws IOException {
  Credentials creds = Credentials.readTokenStorageFile(file, conf);
  return creds.getAllTokens();
}