Java Code Examples for org.apache.hadoop.security.Credentials#write()

The following examples show how to use org.apache.hadoop.security.Credentials#write() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestDelegationTokenRemoteFetcher.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
public void handle(Channel channel, Token<DelegationTokenIdentifier> token,
    String serviceUrl) throws IOException {
  Assert.assertEquals(testToken, token);

  Credentials creds = new Credentials();
  creds.addToken(new Text(serviceUrl), token);
  DataOutputBuffer out = new DataOutputBuffer();
  creds.write(out);
  int fileLength = out.getData().length;
  ChannelBuffer cbuffer = ChannelBuffers.buffer(fileLength);
  cbuffer.writeBytes(out.getData());
  HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
  response.setHeader(HttpHeaders.Names.CONTENT_LENGTH,
      String.valueOf(fileLength));
  response.setContent(cbuffer);
  channel.write(response).addListener(ChannelFutureListener.CLOSE);
}
 
Example 2
Source File: TestDelegationTokenRemoteFetcher.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
public void handle(Channel channel, Token<DelegationTokenIdentifier> token,
    String serviceUrl) throws IOException {
  Assert.assertEquals(testToken, token);

  Credentials creds = new Credentials();
  creds.addToken(new Text(serviceUrl), token);
  DataOutputBuffer out = new DataOutputBuffer();
  creds.write(out);
  int fileLength = out.getData().length;
  ChannelBuffer cbuffer = ChannelBuffers.buffer(fileLength);
  cbuffer.writeBytes(out.getData());
  HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
  response.setHeader(HttpHeaders.Names.CONTENT_LENGTH,
      String.valueOf(fileLength));
  response.setContent(cbuffer);
  channel.write(response).addListener(ChannelFutureListener.CLOSE);
}
 
Example 3
Source File: TestHftpDelegationToken.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * Test whether HftpFileSystem maintain wire-compatibility for 0.20.203 when
 * obtaining delegation token. See HDFS-5440 for more details.
 */
@Test
public void testTokenCompatibilityFor203() throws IOException,
    URISyntaxException, AuthenticationException {
  Configuration conf = new Configuration();
  HftpFileSystem fs = new HftpFileSystem();

  Token<?> token = new Token<TokenIdentifier>(new byte[0], new byte[0],
      DelegationTokenIdentifier.HDFS_DELEGATION_KIND, new Text(
          "127.0.0.1:8020"));
  Credentials cred = new Credentials();
  cred.addToken(HftpFileSystem.TOKEN_KIND, token);
  ByteArrayOutputStream os = new ByteArrayOutputStream();
  cred.write(new DataOutputStream(os));

  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(os.toByteArray())).when(conn)
      .getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  final URI uri = new URI("hftp://127.0.0.1:8020");
  fs.initialize(uri, conf);
  fs.connectionFactory = factory;

  UserGroupInformation ugi = UserGroupInformation.createUserForTesting("foo",
      new String[] { "bar" });

  TokenAspect<HftpFileSystem> tokenAspect = new TokenAspect<HftpFileSystem>(
      fs, SecurityUtil.buildTokenService(uri), HftpFileSystem.TOKEN_KIND);

  tokenAspect.initDelegationToken(ugi);
  tokenAspect.ensureTokenInitialized();

  Assert.assertSame(HftpFileSystem.TOKEN_KIND, fs.getRenewToken().getKind());

  Token<?> tok = (Token<?>) Whitebox.getInternalState(fs, "delegationToken");
  Assert.assertNotSame("Not making a copy of the remote token", token, tok);
  Assert.assertEquals(token.getKind(), tok.getKind());
}
 
Example 4
Source File: TestHftpDelegationToken.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Test whether HftpFileSystem maintain wire-compatibility for 0.20.203 when
 * obtaining delegation token. See HDFS-5440 for more details.
 */
@Test
public void testTokenCompatibilityFor203() throws IOException,
    URISyntaxException, AuthenticationException {
  Configuration conf = new Configuration();
  HftpFileSystem fs = new HftpFileSystem();

  Token<?> token = new Token<TokenIdentifier>(new byte[0], new byte[0],
      DelegationTokenIdentifier.HDFS_DELEGATION_KIND, new Text(
          "127.0.0.1:8020"));
  Credentials cred = new Credentials();
  cred.addToken(HftpFileSystem.TOKEN_KIND, token);
  ByteArrayOutputStream os = new ByteArrayOutputStream();
  cred.write(new DataOutputStream(os));

  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(os.toByteArray())).when(conn)
      .getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  final URI uri = new URI("hftp://127.0.0.1:8020");
  fs.initialize(uri, conf);
  fs.connectionFactory = factory;

  UserGroupInformation ugi = UserGroupInformation.createUserForTesting("foo",
      new String[] { "bar" });

  TokenAspect<HftpFileSystem> tokenAspect = new TokenAspect<HftpFileSystem>(
      fs, SecurityUtil.buildTokenService(uri), HftpFileSystem.TOKEN_KIND);

  tokenAspect.initDelegationToken(ugi);
  tokenAspect.ensureTokenInitialized();

  Assert.assertSame(HftpFileSystem.TOKEN_KIND, fs.getRenewToken().getKind());

  Token<?> tok = (Token<?>) Whitebox.getInternalState(fs, "delegationToken");
  Assert.assertNotSame("Not making a copy of the remote token", token, tok);
  Assert.assertEquals(token.getKind(), tok.getKind());
}
 
Example 5
Source File: AutoHDFS.java    From jstorm with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
protected byte[] getHadoopCredentials(Map conf) {
    try {
        if(UserGroupInformation.isSecurityEnabled()) {
            final Configuration configuration = new Configuration();

            login(configuration);

            final String topologySubmitterUser = (String) conf.get(Config.TOPOLOGY_SUBMITTER_PRINCIPAL);

            final URI nameNodeURI = conf.containsKey(TOPOLOGY_HDFS_URI) ? new URI(conf.get(TOPOLOGY_HDFS_URI).toString())
                    : FileSystem.getDefaultUri(configuration);

            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();

            final UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(topologySubmitterUser, ugi);

            Credentials creds = (Credentials) proxyUser.doAs(new PrivilegedAction<Object>() {
                @Override
                public Object run() {
                    try {
                        FileSystem fileSystem = FileSystem.get(nameNodeURI, configuration);
                        Credentials credential= proxyUser.getCredentials();

                        fileSystem.addDelegationTokens(hdfsPrincipal, credential);
                        LOG.info("Delegation tokens acquired for user {}", topologySubmitterUser);
                        return credential;
                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }
                }
            });


            ByteArrayOutputStream bao = new ByteArrayOutputStream();
            ObjectOutputStream out = new ObjectOutputStream(bao);

            creds.write(out);
            out.flush();
            out.close();

            return bao.toByteArray();
        } else {
            throw new RuntimeException("Security is not enabled for HDFS");
        }
    } catch (Exception ex) {
        throw new RuntimeException("Failed to get delegation tokens." , ex);
    }
}