Java Code Examples for org.apache.hadoop.security.SecurityUtil#setAuthenticationMethod()

The following examples show how to use org.apache.hadoop.security.SecurityUtil#setAuthenticationMethod() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractHadoopUserKerberosTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void getKerberosPrincipal() throws IOException {
    // Configure logins
    Configuration configuration = new Configuration();
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
    UserGroupInformation.setConfiguration(configuration);

    // Login as Client and Execute Test
    UserGroupInformation client = UserGroupInformation.loginUserFromKeytabAndReturnUGI(KerberosSuite.PRINCIPAL_CLIENT,
            KEYTAB_FILE.getAbsolutePath());

    User user = new HadoopUser(client, new TestSettings());

    assertThat(user.getKerberosPrincipal(), is(not(nullValue())));
    assertThat(user.getKerberosPrincipal().getName(), is(equalTo(KerberosSuite.PRINCIPAL_CLIENT + "@" + KerberosSuite.DEFAULT_REALM)));
}
 
Example 2
Source File: AbstractSpnegoNegotiatorTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test(expected = EsHadoopIllegalStateException.class)
public void testPreemptNegotiatorWithChallengeFails() throws IOException, InterruptedException {
    // Configure logins
    Configuration configuration = new Configuration();
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
    UserGroupInformation.setConfiguration(configuration);

    // Login as Client and Create negotiator
    UserGroupInformation client = UserGroupInformation.loginUserFromKeytabAndReturnUGI(KerberosSuite.PRINCIPAL_CLIENT, KEYTAB_FILE.getAbsolutePath());
    final SpnegoNegotiator spnegoNegotiator = client.doAs(new PrivilegedExceptionAction<SpnegoNegotiator>() {
        @Override
        public SpnegoNegotiator run() throws Exception {
            return new SpnegoNegotiator(KerberosSuite.PRINCIPAL_CLIENT, KerberosSuite.PRINCIPAL_SERVER);
        }
    });

    byte[] token = new byte[]{1,2,3,4,5};
    spnegoNegotiator.setTokenData(Base64.encodeBase64String(token));
    fail("Negotiator should break if it is given a challenge before it initiates the negotiations");
}
 
Example 3
Source File: TestWithSecureMiniDFSCluster.java    From streamx with Apache License 2.0 6 votes vote down vote up
private Configuration createSecureConfig(String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
  conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY,
           "true");//https://issues.apache.org/jira/browse/HDFS-7431
  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
Example 4
Source File: AbstractSpnegoNegotiatorTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test(expected = UndeclaredThrowableException.class)
public void testWrongServicePrincipal() throws IOException, InterruptedException {
    // Configure logins
    Configuration configuration = new Configuration();
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
    UserGroupInformation.setConfiguration(configuration);

    // Login as Client and Create negotiator
    UserGroupInformation client = UserGroupInformation.loginUserFromKeytabAndReturnUGI(KerberosSuite.PRINCIPAL_CLIENT, KEYTAB_FILE.getAbsolutePath());
    final SpnegoNegotiator spnegoNegotiator = client.doAs(new PrivilegedExceptionAction<SpnegoNegotiator>() {
        @Override
        public SpnegoNegotiator run() throws Exception {
            return new SpnegoNegotiator(KerberosSuite.PRINCIPAL_CLIENT, "omgWrongServerName");
        }
    });

    client.doAs(new PrivilegedExceptionAction<String>() {
        @Override
        public String run() throws Exception {
            return spnegoNegotiator.send();
        }
    });
    fail("Should not be able to find non existent server credentials");
}
 
Example 5
Source File: AtlasTopicCreator.java    From incubator-atlas with Apache License 2.0 6 votes vote down vote up
@VisibleForTesting
protected boolean handleSecurity(Configuration atlasProperties) {
    if (AuthenticationUtil.isKerberosAuthenticationEnabled(atlasProperties)) {
        String kafkaPrincipal = atlasProperties.getString("atlas.notification.kafka.service.principal");
        String kafkaKeyTab = atlasProperties.getString("atlas.notification.kafka.keytab.location");
        org.apache.hadoop.conf.Configuration hadoopConf = new org.apache.hadoop.conf.Configuration();
        SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, hadoopConf);
        try {
            String serverPrincipal = SecurityUtil.getServerPrincipal(kafkaPrincipal, (String) null);
            UserGroupInformation.setConfiguration(hadoopConf);
            UserGroupInformation.loginUserFromKeytab(serverPrincipal, kafkaKeyTab);
        } catch (IOException e) {
            LOG.warn("Could not login as {} from keytab file {}", kafkaPrincipal, kafkaKeyTab, e);
            return false;
        }
    }
    return true;
}
 
Example 6
Source File: AtlasTopicCreator.java    From atlas with Apache License 2.0 6 votes vote down vote up
@VisibleForTesting
protected boolean handleSecurity(Configuration atlasProperties) {
    if (AuthenticationUtil.isKerberosAuthenticationEnabled(atlasProperties)) {
        String kafkaPrincipal = atlasProperties.getString("atlas.notification.kafka.service.principal");
        String kafkaKeyTab = atlasProperties.getString("atlas.notification.kafka.keytab.location");
        org.apache.hadoop.conf.Configuration hadoopConf = new org.apache.hadoop.conf.Configuration();
        SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, hadoopConf);
        try {
            String serverPrincipal = SecurityUtil.getServerPrincipal(kafkaPrincipal, (String) null);
            UserGroupInformation.setConfiguration(hadoopConf);
            UserGroupInformation.loginUserFromKeytab(serverPrincipal, kafkaKeyTab);
        } catch (IOException e) {
            LOG.warn("Could not login as {} from keytab file {}", kafkaPrincipal, kafkaKeyTab, e);
            return false;
        }
    }
    return true;
}
 
Example 7
Source File: TestWebHdfsTokens.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked") // for any(Token.class)
@Test
public void testLazyTokenFetchForWebhdfs() throws Exception {
  MiniDFSCluster cluster = null;
  WebHdfsFileSystem fs = null;
  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
        .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);

    // trick the NN into thinking security is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build();
    cluster.waitActive();
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    UserGroupInformation.setConfiguration(clusterConf);
    
    uri = DFSUtil.createUri(
        "webhdfs", cluster.getNameNode().getHttpAddress());
    validateLazyTokenFetch(clusterConf);
  } finally {
    IOUtils.cleanup(null, fs);
    if (cluster != null) {
      cluster.shutdown();
    }
  }
}
 
Example 8
Source File: TestWebHdfsTokens.java    From big-c with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() {
  conf = new Configuration();
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);    
  UserGroupInformation.setLoginUser(
      UserGroupInformation.createUserForTesting(
          "LoginUser", new String[]{"supergroup"}));
}
 
Example 9
Source File: RollingSinkSecuredITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static void populateSecureConfigurations() {

		String dataTransferProtection = "authentication";

		SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
		conf.set(DFS_NAMENODE_USER_NAME_KEY, SecureTestEnvironment.getHadoopServicePrincipal());
		conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, SecureTestEnvironment.getTestKeytab());
		conf.set(DFS_DATANODE_USER_NAME_KEY, SecureTestEnvironment.getHadoopServicePrincipal());
		conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, SecureTestEnvironment.getTestKeytab());
		conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, SecureTestEnvironment.getHadoopServicePrincipal());

		conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);

		conf.set("dfs.data.transfer.protection", dataTransferProtection);

		conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_ONLY.name());

		conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY, "false");

		conf.setInt("dfs.datanode.socket.write.timeout", 0);

		/*
		 * We ae setting the port number to privileged port - see HDFS-9213
		 * This requires the user to have root privilege to bind to the port
		 * Use below command (ubuntu) to set privilege to java process for the
		 * bind() to work if the java process is not running as root.
		 * setcap 'cap_net_bind_service=+ep' /path/to/java
		 */
		conf.set(DFS_DATANODE_ADDRESS_KEY, "localhost:1002");
		conf.set(DFS_DATANODE_HOST_NAME_KEY, "localhost");
		conf.set(DFS_DATANODE_HTTP_ADDRESS_KEY, "localhost:1003");
	}
 
Example 10
Source File: TestWebHdfsUrl.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Test(timeout=60000)
public void testSecureAuthParamsInUrl() throws IOException {
  Configuration conf = new Configuration();
  // fake turning on security so api thinks it should use tokens
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  ugi.setAuthenticationMethod(KERBEROS);
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/");
  String tokenString = webhdfs.getDelegationToken().encodeToUrlString();

  // send user
  URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      getTokenUrl);

  // send user
  URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      renewTokenUrl);

  // send token
  URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);
  
  // send token
  URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new DelegationParam(tokenString).toString()
      },
      fileStatusUrl);

  // wipe out internal token to simulate auth always required
  webhdfs.setDelegationToken(null);

  // send user
  cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);

  // send user
  fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      fileStatusUrl);    
}
 
Example 11
Source File: TestWebHdfsUrl.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Test(timeout=60000)
public void testSecureProxyAuthParamsInUrl() throws IOException {
  Configuration conf = new Configuration();
  // fake turning on security so api thinks it should use tokens
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  ugi.setAuthenticationMethod(KERBEROS);
  ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/");
  String tokenString = webhdfs.getDelegationToken().encodeToUrlString();

  // send real+effective
  URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString()
      },
      getTokenUrl);

  // send real+effective
  URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      renewTokenUrl);

  // send token
  URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);
  
  // send token
  URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new DelegationParam(tokenString).toString()
      },
      fileStatusUrl);

  // wipe out internal token to simulate auth always required
  webhdfs.setDelegationToken(null);
  
  // send real+effective
  cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString()
      },
      cancelTokenUrl);
  
  // send real+effective
  fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString()
      },
      fileStatusUrl);    
}
 
Example 12
Source File: AbstractSpnegoAuthSchemeTest.java    From elasticsearch-hadoop with Apache License 2.0 4 votes vote down vote up
@Test
public void testAuthWithReverseLookupServicePrincipal() throws Exception {
    // Configure logins
    Configuration configuration = new Configuration();
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
    UserGroupInformation.setConfiguration(configuration);

    // Login as Client and Execute Test
    UserGroupInformation client = UserGroupInformation.loginUserFromKeytabAndReturnUGI(KerberosSuite.PRINCIPAL_CLIENT, KEYTAB_FILE.getAbsolutePath());

    client.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws Exception {
            HttpParams params = new HttpClientParams();

            // Order auth schemes
            EsHadoopAuthPolicies.registerAuthSchemes();
            List<String> authPreferences = new ArrayList<String>();
            authPreferences.add(EsHadoopAuthPolicies.NEGOTIATE);
            params.setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, authPreferences);

            AuthChallengeProcessor authChallengeProcessor = new AuthChallengeProcessor(params);

            Map<String, String> dnsMappings = new HashMap<String, String>();
            dnsMappings.put("es.build.elastic.co", "127.0.0.1");

            TestMethod method = new TestMethod();
            method.setHeaders(new Header[]{new Header("WWW-Authenticate", "Negotiate")});
            method.setURI(new org.apache.commons.httpclient.URI("http", null, "127.0.0.1", 9200));

            Credentials credentials = new SpnegoCredentials(HadoopUserProvider.create(new TestSettings()), "HTTP/[email protected]");

            // Parse Challenge
            Map challenges = AuthChallengeParser.parseChallenges(method.getResponseHeaders("WWW-Authenticate"));
            assertThat(challenges.isEmpty(), not(true));
            assertThat(challenges.containsKey("negotiate"), is(true));
            assertThat(challenges.get("negotiate"), is("Negotiate"));
            AuthScheme scheme = authChallengeProcessor.processChallenge(method.getHostAuthState(), challenges);

            assertNotNull(scheme);
            assertThat(scheme, instanceOf(SpnegoAuthScheme.class));
            method.getHostAuthState().setAuthAttempted(true);

            // Execute Auth
            Header[] authHeaders = method.getRequestHeaders("Authorization");
            for (Header authHeader : authHeaders) {
                if (authHeader.isAutogenerated()) {
                    method.removeRequestHeader(authHeader);
                }
            }
            AuthState authState = method.getHostAuthState();
            AuthScheme authScheme = authState.getAuthScheme();
            assertNotNull(authScheme);
            assertThat(authScheme.isConnectionBased(), is(not(true)));

            // Replace scheme with test harness scheme
            authScheme = new TestScheme(dnsMappings);
            String authString = authScheme.authenticate(credentials, method);

            assertNotNull(authString);
            assertThat(authString, startsWith("Negotiate "));
            method.addRequestHeader(new Header("Authorization", authString, true));

            return null;
        }
    });
}
 
Example 13
Source File: TestWebHdfsTokens.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked") // for any(Token.class)
@Test
public void testLazyTokenFetchForSWebhdfs() throws Exception {
  MiniDFSCluster cluster = null;
  SWebHdfsFileSystem fs = null;
  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
   .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
    String BASEDIR = System.getProperty("test.build.dir",
     	  "target/test-dir") + "/" + TestWebHdfsTokens.class.getSimpleName();
    String keystoresDir;
    String sslConfDir;
   
    clusterConf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
    clusterConf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    clusterConf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
 
    File base = new File(BASEDIR);
    FileUtil.fullyDelete(base);
    base.mkdirs();
    keystoresDir = new File(BASEDIR).getAbsolutePath();
    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class);
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, clusterConf, false);
 
    // trick the NN into thinking security is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build();
    cluster.waitActive();
    InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
    String nnAddr = NetUtils.getHostPortString(addr);
    clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    UserGroupInformation.setConfiguration(clusterConf);
    
    uri = DFSUtil.createUri(
      "swebhdfs", cluster.getNameNode().getHttpsAddress());
    validateLazyTokenFetch(clusterConf);
    } finally {
      IOUtils.cleanup(null, fs);
      if (cluster != null) {
        cluster.shutdown();
      }
   }
}
 
Example 14
Source File: TestWebHdfsTokens.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Test
public void testSetTokenServiceAndKind() throws Exception {
  MiniDFSCluster cluster = null;

  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
            .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);

    // trick the NN into thinking s[ecurity is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(0).build();
    cluster.waitActive();
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    final WebHdfsFileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem
            (clusterConf, "webhdfs");
    Whitebox.setInternalState(fs, "canRefreshDelegationToken", true);

    URLConnectionFactory factory = new URLConnectionFactory(new ConnectionConfigurator() {
      @Override
      public HttpURLConnection configure(HttpURLConnection conn)
              throws IOException {
        return conn;
      }
    }) {
      @Override
      public URLConnection openConnection(URL url) throws IOException {
        return super.openConnection(new URL(url + "&service=foo&kind=bar"));
      }
    };
    Whitebox.setInternalState(fs, "connectionFactory", factory);
    Token<?> token1 = fs.getDelegationToken();
    Assert.assertEquals(new Text("bar"), token1.getKind());

    final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
    Token<DelegationTokenIdentifier> token2 =
        fs.new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(
            op, null, new RenewerParam(null)) {
          @Override
          Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json)
              throws IOException {
            return JsonUtil.toDelegationToken(json);
          }
        }.run();

    Assert.assertEquals(new Text("bar"), token2.getKind());
    Assert.assertEquals(new Text("foo"), token2.getService());
  } finally {
    if (cluster != null) {
      cluster.shutdown();
    }
  }
}
 
Example 15
Source File: TestWebHdfsTokens.java    From big-c with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked") // for any(Token.class)
@Test
public void testLazyTokenFetchForSWebhdfs() throws Exception {
  MiniDFSCluster cluster = null;
  SWebHdfsFileSystem fs = null;
  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
   .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
    String BASEDIR = System.getProperty("test.build.dir",
     	  "target/test-dir") + "/" + TestWebHdfsTokens.class.getSimpleName();
    String keystoresDir;
    String sslConfDir;
   
    clusterConf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
    clusterConf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    clusterConf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
 
    File base = new File(BASEDIR);
    FileUtil.fullyDelete(base);
    base.mkdirs();
    keystoresDir = new File(BASEDIR).getAbsolutePath();
    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class);
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, clusterConf, false);
 
    // trick the NN into thinking security is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build();
    cluster.waitActive();
    InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
    String nnAddr = NetUtils.getHostPortString(addr);
    clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    UserGroupInformation.setConfiguration(clusterConf);
    
    uri = DFSUtil.createUri(
      "swebhdfs", cluster.getNameNode().getHttpsAddress());
    validateLazyTokenFetch(clusterConf);
    } finally {
      IOUtils.cleanup(null, fs);
      if (cluster != null) {
        cluster.shutdown();
      }
   }
}
 
Example 16
Source File: TestSecureNNWithQJM.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
 
Example 17
Source File: TestRMAppTransitions.java    From big-c with Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() throws Exception {
  conf = new YarnConfiguration();
  AuthenticationMethod authMethod = AuthenticationMethod.SIMPLE;
  if (isSecurityEnabled) {
    authMethod = AuthenticationMethod.KERBEROS;
  }
  SecurityUtil.setAuthenticationMethod(authMethod, conf);
  UserGroupInformation.setConfiguration(conf);

  rmDispatcher = new DrainDispatcher();
  ContainerAllocationExpirer containerAllocationExpirer = 
      mock(ContainerAllocationExpirer.class);
  AMLivelinessMonitor amLivelinessMonitor = mock(AMLivelinessMonitor.class);
  AMLivelinessMonitor amFinishingMonitor = mock(AMLivelinessMonitor.class);
  store = mock(RMStateStore.class);
  writer = mock(RMApplicationHistoryWriter.class);
  DelegationTokenRenewer renewer = mock(DelegationTokenRenewer.class);
  RMContext realRMContext = 
      new RMContextImpl(rmDispatcher,
        containerAllocationExpirer, amLivelinessMonitor, amFinishingMonitor,
        renewer, new AMRMTokenSecretManager(conf, this.rmContext),
        new RMContainerTokenSecretManager(conf),
        new NMTokenSecretManagerInRM(conf),
        new ClientToAMTokenSecretManagerInRM(),
        writer);
  ((RMContextImpl)realRMContext).setStateStore(store);
  publisher = mock(SystemMetricsPublisher.class);
  ((RMContextImpl)realRMContext).setSystemMetricsPublisher(publisher);

  this.rmContext = spy(realRMContext);

  ResourceScheduler resourceScheduler = mock(ResourceScheduler.class);
  doReturn(null).when(resourceScheduler)
            .getAppResourceUsageReport((ApplicationAttemptId)Matchers.any());
  doReturn(resourceScheduler).when(rmContext).getScheduler();

  rmDispatcher.register(RMAppAttemptEventType.class,
      new TestApplicationAttemptEventDispatcher(this.rmContext));

  rmDispatcher.register(RMAppEventType.class,
      new TestApplicationEventDispatcher(rmContext));
  
  rmDispatcher.register(RMAppManagerEventType.class,
      new TestApplicationManagerEventDispatcher());
  
  schedulerDispatcher = new TestSchedulerEventDispatcher();
  rmDispatcher.register(SchedulerEventType.class,
      schedulerDispatcher);
  
  rmDispatcher.init(conf);
  rmDispatcher.start();
}
 
Example 18
Source File: TestRMAppAttemptTransitions.java    From big-c with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("deprecation")
@Before
public void setUp() throws Exception {
  AuthenticationMethod authMethod = AuthenticationMethod.SIMPLE;
  if (isSecurityEnabled) {
    authMethod = AuthenticationMethod.KERBEROS;
  }
  SecurityUtil.setAuthenticationMethod(authMethod, conf);
  UserGroupInformation.setConfiguration(conf);
  InlineDispatcher rmDispatcher = new InlineDispatcher();

  ContainerAllocationExpirer containerAllocationExpirer =
      mock(ContainerAllocationExpirer.class);
  amLivelinessMonitor = mock(AMLivelinessMonitor.class);
  amFinishingMonitor = mock(AMLivelinessMonitor.class);
  writer = mock(RMApplicationHistoryWriter.class);
  MasterKeyData masterKeyData = amRMTokenManager.createNewMasterKey();
  when(amRMTokenManager.getMasterKey()).thenReturn(masterKeyData);
  rmContext =
      new RMContextImpl(rmDispatcher,
        containerAllocationExpirer, amLivelinessMonitor, amFinishingMonitor,
        null, amRMTokenManager,
        new RMContainerTokenSecretManager(conf),
        nmTokenManager,
        clientToAMTokenManager,
        writer);
  
  store = mock(RMStateStore.class);
  ((RMContextImpl) rmContext).setStateStore(store);
  publisher = mock(SystemMetricsPublisher.class);
  ((RMContextImpl) rmContext).setSystemMetricsPublisher(publisher);
  
  scheduler = mock(YarnScheduler.class);
  masterService = mock(ApplicationMasterService.class);
  applicationMasterLauncher = mock(ApplicationMasterLauncher.class);
  
  rmDispatcher.register(RMAppAttemptEventType.class,
      new TestApplicationAttemptEventDispatcher());

  rmDispatcher.register(RMAppEventType.class,
      new TestApplicationEventDispatcher());
  
  rmDispatcher.register(SchedulerEventType.class, 
      new TestSchedulerEventDispatcher());
  
  rmDispatcher.register(AMLauncherEventType.class, 
      new TestAMLauncherEventDispatcher());

  rmnodeEventHandler = mock(RMNodeImpl.class);
  rmDispatcher.register(RMNodeEventType.class, rmnodeEventHandler);

  rmDispatcher.init(conf);
  rmDispatcher.start();
  

  ApplicationId applicationId = MockApps.newAppID(appId++);
  ApplicationAttemptId applicationAttemptId =
      ApplicationAttemptId.newInstance(applicationId, 0);

  resourceScheduler = mock(ResourceScheduler.class);

  ApplicationResourceUsageReport appResUsgRpt =
      mock(ApplicationResourceUsageReport.class);
  when(appResUsgRpt.getMemorySeconds()).thenReturn(0L);
  when(appResUsgRpt.getVcoreSeconds()).thenReturn(0L);
  when(resourceScheduler
      .getAppResourceUsageReport((ApplicationAttemptId)Matchers.any()))
   .thenReturn(appResUsgRpt);
  spyRMContext = spy(rmContext);
  Mockito.doReturn(resourceScheduler).when(spyRMContext).getScheduler();


  final String user = MockApps.newUserName();
  final String queue = MockApps.newQueue();
  submissionContext = mock(ApplicationSubmissionContext.class);
  when(submissionContext.getQueue()).thenReturn(queue);
  Resource resource = BuilderUtils.newResource(1536, 1);
  ContainerLaunchContext amContainerSpec =
      BuilderUtils.newContainerLaunchContext(null, null,
          null, null, null, null);
  when(submissionContext.getAMContainerSpec()).thenReturn(amContainerSpec);
  when(submissionContext.getResource()).thenReturn(resource);

  unmanagedAM = false;
  
  application = mock(RMAppImpl.class);
  applicationAttempt =
      new RMAppAttemptImpl(applicationAttemptId, spyRMContext, scheduler,
          masterService, submissionContext, new Configuration(), false,
          BuilderUtils.newResourceRequest(
              RMAppAttemptImpl.AM_CONTAINER_PRIORITY, ResourceRequest.ANY,
              submissionContext.getResource(), 1));

  when(application.getCurrentAppAttempt()).thenReturn(applicationAttempt);
  when(application.getApplicationId()).thenReturn(applicationId);
  spyRMContext.getRMApps().put(application.getApplicationId(), application);

  testAppAttemptNewState();
}
 
Example 19
Source File: TestWebHdfsUrl.java    From big-c with Apache License 2.0 4 votes vote down vote up
@Test(timeout=60000)
public void testSecureAuthParamsInUrl() throws IOException {
  Configuration conf = new Configuration();
  // fake turning on security so api thinks it should use tokens
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  ugi.setAuthenticationMethod(KERBEROS);
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/");
  String tokenString = webhdfs.getDelegationToken().encodeToUrlString();

  // send user
  URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      getTokenUrl);

  // send user
  URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      renewTokenUrl);

  // send token
  URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);
  
  // send token
  URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new DelegationParam(tokenString).toString()
      },
      fileStatusUrl);

  // wipe out internal token to simulate auth always required
  webhdfs.setDelegationToken(null);

  // send user
  cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);

  // send user
  fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      fileStatusUrl);    
}
 
Example 20
Source File: HdfsRepository.java    From crate with Apache License 2.0 4 votes vote down vote up
private UserGroupInformation login(Configuration hadoopConfiguration, Settings repositorySettings) {
    // Validate the authentication method:
    AuthenticationMethod authMethod = SecurityUtil.getAuthenticationMethod(hadoopConfiguration);
    if (authMethod.equals(AuthenticationMethod.SIMPLE) == false
        && authMethod.equals(AuthenticationMethod.KERBEROS) == false) {
        throw new RuntimeException("Unsupported authorization mode [" + authMethod + "]");
    }

    // Check if the user added a principal to use, and that there is a keytab file provided
    String kerberosPrincipal = repositorySettings.get(CONF_SECURITY_PRINCIPAL);

    // Check to see if the authentication method is compatible
    if (kerberosPrincipal != null && authMethod.equals(AuthenticationMethod.SIMPLE)) {
        LOGGER.warn("Hadoop authentication method is set to [SIMPLE], but a Kerberos principal is " +
            "specified. Continuing with [KERBEROS] authentication.");
        SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, hadoopConfiguration);
    } else if (kerberosPrincipal == null && authMethod.equals(AuthenticationMethod.KERBEROS)) {
        throw new RuntimeException("HDFS Repository does not support [KERBEROS] authentication without " +
            "a valid Kerberos principal and keytab. Please specify a principal in the repository settings with [" +
            CONF_SECURITY_PRINCIPAL + "].");
    }

    // Now we can initialize the UGI with the configuration.
    UserGroupInformation.setConfiguration(hadoopConfiguration);

    // Debugging
    LOGGER.debug("Hadoop security enabled: [{}]", UserGroupInformation.isSecurityEnabled());
    LOGGER.debug("Using Hadoop authentication method: [{}]", SecurityUtil.getAuthenticationMethod(hadoopConfiguration));

    // UserGroupInformation (UGI) instance is just a Hadoop specific wrapper around a Java Subject
    try {
        if (UserGroupInformation.isSecurityEnabled()) {
            String principal = preparePrincipal(kerberosPrincipal);
            String keytab = HdfsSecurityContext.locateKeytabFile(environment).toString();
            LOGGER.debug("Using kerberos principal [{}] and keytab located at [{}]", principal, keytab);
            return UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
        }
        return UserGroupInformation.getCurrentUser();
    } catch (IOException e) {
        throw new UncheckedIOException("Could not retrieve the current user information", e);
    }
}