org.apache.hadoop.http.HttpConfig Java Examples

The following examples show how to use org.apache.hadoop.http.HttpConfig. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestNfs3HttpServer.java    From big-c with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
      HttpConfig.Policy.HTTP_AND_HTTPS.name());
  conf.set(NfsConfigKeys.NFS_HTTP_ADDRESS_KEY, "localhost:0");
  conf.set(NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY, "localhost:0");
  // Use emphral port in case tests are running in parallel
  conf.setInt(NfsConfigKeys.DFS_NFS_SERVER_PORT_KEY, 0);
  conf.setInt(NfsConfigKeys.DFS_NFS_MOUNTD_PORT_KEY, 0);
  
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNfs3HttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);

  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
  cluster.waitActive();
}
 
Example #2
Source File: Nfs3HttpServer.java    From big-c with Apache License 2.0 6 votes vote down vote up
void start() throws IOException {
  final InetSocketAddress httpAddr = getHttpAddress(conf);

  final String httpsAddrString = conf.get(
      NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY,
      NfsConfigKeys.NFS_HTTPS_ADDRESS_DEFAULT);
  InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);

  HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
      httpAddr, httpsAddr, "nfs3",
      NfsConfigKeys.DFS_NFS_KERBEROS_PRINCIPAL_KEY,
      NfsConfigKeys.DFS_NFS_KEYTAB_FILE_KEY);

  this.httpServer = builder.build();
  this.httpServer.start();
  
  HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
  int connIdx = 0;
  if (policy.isHttpEnabled()) {
    infoPort = httpServer.getConnectorAddress(connIdx++).getPort();
  }

  if (policy.isHttpsEnabled()) {
    infoSecurePort = httpServer.getConnectorAddress(connIdx).getPort();
  }
}
 
Example #3
Source File: DataNode.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Checks if the DataNode has a secure configuration if security is enabled.
 * There are 2 possible configurations that are considered secure:
 * 1. The server has bound to privileged ports for RPC and HTTP via
 *   SecureDataNodeStarter.
 * 2. The configuration enables SASL on DataTransferProtocol and HTTPS (no
 *   plain HTTP) for the HTTP server.  The SASL handshake guarantees
 *   authentication of the RPC server before a client transmits a secret, such
 *   as a block access token.  Similarly, SSL guarantees authentication of the
 *   HTTP server before a client transmits a secret, such as a delegation
 *   token.
 * It is not possible to run with both privileged ports and SASL on
 * DataTransferProtocol.  For backwards-compatibility, the connection logic
 * must check if the target port is a privileged port, and if so, skip the
 * SASL handshake.
 *
 * @param dnConf DNConf to check
 * @param conf Configuration to check
 * @param resources SecuredResources obtained for DataNode
 * @throws RuntimeException if security enabled, but configuration is insecure
 */
private static void checkSecureConfig(DNConf dnConf, Configuration conf,
    SecureResources resources) throws RuntimeException {
  if (!UserGroupInformation.isSecurityEnabled()) {
    return;
  }
  SaslPropertiesResolver saslPropsResolver = dnConf.getSaslPropsResolver();
  if (resources != null && saslPropsResolver == null) {
    return;
  }
  if (dnConf.getIgnoreSecurePortsForTesting()) {
    return;
  }
  if (saslPropsResolver != null &&
      DFSUtil.getHttpPolicy(conf) == HttpConfig.Policy.HTTPS_ONLY &&
      resources == null) {
    return;
  }
  throw new RuntimeException("Cannot start secure DataNode without " +
    "configuring either privileged resources or SASL RPC data transfer " +
    "protection and SSL for HTTP.  Using privileged resources in " +
    "combination with SASL RPC data transfer protection is not supported.");
}
 
Example #4
Source File: TestHttpsFileSystem.java    From big-c with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  conf = new Configuration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestHttpsFileSystem.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);

  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
  cluster.waitActive();
  OutputStream os = cluster.getFileSystem().create(new Path("/test"));
  os.write(23);
  os.close();
  InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
  nnAddr = NetUtils.getHostPortString(addr);
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
}
 
Example #5
Source File: TestWithSecureMiniDFSCluster.java    From streamx with Apache License 2.0 6 votes vote down vote up
private Configuration createSecureConfig(String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
  conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY,
           "true");//https://issues.apache.org/jira/browse/HDFS-7431
  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
Example #6
Source File: SaslDataTransferTestCase.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Creates configuration for starting a secure cluster.
 *
 * @param dataTransferProtection supported QOPs
 * @return configuration for starting a secure cluster
 * @throws Exception if there is any failure
 */
protected HdfsConfiguration createSecureConfig(
    String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
Example #7
Source File: Nfs3HttpServer.java    From hadoop with Apache License 2.0 6 votes vote down vote up
void start() throws IOException {
  final InetSocketAddress httpAddr = getHttpAddress(conf);

  final String httpsAddrString = conf.get(
      NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY,
      NfsConfigKeys.NFS_HTTPS_ADDRESS_DEFAULT);
  InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);

  HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
      httpAddr, httpsAddr, "nfs3",
      NfsConfigKeys.DFS_NFS_KERBEROS_PRINCIPAL_KEY,
      NfsConfigKeys.DFS_NFS_KEYTAB_FILE_KEY);

  this.httpServer = builder.build();
  this.httpServer.start();
  
  HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
  int connIdx = 0;
  if (policy.isHttpEnabled()) {
    infoPort = httpServer.getConnectorAddress(connIdx++).getPort();
  }

  if (policy.isHttpsEnabled()) {
    infoSecurePort = httpServer.getConnectorAddress(connIdx).getPort();
  }
}
 
Example #8
Source File: TestNfs3HttpServer.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
      HttpConfig.Policy.HTTP_AND_HTTPS.name());
  conf.set(NfsConfigKeys.NFS_HTTP_ADDRESS_KEY, "localhost:0");
  conf.set(NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY, "localhost:0");
  // Use emphral port in case tests are running in parallel
  conf.setInt(NfsConfigKeys.DFS_NFS_SERVER_PORT_KEY, 0);
  conf.setInt(NfsConfigKeys.DFS_NFS_MOUNTD_PORT_KEY, 0);
  
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNfs3HttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);

  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
  cluster.waitActive();
}
 
Example #9
Source File: SaslDataTransferTestCase.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Creates configuration for starting a secure cluster.
 *
 * @param dataTransferProtection supported QOPs
 * @return configuration for starting a secure cluster
 * @throws Exception if there is any failure
 */
protected HdfsConfiguration createSecureConfig(
    String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
Example #10
Source File: TestHttpsFileSystem.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  conf = new Configuration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestHttpsFileSystem.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);

  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
  cluster.waitActive();
  OutputStream os = cluster.getFileSystem().create(new Path("/test"));
  os.write(23);
  os.close();
  InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
  nnAddr = NetUtils.getHostPortString(addr);
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
}
 
Example #11
Source File: DataNode.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Checks if the DataNode has a secure configuration if security is enabled.
 * There are 2 possible configurations that are considered secure:
 * 1. The server has bound to privileged ports for RPC and HTTP via
 *   SecureDataNodeStarter.
 * 2. The configuration enables SASL on DataTransferProtocol and HTTPS (no
 *   plain HTTP) for the HTTP server.  The SASL handshake guarantees
 *   authentication of the RPC server before a client transmits a secret, such
 *   as a block access token.  Similarly, SSL guarantees authentication of the
 *   HTTP server before a client transmits a secret, such as a delegation
 *   token.
 * It is not possible to run with both privileged ports and SASL on
 * DataTransferProtocol.  For backwards-compatibility, the connection logic
 * must check if the target port is a privileged port, and if so, skip the
 * SASL handshake.
 *
 * @param dnConf DNConf to check
 * @param conf Configuration to check
 * @param resources SecuredResources obtained for DataNode
 * @throws RuntimeException if security enabled, but configuration is insecure
 */
private static void checkSecureConfig(DNConf dnConf, Configuration conf,
    SecureResources resources) throws RuntimeException {
  if (!UserGroupInformation.isSecurityEnabled()) {
    return;
  }
  SaslPropertiesResolver saslPropsResolver = dnConf.getSaslPropsResolver();
  if (resources != null && saslPropsResolver == null) {
    return;
  }
  if (dnConf.getIgnoreSecurePortsForTesting()) {
    return;
  }
  if (saslPropsResolver != null &&
      DFSUtil.getHttpPolicy(conf) == HttpConfig.Policy.HTTPS_ONLY &&
      resources == null) {
    return;
  }
  throw new RuntimeException("Cannot start secure DataNode without " +
    "configuring either privileged resources or SASL RPC data transfer " +
    "protection and SSL for HTTP.  Using privileged resources in " +
    "combination with SASL RPC data transfer protection is not supported.");
}
 
Example #12
Source File: DFSUtil.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Get http policy. Http Policy is chosen as follows:
 * <ol>
 * <li>If hadoop.ssl.enabled is set, http endpoints are not started. Only
 * https endpoints are started on configured https ports</li>
 * <li>This configuration is overridden by dfs.https.enable configuration, if
 * it is set to true. In that case, both http and https endpoints are stared.</li>
 * <li>All the above configurations are overridden by dfs.http.policy
 * configuration. With this configuration you can set http-only, https-only
 * and http-and-https endpoints.</li>
 * </ol>
 * See hdfs-default.xml documentation for more details on each of the above
 * configuration settings.
 */
public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
  String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY);
  if (policyStr == null) {
    boolean https = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
        DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);

    boolean hadoopSsl = conf.getBoolean(
        CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
        CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);

    if (hadoopSsl) {
      LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
          + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
          + ".");
    }
    if (https) {
      LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
          + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
          + ".");
    }

    return (hadoopSsl || https) ? HttpConfig.Policy.HTTP_AND_HTTPS
        : HttpConfig.Policy.HTTP_ONLY;
  }

  HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
  if (policy == null) {
    throw new HadoopIllegalArgumentException("Unregonized value '"
        + policyStr + "' for " + DFSConfigKeys.DFS_HTTP_POLICY_KEY);
  }

  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
  return policy;
}
 
Example #13
Source File: TestNameNodeRespectsBindHostKeys.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private static void setupSsl() throws Exception {
  Configuration conf = new Configuration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  assertTrue(base.mkdirs());
  final String keystoresDir = new File(BASEDIR).getAbsolutePath();
  final String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeRespectsBindHostKeys.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
}
 
Example #14
Source File: TestSaslDataTransfer.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testDataNodeAbortsIfNotHttpsOnly() throws Exception {
  HdfsConfiguration clusterConf = createSecureConfig("authentication");
  clusterConf.set(DFS_HTTP_POLICY_KEY,
    HttpConfig.Policy.HTTP_AND_HTTPS.name());
  exception.expect(RuntimeException.class);
  exception.expectMessage("Cannot start secure DataNode");
  startCluster(clusterConf);
}
 
Example #15
Source File: TestSaslDataTransfer.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testDataNodeAbortsIfNotHttpsOnly() throws Exception {
  HdfsConfiguration clusterConf = createSecureConfig("authentication");
  clusterConf.set(DFS_HTTP_POLICY_KEY,
    HttpConfig.Policy.HTTP_AND_HTTPS.name());
  exception.expect(RuntimeException.class);
  exception.expectMessage("Cannot start secure DataNode");
  startCluster(clusterConf);
}
 
Example #16
Source File: DFSUtil.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Get http policy. Http Policy is chosen as follows:
 * <ol>
 * <li>If hadoop.ssl.enabled is set, http endpoints are not started. Only
 * https endpoints are started on configured https ports</li>
 * <li>This configuration is overridden by dfs.https.enable configuration, if
 * it is set to true. In that case, both http and https endpoints are stared.</li>
 * <li>All the above configurations are overridden by dfs.http.policy
 * configuration. With this configuration you can set http-only, https-only
 * and http-and-https endpoints.</li>
 * </ol>
 * See hdfs-default.xml documentation for more details on each of the above
 * configuration settings.
 */
public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
  String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY);
  if (policyStr == null) {
    boolean https = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
        DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);

    boolean hadoopSsl = conf.getBoolean(
        CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
        CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);

    if (hadoopSsl) {
      LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
          + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
          + ".");
    }
    if (https) {
      LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
          + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
          + ".");
    }

    return (hadoopSsl || https) ? HttpConfig.Policy.HTTP_AND_HTTPS
        : HttpConfig.Policy.HTTP_ONLY;
  }

  HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
  if (policy == null) {
    throw new HadoopIllegalArgumentException("Unregonized value '"
        + policyStr + "' for " + DFSConfigKeys.DFS_HTTP_POLICY_KEY);
  }

  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
  return policy;
}
 
Example #17
Source File: ConfigUtils.java    From attic-apex-core with Apache License 2.0 5 votes vote down vote up
public static boolean isSSLEnabled(Configuration conf)
{
  if (HttpConfig.Policy.HTTPS_ONLY == HttpConfig.Policy.fromString(
      conf.get(YarnConfiguration.YARN_HTTP_POLICY_KEY, YarnConfiguration.YARN_HTTP_POLICY_DEFAULT))) {
    return true;
  }
  return false;
}
 
Example #18
Source File: TestOzoneManagerHttpServer.java    From hadoop-ozone with Apache License 2.0 5 votes vote down vote up
@Parameters public static Collection<Object[]> policy() {
  Object[][] params = new Object[][] {
      {HttpConfig.Policy.HTTP_ONLY},
      {HttpConfig.Policy.HTTPS_ONLY},
      {HttpConfig.Policy.HTTP_AND_HTTPS} };
  return Arrays.asList(params);
}
 
Example #19
Source File: TestSecureRESTServer.java    From hbase with Apache License 2.0 5 votes vote down vote up
private static void setHdfsSecuredConfiguration(Configuration conf) throws Exception {
  // Set principal+keytab configuration for HDFS
  conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY,
      SERVICE_PRINCIPAL + "@" + KDC.getRealm());
  conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath());
  conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY,
      SERVICE_PRINCIPAL + "@" + KDC.getRealm());
  conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath());
  conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
      SPNEGO_SERVICE_PRINCIPAL + "@" + KDC.getRealm());
  // Enable token access for HDFS blocks
  conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  // Only use HTTPS (required because we aren't using "secure" ports)
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  // Bind on localhost for spnego to have a chance at working
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  // Generate SSL certs
  File keystoresDir = new File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath());
  keystoresDir.mkdirs();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureRESTServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);

  // Magic flag to tell hdfs to not fail on using ports above 1024
  conf.setBoolean("ignore.secure.ports.for.testing", true);
}
 
Example #20
Source File: HBaseKerberosUtils.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Set up SSL configuration for HDFS NameNode and DataNode.
 * @param utility a HBaseTestingUtility object.
 * @param clazz the caller test class.
 * @throws Exception if unable to set up SSL configuration
 */
public static void setSSLConfiguration(HBaseCommonTestingUtility utility, Class<?> clazz)
  throws Exception {
  Configuration conf = utility.getConfiguration();
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File keystoresDir = new File(utility.getDataTestDir("keystore").toUri().getPath());
  keystoresDir.mkdirs();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(clazz);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);
}
 
Example #21
Source File: ConfigUtils.java    From Bats with Apache License 2.0 5 votes vote down vote up
public static boolean isSSLEnabled(Configuration conf)
{
  if (HttpConfig.Policy.HTTPS_ONLY == HttpConfig.Policy.fromString(
      conf.get(YarnConfiguration.YARN_HTTP_POLICY_KEY, YarnConfiguration.YARN_HTTP_POLICY_DEFAULT))) {
    return true;
  }
  return false;
}
 
Example #22
Source File: TestNameNodeRespectsBindHostKeys.java    From big-c with Apache License 2.0 5 votes vote down vote up
private static void setupSsl() throws Exception {
  Configuration conf = new Configuration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  assertTrue(base.mkdirs());
  final String keystoresDir = new File(BASEDIR).getAbsolutePath();
  final String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeRespectsBindHostKeys.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
}
 
Example #23
Source File: TestStorageContainerManagerHttpServer.java    From hadoop-ozone with Apache License 2.0 5 votes vote down vote up
@Parameters public static Collection<Object[]> policy() {
  Object[][] params = new Object[][] {
      {HttpConfig.Policy.HTTP_ONLY},
      {HttpConfig.Policy.HTTPS_ONLY},
      {HttpConfig.Policy.HTTP_AND_HTTPS} };
  return Arrays.asList(params);
}
 
Example #24
Source File: RollingSinkSecuredITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static void populateSecureConfigurations() {

		String dataTransferProtection = "authentication";

		SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
		conf.set(DFS_NAMENODE_USER_NAME_KEY, SecureTestEnvironment.getHadoopServicePrincipal());
		conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, SecureTestEnvironment.getTestKeytab());
		conf.set(DFS_DATANODE_USER_NAME_KEY, SecureTestEnvironment.getHadoopServicePrincipal());
		conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, SecureTestEnvironment.getTestKeytab());
		conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, SecureTestEnvironment.getHadoopServicePrincipal());

		conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);

		conf.set("dfs.data.transfer.protection", dataTransferProtection);

		conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_ONLY.name());

		conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY, "false");

		conf.setInt("dfs.datanode.socket.write.timeout", 0);

		/*
		 * We ae setting the port number to privileged port - see HDFS-9213
		 * This requires the user to have root privilege to bind to the port
		 * Use below command (ubuntu) to set privilege to java process for the
		 * bind() to work if the java process is not running as root.
		 * setcap 'cap_net_bind_service=+ep' /path/to/java
		 */
		conf.set(DFS_DATANODE_ADDRESS_KEY, "localhost:1002");
		conf.set(DFS_DATANODE_HOST_NAME_KEY, "localhost");
		conf.set(DFS_DATANODE_HTTP_ADDRESS_KEY, "localhost:1003");
	}
 
Example #25
Source File: SecureKnoxShellTest.java    From knox with Apache License 2.0 4 votes vote down vote up
private static void initKdc() throws Exception {
  final Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  userName = UserGroupInformation
      .createUserForTesting("guest", new String[] { "users" }).getUserName();
  final File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  final String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile, userName + "/" + krbInstance,
      "HTTP/" + krbInstance);

  hdfsPrincipal =
      userName + "/" + krbInstance + "@" + kdc.getRealm();
  spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  configuration.set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, "authentication");
  configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
  configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 10);
  configuration.set("hadoop.proxyuser." + userName + ".hosts", "*");
  configuration.set("hadoop.proxyuser." + userName + ".groups", "*");
  configuration.setBoolean("dfs.permissions", true);

  String keystoresDir = baseDir.getAbsolutePath();
  File sslClientConfFile = new File(keystoresDir + "/ssl-client.xml");
  File sslServerConfFile = new File(keystoresDir + "/ssl-server.xml");
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, keystoresDir, configuration, false);
  configuration.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslClientConfFile.getName());
  configuration.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslServerConfFile.getName());

  krb5conf = kdc.getKrb5conf().getAbsolutePath();
}
 
Example #26
Source File: ATSImportTool.java    From tez with Apache License 2.0 4 votes vote down vote up
static boolean hasHttpsPolicy(Configuration conf) {
  YarnConfiguration yarnConf = new YarnConfiguration(conf);
  return (HttpConfig.Policy.HTTPS_ONLY == HttpConfig.Policy.fromString(yarnConf
      .get(YarnConfiguration.YARN_HTTP_POLICY_KEY, YarnConfiguration.YARN_HTTP_POLICY_DEFAULT)));
}
 
Example #27
Source File: SecondaryNameNode.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Initialize SecondaryNameNode.
 */
private void initialize(final Configuration conf,
    CommandLineOpts commandLineOpts) throws IOException {
  final InetSocketAddress infoSocAddr = getHttpAddress(conf);
  final String infoBindAddress = infoSocAddr.getHostName();
  UserGroupInformation.setConfiguration(conf);
  if (UserGroupInformation.isSecurityEnabled()) {
    SecurityUtil.login(conf,
        DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
        DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_PRINCIPAL_KEY, infoBindAddress);
  }
  // initiate Java VM metrics
  DefaultMetricsSystem.initialize("SecondaryNameNode");
  JvmMetrics.create("SecondaryNameNode",
      conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
      DefaultMetricsSystem.instance());

  // Create connection to the namenode.
  shouldRun = true;
  nameNodeAddr = NameNode.getServiceAddress(conf, true);

  this.conf = conf;
  this.namenode = NameNodeProxies.createNonHAProxy(conf, nameNodeAddr, 
      NamenodeProtocol.class, UserGroupInformation.getCurrentUser(),
      true).getProxy();

  // initialize checkpoint directories
  fsName = getInfoServer();
  checkpointDirs = FSImage.getCheckpointDirs(conf,
                                "/tmp/hadoop/dfs/namesecondary");
  checkpointEditsDirs = FSImage.getCheckpointEditsDirs(conf, 
                                "/tmp/hadoop/dfs/namesecondary");    
  checkpointImage = new CheckpointStorage(conf, checkpointDirs, checkpointEditsDirs);
  checkpointImage.recoverCreate(commandLineOpts.shouldFormat());
  checkpointImage.deleteTempEdits();
  
  namesystem = new FSNamesystem(conf, checkpointImage, true);

  // Disable quota checks
  namesystem.dir.disableQuotaChecks();

  // Initialize other scheduling parameters from the configuration
  checkpointConf = new CheckpointConf(conf);

  final InetSocketAddress httpAddr = infoSocAddr;

  final String httpsAddrString = conf.getTrimmed(
      DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY,
      DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_DEFAULT);
  InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);

  HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
      httpAddr, httpsAddr, "secondary",
      DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
      DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);

  nameNodeStatusBeanName = MBeans.register("SecondaryNameNode",
          "SecondaryNameNodeInfo", this);

  infoServer = builder.build();

  infoServer.setAttribute("secondary.name.node", this);
  infoServer.setAttribute("name.system.image", checkpointImage);
  infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
  infoServer.addInternalServlet("imagetransfer", ImageServlet.PATH_SPEC,
      ImageServlet.class, true);
  infoServer.start();

  LOG.info("Web server init done");

  HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
  int connIdx = 0;
  if (policy.isHttpEnabled()) {
    InetSocketAddress httpAddress = infoServer.getConnectorAddress(connIdx++);
    conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
        NetUtils.getHostPortString(httpAddress));
  }

  if (policy.isHttpsEnabled()) {
    InetSocketAddress httpsAddress = infoServer.getConnectorAddress(connIdx);
    conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY,
        NetUtils.getHostPortString(httpsAddress));
  }

  legacyOivImageDir = conf.get(
      DFSConfigKeys.DFS_NAMENODE_LEGACY_OIV_IMAGE_DIR_KEY);

  LOG.info("Checkpoint Period   :" + checkpointConf.getPeriod() + " secs "
      + "(" + checkpointConf.getPeriod() / 60 + " min)");
  LOG.info("Log Size Trigger    :" + checkpointConf.getTxnCount() + " txns");
}
 
Example #28
Source File: SecureDataNodeStarter.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Acquire privileged resources (i.e., the privileged ports) for the data
 * node. The privileged resources consist of the port of the RPC server and
 * the port of HTTP (not HTTPS) server.
 */
@VisibleForTesting
public static SecureResources getSecureResources(Configuration conf)
    throws Exception {
  HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
  boolean isSecure = UserGroupInformation.isSecurityEnabled();

  // Obtain secure port for data streaming to datanode
  InetSocketAddress streamingAddr  = DataNode.getStreamingAddr(conf);
  int socketWriteTimeout = conf.getInt(
      DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY,
      HdfsServerConstants.WRITE_TIMEOUT);

  ServerSocket ss = (socketWriteTimeout > 0) ? 
      ServerSocketChannel.open().socket() : new ServerSocket();
  ss.bind(streamingAddr, 0);

  // Check that we got the port we need
  if (ss.getLocalPort() != streamingAddr.getPort()) {
    throw new RuntimeException(
        "Unable to bind on specified streaming port in secure "
            + "context. Needed " + streamingAddr.getPort() + ", got "
            + ss.getLocalPort());
  }

  if (!SecurityUtil.isPrivilegedPort(ss.getLocalPort()) && isSecure) {
    throw new RuntimeException(
      "Cannot start secure datanode with unprivileged RPC ports");
  }

  System.err.println("Opened streaming server at " + streamingAddr);

  // Bind a port for the web server. The code intends to bind HTTP server to
  // privileged port only, as the client can authenticate the server using
  // certificates if they are communicating through SSL.
  final ServerSocketChannel httpChannel;
  if (policy.isHttpEnabled()) {
    httpChannel = ServerSocketChannel.open();
    InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
    httpChannel.socket().bind(infoSocAddr);
    InetSocketAddress localAddr = (InetSocketAddress) httpChannel.socket()
      .getLocalSocketAddress();

    if (localAddr.getPort() != infoSocAddr.getPort()) {
      throw new RuntimeException("Unable to bind on specified info port in secure " +
          "context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort());
    }
    System.err.println("Successfully obtained privileged resources (streaming port = "
        + ss + " ) (http listener port = " + localAddr.getPort() +")");

    if (localAddr.getPort() > 1023 && isSecure) {
      throw new RuntimeException(
          "Cannot start secure datanode with unprivileged HTTP ports");
    }
    System.err.println("Opened info server at " + infoSocAddr);
  } else {
    httpChannel = null;
  }

  return new SecureResources(ss, httpChannel);
}
 
Example #29
Source File: TestWebHdfsTokens.java    From big-c with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked") // for any(Token.class)
@Test
public void testLazyTokenFetchForSWebhdfs() throws Exception {
  MiniDFSCluster cluster = null;
  SWebHdfsFileSystem fs = null;
  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
   .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
    String BASEDIR = System.getProperty("test.build.dir",
     	  "target/test-dir") + "/" + TestWebHdfsTokens.class.getSimpleName();
    String keystoresDir;
    String sslConfDir;
   
    clusterConf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
    clusterConf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    clusterConf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
 
    File base = new File(BASEDIR);
    FileUtil.fullyDelete(base);
    base.mkdirs();
    keystoresDir = new File(BASEDIR).getAbsolutePath();
    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class);
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, clusterConf, false);
 
    // trick the NN into thinking security is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build();
    cluster.waitActive();
    InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
    String nnAddr = NetUtils.getHostPortString(addr);
    clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    UserGroupInformation.setConfiguration(clusterConf);
    
    uri = DFSUtil.createUri(
      "swebhdfs", cluster.getNameNode().getHttpsAddress());
    validateLazyTokenFetch(clusterConf);
    } finally {
      IOUtils.cleanup(null, fs);
      if (cluster != null) {
        cluster.shutdown();
      }
   }
}
 
Example #30
Source File: SecureClusterTest.java    From knox with Apache License 2.0 4 votes vote down vote up
private static void initKdc() throws Exception {
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  configuration = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
  UserGroupInformation.setConfiguration(configuration);
  assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
  userName = UserGroupInformation.createUserForTesting("guest", new String[]{"users"}).getUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile, userName + "/" + krbInstance, "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  configuration.set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, "authentication");
  configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
  configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 10);
  configuration.set("hadoop.proxyuser." + userName + ".hosts", "*");
  configuration.set("hadoop.proxyuser." + userName + ".groups", "*");
  configuration.setBoolean("dfs.permissions", true);

  String keystoresDir = baseDir.getAbsolutePath();
  File sslClientConfFile = new File(keystoresDir + "/ssl-client.xml");
  File sslServerConfFile = new File(keystoresDir + "/ssl-server.xml");
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, keystoresDir, configuration, false);
  configuration.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslClientConfFile.getName());
  configuration.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslServerConfFile.getName());

  setupKnox(keytab, hdfsPrincipal);
}