Java Code Examples for org.apache.hadoop.http.HttpConfig

The following examples show how to use org.apache.hadoop.http.HttpConfig. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hadoop   Source File: Nfs3HttpServer.java    License: Apache License 2.0 6 votes vote down vote up
void start() throws IOException {
  final InetSocketAddress httpAddr = getHttpAddress(conf);

  final String httpsAddrString = conf.get(
      NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY,
      NfsConfigKeys.NFS_HTTPS_ADDRESS_DEFAULT);
  InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);

  HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
      httpAddr, httpsAddr, "nfs3",
      NfsConfigKeys.DFS_NFS_KERBEROS_PRINCIPAL_KEY,
      NfsConfigKeys.DFS_NFS_KEYTAB_FILE_KEY);

  this.httpServer = builder.build();
  this.httpServer.start();
  
  HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
  int connIdx = 0;
  if (policy.isHttpEnabled()) {
    infoPort = httpServer.getConnectorAddress(connIdx++).getPort();
  }

  if (policy.isHttpsEnabled()) {
    infoSecurePort = httpServer.getConnectorAddress(connIdx).getPort();
  }
}
 
Example 2
Source Project: hadoop   Source File: TestNfs3HttpServer.java    License: Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
      HttpConfig.Policy.HTTP_AND_HTTPS.name());
  conf.set(NfsConfigKeys.NFS_HTTP_ADDRESS_KEY, "localhost:0");
  conf.set(NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY, "localhost:0");
  // Use emphral port in case tests are running in parallel
  conf.setInt(NfsConfigKeys.DFS_NFS_SERVER_PORT_KEY, 0);
  conf.setInt(NfsConfigKeys.DFS_NFS_MOUNTD_PORT_KEY, 0);
  
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNfs3HttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);

  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
  cluster.waitActive();
}
 
Example 3
Source Project: hadoop   Source File: DataNode.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Checks if the DataNode has a secure configuration if security is enabled.
 * There are 2 possible configurations that are considered secure:
 * 1. The server has bound to privileged ports for RPC and HTTP via
 *   SecureDataNodeStarter.
 * 2. The configuration enables SASL on DataTransferProtocol and HTTPS (no
 *   plain HTTP) for the HTTP server.  The SASL handshake guarantees
 *   authentication of the RPC server before a client transmits a secret, such
 *   as a block access token.  Similarly, SSL guarantees authentication of the
 *   HTTP server before a client transmits a secret, such as a delegation
 *   token.
 * It is not possible to run with both privileged ports and SASL on
 * DataTransferProtocol.  For backwards-compatibility, the connection logic
 * must check if the target port is a privileged port, and if so, skip the
 * SASL handshake.
 *
 * @param dnConf DNConf to check
 * @param conf Configuration to check
 * @param resources SecuredResources obtained for DataNode
 * @throws RuntimeException if security enabled, but configuration is insecure
 */
private static void checkSecureConfig(DNConf dnConf, Configuration conf,
    SecureResources resources) throws RuntimeException {
  if (!UserGroupInformation.isSecurityEnabled()) {
    return;
  }
  SaslPropertiesResolver saslPropsResolver = dnConf.getSaslPropsResolver();
  if (resources != null && saslPropsResolver == null) {
    return;
  }
  if (dnConf.getIgnoreSecurePortsForTesting()) {
    return;
  }
  if (saslPropsResolver != null &&
      DFSUtil.getHttpPolicy(conf) == HttpConfig.Policy.HTTPS_ONLY &&
      resources == null) {
    return;
  }
  throw new RuntimeException("Cannot start secure DataNode without " +
    "configuring either privileged resources or SASL RPC data transfer " +
    "protection and SSL for HTTP.  Using privileged resources in " +
    "combination with SASL RPC data transfer protection is not supported.");
}
 
Example 4
Source Project: hadoop   Source File: TestHttpsFileSystem.java    License: Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  conf = new Configuration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestHttpsFileSystem.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);

  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
  cluster.waitActive();
  OutputStream os = cluster.getFileSystem().create(new Path("/test"));
  os.write(23);
  os.close();
  InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
  nnAddr = NetUtils.getHostPortString(addr);
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
}
 
Example 5
Source Project: hadoop   Source File: SaslDataTransferTestCase.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates configuration for starting a secure cluster.
 *
 * @param dataTransferProtection supported QOPs
 * @return configuration for starting a secure cluster
 * @throws Exception if there is any failure
 */
protected HdfsConfiguration createSecureConfig(
    String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
Example 6
Source Project: streamx   Source File: TestWithSecureMiniDFSCluster.java    License: Apache License 2.0 6 votes vote down vote up
private Configuration createSecureConfig(String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
  conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY,
           "true");//https://issues.apache.org/jira/browse/HDFS-7431
  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
Example 7
Source Project: big-c   Source File: Nfs3HttpServer.java    License: Apache License 2.0 6 votes vote down vote up
void start() throws IOException {
  final InetSocketAddress httpAddr = getHttpAddress(conf);

  final String httpsAddrString = conf.get(
      NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY,
      NfsConfigKeys.NFS_HTTPS_ADDRESS_DEFAULT);
  InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);

  HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
      httpAddr, httpsAddr, "nfs3",
      NfsConfigKeys.DFS_NFS_KERBEROS_PRINCIPAL_KEY,
      NfsConfigKeys.DFS_NFS_KEYTAB_FILE_KEY);

  this.httpServer = builder.build();
  this.httpServer.start();
  
  HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
  int connIdx = 0;
  if (policy.isHttpEnabled()) {
    infoPort = httpServer.getConnectorAddress(connIdx++).getPort();
  }

  if (policy.isHttpsEnabled()) {
    infoSecurePort = httpServer.getConnectorAddress(connIdx).getPort();
  }
}
 
Example 8
Source Project: big-c   Source File: TestNfs3HttpServer.java    License: Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
      HttpConfig.Policy.HTTP_AND_HTTPS.name());
  conf.set(NfsConfigKeys.NFS_HTTP_ADDRESS_KEY, "localhost:0");
  conf.set(NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY, "localhost:0");
  // Use emphral port in case tests are running in parallel
  conf.setInt(NfsConfigKeys.DFS_NFS_SERVER_PORT_KEY, 0);
  conf.setInt(NfsConfigKeys.DFS_NFS_MOUNTD_PORT_KEY, 0);
  
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNfs3HttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);

  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
  cluster.waitActive();
}
 
Example 9
Source Project: big-c   Source File: DataNode.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Checks if the DataNode has a secure configuration if security is enabled.
 * There are 2 possible configurations that are considered secure:
 * 1. The server has bound to privileged ports for RPC and HTTP via
 *   SecureDataNodeStarter.
 * 2. The configuration enables SASL on DataTransferProtocol and HTTPS (no
 *   plain HTTP) for the HTTP server.  The SASL handshake guarantees
 *   authentication of the RPC server before a client transmits a secret, such
 *   as a block access token.  Similarly, SSL guarantees authentication of the
 *   HTTP server before a client transmits a secret, such as a delegation
 *   token.
 * It is not possible to run with both privileged ports and SASL on
 * DataTransferProtocol.  For backwards-compatibility, the connection logic
 * must check if the target port is a privileged port, and if so, skip the
 * SASL handshake.
 *
 * @param dnConf DNConf to check
 * @param conf Configuration to check
 * @param resources SecuredResources obtained for DataNode
 * @throws RuntimeException if security enabled, but configuration is insecure
 */
private static void checkSecureConfig(DNConf dnConf, Configuration conf,
    SecureResources resources) throws RuntimeException {
  if (!UserGroupInformation.isSecurityEnabled()) {
    return;
  }
  SaslPropertiesResolver saslPropsResolver = dnConf.getSaslPropsResolver();
  if (resources != null && saslPropsResolver == null) {
    return;
  }
  if (dnConf.getIgnoreSecurePortsForTesting()) {
    return;
  }
  if (saslPropsResolver != null &&
      DFSUtil.getHttpPolicy(conf) == HttpConfig.Policy.HTTPS_ONLY &&
      resources == null) {
    return;
  }
  throw new RuntimeException("Cannot start secure DataNode without " +
    "configuring either privileged resources or SASL RPC data transfer " +
    "protection and SSL for HTTP.  Using privileged resources in " +
    "combination with SASL RPC data transfer protection is not supported.");
}
 
Example 10
Source Project: big-c   Source File: TestHttpsFileSystem.java    License: Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  conf = new Configuration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestHttpsFileSystem.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);

  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
  cluster.waitActive();
  OutputStream os = cluster.getFileSystem().create(new Path("/test"));
  os.write(23);
  os.close();
  InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
  nnAddr = NetUtils.getHostPortString(addr);
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
}
 
Example 11
Source Project: big-c   Source File: SaslDataTransferTestCase.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates configuration for starting a secure cluster.
 *
 * @param dataTransferProtection supported QOPs
 * @return configuration for starting a secure cluster
 * @throws Exception if there is any failure
 */
protected HdfsConfiguration createSecureConfig(
    String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
Example 12
@Parameters public static Collection<Object[]> policy() {
  Object[][] params = new Object[][] {
      {HttpConfig.Policy.HTTP_ONLY},
      {HttpConfig.Policy.HTTPS_ONLY},
      {HttpConfig.Policy.HTTP_AND_HTTPS} };
  return Arrays.asList(params);
}
 
Example 13
Source Project: hadoop-ozone   Source File: TestOzoneManagerHttpServer.java    License: Apache License 2.0 5 votes vote down vote up
@Parameters public static Collection<Object[]> policy() {
  Object[][] params = new Object[][] {
      {HttpConfig.Policy.HTTP_ONLY},
      {HttpConfig.Policy.HTTPS_ONLY},
      {HttpConfig.Policy.HTTP_AND_HTTPS} };
  return Arrays.asList(params);
}
 
Example 14
Source Project: Bats   Source File: ConfigUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static boolean isSSLEnabled(Configuration conf)
{
  if (HttpConfig.Policy.HTTPS_ONLY == HttpConfig.Policy.fromString(
      conf.get(YarnConfiguration.YARN_HTTP_POLICY_KEY, YarnConfiguration.YARN_HTTP_POLICY_DEFAULT))) {
    return true;
  }
  return false;
}
 
Example 15
Source Project: Flink-CEPplus   Source File: RollingSinkSecuredITCase.java    License: Apache License 2.0 5 votes vote down vote up
private static void populateSecureConfigurations() {

		String dataTransferProtection = "authentication";

		SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
		conf.set(DFS_NAMENODE_USER_NAME_KEY, SecureTestEnvironment.getHadoopServicePrincipal());
		conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, SecureTestEnvironment.getTestKeytab());
		conf.set(DFS_DATANODE_USER_NAME_KEY, SecureTestEnvironment.getHadoopServicePrincipal());
		conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, SecureTestEnvironment.getTestKeytab());
		conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, SecureTestEnvironment.getHadoopServicePrincipal());

		conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);

		conf.set("dfs.data.transfer.protection", dataTransferProtection);

		conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_ONLY.name());

		conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY, "false");

		conf.setInt("dfs.datanode.socket.write.timeout", 0);

		/*
		 * We ae setting the port number to privileged port - see HDFS-9213
		 * This requires the user to have root privilege to bind to the port
		 * Use below command (ubuntu) to set privilege to java process for the
		 * bind() to work if the java process is not running as root.
		 * setcap 'cap_net_bind_service=+ep' /path/to/java
		 */
		conf.set(DFS_DATANODE_ADDRESS_KEY, "localhost:1002");
		conf.set(DFS_DATANODE_HOST_NAME_KEY, "localhost");
		conf.set(DFS_DATANODE_HTTP_ADDRESS_KEY, "localhost:1003");
	}
 
Example 16
Source Project: hadoop   Source File: DFSUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get http policy. Http Policy is chosen as follows:
 * <ol>
 * <li>If hadoop.ssl.enabled is set, http endpoints are not started. Only
 * https endpoints are started on configured https ports</li>
 * <li>This configuration is overridden by dfs.https.enable configuration, if
 * it is set to true. In that case, both http and https endpoints are stared.</li>
 * <li>All the above configurations are overridden by dfs.http.policy
 * configuration. With this configuration you can set http-only, https-only
 * and http-and-https endpoints.</li>
 * </ol>
 * See hdfs-default.xml documentation for more details on each of the above
 * configuration settings.
 */
public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
  String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY);
  if (policyStr == null) {
    boolean https = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
        DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);

    boolean hadoopSsl = conf.getBoolean(
        CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
        CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);

    if (hadoopSsl) {
      LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
          + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
          + ".");
    }
    if (https) {
      LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
          + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
          + ".");
    }

    return (hadoopSsl || https) ? HttpConfig.Policy.HTTP_AND_HTTPS
        : HttpConfig.Policy.HTTP_ONLY;
  }

  HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
  if (policy == null) {
    throw new HadoopIllegalArgumentException("Unregonized value '"
        + policyStr + "' for " + DFSConfigKeys.DFS_HTTP_POLICY_KEY);
  }

  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
  return policy;
}
 
Example 17
Source Project: hadoop   Source File: TestSaslDataTransfer.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testDataNodeAbortsIfNotHttpsOnly() throws Exception {
  HdfsConfiguration clusterConf = createSecureConfig("authentication");
  clusterConf.set(DFS_HTTP_POLICY_KEY,
    HttpConfig.Policy.HTTP_AND_HTTPS.name());
  exception.expect(RuntimeException.class);
  exception.expectMessage("Cannot start secure DataNode");
  startCluster(clusterConf);
}
 
Example 18
Source Project: hadoop   Source File: TestNameNodeRespectsBindHostKeys.java    License: Apache License 2.0 5 votes vote down vote up
private static void setupSsl() throws Exception {
  Configuration conf = new Configuration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  assertTrue(base.mkdirs());
  final String keystoresDir = new File(BASEDIR).getAbsolutePath();
  final String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeRespectsBindHostKeys.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
}
 
Example 19
Source Project: big-c   Source File: DFSUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get http policy. Http Policy is chosen as follows:
 * <ol>
 * <li>If hadoop.ssl.enabled is set, http endpoints are not started. Only
 * https endpoints are started on configured https ports</li>
 * <li>This configuration is overridden by dfs.https.enable configuration, if
 * it is set to true. In that case, both http and https endpoints are stared.</li>
 * <li>All the above configurations are overridden by dfs.http.policy
 * configuration. With this configuration you can set http-only, https-only
 * and http-and-https endpoints.</li>
 * </ol>
 * See hdfs-default.xml documentation for more details on each of the above
 * configuration settings.
 */
public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
  String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY);
  if (policyStr == null) {
    boolean https = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
        DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);

    boolean hadoopSsl = conf.getBoolean(
        CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
        CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);

    if (hadoopSsl) {
      LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
          + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
          + ".");
    }
    if (https) {
      LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
          + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
          + ".");
    }

    return (hadoopSsl || https) ? HttpConfig.Policy.HTTP_AND_HTTPS
        : HttpConfig.Policy.HTTP_ONLY;
  }

  HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
  if (policy == null) {
    throw new HadoopIllegalArgumentException("Unregonized value '"
        + policyStr + "' for " + DFSConfigKeys.DFS_HTTP_POLICY_KEY);
  }

  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
  return policy;
}
 
Example 20
Source Project: big-c   Source File: TestSaslDataTransfer.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testDataNodeAbortsIfNotHttpsOnly() throws Exception {
  HdfsConfiguration clusterConf = createSecureConfig("authentication");
  clusterConf.set(DFS_HTTP_POLICY_KEY,
    HttpConfig.Policy.HTTP_AND_HTTPS.name());
  exception.expect(RuntimeException.class);
  exception.expectMessage("Cannot start secure DataNode");
  startCluster(clusterConf);
}
 
Example 21
Source Project: big-c   Source File: TestNameNodeRespectsBindHostKeys.java    License: Apache License 2.0 5 votes vote down vote up
private static void setupSsl() throws Exception {
  Configuration conf = new Configuration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  assertTrue(base.mkdirs());
  final String keystoresDir = new File(BASEDIR).getAbsolutePath();
  final String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeRespectsBindHostKeys.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
}
 
Example 22
Source Project: attic-apex-core   Source File: ConfigUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static boolean isSSLEnabled(Configuration conf)
{
  if (HttpConfig.Policy.HTTPS_ONLY == HttpConfig.Policy.fromString(
      conf.get(YarnConfiguration.YARN_HTTP_POLICY_KEY, YarnConfiguration.YARN_HTTP_POLICY_DEFAULT))) {
    return true;
  }
  return false;
}
 
Example 23
Source Project: hbase   Source File: TestSecureRESTServer.java    License: Apache License 2.0 5 votes vote down vote up
private static void setHdfsSecuredConfiguration(Configuration conf) throws Exception {
  // Set principal+keytab configuration for HDFS
  conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY,
      SERVICE_PRINCIPAL + "@" + KDC.getRealm());
  conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath());
  conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY,
      SERVICE_PRINCIPAL + "@" + KDC.getRealm());
  conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath());
  conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
      SPNEGO_SERVICE_PRINCIPAL + "@" + KDC.getRealm());
  // Enable token access for HDFS blocks
  conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  // Only use HTTPS (required because we aren't using "secure" ports)
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  // Bind on localhost for spnego to have a chance at working
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  // Generate SSL certs
  File keystoresDir = new File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath());
  keystoresDir.mkdirs();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureRESTServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);

  // Magic flag to tell hdfs to not fail on using ports above 1024
  conf.setBoolean("ignore.secure.ports.for.testing", true);
}
 
Example 24
Source Project: hbase   Source File: HBaseKerberosUtils.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Set up SSL configuration for HDFS NameNode and DataNode.
 * @param utility a HBaseTestingUtility object.
 * @param clazz the caller test class.
 * @throws Exception if unable to set up SSL configuration
 */
public static void setSSLConfiguration(HBaseCommonTestingUtility utility, Class<?> clazz)
  throws Exception {
  Configuration conf = utility.getConfiguration();
  conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");

  File keystoresDir = new File(utility.getDataTestDir("keystore").toUri().getPath());
  keystoresDir.mkdirs();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(clazz);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);
}
 
Example 25
Source Project: XLearning   Source File: XLearningWebAppUtil.java    License: Apache License 2.0 4 votes vote down vote up
public static String getYARNWebappScheme() {
  return httpPolicyInYarn == HttpConfig.Policy.HTTPS_ONLY ? "https://"
      : "http://";
}
 
Example 26
Source Project: XLearning   Source File: XLearningWebAppUtil.java    License: Apache License 2.0 4 votes vote down vote up
public static String getJHSWebappScheme() {
  return httpPolicyInJHS == HttpConfig.Policy.HTTPS_ONLY ? "https://"
      : "http://";
}
 
Example 27
Source Project: hadoop   Source File: YarnConfiguration.java    License: Apache License 2.0 4 votes vote down vote up
public static boolean useHttps(Configuration conf) {
  return HttpConfig.Policy.HTTPS_ONLY == HttpConfig.Policy.fromString(conf
      .get(YARN_HTTP_POLICY_KEY,
          YARN_HTTP_POLICY_DEFAULT));
}
 
Example 28
Source Project: hadoop   Source File: MiniMRYarnCluster.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public synchronized void serviceStart() throws Exception {
  try {
    if (!getConfig().getBoolean(
        JHAdminConfig.MR_HISTORY_MINICLUSTER_FIXED_PORTS,
        JHAdminConfig.DEFAULT_MR_HISTORY_MINICLUSTER_FIXED_PORTS)) {
      String hostname = MiniYARNCluster.getHostname();
      // pick free random ports.
      getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS,
        hostname + ":0");
      MRWebAppUtil.setJHSWebappURLWithoutScheme(getConfig(), hostname
          + ":0");
      getConfig().set(JHAdminConfig.JHS_ADMIN_ADDRESS,
        hostname + ":0");
    }
    historyServer = new JobHistoryServer();
    historyServer.init(getConfig());
    new Thread() {
      public void run() {
        historyServer.start();
      };
    }.start();
    while (historyServer.getServiceState() == STATE.INITED) {
      LOG.info("Waiting for HistoryServer to start...");
      Thread.sleep(1500);
    }
    //TODO Add a timeout. State.STOPPED check ?
    if (historyServer.getServiceState() != STATE.STARTED) {
      throw new IOException("HistoryServer failed to start");
    }
    super.serviceStart();
  } catch (Throwable t) {
    throw new YarnRuntimeException(t);
  }
  //need to do this because historyServer.init creates a new Configuration
  getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS,
                  historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS));
  MRWebAppUtil.setJHSWebappURLWithoutScheme(getConfig(),
      MRWebAppUtil.getJHSWebappURLWithoutScheme(historyServer.getConfig()));

  LOG.info("MiniMRYARN ResourceManager address: " +
           getConfig().get(YarnConfiguration.RM_ADDRESS));
  LOG.info("MiniMRYARN ResourceManager web address: " +
           WebAppUtils.getRMWebAppURLWithoutScheme(getConfig()));
  LOG.info("MiniMRYARN HistoryServer address: " +
           getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS));
  LOG.info("MiniMRYARN HistoryServer web address: "
      + getResolvedMRHistoryWebAppURLWithoutScheme(getConfig(),
          MRWebAppUtil.getJHSHttpPolicy() == HttpConfig.Policy.HTTPS_ONLY));
}
 
Example 29
Source Project: hadoop   Source File: MRWebAppUtil.java    License: Apache License 2.0 4 votes vote down vote up
public static String getYARNWebappScheme() {
  return httpPolicyInYarn == HttpConfig.Policy.HTTPS_ONLY ? "https://"
      : "http://";
}
 
Example 30
Source Project: hadoop   Source File: MRWebAppUtil.java    License: Apache License 2.0 4 votes vote down vote up
public static String getJHSWebappScheme() {
  return httpPolicyInJHS == HttpConfig.Policy.HTTPS_ONLY ? "https://"
      : "http://";
}