Java Code Examples for org.apache.hadoop.minikdc.MiniKdc#getRealm()

The following examples show how to use org.apache.hadoop.minikdc.MiniKdc#getRealm() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SaslDataTransferTestCase.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, userName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = userName + "/localhost@" + kdc.getRealm();
  spnegoPrincipal = "HTTP/localhost@" + kdc.getRealm();
}
 
Example 2
Source File: SaslDataTransferTestCase.java    From big-c with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, userName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = userName + "/localhost@" + kdc.getRealm();
  spnegoPrincipal = "HTTP/localhost@" + kdc.getRealm();
}
 
Example 3
Source File: SecureTestEnvironment.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
public static void prepare(TemporaryFolder tempFolder) {

		try {
			File baseDirForSecureRun = tempFolder.newFolder();
			LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);

			String hostName = "localhost";
			Properties kdcConf = MiniKdc.createConf();
			if (LOG.isDebugEnabled()) {
				kdcConf.setProperty(MiniKdc.DEBUG, "true");
			}
			kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
			kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
			kdc.start();
			LOG.info("Started Mini KDC");

			File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
			testKeytab = keytabFile.getAbsolutePath();
			testZkServerPrincipal = "zookeeper/127.0.0.1";
			testZkClientPrincipal = "zk-client/127.0.0.1";
			testKafkaServerPrincipal = "kafka/" + hostName;
			hadoopServicePrincipal = "hadoop/" + hostName;
			testPrincipal = "client/" + hostName;

			kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
					hadoopServicePrincipal,
					testZkClientPrincipal,
					testKafkaServerPrincipal);

			testPrincipal = testPrincipal + "@" + kdc.getRealm();
			testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
			testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
			testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
			hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();

			LOG.info("-------------------------------------------------------------------");
			LOG.info("Test Principal: {}", testPrincipal);
			LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
			LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
			LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
			LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
			LOG.info("Test Keytab: {}", testKeytab);
			LOG.info("-------------------------------------------------------------------");

			//Security Context is established to allow non hadoop applications that requires JAAS
			//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
			//the context can be reinitialized with Hadoop configuration by calling
			//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
			//See Yarn test case module for reference
			Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
			flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
			flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
			SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
			TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());

			populateJavaPropertyVariables();

		} catch (Exception e) {
			throw new RuntimeException("Exception occured while preparing secure environment.", e);
		}

	}
 
Example 4
Source File: SecureTestEnvironment.java    From flink with Apache License 2.0 4 votes vote down vote up
public static void prepare(TemporaryFolder tempFolder) {

		try {
			File baseDirForSecureRun = tempFolder.newFolder();
			LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);

			String hostName = "localhost";
			Properties kdcConf = MiniKdc.createConf();
			if (LOG.isDebugEnabled()) {
				kdcConf.setProperty(MiniKdc.DEBUG, "true");
			}
			kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
			kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
			kdc.start();
			LOG.info("Started Mini KDC");

			File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
			testKeytab = keytabFile.getAbsolutePath();
			testZkServerPrincipal = "zookeeper/127.0.0.1";
			testZkClientPrincipal = "zk-client/127.0.0.1";
			testKafkaServerPrincipal = "kafka/" + hostName;
			hadoopServicePrincipal = "hadoop/" + hostName;
			testPrincipal = "client/" + hostName;

			kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
					hadoopServicePrincipal,
					testZkClientPrincipal,
					testKafkaServerPrincipal);

			testPrincipal = testPrincipal + "@" + kdc.getRealm();
			testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
			testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
			testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
			hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();

			LOG.info("-------------------------------------------------------------------");
			LOG.info("Test Principal: {}", testPrincipal);
			LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
			LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
			LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
			LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
			LOG.info("Test Keytab: {}", testKeytab);
			LOG.info("-------------------------------------------------------------------");

			//Security Context is established to allow non hadoop applications that requires JAAS
			//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
			//the context can be reinitialized with Hadoop configuration by calling
			//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
			//See Yarn test case module for reference
			Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
			flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
			flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
			SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
			TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());

			populateJavaPropertyVariables();

		} catch (Exception e) {
			throw new RuntimeException("Exception occured while preparing secure environment.", e);
		}

	}
 
Example 5
Source File: TestSecureNNWithQJM.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
 
Example 6
Source File: JAASKerberosTest.java    From herddb with Apache License 2.0 4 votes vote down vote up
@Before
public void startMiniKdc() throws Exception {

    conf = MiniKdc.createConf();
    kdc = new MiniKdc(conf, kdcDir.getRoot());
    kdc.start();

    String localhostName = "localhost";
    String principalServerNoRealm = "herddb/" + localhostName;
    String principalServer = "herddb/" + localhostName + "@" + kdc.getRealm();
    String principalClientNoRealm = "herddbclient/" + localhostName;
    String principalClient = principalClientNoRealm + "@" + kdc.getRealm();

    System.out.println("adding principal: " + principalServerNoRealm);
    System.out.println("adding principal: " + principalClientNoRealm);

    File keytabClient = new File(workDir.getRoot(), "herddbclient.keytab");
    kdc.createPrincipal(keytabClient, principalClientNoRealm);

    File keytabServer = new File(workDir.getRoot(), "herddbserver.keytab");
    kdc.createPrincipal(keytabServer, principalServerNoRealm);

    File jaas_file = new File(workDir.getRoot(), "jaas.conf");
    try (FileWriter writer = new FileWriter(jaas_file)) {
        writer.write("\n"
                + "HerdDBServer {\n"
                + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
                + "  useKeyTab=true\n"
                + "  keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
                + "  storeKey=true\n"
                + "  useTicketCache=false\n"
                + "  principal=\"" + principalServer + "\";\n"
                + "};\n"
                + "\n"
                + "\n"
                + "\n"
                + "HerdDBClient {\n"
                + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
                + "  useKeyTab=true\n"
                + "  keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
                + "  storeKey=true\n"
                + "  useTicketCache=false\n"
                + "  principal=\"" + principalClient + "\";\n"
                + "};\n"
        );

    }

    File krb5file = new File(workDir.getRoot(), "krb5.conf");
    try (FileWriter writer = new FileWriter(krb5file)) {
        writer.write("[libdefaults]\n"
                + " default_realm = " + kdc.getRealm() + "\n"
                // disable UDP as Kerby will listen only on TCP by default
                + " udp_preference_limit=1\n"
                + "\n"
                + "[realms]\n"
                + " " + kdc.getRealm() + "  = {\n"
                + "  kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
                + " }"
        );

    }

    System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
    System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());

}
 
Example 7
Source File: TestSecureNNWithQJM.java    From big-c with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
 
Example 8
Source File: JAASKerberosTest.java    From blazingcache with Apache License 2.0 4 votes vote down vote up
@Before
public void startMiniKdc() throws Exception {

    createMiniKdcConf();
    kdc = new MiniKdc(conf, kdcDir.getRoot());
    kdc.start();

    String localhostName = "localhost";
    String principalServerNoRealm = "blazingcache/" + localhostName;
    String principalServer = "blazingcache/" + localhostName + "@" + kdc.getRealm();
    String principalClientNoRealm = "blazingcacheclient/" + localhostName;
    String principalClient = principalClientNoRealm + "@" + kdc.getRealm();

    System.out.println("adding principal: " + principalServerNoRealm);
    System.out.println("adding principal: " + principalClientNoRealm);

    File keytabClient = new File(kerberosWorkDir.getRoot(), "blazingcacheclient.keytab");
    kdc.createPrincipal(keytabClient, principalClientNoRealm);

    File keytabServer = new File(kerberosWorkDir.getRoot(), "blazingcacheserver.keytab");
    kdc.createPrincipal(keytabServer, principalServerNoRealm);

    File jaas_file = new File(kerberosWorkDir.getRoot(), "jaas.conf");
    try (FileWriter writer = new FileWriter(jaas_file)) {
        writer.write("\n"
            + "BlazingCacheServer {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalServer + "\";\n"
            + "};\n"
            + "\n"
            + "\n"
            + "\n"
            + "BlazingCacheClient {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalClient + "\";\n"
            + "};\n"
        );

    }

    File krb5file = new File(kerberosWorkDir.getRoot(), "krb5.conf");
    try (FileWriter writer = new FileWriter(krb5file)) {
        writer.write("[libdefaults]\n"
            + " default_realm = " + kdc.getRealm() + "\n"
            // disable UDP as Kerby will listen only on TCP by default
            + " udp_preference_limit=1\n"
            + "\n"
            + "\n"
            + "[realms]\n"
            + " " + kdc.getRealm() + "  = {\n"
            + "  kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
            + " }"
        );

    }

    System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
    System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
    System.setProperty("sun.security.krb5.debug", "true");
    javax.security.auth.login.Configuration.getConfiguration().refresh();

}
 
Example 9
Source File: SecureTestEnvironment.java    From flink with Apache License 2.0 4 votes vote down vote up
public static void prepare(TemporaryFolder tempFolder) {

		try {
			File baseDirForSecureRun = tempFolder.newFolder();
			LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);

			String hostName = "localhost";
			Properties kdcConf = MiniKdc.createConf();
			if (LOG.isDebugEnabled()) {
				kdcConf.setProperty(MiniKdc.DEBUG, "true");
			}
			kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
			kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
			kdc.start();
			LOG.info("Started Mini KDC");

			File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
			testKeytab = keytabFile.getAbsolutePath();
			testZkServerPrincipal = "zookeeper/" + hostName;
			testZkClientPrincipal = "zk-client/" + hostName;
			testKafkaServerPrincipal = "kafka/" + hostName;
			hadoopServicePrincipal = "hadoop/" + hostName;
			testPrincipal = "client/" + hostName;

			kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
					hadoopServicePrincipal,
					testZkClientPrincipal,
					testKafkaServerPrincipal);

			testPrincipal = testPrincipal + "@" + kdc.getRealm();
			testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
			testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
			testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
			hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();

			LOG.info("-------------------------------------------------------------------");
			LOG.info("Test Principal: {}", testPrincipal);
			LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
			LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
			LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
			LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
			LOG.info("Test Keytab: {}", testKeytab);
			LOG.info("-------------------------------------------------------------------");

			//Security Context is established to allow non hadoop applications that requires JAAS
			//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
			//the context can be reinitialized with Hadoop configuration by calling
			//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
			//See Yarn test case module for reference
			Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
			flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
			flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
			SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
			TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());

			populateJavaPropertyVariables();

		} catch (Exception e) {
			throw new RuntimeException("Exception occured while preparing secure environment.", e);
		}

	}
 
Example 10
Source File: SecureClusterTest.java    From knox with Apache License 2.0 4 votes vote down vote up
private static void initKdc() throws Exception {
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  configuration = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
  UserGroupInformation.setConfiguration(configuration);
  assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
  userName = UserGroupInformation.createUserForTesting("guest", new String[]{"users"}).getUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile, userName + "/" + krbInstance, "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  configuration.set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, "authentication");
  configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
  configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 10);
  configuration.set("hadoop.proxyuser." + userName + ".hosts", "*");
  configuration.set("hadoop.proxyuser." + userName + ".groups", "*");
  configuration.setBoolean("dfs.permissions", true);

  String keystoresDir = baseDir.getAbsolutePath();
  File sslClientConfFile = new File(keystoresDir + "/ssl-client.xml");
  File sslServerConfFile = new File(keystoresDir + "/ssl-server.xml");
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, keystoresDir, configuration, false);
  configuration.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslClientConfFile.getName());
  configuration.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslServerConfFile.getName());

  setupKnox(keytab, hdfsPrincipal);
}
 
Example 11
Source File: SecureKnoxShellTest.java    From knox with Apache License 2.0 4 votes vote down vote up
private static void initKdc() throws Exception {
  final Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  userName = UserGroupInformation
      .createUserForTesting("guest", new String[] { "users" }).getUserName();
  final File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  final String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile, userName + "/" + krbInstance,
      "HTTP/" + krbInstance);

  hdfsPrincipal =
      userName + "/" + krbInstance + "@" + kdc.getRealm();
  spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  configuration.set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, "authentication");
  configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
  configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 10);
  configuration.set("hadoop.proxyuser." + userName + ".hosts", "*");
  configuration.set("hadoop.proxyuser." + userName + ".groups", "*");
  configuration.setBoolean("dfs.permissions", true);

  String keystoresDir = baseDir.getAbsolutePath();
  File sslClientConfFile = new File(keystoresDir + "/ssl-client.xml");
  File sslServerConfFile = new File(keystoresDir + "/ssl-server.xml");
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, keystoresDir, configuration, false);
  configuration.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslClientConfFile.getName());
  configuration.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslServerConfFile.getName());

  krb5conf = kdc.getKrb5conf().getAbsolutePath();
}