Java Code Examples for org.apache.hadoop.minikdc.MiniKdc#start()

The following examples show how to use org.apache.hadoop.minikdc.MiniKdc#start() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestKMS.java    From big-c with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUpMiniKdc() throws Exception {
  File kdcDir = getTestDir();
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, kdcDir);
  kdc.start();
  keytab = new File(kdcDir, "keytab");
  List<String> principals = new ArrayList<String>();
  principals.add("HTTP/localhost");
  principals.add("client");
  principals.add("hdfs");
  principals.add("otheradmin");
  principals.add("client/host");
  principals.add("client1");
  for (KMSACLs.Type type : KMSACLs.Type.values()) {
    principals.add(type.toString());
  }
  principals.add("CREATE_MATERIAL");
  principals.add("ROLLOVER_MATERIAL");
  kdc.createPrincipal(keytab,
      principals.toArray(new String[principals.size()]));
}
 
Example 2
Source File: TestKMS.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUpMiniKdc() throws Exception {
  File kdcDir = getTestDir();
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, kdcDir);
  kdc.start();
  keytab = new File(kdcDir, "keytab");
  List<String> principals = new ArrayList<String>();
  principals.add("HTTP/localhost");
  principals.add("client");
  principals.add("hdfs");
  principals.add("otheradmin");
  principals.add("client/host");
  principals.add("client1");
  for (KMSACLs.Type type : KMSACLs.Type.values()) {
    principals.add(type.toString());
  }
  principals.add("CREATE_MATERIAL");
  principals.add("ROLLOVER_MATERIAL");
  kdc.createPrincipal(keytab,
      principals.toArray(new String[principals.size()]));
}
 
Example 3
Source File: SaslDataTransferTestCase.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, userName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = userName + "/localhost@" + kdc.getRealm();
  spnegoPrincipal = "HTTP/localhost@" + kdc.getRealm();
}
 
Example 4
Source File: KdcLocalCluster.java    From hadoop-mini-clusters with Apache License 2.0 5 votes vote down vote up
@Override
public void start() throws Exception {

    LOG.info("KDC: Starting MiniKdc");
    configure();
    miniKdc = new MiniKdc(conf, new File(baseDir));
    miniKdc.start();

    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("guest");
    UserGroupInformation.setLoginUser(ugi);
    String username = UserGroupInformation.getLoginUser().getShortUserName();

    List<String> temp = new ArrayList<>(principals);
    temp.add(username);
    this.principals = Collections.unmodifiableList(temp);

    principals.forEach(p -> {
        try {
            File keytab = new File(baseDir, p + ".keytab");
            LOG.info("KDC: Creating keytab for {} in {}", p, keytab);
            miniKdc.createPrincipal(keytab, p, getKrbPrincipal(p), getKrbPrincipalWithRealm(p));
        } catch (Exception e) {
            throw Throwables.propagate(e);
        }
    });
    refreshDefaultRealm();
    prepareSecureConfiguration(username);
}
 
Example 5
Source File: BaseSecurityTest.java    From incubator-atlas with Apache License 2.0 5 votes vote down vote up
protected File startKDC() throws Exception {
    File target = Files.createTempDirectory("sectest").toFile();
    File kdcWorkDir = new File(target, "kdc");
    Properties kdcConf = MiniKdc.createConf();
    kdcConf.setProperty(MiniKdc.DEBUG, "true");
    kdc = new MiniKdc(kdcConf, kdcWorkDir);
    kdc.start();

    Assert.assertNotNull(kdc.getRealm());
    return kdcWorkDir;
}
 
Example 6
Source File: AbstractSecureRegistryTest.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Sets up the KDC and a set of principals in the JAAS file
 *
 * @throws Exception
 */
public static void setupKDCAndPrincipals() throws Exception {
  // set up the KDC
  File target = new File(System.getProperty("test.dir", "target"));
  kdcWorkDir = new File(target, "kdc");
  kdcWorkDir.mkdirs();
  if (!kdcWorkDir.mkdirs()) {
    assertTrue(kdcWorkDir.isDirectory());
  }
  kdcConf = MiniKdc.createConf();
  kdcConf.setProperty(MiniKdc.DEBUG, "true");
  kdc = new MiniKdc(kdcConf, kdcWorkDir);
  kdc.start();

  keytab_zk = createKeytab(ZOOKEEPER, "zookeeper.keytab");
  keytab_alice = createKeytab(ALICE, "alice.keytab");
  keytab_bob = createKeytab(BOB, "bob.keytab");
  zkServerPrincipal = Shell.WINDOWS ? ZOOKEEPER_1270001 : ZOOKEEPER_LOCALHOST;

  StringBuilder jaas = new StringBuilder(1024);
  jaas.append(registrySecurity.createJAASEntry(ZOOKEEPER_CLIENT_CONTEXT,
      ZOOKEEPER, keytab_zk));
  jaas.append(registrySecurity.createJAASEntry(ZOOKEEPER_SERVER_CONTEXT,
      zkServerPrincipal, keytab_zk));
  jaas.append(registrySecurity.createJAASEntry(ALICE_CLIENT_CONTEXT,
      ALICE_LOCALHOST , keytab_alice));
  jaas.append(registrySecurity.createJAASEntry(BOB_CLIENT_CONTEXT,
      BOB_LOCALHOST, keytab_bob));

  jaasFile = new File(kdcWorkDir, "jaas.txt");
  FileUtils.write(jaasFile, jaas.toString());
  LOG.info("\n"+ jaas);
  RegistrySecurity.bindJVMtoJAASFile(jaasFile);
}
 
Example 7
Source File: TestRMWebServicesDelegationTokens.java    From big-c with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setupKDC() throws Exception {
  testRootDir = new File("target",
    TestRMWebServicesDelegationTokens.class.getName() + "-root");
  testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir);
  testMiniKDC.start();
  testMiniKDC.createPrincipal(httpSpnegoKeytabFile, "HTTP/localhost",
    "client", "client2", "client3");
}
 
Example 8
Source File: AbstractSecureRegistryTest.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Sets up the KDC and a set of principals in the JAAS file
 *
 * @throws Exception
 */
public static void setupKDCAndPrincipals() throws Exception {
  // set up the KDC
  File target = new File(System.getProperty("test.dir", "target"));
  kdcWorkDir = new File(target, "kdc");
  kdcWorkDir.mkdirs();
  if (!kdcWorkDir.mkdirs()) {
    assertTrue(kdcWorkDir.isDirectory());
  }
  kdcConf = MiniKdc.createConf();
  kdcConf.setProperty(MiniKdc.DEBUG, "true");
  kdc = new MiniKdc(kdcConf, kdcWorkDir);
  kdc.start();

  keytab_zk = createKeytab(ZOOKEEPER, "zookeeper.keytab");
  keytab_alice = createKeytab(ALICE, "alice.keytab");
  keytab_bob = createKeytab(BOB, "bob.keytab");
  zkServerPrincipal = Shell.WINDOWS ? ZOOKEEPER_1270001 : ZOOKEEPER_LOCALHOST;

  StringBuilder jaas = new StringBuilder(1024);
  jaas.append(registrySecurity.createJAASEntry(ZOOKEEPER_CLIENT_CONTEXT,
      ZOOKEEPER, keytab_zk));
  jaas.append(registrySecurity.createJAASEntry(ZOOKEEPER_SERVER_CONTEXT,
      zkServerPrincipal, keytab_zk));
  jaas.append(registrySecurity.createJAASEntry(ALICE_CLIENT_CONTEXT,
      ALICE_LOCALHOST , keytab_alice));
  jaas.append(registrySecurity.createJAASEntry(BOB_CLIENT_CONTEXT,
      BOB_LOCALHOST, keytab_bob));

  jaasFile = new File(kdcWorkDir, "jaas.txt");
  FileUtils.write(jaasFile, jaas.toString());
  LOG.info("\n"+ jaas);
  RegistrySecurity.bindJVMtoJAASFile(jaasFile);
}
 
Example 9
Source File: TestRMWebServicesDelegationTokens.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setupKDC() throws Exception {
  testRootDir = new File("target",
    TestRMWebServicesDelegationTokens.class.getName() + "-root");
  testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir);
  testMiniKDC.start();
  testMiniKDC.createPrincipal(httpSpnegoKeytabFile, "HTTP/localhost",
    "client", "client2", "client3");
}
 
Example 10
Source File: BaseSecurityTest.java    From atlas with Apache License 2.0 5 votes vote down vote up
protected File startKDC() throws Exception {
    File target = Files.createTempDirectory("sectest").toFile();
    File kdcWorkDir = new File(target, "kdc");
    Properties kdcConf = MiniKdc.createConf();
    kdcConf.setProperty(MiniKdc.DEBUG, "true");
    kdc = new MiniKdc(kdcConf, kdcWorkDir);
    kdc.start();

    Assert.assertNotNull(kdc.getRealm());
    return kdcWorkDir;
}
 
Example 11
Source File: CoreClientOverOneWaySSLKerb5Test.java    From activemq-artemis with Apache License 2.0 5 votes vote down vote up
@Override
@Before
public void setUp() throws Exception {
   super.setUp();
   kdc = new MiniKdc(MiniKdc.createConf(), temporaryFolder.newFolder("kdc"));
   kdc.start();
}
 
Example 12
Source File: TestSecureNNWithQJM.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
 
Example 13
Source File: SecureKnoxShellTest.java    From knox with Apache License 2.0 4 votes vote down vote up
private static void initKdc() throws Exception {
  final Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  userName = UserGroupInformation
      .createUserForTesting("guest", new String[] { "users" }).getUserName();
  final File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  final String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile, userName + "/" + krbInstance,
      "HTTP/" + krbInstance);

  hdfsPrincipal =
      userName + "/" + krbInstance + "@" + kdc.getRealm();
  spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  configuration.set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, "authentication");
  configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
  configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 10);
  configuration.set("hadoop.proxyuser." + userName + ".hosts", "*");
  configuration.set("hadoop.proxyuser." + userName + ".groups", "*");
  configuration.setBoolean("dfs.permissions", true);

  String keystoresDir = baseDir.getAbsolutePath();
  File sslClientConfFile = new File(keystoresDir + "/ssl-client.xml");
  File sslServerConfFile = new File(keystoresDir + "/ssl-server.xml");
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, keystoresDir, configuration, false);
  configuration.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslClientConfFile.getName());
  configuration.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslServerConfFile.getName());

  krb5conf = kdc.getKrb5conf().getAbsolutePath();
}
 
Example 14
Source File: SentryMiniKdcTestcase.java    From incubator-sentry with Apache License 2.0 4 votes vote down vote up
public static void startMiniKdc(Properties confOverlay) throws Exception {
  createTestDir();
  createMiniKdcConf(confOverlay);
  kdc = new MiniKdc(conf, workDir);
  kdc.start();
}
 
Example 15
Source File: JAASKerberosTest.java    From blazingcache with Apache License 2.0 4 votes vote down vote up
@Before
public void startMiniKdc() throws Exception {

    createMiniKdcConf();
    kdc = new MiniKdc(conf, kdcDir.getRoot());
    kdc.start();

    String localhostName = "localhost";
    String principalServerNoRealm = "blazingcache/" + localhostName;
    String principalServer = "blazingcache/" + localhostName + "@" + kdc.getRealm();
    String principalClientNoRealm = "blazingcacheclient/" + localhostName;
    String principalClient = principalClientNoRealm + "@" + kdc.getRealm();

    System.out.println("adding principal: " + principalServerNoRealm);
    System.out.println("adding principal: " + principalClientNoRealm);

    File keytabClient = new File(kerberosWorkDir.getRoot(), "blazingcacheclient.keytab");
    kdc.createPrincipal(keytabClient, principalClientNoRealm);

    File keytabServer = new File(kerberosWorkDir.getRoot(), "blazingcacheserver.keytab");
    kdc.createPrincipal(keytabServer, principalServerNoRealm);

    File jaas_file = new File(kerberosWorkDir.getRoot(), "jaas.conf");
    try (FileWriter writer = new FileWriter(jaas_file)) {
        writer.write("\n"
            + "BlazingCacheServer {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalServer + "\";\n"
            + "};\n"
            + "\n"
            + "\n"
            + "\n"
            + "BlazingCacheClient {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalClient + "\";\n"
            + "};\n"
        );

    }

    File krb5file = new File(kerberosWorkDir.getRoot(), "krb5.conf");
    try (FileWriter writer = new FileWriter(krb5file)) {
        writer.write("[libdefaults]\n"
            + " default_realm = " + kdc.getRealm() + "\n"
            // disable UDP as Kerby will listen only on TCP by default
            + " udp_preference_limit=1\n"
            + "\n"
            + "\n"
            + "[realms]\n"
            + " " + kdc.getRealm() + "  = {\n"
            + "  kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
            + " }"
        );

    }

    System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
    System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
    System.setProperty("sun.security.krb5.debug", "true");
    javax.security.auth.login.Configuration.getConfiguration().refresh();

}
 
Example 16
Source File: SecureTestEnvironment.java    From flink with Apache License 2.0 4 votes vote down vote up
public static void prepare(TemporaryFolder tempFolder) {

		try {
			File baseDirForSecureRun = tempFolder.newFolder();
			LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);

			String hostName = "localhost";
			Properties kdcConf = MiniKdc.createConf();
			if (LOG.isDebugEnabled()) {
				kdcConf.setProperty(MiniKdc.DEBUG, "true");
			}
			kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
			kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
			kdc.start();
			LOG.info("Started Mini KDC");

			File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
			testKeytab = keytabFile.getAbsolutePath();
			testZkServerPrincipal = "zookeeper/" + hostName;
			testZkClientPrincipal = "zk-client/" + hostName;
			testKafkaServerPrincipal = "kafka/" + hostName;
			hadoopServicePrincipal = "hadoop/" + hostName;
			testPrincipal = "client/" + hostName;

			kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
					hadoopServicePrincipal,
					testZkClientPrincipal,
					testKafkaServerPrincipal);

			testPrincipal = testPrincipal + "@" + kdc.getRealm();
			testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
			testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
			testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
			hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();

			LOG.info("-------------------------------------------------------------------");
			LOG.info("Test Principal: {}", testPrincipal);
			LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
			LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
			LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
			LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
			LOG.info("Test Keytab: {}", testKeytab);
			LOG.info("-------------------------------------------------------------------");

			//Security Context is established to allow non hadoop applications that requires JAAS
			//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
			//the context can be reinitialized with Hadoop configuration by calling
			//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
			//See Yarn test case module for reference
			Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
			flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
			flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
			SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
			TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());

			populateJavaPropertyVariables();

		} catch (Exception e) {
			throw new RuntimeException("Exception occured while preparing secure environment.", e);
		}

	}
 
Example 17
Source File: SecureUserConnectionsIT.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static synchronized void setupKdc() throws Exception {
    ensureIsEmptyDirectory(KDC_DIR);
    ensureIsEmptyDirectory(KEYTAB_DIR);
    // Create and start the KDC. MiniKDC appears to have a race condition in how it does
    // port allocation (with apache-ds). See PHOENIX-3287.
    boolean started = false;
    for (int i = 0; !started && i < KDC_START_ATTEMPTS; i++) {
        Properties kdcConf = MiniKdc.createConf();
        kdcConf.put(MiniKdc.DEBUG, true);
        KDC = new MiniKdc(kdcConf, KDC_DIR);
        try {
            KDC.start();
            started = true;
        } catch (Exception e) {
            LOGGER.warn("PHOENIX-3287: Failed to start KDC, retrying..", e);
        }
    }
    assertTrue("The embedded KDC failed to start successfully after " + KDC_START_ATTEMPTS
            + " attempts.", started);

    createUsers(NUM_USERS);
    createServiceUsers(NUM_USERS);

    final Configuration conf = new Configuration(false);
    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
    conf.setBoolean(User.HBASE_SECURITY_AUTHORIZATION_CONF_KEY, true);
    UserGroupInformation.setConfiguration(conf);

    // Clear the cached singletons so we can inject our own.
    InstanceResolver.clearSingletons();
    // Make sure the ConnectionInfo doesn't try to pull a default Configuration
    InstanceResolver.getSingleton(ConfigurationFactory.class, new ConfigurationFactory() {
        @Override
        public Configuration getConfiguration() {
            return conf;
        }
        @Override
        public Configuration getConfiguration(Configuration confToClone) {
            Configuration copy = new Configuration(conf);
            copy.addResource(confToClone);
            return copy;
        }
    });
    updateDefaultRealm();
}
 
Example 18
Source File: SecureTestEnvironment.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
public static void prepare(TemporaryFolder tempFolder) {

		try {
			File baseDirForSecureRun = tempFolder.newFolder();
			LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);

			String hostName = "localhost";
			Properties kdcConf = MiniKdc.createConf();
			if (LOG.isDebugEnabled()) {
				kdcConf.setProperty(MiniKdc.DEBUG, "true");
			}
			kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
			kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
			kdc.start();
			LOG.info("Started Mini KDC");

			File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
			testKeytab = keytabFile.getAbsolutePath();
			testZkServerPrincipal = "zookeeper/127.0.0.1";
			testZkClientPrincipal = "zk-client/127.0.0.1";
			testKafkaServerPrincipal = "kafka/" + hostName;
			hadoopServicePrincipal = "hadoop/" + hostName;
			testPrincipal = "client/" + hostName;

			kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
					hadoopServicePrincipal,
					testZkClientPrincipal,
					testKafkaServerPrincipal);

			testPrincipal = testPrincipal + "@" + kdc.getRealm();
			testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
			testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
			testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
			hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();

			LOG.info("-------------------------------------------------------------------");
			LOG.info("Test Principal: {}", testPrincipal);
			LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
			LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
			LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
			LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
			LOG.info("Test Keytab: {}", testKeytab);
			LOG.info("-------------------------------------------------------------------");

			//Security Context is established to allow non hadoop applications that requires JAAS
			//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
			//the context can be reinitialized with Hadoop configuration by calling
			//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
			//See Yarn test case module for reference
			Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
			flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
			flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
			SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
			TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());

			populateJavaPropertyVariables();

		} catch (Exception e) {
			throw new RuntimeException("Exception occured while preparing secure environment.", e);
		}

	}
 
Example 19
Source File: TestDelegationToken.java    From hadoop-ozone with Apache License 2.0 4 votes vote down vote up
private void startMiniKdc() throws Exception {
  Properties securityProperties = MiniKdc.createConf();
  miniKdc = new MiniKdc(securityProperties, workDir);
  miniKdc.start();
}
 
Example 20
Source File: SecureClusterTest.java    From knox with Apache License 2.0 4 votes vote down vote up
private static void initKdc() throws Exception {
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  configuration = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
  UserGroupInformation.setConfiguration(configuration);
  assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
  userName = UserGroupInformation.createUserForTesting("guest", new String[]{"users"}).getUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile, userName + "/" + krbInstance, "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  configuration.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  configuration.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
  configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  configuration.set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, "authentication");
  configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
  configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  configuration.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 10);
  configuration.set("hadoop.proxyuser." + userName + ".hosts", "*");
  configuration.set("hadoop.proxyuser." + userName + ".groups", "*");
  configuration.setBoolean("dfs.permissions", true);

  String keystoresDir = baseDir.getAbsolutePath();
  File sslClientConfFile = new File(keystoresDir + "/ssl-client.xml");
  File sslServerConfFile = new File(keystoresDir + "/ssl-server.xml");
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, keystoresDir, configuration, false);
  configuration.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslClientConfFile.getName());
  configuration.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      sslServerConfFile.getName());

  setupKnox(keytab, hdfsPrincipal);
}