org.apache.flink.test.util.SecureTestEnvironment Java Examples

The following examples show how to use org.apache.flink.test.util.SecureTestEnvironment. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: YARNSessionFIFOSecuredITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test(timeout = 60000) // timeout after a minute.
public void testDetachedModeSecureWithPreInstallKeytab() throws Exception {
	runTest(() -> {
		Map<String, String> securityProperties = new HashMap<>();
		if (SecureTestEnvironment.getTestKeytab() != null) {
			// client login keytab
			securityProperties.put(SecurityOptions.KERBEROS_LOGIN_KEYTAB.key(), SecureTestEnvironment.getTestKeytab());
			// pre-install Yarn local keytab, since both reuse the same temporary folder "tmp"
			securityProperties.put(YarnConfigOptions.LOCALIZED_KEYTAB_PATH.key(), SecureTestEnvironment.getTestKeytab());
			// unset keytab localization
			securityProperties.put(YarnConfigOptions.SHIP_LOCAL_KEYTAB.key(), "false");
		}
		if (SecureTestEnvironment.getHadoopServicePrincipal() != null) {
			securityProperties.put(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL.key(), SecureTestEnvironment.getHadoopServicePrincipal());
		}
		runDetachedModeTest(securityProperties);
		verifyResultContainsKerberosKeytab();
	});
}
 
Example #2
Source File: RollingSinkSecuredITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private static Configuration startSecureFlinkClusterWithRecoveryModeEnabled() {
	try {
		LOG.info("Starting Flink and ZK in secure mode");

		dfs.mkdirs(new Path("/flink/checkpoints"));
		dfs.mkdirs(new Path("/flink/recovery"));

		final Configuration result = new Configuration();

		result.setBoolean(ConfigConstants.LOCAL_START_WEBSERVER, false);
		result.setInteger(ConfigConstants.LOCAL_NUMBER_JOB_MANAGER, 3);
		result.setString(HighAvailabilityOptions.HA_MODE, "zookeeper");
		result.setString(CheckpointingOptions.STATE_BACKEND, "filesystem");
		result.setString(HighAvailabilityOptions.HA_ZOOKEEPER_CHECKPOINTS_PATH, hdfsURI + "/flink/checkpoints");
		result.setString(HighAvailabilityOptions.HA_STORAGE_PATH, hdfsURI + "/flink/recovery");
		result.setString("state.backend.fs.checkpointdir", hdfsURI + "/flink/checkpoints");

		SecureTestEnvironment.populateFlinkSecureConfigurations(result);

		return result;
	} catch (Exception e) {
		throw new RuntimeException(e);
	}
}
 
Example #3
Source File: YARNSessionFIFOSecuredITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(timeout = 60000) // timeout after a minute.
@Override
public void testDetachedMode() throws Exception {
	runTest(() -> {
		Map<String, String> securityProperties = new HashMap<>();
		if (SecureTestEnvironment.getTestKeytab() != null) {
			securityProperties.put(SecurityOptions.KERBEROS_LOGIN_KEYTAB.key(), SecureTestEnvironment.getTestKeytab());
		}
		if (SecureTestEnvironment.getHadoopServicePrincipal() != null) {
			securityProperties.put(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL.key(), SecureTestEnvironment.getHadoopServicePrincipal());
		}
		runDetachedModeTest(securityProperties);
		verifyResultContainsKerberosKeytab();
	});
}
 
Example #4
Source File: RollingSinkSecuredITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@AfterClass
public static void teardown() throws Exception {
	LOG.info("tearing down secure cluster environment");

	if (hdfsCluster != null) {
		hdfsCluster.shutdown();
	}

	if (miniClusterResource != null) {
		miniClusterResource.after();
		miniClusterResource = null;
	}

	SecureTestEnvironment.cleanup();
}
 
Example #5
Source File: RollingSinkSecuredITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static void populateSecureConfigurations() {

		String dataTransferProtection = "authentication";

		SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
		conf.set(DFS_NAMENODE_USER_NAME_KEY, SecureTestEnvironment.getHadoopServicePrincipal());
		conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, SecureTestEnvironment.getTestKeytab());
		conf.set(DFS_DATANODE_USER_NAME_KEY, SecureTestEnvironment.getHadoopServicePrincipal());
		conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, SecureTestEnvironment.getTestKeytab());
		conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, SecureTestEnvironment.getHadoopServicePrincipal());

		conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);

		conf.set("dfs.data.transfer.protection", dataTransferProtection);

		conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_ONLY.name());

		conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY, "false");

		conf.setInt("dfs.datanode.socket.write.timeout", 0);

		/*
		 * We ae setting the port number to privileged port - see HDFS-9213
		 * This requires the user to have root privilege to bind to the port
		 * Use below command (ubuntu) to set privilege to java process for the
		 * bind() to work if the java process is not running as root.
		 * setcap 'cap_net_bind_service=+ep' /path/to/java
		 */
		conf.set(DFS_DATANODE_ADDRESS_KEY, "localhost:1002");
		conf.set(DFS_DATANODE_HOST_NAME_KEY, "localhost");
		conf.set(DFS_DATANODE_HTTP_ADDRESS_KEY, "localhost:1003");
	}
 
Example #6
Source File: Kafka09SecuredRunITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void prepare() throws Exception {
	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    Starting Kafka09SecuredRunITCase ");
	LOG.info("-------------------------------------------------------------------------");

	SecureTestEnvironment.prepare(tempFolder);
	SecureTestEnvironment.populateFlinkSecureConfigurations(getFlinkConfiguration());

	startClusters(true, false);
}
 
Example #7
Source File: Kafka09SecuredRunITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void prepare() throws ClassNotFoundException {
	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    Starting Kafka09SecuredRunITCase ");
	LOG.info("-------------------------------------------------------------------------");

	SecureTestEnvironment.prepare(tempFolder);
	SecureTestEnvironment.populateFlinkSecureConfigurations(getFlinkConfiguration());

	startClusters(true, false);
}
 
Example #8
Source File: Kafka010SecuredRunITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void prepare() throws Exception {
	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    Starting Kafka010SecuredRunITCase ");
	LOG.info("-------------------------------------------------------------------------");

	SecureTestEnvironment.prepare(tempFolder);
	SecureTestEnvironment.populateFlinkSecureConfigurations(getFlinkConfiguration());

	startClusters(true, false);
}
 
Example #9
Source File: Kafka09SecuredRunITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@AfterClass
public static void shutDownServices() throws Exception {
	shutdownClusters();
	SecureTestEnvironment.cleanup();
}
 
Example #10
Source File: YARNSessionFIFOSecuredITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@AfterClass
public static void teardownSecureCluster() {
	LOG.info("tearing down secure cluster environment");
	SecureTestEnvironment.cleanup();
}
 
Example #11
Source File: YARNSessionFIFOSecuredITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void setup() {

	LOG.info("starting secure cluster environment for testing");

	YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class);
	YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768);
	YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
	YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured");

	SecureTestEnvironment.prepare(tmp);

	populateYarnSecureConfigurations(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(),
			SecureTestEnvironment.getTestKeytab());

	Configuration flinkConfig = new Configuration();
	flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB,
		SecureTestEnvironment.getTestKeytab());
	flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL,
		SecureTestEnvironment.getHadoopServicePrincipal());

	// Setting customized security module class.
	TestHadoopModuleFactory.hadoopConfiguration = YARN_CONFIGURATION;
	flinkConfig.set(SecurityOptions.SECURITY_MODULE_FACTORY_CLASSES,
		Collections.singletonList("org.apache.flink.yarn.util.TestHadoopModuleFactory"));
	flinkConfig.set(SecurityOptions.SECURITY_CONTEXT_FACTORY_CLASSES,
		Collections.singletonList("org.apache.flink.yarn.util.TestHadoopSecurityContextFactory"));

	SecurityConfiguration securityConfig =
		new SecurityConfiguration(flinkConfig);

	try {
		TestingSecurityContext.install(securityConfig, SecureTestEnvironment.getClientSecurityConfigurationMap());

		// This is needed to ensure that SecurityUtils are run within a ugi.doAs section
		// Since we already logged in here in @BeforeClass, even a no-op security context will still work.
		Assert.assertTrue("HadoopSecurityContext must be installed",
			SecurityUtils.getInstalledContext() instanceof HadoopSecurityContext);
		SecurityUtils.getInstalledContext().runSecured(new Callable<Object>() {
			@Override
			public Integer call() {
				startYARNSecureMode(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(),
						SecureTestEnvironment.getTestKeytab());
				return null;
			}
		});

	} catch (Exception e) {
		throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e);
	}

}
 
Example #12
Source File: Kafka010SecuredRunITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@AfterClass
public static void shutDownServices() throws Exception {
	shutdownClusters();
	SecureTestEnvironment.cleanup();
}
 
Example #13
Source File: YARNSessionFIFOSecuredITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@AfterClass
public static void teardownSecureCluster() {
	LOG.info("tearing down secure cluster environment");
	SecureTestEnvironment.cleanup();
}
 
Example #14
Source File: YARNSessionFIFOSecuredITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void setup() {

	LOG.info("starting secure cluster environment for testing");

	YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class);
	YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768);
	YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
	YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured");

	SecureTestEnvironment.prepare(tmp);

	populateYarnSecureConfigurations(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(),
			SecureTestEnvironment.getTestKeytab());

	Configuration flinkConfig = new Configuration();
	flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB,
			SecureTestEnvironment.getTestKeytab());
	flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL,
			SecureTestEnvironment.getHadoopServicePrincipal());

	SecurityConfiguration securityConfig =
		new SecurityConfiguration(
			flinkConfig,
			Collections.singletonList(securityConfig1 -> {
				// manually override the Hadoop Configuration
				return new HadoopModule(securityConfig1, YARN_CONFIGURATION);
			}));

	try {
		TestingSecurityContext.install(securityConfig, SecureTestEnvironment.getClientSecurityConfigurationMap());

		SecurityUtils.getInstalledContext().runSecured(new Callable<Object>() {
			@Override
			public Integer call() {
				startYARNSecureMode(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(),
						SecureTestEnvironment.getTestKeytab());
				return null;
			}
		});

	} catch (Exception e) {
		throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e);
	}

}
 
Example #15
Source File: YARNSessionFIFOSecuredITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@AfterClass
public static void teardownSecureCluster() throws Exception {
	LOG.info("tearing down secure cluster environment");
	SecureTestEnvironment.cleanup();
}
 
Example #16
Source File: YARNSessionFIFOSecuredITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void setup() {

	LOG.info("starting secure cluster environment for testing");

	YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class);
	YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768);
	YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
	YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured");

	SecureTestEnvironment.prepare(tmp);

	populateYarnSecureConfigurations(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(),
			SecureTestEnvironment.getTestKeytab());

	Configuration flinkConfig = new Configuration();
	flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB,
			SecureTestEnvironment.getTestKeytab());
	flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL,
			SecureTestEnvironment.getHadoopServicePrincipal());

	SecurityConfiguration securityConfig =
		new SecurityConfiguration(
			flinkConfig,
			Collections.singletonList(securityConfig1 -> {
				// manually override the Hadoop Configuration
				return new HadoopModule(securityConfig1, YARN_CONFIGURATION);
			}));

	try {
		TestingSecurityContext.install(securityConfig, SecureTestEnvironment.getClientSecurityConfigurationMap());

		SecurityUtils.getInstalledContext().runSecured(new Callable<Object>() {
			@Override
			public Integer call() {
				startYARNSecureMode(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(),
						SecureTestEnvironment.getTestKeytab());
				return null;
			}
		});

	} catch (Exception e) {
		throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e);
	}

}
 
Example #17
Source File: RollingSinkSecuredITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void setup() throws Exception {

	skipIfHadoopVersionIsNotAppropriate();

	LOG.info("starting secure cluster environment for testing");

	dataDir = tempFolder.newFolder();

	conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dataDir.getAbsolutePath());

	SecureTestEnvironment.prepare(tempFolder);

	populateSecureConfigurations();

	Configuration flinkConfig = new Configuration();
	flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB,
			SecureTestEnvironment.getTestKeytab());
	flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL,
			SecureTestEnvironment.getHadoopServicePrincipal());

	SecurityConfiguration ctx =
		new SecurityConfiguration(
			flinkConfig,
			Collections.singletonList(securityConfig -> new HadoopModule(securityConfig, conf)));
	try {
		TestingSecurityContext.install(ctx, SecureTestEnvironment.getClientSecurityConfigurationMap());
	} catch (Exception e) {
		throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e);
	}

	File hdfsSiteXML = new File(dataDir.getAbsolutePath() + "/hdfs-site.xml");

	FileWriter writer = new FileWriter(hdfsSiteXML);
	conf.writeXml(writer);
	writer.flush();
	writer.close();

	Map<String, String> map = new HashMap<String, String>(System.getenv());
	map.put("HADOOP_CONF_DIR", hdfsSiteXML.getParentFile().getAbsolutePath());
	TestBaseUtils.setEnv(map);

	MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
	builder.checkDataNodeAddrConfig(true);
	builder.checkDataNodeHostConfig(true);
	hdfsCluster = builder.build();

	dfs = hdfsCluster.getFileSystem();

	hdfsURI = "hdfs://"
			+ NetUtils.hostAndPortToUrlString(hdfsCluster.getURI().getHost(), hdfsCluster.getNameNodePort())
			+ "/";

	Configuration configuration = startSecureFlinkClusterWithRecoveryModeEnabled();

	miniClusterResource = new MiniClusterResource(
		new MiniClusterResourceConfiguration.Builder()
			.setConfiguration(configuration)
			.setNumberTaskManagers(1)
			.setNumberSlotsPerTaskManager(4)
			.build());

	miniClusterResource.before();
}
 
Example #18
Source File: Kafka09SecuredRunITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@AfterClass
public static void shutDownServices() throws Exception {
	shutdownClusters();
	SecureTestEnvironment.cleanup();
}