Java Code Examples for org.apache.hadoop.hive.conf.HiveConf#setBoolVar()

The following examples show how to use org.apache.hadoop.hive.conf.HiveConf#setBoolVar() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FlinkStandaloneHiveServerContext.java    From flink with Apache License 2.0 6 votes vote down vote up
private void configureMrExecutionEngine(HiveConf conf) {

		/*
		 * Switch off all optimizers otherwise we didn't
		 * manage to contain the map reduction within this JVM.
		 */
		conf.setBoolVar(HIVE_INFER_BUCKET_SORT, false);
		conf.setBoolVar(HIVEMETADATAONLYQUERIES, false);
		conf.setBoolVar(HIVEOPTINDEXFILTER, false);
		conf.setBoolVar(HIVECONVERTJOIN, false);
		conf.setBoolVar(HIVESKEWJOIN, false);

		// Defaults to a 1000 millis sleep in. We can speed up the tests a bit by setting this to 1 millis instead.
		// org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper.
		hiveConf.setLongVar(HiveConf.ConfVars.HIVECOUNTERSPULLINTERVAL, 1L);

		hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true);
	}
 
Example 2
Source File: StandaloneHiveServerContext.java    From HiveRunner with Apache License 2.0 6 votes vote down vote up
protected void configureMetaStore(HiveConf conf) {
    configureDerbyLog();

    String jdbcDriver = org.apache.derby.jdbc.EmbeddedDriver.class.getName();
    try {
        Class.forName(jdbcDriver);
    } catch (ClassNotFoundException e) {
        throw new RuntimeException(e);
    }

    // Set the Hive Metastore DB driver
    metaStorageUrl = "jdbc:derby:memory:" + UUID.randomUUID().toString();
    hiveConf.set("datanucleus.schema.autoCreateAll", "true");
    hiveConf.set("hive.metastore.schema.verification", "false");

    hiveConf.set("datanucleus.connectiondrivername", jdbcDriver);
    hiveConf.set("javax.jdo.option.ConnectionDriverName", jdbcDriver);

    // No pooling needed. This will save us a lot of threads
    hiveConf.set("datanucleus.connectionPoolingType", "None");

    conf.setBoolVar(METASTORE_VALIDATE_CONSTRAINTS, true);
    conf.setBoolVar(METASTORE_VALIDATE_COLUMNS, true);
    conf.setBoolVar(METASTORE_VALIDATE_TABLES, true);
}
 
Example 3
Source File: StandaloneHiveServerContext.java    From HiveRunner with Apache License 2.0 6 votes vote down vote up
protected void configureMrExecutionEngine(HiveConf conf) {

        /*
         * Switch off all optimizers otherwise we didn't
         * manage to contain the map reduction within this JVM.
         */
        conf.setBoolVar(HIVE_INFER_BUCKET_SORT, false);
        conf.setBoolVar(HIVEMETADATAONLYQUERIES, false);
        conf.setBoolVar(HIVEOPTINDEXFILTER, false);
        conf.setBoolVar(HIVECONVERTJOIN, false);
        conf.setBoolVar(HIVESKEWJOIN, false);

        // Defaults to a 1000 millis sleep in. We can speed up the tests a bit by setting this to 1 millis instead.
        // org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper.
        hiveConf.setLongVar(HiveConf.ConfVars.HIVECOUNTERSPULLINTERVAL, 1L);

        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true);
    }
 
Example 4
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private HiveConf newHiveConf() {
  HiveConf conf = new HiveConf(SessionState.class);

  MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_URIS, "thrift://localhost:" + port);
  conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
  // Metastore needs to be set, and WITH the deprecated key :(
  // Otherwise, will default to /user/hive/warehouse when trying to create a new database
  // (database location is now sent by the client to the server...)
  HiveConf.setVar(conf, ConfVars.METASTOREWAREHOUSE, whDir);
  conf.set("mapred.job.tracker", "local");
  HiveConf.setVar(conf, ConfVars.SCRATCHDIR,  getTempDir("scratch_dir"));
  HiveConf.setVar(conf, ConfVars.LOCALSCRATCHDIR, getTempDir("local_scratch_dir"));
  HiveConf.setVar(conf, ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
  HiveConf.setBoolVar(conf, ConfVars.HIVE_CBO_ENABLED, false);

  return conf;

}
 
Example 5
Source File: FlinkStandaloneHiveServerContext.java    From flink with Apache License 2.0 6 votes vote down vote up
private void configureMetaStore(HiveConf conf) {

		String jdbcDriver = org.apache.derby.jdbc.EmbeddedDriver.class.getName();
		try {
			Class.forName(jdbcDriver);
		} catch (ClassNotFoundException e) {
			throw new RuntimeException(e);
		}

		// Set the Hive Metastore DB driver
		hiveConf.set("datanucleus.schema.autoCreateAll", "true");
		hiveConf.set("hive.metastore.schema.verification", "false");
		hiveConf.set("hive.metastore.uris", toHmsURI());
		// No pooling needed. This will save us a lot of threads
		hiveConf.set("datanucleus.connectionPoolingType", "None");

		conf.setBoolVar(METASTORE_VALIDATE_CONSTRAINTS, true);
		conf.setBoolVar(METASTORE_VALIDATE_COLUMNS, true);
		conf.setBoolVar(METASTORE_VALIDATE_TABLES, true);

		// disable authorization to avoid NPE
		conf.set(HIVE_AUTHORIZATION_MANAGER.varname,
				"org.apache.hive.hcatalog.storagehandler.DummyHCatAuthProvider");
	}
 
Example 6
Source File: TestHiveStoragePluginCleanup.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Before
public void createStartedHiveStoragePlugin() {
  final HiveConf hiveConf = new HiveConf();
  hiveConf.setBoolVar(HIVE_SERVER2_ENABLE_DOAS, true);
  final SabotContext context = mock(SabotContext.class);
  when(context.getDremioConfig()).thenReturn(DremioConfig.create());
  when(context.isCoordinator()).thenReturn(true);

  plugin = new MockHiveStoragePlugin(hiveConf, context, "foo");
  plugin.start();
  assertEquals(1, plugin.getClientCount());
}
 
Example 7
Source File: TestHiveStoragePlugin.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Test
public void impersonationDisabledShouldReturnSystemUser() {
  final HiveConf hiveConf = new HiveConf();
  hiveConf.setBoolVar(HIVE_SERVER2_ENABLE_DOAS, false);
  final SabotContext context = mock(SabotContext.class);

  final HiveStoragePlugin plugin = createHiveStoragePlugin(hiveConf, context);

  final String userName = plugin.getUsername(TEST_USER_NAME);
  assertEquals(SystemUser.SYSTEM_USERNAME, userName);
}
 
Example 8
Source File: TestHiveStoragePlugin.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Test
public void impersonationEnabledShouldReturnUser() {
  final HiveConf hiveConf = new HiveConf();
  hiveConf.setBoolVar(HIVE_SERVER2_ENABLE_DOAS, true);
  final SabotContext context = mock(SabotContext.class);

  final HiveStoragePlugin plugin = new HiveStoragePlugin(hiveConf, context, "foo");

  final String userName = plugin.getUsername(TEST_USER_NAME);
  assertEquals(TEST_USER_NAME, userName);
}
 
Example 9
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
private HiveTestDataGenerator(final String dbDir, final String whDir) throws Exception {
  this.dbDir = dbDir;
  this.whDir = whDir;

  final HiveConf conf = new HiveConf();

  HiveConf.setBoolVar(conf, HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION, false);
  HiveConf.setBoolVar(conf, HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL, true);
  HiveConf.setVar(conf, HiveConf.ConfVars.METASTOREWAREHOUSE, whDir);
  HiveConf.setVar(conf, HiveConf.ConfVars.METASTORECONNECTURLKEY, String.format("jdbc:derby:;databaseName=%s;create=true", dbDir));
  port = MetaStoreUtils.startMetaStore(conf);

  config = Maps.newHashMap();
  config.put(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
}
 
Example 10
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
private HiveConf newHiveConf() {
  HiveConf conf = new HiveConf(SessionState.class);

  HiveConf.setVar(conf, ConfVars.METASTOREURIS, "thrift://localhost:" + port);
  conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
  HiveConf.setVar(conf, ConfVars.METASTOREWAREHOUSE, whDir);
  conf.set("mapred.job.tracker", "local");
  HiveConf.setVar(conf, ConfVars.SCRATCHDIR, getTempDir("scratch_dir"));
  HiveConf.setVar(conf, ConfVars.LOCALSCRATCHDIR, getTempDir("local_scratch_dir"));
  HiveConf.setVar(conf, ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
  HiveConf.setBoolVar(conf, ConfVars.HIVE_CBO_ENABLED, false);
  return conf;

}
 
Example 11
Source File: TestHive3StoragePlugin.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Test
public void impersonationDisabledShouldReturnSystemUser() {
  final HiveConf hiveConf = new HiveConf();
  hiveConf.setBoolVar(HIVE_SERVER2_ENABLE_DOAS, false);
  final SabotContext context = mock(SabotContext.class);

  final Hive3StoragePlugin plugin = createHiveStoragePlugin(hiveConf, context);

  final String userName = plugin.getUsername(TEST_USER_NAME);
  assertEquals(SystemUser.SYSTEM_USERNAME, userName);
}
 
Example 12
Source File: TestHive3StoragePlugin.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Test
public void impersonationEnabledShouldReturnUser() {
  final HiveConf hiveConf = new HiveConf();
  hiveConf.setBoolVar(HIVE_SERVER2_ENABLE_DOAS, true);
  final SabotContext context = mock(SabotContext.class);

  final Hive3StoragePlugin plugin = new Hive3StoragePlugin(hiveConf, context, "foo");

  final String userName = plugin.getUsername(TEST_USER_NAME);
  assertEquals(TEST_USER_NAME, userName);
}
 
Example 13
Source File: FlinkStandaloneHiveServerContext.java    From flink with Apache License 2.0 5 votes vote down vote up
private void configureFileSystem(TemporaryFolder basedir, HiveConf conf) {

		createAndSetFolderProperty(METASTOREWAREHOUSE, "warehouse", conf, basedir);
		createAndSetFolderProperty(SCRATCHDIR, "scratchdir", conf, basedir);
		createAndSetFolderProperty(LOCALSCRATCHDIR, "localscratchdir", conf, basedir);
		createAndSetFolderProperty(HIVEHISTORYFILELOC, "tmp", conf, basedir);

		conf.setBoolVar(HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);

		createAndSetFolderProperty("hadoop.tmp.dir", "hadooptmp", conf, basedir);
		createAndSetFolderProperty("test.log.dir", "logs", conf, basedir);
	}
 
Example 14
Source File: EmbeddedMetastoreService.java    From beam with Apache License 2.0 5 votes vote down vote up
public EmbeddedMetastoreService(String baseDirPath) throws IOException {
  FileUtils.forceDeleteOnExit(new File(baseDirPath));

  String hiveDirPath = makePathASafeFileName(baseDirPath + "/hive");
  String testDataDirPath =
      makePathASafeFileName(
          hiveDirPath
              + "/data/"
              + EmbeddedMetastoreService.class.getCanonicalName()
              + System.currentTimeMillis());
  String testWarehouseDirPath = makePathASafeFileName(testDataDirPath + "/warehouse");

  hiveConf = new HiveConf(getClass());
  hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
  hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, "");
  hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
  hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, testWarehouseDirPath);
  hiveConf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true);
  hiveConf.setVar(
      HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
      "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd."
          + "SQLStdHiveAuthorizerFactory");
  hiveConf.set("test.tmp.dir", hiveDirPath);

  System.setProperty("derby.stream.error.file", "/dev/null");
  driver = new Driver(hiveConf);
  sessionState = SessionState.start(new SessionState(hiveConf));
}
 
Example 15
Source File: HiveAuthzBindingSessionHook.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
/**
 * The session hook for sentry authorization that sets the required session level configuration
 * 1. Setup the sentry hooks -
 *    semantic, exec and filter hooks
 * 2. Set additional config properties required for auth
 *      set HIVE_EXTENDED_ENITITY_CAPTURE = true
 *      set SCRATCHDIRPERMISSION = 700
 * 3. Add sensitive config parameters to the config restrict list so that they can't be overridden by users
 */
@Override
public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException {
  // Add sentry hooks to the session configuration
  HiveConf sessionConf = sessionHookContext.getSessionConf();

  appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
      SEMANTIC_HOOK);
  HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf);
  String commandWhitelist =
      authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST,
          HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT);
  sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist);
  sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS);
  sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true);

  // set user name
  sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser());
  sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser());
  sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER,
          "org.apache.sentry.binding.hive.HiveAuthzBindingSessionHook$SentryHiveAuthorizerFactory");

  // Set MR ACLs to session user
  appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB,
      sessionHookContext.getSessionUser());
  appendConfVar(sessionConf, JobContext.JOB_ACL_MODIFY_JOB,
      sessionHookContext.getSessionUser());

  // setup restrict list
  sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST);
}
 
Example 16
Source File: HiveAuthzBindingSessionHookV2.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
/**
 * The session hook for sentry authorization that sets the required session level configuration 1.
 * Setup the sentry hooks - semantic, exec and filter hooks 2. Set additional config properties
 * required for auth set HIVE_EXTENDED_ENITITY_CAPTURE = true set SCRATCHDIRPERMISSION = 700 3.
 * Add sensitive config parameters to the config restrict list so that they can't be overridden by
 * users
 */
@Override
public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException {
  // Add sentry hooks to the session configuration
  HiveConf sessionConf = sessionHookContext.getSessionConf();

  appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SEMANTIC_HOOK);
  // enable sentry authorization V2
  sessionConf.setBoolean(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, true);
  sessionConf.setBoolean(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, false);
  sessionConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname,
      "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator");

  // grant all privileges for table to its owner
  sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS, "");

  // Enable compiler to capture transform URI referred in the query
  sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true);

  // set security command list
  HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf);
  String commandWhitelist =
      authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST,
          HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT);
  sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist);

  // set additional configuration properties required for auth
  sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS);

  // setup restrict list
  sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST);

  // set user name
  sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser());
  sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser());

  // Set MR ACLs to session user
  appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB, sessionHookContext.getSessionUser());
  appendConfVar(sessionConf, JobContext.JOB_ACL_MODIFY_JOB, sessionHookContext.getSessionUser());
}
 
Example 17
Source File: StandaloneHiveServerContext.java    From HiveRunner with Apache License 2.0 5 votes vote down vote up
protected void configureMiscHiveSettings(HiveConf hiveConf) {
    hiveConf.setBoolVar(HIVESTATSAUTOGATHER, false);

    // Turn of dependency to calcite library
    hiveConf.setBoolVar(HIVE_CBO_ENABLED, false);

    // Disable to get rid of clean up exception when stopping the Session.
    hiveConf.setBoolVar(HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false);

    hiveConf.setVar(HADOOPBIN, "NO_BIN!");
}
 
Example 18
Source File: StandaloneHiveServerContext.java    From HiveRunner with Apache License 2.0 5 votes vote down vote up
protected void configureFileSystem(Path basedir, HiveConf conf) throws IOException {
    conf.setVar(METASTORECONNECTURLKEY, metaStorageUrl + ";create=true");

    createAndSetFolderProperty(METASTOREWAREHOUSE, "warehouse", conf, basedir);
    createAndSetFolderProperty(LOCALSCRATCHDIR, "localscratchdir", conf, basedir);
    createAndSetFolderProperty(HIVEHISTORYFILELOC, "tmp", conf, basedir);

    conf.setBoolVar(HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);

    createAndSetFolderProperty("hadoop.tmp.dir", "hadooptmp", conf, basedir);
    createAndSetFolderProperty("test.log.dir", "logs", conf, basedir);



    /*
        Tez specific configurations below
     */
    /*
        Tez will upload a hive-exec.jar to this location.
        It looks like it will do this only once per test suite so it makes sense to keep this in a central location
        rather than in the tmp dir of each test.
     */
    File installation_dir = newFolder(basedir, "tez_installation_dir").toFile();

    conf.setVar(HiveConf.ConfVars.HIVE_JAR_DIRECTORY, installation_dir.getAbsolutePath());
    conf.setVar(HiveConf.ConfVars.HIVE_USER_INSTALL_DIR, installation_dir.getAbsolutePath());
}
 
Example 19
Source File: WaggleDanceIntegrationTest.java    From waggle-dance with Apache License 2.0 4 votes vote down vote up
private HiveMetaStoreClient getWaggleDanceClient() throws MetaException {
  HiveConf conf = new HiveConf();
  conf.setVar(ConfVars.METASTOREURIS, getWaggleDanceThriftUri());
  conf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true);
  return new HiveMetaStoreClient(conf);
}
 
Example 20
Source File: TestCobolHiveTableCreation.java    From Cobol-to-Hive with Apache License 2.0 4 votes vote down vote up
@Override
public void setUp() throws Exception {
    super.setUp();
    LogManager.getRootLogger().setLevel(Level.ERROR);
    hiveConf = new HiveConf();
    hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, false);
    String cwd = System.getProperty("user.dir");
    System.out.println("Current working directory : " + cwd);
    hiveConf.setAuxJars("target/Cobol-to-Hive-1.1.0.jar");
    zookeeperLocalCluster = new ZookeeperLocalCluster.Builder()
            .setPort(12345)
            .setTempDir("embedded_zookeeper")
            .setZookeeperConnectionString("localhost:12345")
            .setMaxClientCnxns(60)
            .setElectionPort(20001)
            .setQuorumPort(20002)
            .setDeleteDataDirectoryOnClose(false)
            .setServerId(1)
            .setTickTime(2000)
            .build();

    zookeeperLocalCluster.start();
    hiveLocalMetaStore = new HiveLocalMetaStore.Builder()
            .setHiveMetastoreHostname("localhost")
            .setHiveMetastorePort(12347)
            .setHiveMetastoreDerbyDbDir("metastore_db")
            .setHiveScratchDir("hive_scratch_dir")
            .setHiveWarehouseDir("warehouse_dir")
            .setHiveConf(hiveConf)
            .build();

    hiveLocalMetaStore.start();
    hiveLocalServer2 = new HiveLocalServer2.Builder()
            .setHiveServer2Hostname("localhost")
            .setHiveServer2Port(12348)
            .setHiveMetastoreHostname("localhost")
            .setHiveMetastorePort(12347)
            .setHiveMetastoreDerbyDbDir("metastore_db")
            .setHiveScratchDir("hive_scratch_dir")
            .setHiveWarehouseDir("warehouse_dir")
            .setHiveConf(hiveConf)
            .setZookeeperConnectionString("localhost:12345")
            .build();

    hiveLocalServer2.start();
    try {
        Class.forName(driverName);
    } catch (ClassNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.exit(1);
    }
}