org.apache.hadoop.hive.conf.HiveConf.ConfVars Java Examples
The following examples show how to use
org.apache.hadoop.hive.conf.HiveConf.ConfVars.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TunnelingMetaStoreClientFactoryTest.java From waggle-dance with Apache License 2.0 | 6 votes |
@Before public void init() { metastoreTunnel = new MetastoreTunnel(); metastoreTunnel.setRoute(TUNNEL_ROUTE); metastoreTunnel.setPrivateKeys(TUNNEL_PRIVATE_KEY); metastoreTunnel.setKnownHosts(TUNNEL_KNOWN_HOSTS); metastoreTunnel.setLocalhost(TUNNEL_LOCALHOST); when(localHiveConfFactory.newInstance(any(String.class), any(Integer.class), any(HiveConf.class))) .thenReturn(localHiveConf); when(tunnelableFactorySupplier.get(metastoreTunnel)).thenReturn(tunnelableFactory); when(localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)).thenReturn(METASTORE_URI); when(hiveMetaStoreClientSupplierFactory.newInstance(localHiveConf, NAME, RECONNECTION_RETRIES, CONNECTION_TIMEOUT)) .thenReturn(hiveMetaStoreClientSupplier); tunnelingMetaStoreClientFactory = new TunnelingMetaStoreClientFactory(tunnelableFactorySupplier, localHiveConfFactory, hiveMetaStoreClientSupplierFactory); }
Example #2
Source File: BaseTestHiveImpersonation.java From dremio-oss with Apache License 2.0 | 6 votes |
protected static void prepHiveConfAndData() throws Exception { hiveConf = new HiveConf(); // Configure metastore persistence db location on local filesystem final String dbUrl = String.format("jdbc:derby:;databaseName=%s;create=true", getTempDir("metastore_db")); hiveConf.set(ConfVars.METASTORECONNECTURLKEY.varname, dbUrl); hiveConf.set(ConfVars.SCRATCHDIR.varname, "file:///" + getTempDir("scratch_dir")); hiveConf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir")); hiveConf.set(ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "false"); hiveConf.set(ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true"); hiveConf.set(ConfVars.HIVE_CBO_ENABLED.varname, "false"); // Set MiniDFS conf in HiveConf hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY)); whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname); FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777)); studentData = getPhysicalFileFromResource("student.txt"); voterData = getPhysicalFileFromResource("voter.txt"); }
Example #3
Source File: CommonBeans.java From circus-train with Apache License 2.0 | 6 votes |
private HiveConf newHiveConf(TunnelMetastoreCatalog hiveCatalog, Configuration baseConf) { List<String> siteXml = hiveCatalog.getSiteXml(); if (CollectionUtils.isEmpty(siteXml)) { LOG.info("No Hadoop site XML is defined for catalog {}.", hiveCatalog.getName()); } Map<String, String> properties = new HashMap<>(); for (Entry<String, String> entry : baseConf) { properties.put(entry.getKey(), entry.getValue()); } if (hiveCatalog.getHiveMetastoreUris() != null) { properties.put(ConfVars.METASTOREURIS.varname, hiveCatalog.getHiveMetastoreUris()); } putConfigurationProperties(hiveCatalog.getConfigurationProperties(), properties); HiveConf hiveConf = new HiveConfFactory(siteXml, properties).newInstance(); return hiveConf; }
Example #4
Source File: Hive3StoragePlugin.java From dremio-oss with Apache License 2.0 | 6 votes |
public Hive3StoragePlugin(HiveConf hiveConf, PluginManager pf4jManager, SabotContext context, String name) { super(context, name); this.isCoordinator = context.isCoordinator(); this.hiveConf = hiveConf; this.pf4jManager = pf4jManager; this.sabotConfig = context.getConfig(); this.hiveSettings = new HiveSettings(context.getOptionManager()); this.optionManager = context.getOptionManager(); this.dremioConfig = context.getDremioConfig(); storageImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS); // Hive Metastore impersonation is enabled if: // - "hive.security.authorization.enabled" is set to true, // - "hive.metastore.execute.setugi" is set to true (in SASL disabled scenarios) or // - "hive.metastore.sasl.enabled" is set to true in which case all metastore calls are impersonated as // the authenticated user. this.metastoreImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED) || hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI) || hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); }
Example #5
Source File: RangerHiveAuthorizerBase.java From ranger with Apache License 2.0 | 6 votes |
@Override public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException { LOG.debug("RangerHiveAuthorizerBase.applyAuthorizationConfigPolicy()"); // from SQLStdHiveAccessController.applyAuthorizationConfigPolicy() if (mSessionContext != null && mSessionContext.getClientType() == CLIENT_TYPE.HIVESERVER2) { // Configure PREEXECHOOKS with DisallowTransformHook to disallow transform queries String hooks = hiveConf.getVar(ConfVars.PREEXECHOOKS).trim(); if (hooks.isEmpty()) { hooks = DisallowTransformHook.class.getName(); } else { hooks = hooks + "," + DisallowTransformHook.class.getName(); } hiveConf.setVar(ConfVars.PREEXECHOOKS, hooks); SettableConfigUpdater.setHiveConfWhiteList(hiveConf); } }
Example #6
Source File: TestSentryHiveAuthorizationTaskFactory.java From incubator-sentry with Apache License 2.0 | 6 votes |
@Before public void setup() throws Exception { conf = new HiveConf(); baseDir = Files.createTempDir(); baseDir.setWritable(true, false); conf.setVar(HiveConf.ConfVars.SCRATCHDIR, baseDir.getAbsolutePath()); SessionState.start(conf); conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY, SentryHiveAuthorizationTaskFactoryImpl.class.getName()); db = Mockito.mock(Hive.class); table = new Table(DB, TABLE); partition = new Partition(table); context = new Context(conf); parseDriver = new ParseDriver(); analyzer = new DDLSemanticAnalyzer(conf, db); SessionState.start(conf); Mockito.when(db.getTable(TABLE, false)).thenReturn(table); Mockito.when(db.getPartition(table, new HashMap<String, String>(), false)) .thenReturn(partition); HadoopDefaultAuthenticator auth = new HadoopDefaultAuthenticator(); auth.setConf(conf); currentUser = auth.getUserName(); }
Example #7
Source File: HiveAuthzBinding.java From incubator-sentry with Apache License 2.0 | 6 votes |
private void validateHiveMetaStoreConfig(HiveConf hiveConf, HiveAuthzConf authzConf) throws InvalidConfigurationException{ boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty( authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim()); LOG.debug("Testing mode is " + isTestingMode); if(!isTestingMode) { boolean sasl = hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); if(!sasl) { throw new InvalidConfigurationException( ConfVars.METASTORE_USE_THRIFT_SASL + " can't be false in non-testing mode"); } } else { boolean setUgi = hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI); if(!setUgi) { throw new InvalidConfigurationException( ConfVars.METASTORE_EXECUTE_SET_UGI.toString() + " can't be false in non secure mode"); } } }
Example #8
Source File: BaseTestHiveImpersonation.java From dremio-oss with Apache License 2.0 | 6 votes |
protected static void prepHiveConfAndData() throws Exception { hiveConf = new HiveConf(); // Configure metastore persistence db location on local filesystem final String dbUrl = String.format("jdbc:derby:;databaseName=%s;create=true", getTempDir("metastore_db")); hiveConf.set(ConfVars.METASTORECONNECTURLKEY.varname, dbUrl); hiveConf.set(ConfVars.SCRATCHDIR.varname, "file:///" + getTempDir("scratch_dir")); hiveConf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir")); hiveConf.set(ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "false"); hiveConf.set(ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true"); hiveConf.set(ConfVars.HIVE_CBO_ENABLED.varname, "false"); // Set MiniDFS conf in HiveConf hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY)); whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname); FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777)); studentData = getPhysicalFileFromResource("student.txt"); voterData = getPhysicalFileFromResource("voter.txt"); }
Example #9
Source File: HiveAuthzBindingHook.java From incubator-sentry with Apache License 2.0 | 6 votes |
@VisibleForTesting protected static AccessURI parseURI(String uri, boolean isLocal) throws SemanticException { try { HiveConf conf = SessionState.get().getConf(); String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE); Path warehousePath = new Path(warehouseDir); if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) { FileSystem fs = FileSystem.get(conf); warehouseDir = fs.makeQualified(warehousePath).toUri().toString(); } return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal)); } catch (Exception e) { throw new SemanticException("Error parsing URI " + uri + ": " + e.getMessage(), e); } }
Example #10
Source File: HiveStoragePlugin.java From dremio-oss with Apache License 2.0 | 6 votes |
public HiveStoragePlugin(HiveConf hiveConf, PluginManager pf4jManager, SabotContext context, String name) { super(context, name); this.isCoordinator = context.isCoordinator(); this.hiveConf = hiveConf; this.pf4jManager = pf4jManager; this.sabotConfig = context.getConfig(); this.hiveSettings = new HiveSettings(context.getOptionManager()); this.optionManager = context.getOptionManager(); this.dremioConfig = context.getDremioConfig(); storageImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS); // Hive Metastore impersonation is enabled if: // - "hive.security.authorization.enabled" is set to true, // - "hive.metastore.execute.setugi" is set to true (in SASL disabled scenarios) or // - "hive.metastore.sasl.enabled" is set to true in which case all metastore calls are impersonated as // the authenticated user. this.metastoreImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED) || hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI) || hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); }
Example #11
Source File: CloseableThriftHiveMetastoreIfaceClientFactory.java From waggle-dance with Apache License 2.0 | 6 votes |
public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); // Connection timeout should not be less than 1 // A timeout of zero is interpreted as an infinite timeout, so this is avoided int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); }
Example #12
Source File: MetaStoreProxyServer.java From waggle-dance with Apache License 2.0 | 6 votes |
private TServerSocket createServerSocket(boolean useSSL, int port) throws IOException, TTransportException { TServerSocket serverSocket = null; // enable SSL support for HMS List<String> sslVersionBlacklist = new ArrayList<>(Arrays.asList(hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(","))); if (!useSSL) { serverSocket = HiveAuthUtils.getServerSocket(null, port); } else { String keyStorePath = hiveConf.getVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH).trim(); if (keyStorePath.isEmpty()) { throw new IllegalArgumentException( ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname + " Not configured for SSL connection"); } String keyStorePassword = ShimLoader .getHadoopShims() .getPassword(hiveConf, HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname); serverSocket = HiveAuthUtils.getServerSSLSocket(null, port, keyStorePath, keyStorePassword, sslVersionBlacklist); } return serverSocket; }
Example #13
Source File: AWSCatalogMetastoreClient.java From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 | 6 votes |
@Override public boolean isCompatibleWith(HiveConf conf) { if (currentMetaVars == null) { return false; // recreate } boolean compatible = true; for (ConfVars oneVar : HiveConf.metaVars) { // Since metaVars are all of different types, use string for comparison String oldVar = currentMetaVars.get(oneVar.varname); String newVar = conf.get(oneVar.varname, ""); if (oldVar == null || (oneVar.isCaseSensitive() ? !oldVar.equals(newVar) : !oldVar.equalsIgnoreCase(newVar))) { logger.info("Mestastore configuration " + oneVar.varname + " changed from " + oldVar + " to " + newVar); compatible = false; } } return compatible; }
Example #14
Source File: TestURI.java From incubator-sentry with Apache License 2.0 | 5 votes |
@Test public void testWarehouseDirWithoutPrefix() throws SemanticException { conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:8020"); conf.set(ConfVars.METASTOREWAREHOUSE.varname, "/path/to/warehouse"); Assert.assertEquals("hdfs://localhost:8020/some/path", HiveAuthzBindingHook.parseURI("/some/path").getName()); }
Example #15
Source File: TestURI.java From incubator-sentry with Apache License 2.0 | 5 votes |
@BeforeClass public static void setupTestURI() { conf = new HiveConf(); baseDir = Files.createTempDir(); baseDir.setWritable(true, false); conf.setVar(HiveConf.ConfVars.SCRATCHDIR, baseDir.getAbsolutePath()); SessionState.start(conf); }
Example #16
Source File: TestMetastoreEndToEnd.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * Verify data load into new partition using INSERT .. PARTITION statement */ @Test public void testPartionInsert() throws Exception { String partVal1 = "part1", partVal2 = "part2"; policyFile.addRolesToGroup(USERGROUP1, uri_role).addPermissionsToRole( uri_role, "server=server1->uri=file://" + dataFile.getPath()); writePolicyFile(policyFile); execHiveSQL("CREATE TABLE " + dbName + "." + tabName1 + " (id int) PARTITIONED BY (part_col string)", USER1_1); execHiveSQL("CREATE TABLE " + dbName + "." + tabName2 + " (id int)", USER1_1); execHiveSQL("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + dbName + "." + tabName2, USER1_1); // verify that user with DB all can add partition using INSERT .. PARTITION execHiveSQL("INSERT OVERWRITE TABLE " + dbName + "." + tabName1 + " PARTITION (part_col='" + partVal1 + "') SELECT * FROM " + dbName + "." + tabName2, USER1_1); verifyPartitionExists(dbName, tabName1, partVal1); // verify that user with Table all can add partition using INSERT execHiveSQL("INSERT OVERWRITE TABLE " + dbName + "." + tabName1 + " PARTITION (part_col='" + partVal2 + "') SELECT * FROM " + dbName + "." + tabName2, USER2_1); verifyPartitionExists(dbName, tabName1, partVal2); // verify that user with Table all can add dynamic partition using INSERT Map<String, String> dynamicInsertProperties = Maps.newHashMap(); dynamicInsertProperties.put(ConfVars.DYNAMICPARTITIONING.varname, "true"); dynamicInsertProperties.put(ConfVars.DYNAMICPARTITIONINGMODE.varname, "nonstrict"); execHiveSQL("CREATE TABLE " + dbName + "." + tabName3 + " (id int) PARTITIONED BY (part_col string)", USER1_1); execHiveSQLwithOverlay("INSERT OVERWRITE TABLE " + dbName + "." + tabName3 + " partition (part_col) SELECT id, part_col FROM " + dbName + "." + tabName1, USER1_1, dynamicInsertProperties); }
Example #17
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * HiveServer2 not using string authentication, make sure InvalidConfigurationException is thrown. * @throws Exception */ @Test(expected=InvalidConfigurationException.class) public void testHiveServer2AuthRestriction() throws Exception { // prepare the hive and auth configs hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, "none"); authzConf.set(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "false"); testAuth = new HiveAuthzBinding(hiveConf, authzConf); }
Example #18
Source File: HiveAuthzBindingSessionHookV2.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * The session hook for sentry authorization that sets the required session level configuration 1. * Setup the sentry hooks - semantic, exec and filter hooks 2. Set additional config properties * required for auth set HIVE_EXTENDED_ENITITY_CAPTURE = true set SCRATCHDIRPERMISSION = 700 3. * Add sensitive config parameters to the config restrict list so that they can't be overridden by * users */ @Override public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException { // Add sentry hooks to the session configuration HiveConf sessionConf = sessionHookContext.getSessionConf(); appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SEMANTIC_HOOK); // enable sentry authorization V2 sessionConf.setBoolean(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, true); sessionConf.setBoolean(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, false); sessionConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname, "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator"); // grant all privileges for table to its owner sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS, ""); // Enable compiler to capture transform URI referred in the query sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true); // set security command list HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf); String commandWhitelist = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST, HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT); sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist); // set additional configuration properties required for auth sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS); // setup restrict list sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST); // set user name sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser()); sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser()); // Set MR ACLs to session user appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB, sessionHookContext.getSessionUser()); appendConfVar(sessionConf, JobContext.JOB_ACL_MODIFY_JOB, sessionHookContext.getSessionUser()); }
Example #19
Source File: DefaultSentryAccessController.java From incubator-sentry with Apache License 2.0 | 5 votes |
@Override public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException { // Apply rest of the configuration only to HiveServer2 if (ctx.getClientType() != CLIENT_TYPE.HIVESERVER2 || !hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) { throw new HiveAuthzPluginException("Sentry just support for hiveserver2"); } }
Example #20
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * hive.metastore.execute.setugi != true, make sure InvalidConfigurationException is thrown. * @throws Exception */ @Test(expected=InvalidConfigurationException.class) public void testHiveMetaStoreUGIConfig() throws Exception { // prepare the hive and auth configs hiveConf.setBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL, true); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, false); authzConf.set(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "true"); testAuth = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore, hiveConf, authzConf); }
Example #21
Source File: AWSCatalogMetastoreClient.java From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 | 5 votes |
@Override public String getMetaConf(String key) throws MetaException, TException { ConfVars metaConfVar = HiveConf.getMetaConf(key); if (metaConfVar == null) { throw new MetaException("Invalid configuration key " + key); } return conf.get(key, metaConfVar.getDefaultValue()); }
Example #22
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * Turn on impersonation and make sure that the authorization fails. * @throws Exception */ @Test public void testImpersonationAllowed() throws Exception { // perpare the hive and auth configs hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, true); hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, "Kerberos"); authzConf.set(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "false"); authzConf.set(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar(), "true"); testAuth = new HiveAuthzBinding(hiveConf, authzConf); // following check should pass, even with impersonation inputTabHierarcyList.add(buildObjectHierarchy(SERVER1, CUSTOMER_DB, PURCHASES_TAB)); testAuth.authorize(HiveOperation.QUERY, queryPrivileges, ADMIN_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); }
Example #23
Source File: HiveAuthzBindingHook.java From incubator-sentry with Apache License 2.0 | 5 votes |
private boolean filterWriteEntity(WriteEntity writeEntity) throws AuthorizationException { // skip URI validation for session scratch file URIs if (writeEntity.isTempURI()) { return true; } try { if (writeEntity.getTyp().equals(Type.DFS_DIR) || writeEntity.getTyp().equals(Type.LOCAL_DIR)) { HiveConf conf = SessionState.get().getConf(); String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE); URI scratchURI = new URI(PathUtils.parseDFSURI(warehouseDir, conf.getVar(HiveConf.ConfVars.SCRATCHDIR))); URI requestURI = new URI(PathUtils.parseDFSURI(warehouseDir, writeEntity.getLocation().getPath())); LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + requestURI); if (PathUtils.impliesURI(scratchURI, requestURI)) { return true; } URI localScratchURI = new URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR))); URI localRequestURI = new URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath())); LOG.debug("localScratchURI = " + localScratchURI + ", localRequestURI = " + localRequestURI); if (PathUtils.impliesURI(localScratchURI, localRequestURI)) { return true; } } } catch (Exception e) { throw new AuthorizationException("Failed to extract uri details", e); } return false; }
Example #24
Source File: ITSqlStdBasedAuthorization.java From dremio-oss with Apache License 2.0 | 5 votes |
private static void setSqlStdBasedAuthorizationInHiveConf() { hiveConf.set(ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "true"); hiveConf.set(HIVE_AUTHENTICATOR_MANAGER.varname, SessionStateConfigUserAuthenticator.class.getName()); hiveConf.set(HIVE_AUTHORIZATION_MANAGER.varname, SQLStdConfOnlyAuthorizerFactory.class.getName()); hiveConf.set(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "false"); hiveConf.set(ConfVars.METASTORE_EXECUTE_SET_UGI.varname, "false"); hiveConf.set(ConfVars.USERS_IN_ADMIN_ROLE.varname, processUser); }
Example #25
Source File: HiveTestDataGenerator.java From dremio-oss with Apache License 2.0 | 5 votes |
private HiveConf newHiveConf() { HiveConf conf = new HiveConf(SessionState.class); HiveConf.setVar(conf, ConfVars.METASTOREURIS, "thrift://localhost:" + port); conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///"); HiveConf.setVar(conf, ConfVars.METASTOREWAREHOUSE, whDir); conf.set("mapred.job.tracker", "local"); HiveConf.setVar(conf, ConfVars.SCRATCHDIR, getTempDir("scratch_dir")); HiveConf.setVar(conf, ConfVars.LOCALSCRATCHDIR, getTempDir("local_scratch_dir")); HiveConf.setVar(conf, ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); HiveConf.setBoolVar(conf, ConfVars.HIVE_CBO_ENABLED, false); return conf; }
Example #26
Source File: HiveTestDataGenerator.java From dremio-oss with Apache License 2.0 | 5 votes |
private HiveTestDataGenerator(final String dbDir, final String whDir) throws Exception { this.dbDir = dbDir; this.whDir = whDir; final HiveConf conf = new HiveConf(); HiveConf.setBoolVar(conf, HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION, false); HiveConf.setBoolVar(conf, HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL, true); HiveConf.setVar(conf, HiveConf.ConfVars.METASTOREWAREHOUSE, whDir); HiveConf.setVar(conf, HiveConf.ConfVars.METASTORECONNECTURLKEY, String.format("jdbc:derby:;databaseName=%s;create=true", dbDir)); port = MetaStoreUtils.startMetaStore(conf); config = Maps.newHashMap(); config.put(FileSystem.FS_DEFAULT_NAME_KEY, "file:///"); }
Example #27
Source File: ITSqlStdBasedAuthorization.java From dremio-oss with Apache License 2.0 | 5 votes |
private static void setSqlStdBasedAuthorizationInHiveConf() { hiveConf.set(ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "true"); hiveConf.set(HIVE_AUTHENTICATOR_MANAGER.varname, SessionStateConfigUserAuthenticator.class.getName()); hiveConf.set(HIVE_AUTHORIZATION_MANAGER.varname, SQLStdConfOnlyAuthorizerFactory.class.getName()); hiveConf.set(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "false"); hiveConf.set(ConfVars.METASTORE_EXECUTE_SET_UGI.varname, "false"); hiveConf.set(ConfVars.USERS_IN_ADMIN_ROLE.varname, processUser); }
Example #28
Source File: ThriftMetastoreClientManagerTest.java From waggle-dance with Apache License 2.0 | 5 votes |
@Before public void init() { hiveConf.setVar(ConfVars.METASTOREURIS, "thrift://localhost:123"); client = new ThriftMetastoreClientManager(hiveConf, hiveCompatibleThriftHiveMetastoreIfaceFactory, connectionTimeout); ReflectionTestUtils.setField(client, "transport", transport); ReflectionTestUtils.setField(client, "isConnected", true); }
Example #29
Source File: CloseableThriftHiveMetastoreIfaceClientFactoryTest.java From waggle-dance with Apache License 2.0 | 5 votes |
@Test public void defaultFactory() { ArgumentCaptor<HiveConf> hiveConfCaptor = ArgumentCaptor.forClass(HiveConf.class); factory.newInstance(newFederatedInstance("fed1", THRIFT_URI)); verify(defaultMetaStoreClientFactory).newInstance(hiveConfCaptor.capture(), eq( "waggledance-fed1"), eq(3), eq(2000)); verifyZeroInteractions(tunnelingMetaStoreClientFactory); HiveConf hiveConf = hiveConfCaptor.getValue(); assertThat(hiveConf.getVar(ConfVars.METASTOREURIS), is(THRIFT_URI)); }
Example #30
Source File: ThriftMetastoreClientManagerIntegrationTest.java From waggle-dance with Apache License 2.0 | 5 votes |
@Test public void openWithDummyConnectionThrowsRuntimeWithOriginalExceptionInMessage() { hiveConf.setVar(ConfVars.METASTOREURIS, "thrift://localhost:123"); manager = new ThriftMetastoreClientManager(hiveConf, hiveCompatibleThriftHiveMetastoreIfaceFactory, connectionTimeout); try { manager.open(); } catch (RuntimeException e) { assertThat(e.getMessage(), containsString("java.net.ConnectException: Connection refused")); } }