Java Code Examples for org.apache.hadoop.hive.conf.HiveConf#get()

The following examples show how to use org.apache.hadoop.hive.conf.HiveConf#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SessionCredentialsProviderFactory.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 6 votes vote down vote up
@Override
public AWSCredentialsProvider buildAWSCredentialsProvider(HiveConf hiveConf) {

  checkArgument(hiveConf != null, "hiveConf cannot be null.");
  
  String accessKey = hiveConf.get(AWS_ACCESS_KEY_CONF_VAR);
  String secretKey = hiveConf.get(AWS_SECRET_KEY_CONF_VAR);
  String sessionToken = hiveConf.get(AWS_SESSION_TOKEN_CONF_VAR);
  
  checkArgument(accessKey != null, AWS_ACCESS_KEY_CONF_VAR + " must be set.");
  checkArgument(secretKey != null, AWS_SECRET_KEY_CONF_VAR + " must be set.");
  checkArgument(sessionToken != null, AWS_SESSION_TOKEN_CONF_VAR + " must be set.");
  
  AWSSessionCredentials credentials = new BasicSessionCredentials(accessKey, secretKey, sessionToken);
  
  return new StaticCredentialsProvider(credentials);
}
 
Example 2
Source File: AWSCatalogMetastoreClient.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 6 votes vote down vote up
@Override
public boolean isCompatibleWith(HiveConf conf) {
    if (currentMetaVars == null) {
        return false; // recreate
    }
    boolean compatible = true;
    for (ConfVars oneVar : HiveConf.metaVars) {
        // Since metaVars are all of different types, use string for comparison
        String oldVar = currentMetaVars.get(oneVar.varname);
        String newVar = conf.get(oneVar.varname, "");
        if (oldVar == null ||
              (oneVar.isCaseSensitive() ? !oldVar.equals(newVar) : !oldVar.equalsIgnoreCase(newVar))) {
            logger.info("Mestastore configuration " + oneVar.varname +
                  " changed from " + oldVar + " to " + newVar);
            compatible = false;
        }
    }
    return compatible;
}
 
Example 3
Source File: AWSCatalogMetastoreClient.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 6 votes vote down vote up
@Override
public boolean isCompatibleWith(HiveConf conf) {
  if (currentMetaVars == null) {
    return false; // recreate
  }
  boolean compatible = true;
  for (ConfVars oneVar : HiveConf.metaVars) {
    // Since metaVars are all of different types, use string for comparison
    String oldVar = currentMetaVars.get(oneVar.varname);
    String newVar = conf.get(oneVar.varname, "");
    if (oldVar == null ||
          (oneVar.isCaseSensitive() ? !oldVar.equals(newVar) : !oldVar.equalsIgnoreCase(newVar))) {
      logger.info("Mestastore configuration " + oneVar.varname +
            " changed from " + oldVar + " to " + newVar);
      compatible = false;
    }
  }
  return compatible;
}
 
Example 4
Source File: BaseTestHiveImpersonation.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
protected static void prepHiveConfAndData() throws Exception {
  hiveConf = new HiveConf();

  // Configure metastore persistence db location on local filesystem
  final String dbUrl = String.format("jdbc:derby:;databaseName=%s;create=true",  getTempDir("metastore_db"));
  hiveConf.set(ConfVars.METASTORECONNECTURLKEY.varname, dbUrl);

  hiveConf.set(ConfVars.SCRATCHDIR.varname, "file:///" + getTempDir("scratch_dir"));
  hiveConf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));
  hiveConf.set(ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "false");
  hiveConf.set(ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true");
  hiveConf.set(ConfVars.HIVE_CBO_ENABLED.varname, "false");

  // Set MiniDFS conf in HiveConf
  hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY));

  whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname);
  FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777));

  studentData = getPhysicalFileFromResource("student.txt");
  voterData = getPhysicalFileFromResource("voter.txt");
}
 
Example 5
Source File: BaseTestHiveImpersonation.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
protected static void prepHiveConfAndData() throws Exception {
  hiveConf = new HiveConf();

  // Configure metastore persistence db location on local filesystem
  final String dbUrl = String.format("jdbc:derby:;databaseName=%s;create=true",  getTempDir("metastore_db"));
  hiveConf.set(ConfVars.METASTORECONNECTURLKEY.varname, dbUrl);

  hiveConf.set(ConfVars.SCRATCHDIR.varname, "file:///" + getTempDir("scratch_dir"));
  hiveConf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));
  hiveConf.set(ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "false");
  hiveConf.set(ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true");
  hiveConf.set(ConfVars.HIVE_CBO_ENABLED.varname, "false");

  // Set MiniDFS conf in HiveConf
  hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY));

  whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname);
  FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777));

  studentData = getPhysicalFileFromResource("student.txt");
  voterData = getPhysicalFileFromResource("voter.txt");
}
 
Example 6
Source File: HiveAuthzBindingHook.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) {
  boolean depreicatedConfigFile = false;
  HiveAuthzConf newAuthzConf = null;
  String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
  if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
    hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL);
    depreicatedConfigFile = true;
  }

  if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
    throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL
        + " value '" + hiveAuthzConf + "' is invalid.");
  }
  try {
    newAuthzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
  } catch (MalformedURLException e) {
    if (depreicatedConfigFile) {
      throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_ACCESS_CONF_URL
          + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
    } else {
      throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL
          + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
    }
  }
  return newAuthzConf;
}
 
Example 7
Source File: HiveLanguageParser.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public HiveLanguageParser(HiveConf hiveConfiguration) {
  hiveConf = new HiveConf(hiveConfiguration);
  if (hiveConf.get(HDFS_SESSION_PATH_KEY) == null) {
    hiveConf.set(HDFS_SESSION_PATH_KEY, hdfsTemporaryDirectory(hiveConf));
  }
  if (hiveConf.get(LOCAL_SESSION_PATH_KEY) == null) {
    hiveConf.set(LOCAL_SESSION_PATH_KEY, localTemporaryDirectory());
  }
}
 
Example 8
Source File: HiveAuthzBinding.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
private void validateHiveServer2Config(HiveConf hiveConf, HiveAuthzConf authzConf)
    throws InvalidConfigurationException{
  boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
      authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
  LOG.debug("Testing mode is " + isTestingMode);
  if(!isTestingMode) {
    String authMethod = Strings.nullToEmpty(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)).trim();
    if("none".equalsIgnoreCase(authMethod)) {
      throw new InvalidConfigurationException(ConfVars.HIVE_SERVER2_AUTHENTICATION +
          " can't be none in non-testing mode");
    }
    boolean impersonation = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);
    boolean allowImpersonation = Boolean.parseBoolean(Strings.nullToEmpty(
        authzConf.get(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar())).trim());

    if(impersonation && !allowImpersonation) {
      LOG.error("Role based authorization does not work with HiveServer2 impersonation");
      throw new InvalidConfigurationException(ConfVars.HIVE_SERVER2_ENABLE_DOAS +
          " can't be set to true in non-testing mode");
    }
  }
  String defaultUmask = hiveConf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
  if("077".equalsIgnoreCase(defaultUmask)) {
    LOG.error("HiveServer2 required a default umask of 077");
    throw new InvalidConfigurationException(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY +
        " should be 077 in non-testing mode");
  }
}
 
Example 9
Source File: HiveAuthzConf.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
/**
 * Extract the authz config file path from given hive conf and load the authz config
 * @param hiveConf
 * @return
 * @throws IllegalArgumentException
 */
public static HiveAuthzConf getAuthzConf(HiveConf hiveConf)
  throws IllegalArgumentException {
  boolean depreicatedConfigFile = false;

  String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
  if (hiveAuthzConf == null
      || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
    hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL);
    depreicatedConfigFile = true;
  }

  if (hiveAuthzConf == null
      || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
    throw new IllegalArgumentException("Configuration key "
        + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
        + "' is invalid.");
  }

  try {
    return new HiveAuthzConf(new URL(hiveAuthzConf));
  } catch (MalformedURLException e) {
    if (depreicatedConfigFile) {
      throw new IllegalArgumentException("Configuration key "
          + HiveAuthzConf.HIVE_ACCESS_CONF_URL
          + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
    } else {
      throw new IllegalArgumentException("Configuration key "
          + HiveAuthzConf.HIVE_SENTRY_CONF_URL
          + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
    }
  }
}
 
Example 10
Source File: MetaStoreUtil.java    From kite with Apache License 2.0 5 votes vote down vote up
public static MetaStoreUtil get(Configuration conf) {
  HiveConf hiveConf = new HiveConf(conf, HiveConf.class);
  // Add the passed configuration back into the HiveConf to work around
  // a Hive bug that resets to defaults
  HiveUtils.addResource(hiveConf, conf);

  if (isEmpty(hiveConf, Loader.HIVE_METASTORE_URI_PROP)) {
    if (allowLocalMetaStore(hiveConf)) {
      return new MetaStoreUtil(hiveConf);
    } else {
      LOG.warn("Aborting use of local MetaStore. " +
              "Allow local MetaStore by setting {}=true in HiveConf",
          ALLOW_LOCAL_METASTORE);
      throw new IllegalArgumentException(
          "Missing Hive MetaStore connection URI");
    }
  }

  // get the URI and cache instances to a non-local metastore
  String uris = hiveConf.get(Loader.HIVE_METASTORE_URI_PROP);
  MetaStoreUtil util;
  synchronized (INSTANCES) {
    util = INSTANCES.get(uris);
    if (util == null) {
      util = new MetaStoreUtil(hiveConf);
      INSTANCES.put(uris, util);
    }
  }

  return util;
}
 
Example 11
Source File: AWSGlueClientFactory.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 4 votes vote down vote up
private static String getProperty(String propertyName, HiveConf conf) {
  return Strings.isNullOrEmpty(System.getProperty(propertyName)) ?
      conf.get(propertyName) : System.getProperty(propertyName);
}
 
Example 12
Source File: HiveLanguageParser.java    From circus-train with Apache License 2.0 4 votes vote down vote up
private static String hdfsTemporaryDirectory(HiveConf hiveConf) {
  return hiveConf.get("hadoop.tmp.dir", "/tmp");
}
 
Example 13
Source File: HiveConfFactory.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
/**
 * Fills in a HiveConf instance with any user provided configuration parameters
 *
 * @param hiveConf - the conf to fill in
 * @param config - the user provided parameters
 * @return
 */
protected static void addUserProperties(HiveConf hiveConf, BaseHiveStoragePluginConfig<?,?> config) {
  // Used to capture properties set by user
  final Set<String> userPropertyNames = new HashSet<>();
  if(config.propertyList != null) {
    for(Property prop : config.propertyList) {
      userPropertyNames.add(prop.name);
      setConf(hiveConf, prop.name, prop.value);
      if(logger.isTraceEnabled()){
        logger.trace("HiveConfig Override {}={}", prop.name, prop.value);
      }
    }
  }

  // Check if zero-copy has been set by user
  boolean zeroCopySetByUser = userPropertyNames.contains(OrcConf.USE_ZEROCOPY.getAttribute())
    || userPropertyNames.contains(HiveConf.ConfVars.HIVE_ORC_ZEROCOPY.varname);
  // Configure zero-copy for ORC reader
  if (!zeroCopySetByUser) {
    if (VM.isWindowsHost() || VM.isMacOSHost()) {
      logger.debug("MacOS or Windows host detected. Not automatically enabling ORC zero-copy feature");
    } else {
      String fs = hiveConf.get(FileSystem.FS_DEFAULT_NAME_KEY);
      // Equivalent to a case-insensitive startsWith...
      if (fs.regionMatches(true, 0, "maprfs", 0, 6)) {
        // DX-12672: do not enable ORC zero-copy on MapRFS
        logger.debug("MapRFS detected. Not automatically enabling ORC zero-copy feature");
      } else {
        logger.debug("Linux host detected. Enabling ORC zero-copy feature");
        setConf(hiveConf, HiveConf.ConfVars.HIVE_ORC_ZEROCOPY, true);
      }
    }
  } else {
    boolean useZeroCopy = OrcConf.USE_ZEROCOPY.getBoolean(hiveConf);
    if (useZeroCopy) {
      logger.warn("ORC zero-copy feature has been manually enabled. This is not recommended.");
    } else {
      logger.error("ORC zero-copy feature has been manually disabled. This is not recommended and might cause memory issues");
    }
  }

  // Check if ORC Footer cache has been configured by user
  boolean orcStripCacheSetByUser = userPropertyNames.contains(HiveConf.ConfVars.HIVE_ORC_CACHE_STRIPE_DETAILS_SIZE.varname);
  if (orcStripCacheSetByUser) {
    logger.error("ORC stripe details cache has been manually configured. This is not recommended and might cause memory issues");
  } else {
    logger.debug("Disabling ORC stripe details cache.");
    setConf(hiveConf, HiveConf.ConfVars.HIVE_ORC_CACHE_STRIPE_DETAILS_SIZE, 0);
  }

  // Check if fs.s3.impl has been set by user
  boolean fsS3ImplSetByUser = userPropertyNames.contains(FS_S3_IMPL);
  if (fsS3ImplSetByUser) {
    logger.warn(FS_S3_IMPL + " manually set. This is not recommended.");
  } else {
    logger.debug("Setting " + FS_S3_IMPL + " to " + FS_S3_IMPL_DEFAULT);
    setConf(hiveConf, FS_S3_IMPL, FS_S3_IMPL_DEFAULT);
  }

  ADL_PROPS.entrySet().asList().forEach(entry->setConf(hiveConf, entry.getKey(), entry.getValue()));
  WASB_PROPS.entrySet().asList().forEach(entry->setConf(hiveConf, entry.getKey(), entry.getValue()));
  ABFS_PROPS.entrySet().asList().forEach(entry->setConf(hiveConf, entry.getKey(), entry.getValue()));
}
 
Example 14
Source File: HiveConfFactory.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
/**
 * Fills in a HiveConf instance with any user provided configuration parameters
 *
 * @param hiveConf - the conf to fill in
 * @param config - the user provided parameters
 * @return
 */
protected static void addUserProperties(HiveConf hiveConf, BaseHiveStoragePluginConfig<?,?> config) {
  // Used to capture properties set by user
  final Set<String> userPropertyNames = new HashSet<>();
  if(config.propertyList != null) {
    for(Property prop : config.propertyList) {
      userPropertyNames.add(prop.name);
      setConf(hiveConf, prop.name, prop.value);
      if(logger.isTraceEnabled()){
        logger.trace("HiveConfig Override {}={}", prop.name, prop.value);
      }
    }
  }

  // Check if zero-copy has been set by user
  boolean zeroCopySetByUser = userPropertyNames.contains(OrcConf.USE_ZEROCOPY.getAttribute());
  // Configure zero-copy for ORC reader
  if (!zeroCopySetByUser) {
    if (VM.isWindowsHost() || VM.isMacOSHost()) {
      logger.debug("MacOS or Windows host detected. Not automatically enabling ORC zero-copy feature");
    } else {
      String fs = hiveConf.get(FileSystem.FS_DEFAULT_NAME_KEY);
      // Equivalent to a case-insensitive startsWith...
      if (fs.regionMatches(true, 0, "maprfs", 0, 6)) {
        // DX-12672: do not enable ORC zero-copy on MapRFS
        logger.debug("MapRFS detected. Not automatically enabling ORC zero-copy feature");
      } else {
        logger.debug("Linux host detected. Enabling ORC zero-copy feature");
        hiveConf.set(OrcConf.USE_ZEROCOPY.getAttribute(), "true");
      }
    }
  } else {
    boolean useZeroCopy = OrcConf.USE_ZEROCOPY.getBoolean(hiveConf);
    if (useZeroCopy) {
      logger.warn("ORC zero-copy feature has been manually enabled. This is not recommended.");
    } else {
      logger.error("ORC zero-copy feature has been manually disabled. This is not recommended and might cause memory issues");
    }
  }

  // Check if fs.s3.impl has been set by user
  boolean fsS3ImplSetByUser = userPropertyNames.contains(FS_S3_IMPL);
  if (fsS3ImplSetByUser) {
    logger.warn(FS_S3_IMPL + " manually set. This is not recommended.");
  } else {
    logger.debug("Setting " + FS_S3_IMPL + " to " + FS_S3_IMPL_DEFAULT);
    setConf(hiveConf, FS_S3_IMPL, FS_S3_IMPL_DEFAULT);
  }

  ADL_PROPS.entrySet().asList().forEach(entry->setConf(hiveConf, entry.getKey(), entry.getValue()));
  WASB_PROPS.entrySet().asList().forEach(entry->setConf(hiveConf, entry.getKey(), entry.getValue()));
  ABFS_PROPS.entrySet().asList().forEach(entry->setConf(hiveConf, entry.getKey(), entry.getValue()));
}
 
Example 15
Source File: MetaStoreUtil.java    From kite with Apache License 2.0 4 votes vote down vote up
private static boolean isEmpty(HiveConf conf, String prop) {
  String value = conf.get(prop);
  return (value == null || value.isEmpty());
}