Java Code Examples for org.apache.hadoop.hive.conf.HiveConf.ConfVars

The following examples show how to use org.apache.hadoop.hive.conf.HiveConf.ConfVars. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
@Override
public boolean isCompatibleWith(HiveConf conf) {
    if (currentMetaVars == null) {
        return false; // recreate
    }
    boolean compatible = true;
    for (ConfVars oneVar : HiveConf.metaVars) {
        // Since metaVars are all of different types, use string for comparison
        String oldVar = currentMetaVars.get(oneVar.varname);
        String newVar = conf.get(oneVar.varname, "");
        if (oldVar == null ||
              (oneVar.isCaseSensitive() ? !oldVar.equals(newVar) : !oldVar.equalsIgnoreCase(newVar))) {
            logger.info("Mestastore configuration " + oneVar.varname +
                  " changed from " + oldVar + " to " + newVar);
            compatible = false;
        }
    }
    return compatible;
}
 
Example 2
Source Project: circus-train   Source File: CommonBeans.java    License: Apache License 2.0 6 votes vote down vote up
private HiveConf newHiveConf(TunnelMetastoreCatalog hiveCatalog, Configuration baseConf) {
  List<String> siteXml = hiveCatalog.getSiteXml();
  if (CollectionUtils.isEmpty(siteXml)) {
    LOG.info("No Hadoop site XML is defined for catalog {}.", hiveCatalog.getName());
  }
  Map<String, String> properties = new HashMap<>();
  for (Entry<String, String> entry : baseConf) {
    properties.put(entry.getKey(), entry.getValue());
  }
  if (hiveCatalog.getHiveMetastoreUris() != null) {
    properties.put(ConfVars.METASTOREURIS.varname, hiveCatalog.getHiveMetastoreUris());
  }
  putConfigurationProperties(hiveCatalog.getConfigurationProperties(), properties);
  HiveConf hiveConf = new HiveConfFactory(siteXml, properties).newInstance();
  return hiveConf;
}
 
Example 3
public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) {
  String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris());
  String name = metaStore.getName().toLowerCase(Locale.ROOT);

  // Connection timeout should not be less than 1
  // A timeout of zero is interpreted as an infinite timeout, so this is avoided
  int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency());

  if (metaStore.getConnectionType() == TUNNELED) {
    return tunnelingMetaStoreClientFactory
        .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY,
            connectionTimeout);
  }
  Map<String, String> properties = new HashMap<>();
  properties.put(ConfVars.METASTOREURIS.varname, uris);
  HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties);
  return defaultMetaStoreClientFactory
      .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY,
          connectionTimeout);
}
 
Example 4
Source Project: waggle-dance   Source File: MetaStoreProxyServer.java    License: Apache License 2.0 6 votes vote down vote up
private TServerSocket createServerSocket(boolean useSSL, int port) throws IOException, TTransportException {
  TServerSocket serverSocket = null;
  // enable SSL support for HMS
  List<String> sslVersionBlacklist = new ArrayList<>(Arrays.asList(hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")));
  if (!useSSL) {
    serverSocket = HiveAuthUtils.getServerSocket(null, port);
  } else {
    String keyStorePath = hiveConf.getVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH).trim();
    if (keyStorePath.isEmpty()) {
      throw new IllegalArgumentException(
          ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname + " Not configured for SSL connection");
    }
    String keyStorePassword = ShimLoader
        .getHadoopShims()
        .getPassword(hiveConf, HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname);
    serverSocket = HiveAuthUtils.getServerSSLSocket(null, port, keyStorePath, keyStorePassword, sslVersionBlacklist);
  }
  return serverSocket;
}
 
Example 5
@Before
public void init() {
  metastoreTunnel = new MetastoreTunnel();
  metastoreTunnel.setRoute(TUNNEL_ROUTE);
  metastoreTunnel.setPrivateKeys(TUNNEL_PRIVATE_KEY);
  metastoreTunnel.setKnownHosts(TUNNEL_KNOWN_HOSTS);
  metastoreTunnel.setLocalhost(TUNNEL_LOCALHOST);

  when(localHiveConfFactory.newInstance(any(String.class), any(Integer.class), any(HiveConf.class)))
      .thenReturn(localHiveConf);
  when(tunnelableFactorySupplier.get(metastoreTunnel)).thenReturn(tunnelableFactory);
  when(localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)).thenReturn(METASTORE_URI);
  when(hiveMetaStoreClientSupplierFactory.newInstance(localHiveConf, NAME, RECONNECTION_RETRIES, CONNECTION_TIMEOUT))
      .thenReturn(hiveMetaStoreClientSupplier);

  tunnelingMetaStoreClientFactory = new TunnelingMetaStoreClientFactory(tunnelableFactorySupplier,
      localHiveConfFactory, hiveMetaStoreClientSupplierFactory);
}
 
Example 6
Source Project: dremio-oss   Source File: HiveStoragePlugin.java    License: Apache License 2.0 6 votes vote down vote up
public HiveStoragePlugin(HiveConf hiveConf, PluginManager pf4jManager, SabotContext context, String name) {
  super(context, name);
  this.isCoordinator = context.isCoordinator();
  this.hiveConf = hiveConf;
  this.pf4jManager = pf4jManager;
  this.sabotConfig = context.getConfig();
  this.hiveSettings = new HiveSettings(context.getOptionManager());
  this.optionManager = context.getOptionManager();
  this.dremioConfig = context.getDremioConfig();

  storageImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);

  // Hive Metastore impersonation is enabled if:
  // - "hive.security.authorization.enabled" is set to true,
  // - "hive.metastore.execute.setugi" is set to true (in SASL disabled scenarios) or
  // - "hive.metastore.sasl.enabled" is set to true in which case all metastore calls are impersonated as
  //     the authenticated user.
  this.metastoreImpersonationEnabled =
    hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED) ||
      hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI) ||
      hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
}
 
Example 7
Source Project: dremio-oss   Source File: BaseTestHiveImpersonation.java    License: Apache License 2.0 6 votes vote down vote up
protected static void prepHiveConfAndData() throws Exception {
  hiveConf = new HiveConf();

  // Configure metastore persistence db location on local filesystem
  final String dbUrl = String.format("jdbc:derby:;databaseName=%s;create=true",  getTempDir("metastore_db"));
  hiveConf.set(ConfVars.METASTORECONNECTURLKEY.varname, dbUrl);

  hiveConf.set(ConfVars.SCRATCHDIR.varname, "file:///" + getTempDir("scratch_dir"));
  hiveConf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));
  hiveConf.set(ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "false");
  hiveConf.set(ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true");
  hiveConf.set(ConfVars.HIVE_CBO_ENABLED.varname, "false");

  // Set MiniDFS conf in HiveConf
  hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY));

  whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname);
  FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777));

  studentData = getPhysicalFileFromResource("student.txt");
  voterData = getPhysicalFileFromResource("voter.txt");
}
 
Example 8
Source Project: dremio-oss   Source File: Hive3StoragePlugin.java    License: Apache License 2.0 6 votes vote down vote up
public Hive3StoragePlugin(HiveConf hiveConf, PluginManager pf4jManager, SabotContext context, String name) {
  super(context, name);
  this.isCoordinator = context.isCoordinator();
  this.hiveConf = hiveConf;
  this.pf4jManager = pf4jManager;
  this.sabotConfig = context.getConfig();
  this.hiveSettings = new HiveSettings(context.getOptionManager());
  this.optionManager = context.getOptionManager();
  this.dremioConfig = context.getDremioConfig();

  storageImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);

  // Hive Metastore impersonation is enabled if:
  // - "hive.security.authorization.enabled" is set to true,
  // - "hive.metastore.execute.setugi" is set to true (in SASL disabled scenarios) or
  // - "hive.metastore.sasl.enabled" is set to true in which case all metastore calls are impersonated as
  //     the authenticated user.
  this.metastoreImpersonationEnabled =
    hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED) ||
      hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI) ||
      hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
}
 
Example 9
Source Project: dremio-oss   Source File: BaseTestHiveImpersonation.java    License: Apache License 2.0 6 votes vote down vote up
protected static void prepHiveConfAndData() throws Exception {
  hiveConf = new HiveConf();

  // Configure metastore persistence db location on local filesystem
  final String dbUrl = String.format("jdbc:derby:;databaseName=%s;create=true",  getTempDir("metastore_db"));
  hiveConf.set(ConfVars.METASTORECONNECTURLKEY.varname, dbUrl);

  hiveConf.set(ConfVars.SCRATCHDIR.varname, "file:///" + getTempDir("scratch_dir"));
  hiveConf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));
  hiveConf.set(ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "false");
  hiveConf.set(ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true");
  hiveConf.set(ConfVars.HIVE_CBO_ENABLED.varname, "false");

  // Set MiniDFS conf in HiveConf
  hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY));

  whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname);
  FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777));

  studentData = getPhysicalFileFromResource("student.txt");
  voterData = getPhysicalFileFromResource("voter.txt");
}
 
Example 10
Source Project: ranger   Source File: RangerHiveAuthorizerBase.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {
	LOG.debug("RangerHiveAuthorizerBase.applyAuthorizationConfigPolicy()");

	// from SQLStdHiveAccessController.applyAuthorizationConfigPolicy()
	if (mSessionContext != null && mSessionContext.getClientType() == CLIENT_TYPE.HIVESERVER2) {
		// Configure PREEXECHOOKS with DisallowTransformHook to disallow transform queries
		String hooks = hiveConf.getVar(ConfVars.PREEXECHOOKS).trim();
		if (hooks.isEmpty()) {
			hooks = DisallowTransformHook.class.getName();
		} else {
			hooks = hooks + "," + DisallowTransformHook.class.getName();
		}

		hiveConf.setVar(ConfVars.PREEXECHOOKS, hooks);

		SettableConfigUpdater.setHiveConfWhiteList(hiveConf);
	}
}
 
Example 11
@Before
public void setup() throws Exception {
  conf = new HiveConf();
  baseDir = Files.createTempDir();
  baseDir.setWritable(true, false);
  conf.setVar(HiveConf.ConfVars.SCRATCHDIR, baseDir.getAbsolutePath());
  SessionState.start(conf);
  conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY,
      SentryHiveAuthorizationTaskFactoryImpl.class.getName());

  db = Mockito.mock(Hive.class);
  table = new Table(DB, TABLE);
  partition = new Partition(table);
  context = new Context(conf);
  parseDriver = new ParseDriver();
  analyzer = new DDLSemanticAnalyzer(conf, db);
  SessionState.start(conf);
  Mockito.when(db.getTable(TABLE, false)).thenReturn(table);
  Mockito.when(db.getPartition(table, new HashMap<String, String>(), false))
  .thenReturn(partition);

  HadoopDefaultAuthenticator auth = new HadoopDefaultAuthenticator();
  auth.setConf(conf);
  currentUser = auth.getUserName();

}
 
Example 12
Source Project: incubator-sentry   Source File: HiveAuthzBinding.java    License: Apache License 2.0 6 votes vote down vote up
private void validateHiveMetaStoreConfig(HiveConf hiveConf, HiveAuthzConf authzConf)
    throws InvalidConfigurationException{
  boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
      authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
  LOG.debug("Testing mode is " + isTestingMode);
  if(!isTestingMode) {
    boolean sasl = hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
    if(!sasl) {
      throw new InvalidConfigurationException(
          ConfVars.METASTORE_USE_THRIFT_SASL + " can't be false in non-testing mode");
    }
  } else {
    boolean setUgi = hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI);
    if(!setUgi) {
      throw new InvalidConfigurationException(
          ConfVars.METASTORE_EXECUTE_SET_UGI.toString() + " can't be false in non secure mode");
    }
  }
}
 
Example 13
Source Project: incubator-sentry   Source File: HiveAuthzBindingHook.java    License: Apache License 2.0 6 votes vote down vote up
@VisibleForTesting
protected static AccessURI parseURI(String uri, boolean isLocal)
    throws SemanticException {
  try {
    HiveConf conf = SessionState.get().getConf();
    String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
    Path warehousePath = new Path(warehouseDir);
    if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) {
      FileSystem fs = FileSystem.get(conf);
      warehouseDir = fs.makeQualified(warehousePath).toUri().toString();
    }
    return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
  } catch (Exception e) {
    throw new SemanticException("Error parsing URI " + uri + ": " +
      e.getMessage(), e);
  }
}
 
Example 14
Source Project: data-highway   Source File: HiveConfFactory.java    License: Apache License 2.0 5 votes vote down vote up
public HiveConf newInstance(String hiveMetaStoreUris) {
  synchronized (HiveConf.class) {
    // The following prevents HiveConf from loading the default hadoop
    // *-site.xml and hive-site.xml if they're on the classpath.
    URL hiveSiteLocation = HiveConf.getHiveSiteLocation();
    HiveConf.setHiveSiteLocation(null);
    HiveConf conf = new HiveConf(new Configuration(false), getClass());
    HiveConf.setHiveSiteLocation(hiveSiteLocation);
    conf.setVar(ConfVars.METASTOREURIS, hiveMetaStoreUris);
    return conf;
  }
}
 
Example 15
@Override
public String getMetaConf(String key) throws MetaException, TException {
    ConfVars metaConfVar = HiveConf.getMetaConf(key);
    if (metaConfVar == null) {
        throw new MetaException("Invalid configuration key " + key);
    }
    return conf.get(key, metaConfVar.getDefaultValue());
}
 
Example 16
@Override
public void setMetaConf(String key, String value) throws MetaException, TException {
    ConfVars confVar = HiveConf.getMetaConf(key);
    if (confVar == null) {
        throw new MetaException("Invalid configuration key " + key);
    }
    String validate = confVar.validate(value);
    if (validate != null) {
        throw new MetaException("Invalid configuration value " + value + " for key " + key + " by " + validate);
    }
    conf.set(key, value);
}
 
Example 17
@Override
public void validatePartitionNameCharacters(List<String> part_vals) throws TException, MetaException {
    try {
        String partitionValidationRegex = conf.getVar(HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN);
        Pattern partitionValidationPattern = Strings.isNullOrEmpty(partitionValidationRegex) ? null
              : Pattern.compile(partitionValidationRegex);
        MetaStoreUtils.validatePartitionNameCharacters(part_vals, partitionValidationPattern);
    } catch (Exception e){
        if (e instanceof MetaException) {
            throw (MetaException) e;
        } else {
            throw new MetaException(e.getMessage());
        }
    }
}
 
Example 18
@Test
public void testDefaultNamespaceCreation() throws Exception {
  doThrow(new EntityNotFoundException("")).when(glueClient).getDatabase(any(GetDatabaseRequest.class));

  when(conf.getVar(conf, ConfVars.USERS_IN_ADMIN_ROLE, "")).thenReturn("");
  metastoreClient = new AWSCatalogMetastoreClient.Builder().withClientFactory(clientFactory)
      .withMetastoreFactory(metastoreFactory).withWarehouse(wh).createDefaults(true).withHiveConf(conf).build();

  verify(glueClient, times(1)).createDatabase(any(CreateDatabaseRequest.class));
  verify(wh, times(1)).getDefaultDatabasePath(DEFAULT_DATABASE_NAME);
  verify(wh, times(1)).isDir(defaultWhPath);
}
 
Example 19
@Override
public String getMetaConf(String key) throws MetaException, TException {
  ConfVars metaConfVar = HiveConf.getMetaConf(key);
  if (metaConfVar == null) {
    throw new MetaException("Invalid configuration key " + key);
  }
  return conf.get(key, metaConfVar.getDefaultValue());
}
 
Example 20
Source Project: incubator-sentry   Source File: TestHiveAuthzBindings.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Turn off authentication and verify exception is raised in non-testing mode
 * @throws Exception
 */
@Test(expected=InvalidConfigurationException.class)
public void testNoAuthenticationRestriction() throws Exception {
  // perpare the hive and auth configs
  hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, "None");
  authzConf.set(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "false");
  testAuth = new HiveAuthzBinding(hiveConf, authzConf);
}
 
Example 21
@Override
public void setMetaConf(String key, String value) throws MetaException, TException {
  ConfVars confVar = HiveConf.getMetaConf(key);
  if (confVar == null) {
    throw new MetaException("Invalid configuration key " + key);
  }
  String validate = confVar.validate(value);
  if (validate != null) {
    throw new MetaException("Invalid configuration value " + value + " for key " + key + " by " + validate);
  }
  conf.set(key, value);
}
 
Example 22
@Override
public void validatePartitionNameCharacters(List<String> part_vals) throws TException, MetaException {
  try {
    String partitionValidationRegex = conf.getVar(HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN);
    Pattern partitionValidationPattern = Strings.isNullOrEmpty(partitionValidationRegex) ? null
          : Pattern.compile(partitionValidationRegex);
    MetaStoreUtils.validatePartitionNameCharacters(part_vals, partitionValidationPattern);
  } catch (Exception e){
    if (e instanceof MetaException) {
      throw (MetaException) e;
    } else {
      throw new MetaException(e.getMessage());
    }
  }
}
 
Example 23
Source Project: circus-train   Source File: ReplicaTableFactory.java    License: Apache License 2.0 5 votes vote down vote up
ReplicaTableFactory(
    HiveConf sourceHiveConf,
    TableTransformation tableTransformation,
    PartitionTransformation partitionTransformation,
    ColumnStatisticsTransformation columnStatisticsTransformation) {
  this(sourceHiveConf.getVar(ConfVars.METASTOREURIS), tableTransformation, partitionTransformation,
      columnStatisticsTransformation);
}
 
Example 24
Source Project: circus-train   Source File: CircusTrainTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void singleYmlFile() throws Exception {
  exit.expectSystemExitWithStatus(0);
  File ymlFile = temp.newFile("test-application.yml");
  List<String> lines = ImmutableList
      .<String>builder()
      .add("extension-packages: " + TestCopierFactory.class.getPackage().getName())
      .add("source-catalog:")
      .add("  name: source")
      .add("  configuration-properties:")
      .add("    " + ConfVars.METASTOREURIS.varname + ": " + hive.getThriftConnectionUri())
      .add("replica-catalog:")
      .add("  name: replica")
      .add("  hive-metastore-uris: " + hive.getThriftConnectionUri())
      .add("table-replications:")
      .add("  -")
      .add("    source-table:")
      .add("      database-name: " + DATABASE)
      .add("      table-name: source_" + TABLE)
      .add("    replica-table:")
      .add("      table-name: replica_" + TABLE)
      .add("      table-location: " + temp.newFolder("replica"))
      .build();
  Files.asCharSink(ymlFile, UTF_8).writeLines(lines);

  exit.checkAssertionAfterwards(new Assertion() {
    @Override
    public void checkAssertion() throws Exception {
      assertTrue(hive.client().tableExists(DATABASE, "replica_" + TABLE));
    }
  });

  CircusTrain.main(new String[] { "--config=" + ymlFile.getAbsolutePath() });
}
 
Example 25
Source Project: circus-train   Source File: CircusTrainTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void singleYmlFileWithMultipleExtensions() throws Exception {
  TestLocomotiveListener.testBean = null;
  exit.expectSystemExitWithStatus(0);
  File ymlFile = temp.newFile("test-application.yml");
  List<String> lines = ImmutableList
      .<String>builder()
      .add("source-catalog:")
      .add("  name: source")
      .add("  configuration-properties:")
      .add("    " + ConfVars.METASTOREURIS.varname + ": " + hive.getThriftConnectionUri())
      .add("replica-catalog:")
      .add("  name: replica")
      .add("  hive-metastore-uris: " + hive.getThriftConnectionUri())
      .add("table-replications:")
      .add("  -")
      .add("    source-table:")
      .add("      database-name: " + DATABASE)
      .add("      table-name: source_" + TABLE)
      .add("    replica-table:")
      .add("      table-name: replica_" + TABLE)
      .add("      table-location: " + temp.newFolder("replica"))
      .add("extension-packages: com.hotels.test.extension, " + TestCopierFactory.class.getPackage().getName())
      .add("testExtensionConfig: foo")
      .build();
  Files.asCharSink(ymlFile, UTF_8).writeLines(lines);

  exit.checkAssertionAfterwards(new Assertion() {
    @Override
    public void checkAssertion() throws Exception {
      assertThat(TestLocomotiveListener.testBean.getValue(), is("foo"));
    }
  });
  CircusTrain.main(new String[] { "--config=" + ymlFile.getAbsolutePath() });
}
 
Example 26
Source Project: circus-train   Source File: ReplicaTest.java    License: Apache License 2.0 5 votes vote down vote up
@Before
public void prepare() throws Exception {
  when(metaStoreClientSupplier.get()).thenReturn(mockMetaStoreClient);
  when(replicaCatalog.getName()).thenReturn(NAME);

  hiveConf = new HiveConf();
  hiveConf.setVar(ConfVars.METASTOREURIS, REPLICA_META_STORE_URIS);
  replica = newReplica(tableReplication);
  tableLocation = temporaryFolder.newFolder("table_location").toURI().toString();

  sourceTable = newTable();
  existingPartition = newPartition("one", "two");

  ColumnStatisticsObj columnStatisticsObj1 = new ColumnStatisticsObj(COLUMN_A, "string",
      new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(0, 1)));
  ColumnStatisticsObj columnStatisticsObj2 = new ColumnStatisticsObj(COLUMN_B, "string",
      new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(1, 2)));
  columnStatisticsObjs = Arrays.asList(columnStatisticsObj1, columnStatisticsObj2);
  ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, DB_NAME, TABLE_NAME);
  columnStatistics = new ColumnStatistics(statsDesc, columnStatisticsObjs);

  tableAndStatistics = new TableAndStatistics(sourceTable, columnStatistics);

  existingReplicaTable = new Table(sourceTable);

  when(mockReplicaLocationManager.getTableLocation()).thenReturn(new Path(tableLocation));
  when(mockReplicaLocationManager.getPartitionBaseLocation()).thenReturn(new Path(tableLocation));

  when(mockMetaStoreClient.getTable(DB_NAME, TABLE_NAME)).thenReturn(existingReplicaTable);
}
 
Example 27
@Override
public void beforeTest() throws Throwable {
  // Override with values given in the hiveConf.
  core.setHiveVar(ConfVars.METASTORECONNECTURLKEY, hiveConf.getVar(ConfVars.METASTORECONNECTURLKEY));
  core.setHiveVar(ConfVars.METASTORE_CONNECTION_DRIVER, hiveConf.getVar(ConfVars.METASTORE_CONNECTION_DRIVER));
  core.setHiveVar(ConfVars.METASTORE_CONNECTION_USER_NAME, hiveConf.getVar(ConfVars.METASTORE_CONNECTION_USER_NAME));
  core.setHiveVar(ConfVars.METASTOREPWD, hiveConf.getVar(ConfVars.METASTOREPWD));
  super.beforeTest();
}
 
Example 28
public CloseableThriftHiveMetastoreIface newInstance(
    String uris,
    MetastoreTunnel metastoreTunnel,
    String name,
    int reconnectionRetries,
    int connectionTimeout) {
  String uri = uris;
  String[] urisSplit = uri.split(",");
  if (urisSplit.length > 1) {
    uri = urisSplit[0];
    LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri);
  }
  String localHost = metastoreTunnel.getLocalhost();
  int localPort = getLocalPort();

  Map<String, String> properties = new HashMap<>();
  properties.put(ConfVars.METASTOREURIS.varname, uri);
  HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties);
  HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance());

  TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier
      .get(metastoreTunnel);

  LOG
      .info("Metastore URI {} is being proxied through {}", uri,
          localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS));

  HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory
      .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout);

  URI metaStoreUri = URI.create(uri);
  String remoteHost = metaStoreUri.getHost();
  int remotePort = metaStoreUri.getPort();
  return (CloseableThriftHiveMetastoreIface) tunnelableFactory
      .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort);
}
 
Example 29
@Test
public void localHiveConfigUsesCorrectParameters() {
  tunnelingMetaStoreClientFactory.newInstance(METASTORE_URI, metastoreTunnel, NAME, RECONNECTION_RETRIES,
      CONNECTION_TIMEOUT);
  ArgumentCaptor<String> localHostCaptor = ArgumentCaptor.forClass(String.class);
  ArgumentCaptor<HiveConf> hiveConfCaptor = ArgumentCaptor.forClass(HiveConf.class);
  // we get random assigned free port for local port
  verify(localHiveConfFactory).newInstance(localHostCaptor.capture(), anyInt(), hiveConfCaptor.capture());
  assertThat(localHostCaptor.getValue(), is(TUNNEL_LOCALHOST));
  HiveConf hiveConf = hiveConfCaptor.getValue();
  assertThat(hiveConf.get(ConfVars.METASTOREURIS.varname), is(METASTORE_URI));
}
 
Example 30
@Test
public void openWithDummyConnectionThrowsRuntimeWithOriginalExceptionInMessage() {
  hiveConf.setVar(ConfVars.METASTOREURIS, "thrift://localhost:123");
  manager = new ThriftMetastoreClientManager(hiveConf, hiveCompatibleThriftHiveMetastoreIfaceFactory,
      connectionTimeout);

  try {
    manager.open();
  } catch (RuntimeException e) {
    assertThat(e.getMessage(), containsString("java.net.ConnectException: Connection refused"));
  }
}