org.apache.hive.jdbc.HiveDriver Java Examples
The following examples show how to use
org.apache.hive.jdbc.HiveDriver.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HerdMetastoreConfig.java From herd-mdl with Apache License 2.0 | 5 votes |
@Bean (name = "hiveJdbcTemplate") public JdbcTemplate hiveJdbcTemplate() { SimpleDriverDataSource dataSource = new SimpleDriverDataSource( new HiveDriver() , HIVE_URL , HIVE_USER , HIVE_PASSWORD ); return new JdbcTemplate( dataSource ); }
Example #2
Source File: DriverProxyInvocationChainTest.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
@Test public void testGetProxyNotNull() throws Exception { assertTrue( DriverProxyInvocationChain.isInitialized() ); if ( Boolean.parseBoolean( System.getProperty( "org.pentaho.hadoop.shims.check.hive1", "true" ) ) ) { // Create Hive driver Driver hiveDriver = new HiveDriver(); // Create proxy to driver Driver driver = DriverProxyInvocationChain.getProxy( Driver.class, hiveDriver ); assertNotNull( driver ); } }
Example #3
Source File: DriverProxyInvocationChainTest.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
@Test public void testDatabaseSelected() throws SQLException { Driver driverMock = mock( HiveDriver.class ); Driver driverProxy = DriverProxyInvocationChain.getProxy( Driver.class, driverMock ); Connection connectionMock = mock( Connection.class ); doReturn( connectionMock ).when( driverMock ).connect( anyString(), (Properties) isNull() ); Statement statementMock = mock( Statement.class ); doReturn( statementMock ).when( connectionMock ).createStatement(); driverProxy.connect( "jdbc:hive://host:port/dbName", null ); verify( statementMock ).execute( "use dbName" ); }
Example #4
Source File: DriverProxyInvocationChainTest.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
@Test public void testGetTablesWithSchema() throws SQLException { Class hive2; try { hive2 = Class.forName( "org.apache.hive.jdbc.HiveDatabaseMetaData" ); } catch ( ClassNotFoundException e ) { return; } if ( hive2 != null ) { Driver driverMock = mock( HiveDriver.class ); Driver driverProxy = DriverProxyInvocationChain.getProxy( Driver.class, driverMock ); Connection connectionMock = mock( Connection.class ); doReturn( connectionMock ).when( driverMock ).connect( anyString(), (Properties) isNull() ); Statement statementMock = mock( Statement.class ); doReturn( statementMock ).when( connectionMock ).createStatement(); ResultSet resultSet = mock( ResultSet.class ); doReturn( resultSet ).when( statementMock ).executeQuery( anyString() ); DatabaseMetaData databaseMetaDataMock = (DatabaseMetaData) mock( hive2 ); doReturn( databaseMetaDataMock ).when( connectionMock ).getMetaData(); String schema = "someSchema"; doThrow( new SQLException( "Method is not supported" ) ).when( databaseMetaDataMock ) .getTables( null, schema, null, null ); Connection conn = driverProxy.connect( "jdbc:hive://host:port/dbName", null ); conn.getMetaData().getTables( null, schema, null, null ); verify( statementMock ).execute( "use dbName" ); verify( statementMock ).executeQuery( "show tables in " + schema ); } }
Example #5
Source File: HiveConnectionPool.java From localization_nifi with Apache License 2.0 | 4 votes |
/** * Configures connection pool by creating an instance of the * {@link BasicDataSource} based on configuration provided with * {@link ConfigurationContext}. * <p> * This operation makes no guarantees that the actual connection could be * made since the underlying system may still go off-line during normal * operation of the connection pool. * * @param context the configuration context * @throws InitializationException if unable to create a database connection */ @OnEnabled public void onConfigured(final ConfigurationContext context) throws InitializationException { connectionUrl = context.getProperty(DATABASE_URL).getValue(); ComponentLog log = getLogger(); final String configFiles = context.getProperty(HIVE_CONFIGURATION_RESOURCES).getValue(); final Configuration hiveConfig = hiveConfigurator.getConfigurationFromFiles(configFiles); final String validationQuery = context.getProperty(VALIDATION_QUERY).evaluateAttributeExpressions().getValue(); // add any dynamic properties to the Hive configuration for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); if (descriptor.isDynamic()) { hiveConfig.set(descriptor.getName(), entry.getValue()); } } final String drv = HiveDriver.class.getName(); if (SecurityUtil.isSecurityEnabled(hiveConfig)) { final String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).getValue(); final String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).getValue(); log.info("Hive Security Enabled, logging in as principal {} with keytab {}", new Object[]{principal, keyTab}); try { ugi = hiveConfigurator.authenticate(hiveConfig, principal, keyTab, TICKET_RENEWAL_PERIOD, log); } catch (AuthenticationFailedException ae) { log.error(ae.getMessage(), ae); } getLogger().info("Successfully logged in as principal {} with keytab {}", new Object[]{principal, keyTab}); } final String user = context.getProperty(DB_USER).getValue(); final String passw = context.getProperty(DB_PASSWORD).getValue(); final Long maxWaitMillis = context.getProperty(MAX_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS); final Integer maxTotal = context.getProperty(MAX_TOTAL_CONNECTIONS).asInteger(); dataSource = new BasicDataSource(); dataSource.setDriverClassName(drv); final String dburl = context.getProperty(DATABASE_URL).getValue(); dataSource.setMaxWait(maxWaitMillis); dataSource.setMaxActive(maxTotal); if (validationQuery != null && !validationQuery.isEmpty()) { dataSource.setValidationQuery(validationQuery); dataSource.setTestOnBorrow(true); } dataSource.setUrl(dburl); dataSource.setUsername(user); dataSource.setPassword(passw); }
Example #6
Source File: HiveServer2JUnitRule.java From beeju with Apache License 2.0 | 4 votes |
/** * @return the name of the Hive JDBC driver class used to access the database. */ @Override public String driverClassName() { return HiveDriver.class.getName(); }
Example #7
Source File: HiveServer2JUnitExtension.java From beeju with Apache License 2.0 | 4 votes |
/** * @return the name of the Hive JDBC driver class used to access the database. */ @Override public String driverClassName() { return HiveDriver.class.getName(); }
Example #8
Source File: BlurSerDeTest.java From incubator-retired-blur with Apache License 2.0 | 4 votes |
private int runLoad(boolean disableMrUpdate) throws IOException, InterruptedException, ClassNotFoundException, SQLException { Configuration configuration = miniCluster.getMRConfiguration(); writeSiteFiles(configuration); HiveConf hiveConf = new HiveConf(configuration, getClass()); hiveConf.set("hive.server2.thrift.port", "0"); HiveServer2 hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); hiveServer2.start(); int port = waitForStartupAndGetPort(hiveServer2); Class.forName(HiveDriver.class.getName()); String userName = UserGroupInformation.getCurrentUser().getShortUserName(); Connection connection = DriverManager.getConnection("jdbc:hive2://localhost:" + port, userName, ""); UserGroupInformation currentUser = UserGroupInformation.getCurrentUser(); run(connection, "set blur.user.name=" + currentUser.getUserName()); run(connection, "set blur.mr.update.disabled=" + disableMrUpdate); run(connection, "set hive.metastore.warehouse.dir=" + WAREHOUSE.toURI().toString()); run(connection, "create database if not exists testdb"); run(connection, "use testdb"); run(connection, "CREATE TABLE if not exists testtable ROW FORMAT SERDE 'org.apache.blur.hive.BlurSerDe' " + "WITH SERDEPROPERTIES ( 'blur.zookeeper.connection'='" + miniCluster.getZkConnectionString() + "', " + "'blur.table'='" + TEST + "', 'blur.family'='" + FAM + "' ) " + "STORED BY 'org.apache.blur.hive.BlurHiveStorageHandler'"); run(connection, "desc testtable"); String createLoadTable = buildCreateLoadTable(connection); run(connection, createLoadTable); File dbDir = new File(WAREHOUSE, "testdb.db"); File tableDir = new File(dbDir, "loadtable"); int totalRecords = 100; generateData(tableDir, totalRecords); run(connection, "select * from loadtable"); run(connection, "set " + BlurSerDe.BLUR_BLOCKING_APPLY + "=true"); run(connection, "insert into table testtable select * from loadtable"); connection.close(); hiveServer2.stop(); return totalRecords; }