org.apache.hadoop.hive.ql.Driver Java Examples

The following examples show how to use org.apache.hadoop.hive.ql.Driver. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SentryConfigTool.java    From incubator-sentry with Apache License 2.0 7 votes vote down vote up
public void verifyLocalQuery(String queryStr) throws Exception {
  // setup Hive driver
  SessionState session = new SessionState(getHiveConf());
  SessionState.start(session);
  Driver driver = new Driver(session.getConf(), getUser());

  // compile the query
  CommandProcessorResponse compilerStatus = driver
      .compileAndRespond(queryStr);
  if (compilerStatus.getResponseCode() != 0) {
    String errMsg = compilerStatus.getErrorMessage();
    if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
      printMissingPerms(getHiveConf().get(
          HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
    }
    throw new SemanticException("Compilation error: "
        + compilerStatus.getErrorMessage());
  }
  driver.close();
  System.out
      .println("User " + getUser() + " has privileges to run the query");
}
 
Example #2
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createTableMixedCaseColumnsWithComplexTypes(Driver hiveDriver, String table) throws IOException {
  final File casetestDir = new File(BaseTestQuery.getTempDir("casetest"));
  casetestDir.mkdirs();
  final File parquetFile = new File(casetestDir, "casetestdata.parquet");
  parquetFile.deleteOnExit();
  casetestDir.deleteOnExit();
  final URL url = Resources.getResource("casetestdata.parquet");
  Files.write(Paths.get(parquetFile.toURI()), Resources.toByteArray(url));

  String caseTestTable = "create table " + table +
    " (upcase_col string, " +
    "lwcase_col string, " +
    "mixedcase_col string, " +
    "upcase_struct_col struct<upcase_sub1:string, lowcase_sub2:string, mixedcase_sub3:string>, " +
    "lwcase_struct_col struct<upcase_sub1:string, lowcase_sub2:string, mixedcase_sub3:string>) " +
    "stored as parquet location 'file://" + parquetFile.getParent() + "'";
  executeQuery(hiveDriver, caseTestTable);
}
 
Example #3
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createListTypesTextTable(final Driver hiveDriver, final String table) throws Exception {
  String testDataFile = generateListTypesDataFile();
  executeQuery(hiveDriver,
    "CREATE TABLE IF NOT EXISTS " + table + " (" +
      " rownum int," +
      " tinyint_field array<tinyint>, " +
      " smallint_field array<smallint>, " +
      " int_field array<int>, " +
      " bigint_field array<bigint>, " +
      " float_field array<float>, " +
      " double_field array<double>, " +
      " timestamp_field array<timestamp>, " +
      " date_field array<date>, " +
      " string_field array<string>, " +
      " boolean_field array<boolean>, " +
      " binary_field array<binary> " +
      ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ','");
  // Load data into table 'readtest'
  executeQuery(hiveDriver,
    String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE default." + table, testDataFile));
}
 
Example #4
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createPartitionDecimalOverflow(final Driver hiveDriver, final String table) throws Exception {
  String createParqetTableCmd = "CREATE TABLE " + table + " (col1 int) partitioned by (col2 decimal(20, 2)) STORED AS PARQUET";
  String insertData = "INSERT INTO TABLE " + table + " PARTITION(col2=123456789101214161.12) VALUES(202)";

  String createParquetExtTable = "CREATE EXTERNAL TABLE " + table + "_ext" + " (col1 int) partitioned by (col2 decimal(15,3))" +
    " STORED AS PARQUET LOCATION 'file://" + this.getWhDir() + "/" + table + "'";
  String insertData1 = "INSERT INTO TABLE " + table + "_ext" + " PARTITION(col2=123456789.120) VALUES(2202)";
  String insertData2 = "INSERT INTO TABLE " + table + "_ext" + " PARTITION(col2=123456789101.123) VALUES(234)";
  String insertData3 = "INSERT INTO TABLE " + table + "_ext" + " PARTITION(col2=123456789101.123) VALUES(154)";
  String insertData4 = "INSERT INTO TABLE " + table + "_ext" + " PARTITION(col2=123456789102.123) VALUES(184)";
  String insertData5 = "INSERT INTO TABLE " + table + "_ext" + " PARTITION(col2=15.300) VALUES(153)";
  String partitionRepair = "msck repair table " + table + "_ext";

  executeQuery(hiveDriver, createParqetTableCmd);
  executeQuery(hiveDriver, insertData);
  executeQuery(hiveDriver, createParquetExtTable);
  executeQuery(hiveDriver, insertData1);
  executeQuery(hiveDriver, insertData2);
  executeQuery(hiveDriver, insertData3);
  executeQuery(hiveDriver, insertData4);
  executeQuery(hiveDriver, insertData4);
  executeQuery(hiveDriver, insertData5);
  executeQuery(hiveDriver, partitionRepair);
}
 
Example #5
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createParquetDecimalSchemaChangeFilterTestTable(final Driver hiveDriver, final String table) {
  String createParqetTableCmd = "CREATE TABLE " + table + " (schema_mismatch_col decimal(8,5), value_in_parquet decimal(8,5)) STORED AS " +
    "PARQUET";
  String insertDataCmd = "INSERT INTO " + table + " VALUES (123.12341, 123.12341)";
  String insertDataCmd0 = "INSERT INTO " + table + " VALUES (123.13341, 123.13341)";
  String insertDataCmd1 = "INSERT INTO " + table + " VALUES (123.12000, 123.12000)";
  String insertDataCmd2 = "INSERT INTO " + table + " VALUES (123.12100, 123.12100)";
  String insertDataCmd3 = "INSERT INTO " + table + " VALUES (543.21568, 543.21568)";
  String alterTableCmd = "ALTER TABLE " + table + " CHANGE schema_mismatch_col schema_mismatch_col decimal(5,2)";
  executeQuery(hiveDriver, createParqetTableCmd);
  executeQuery(hiveDriver, insertDataCmd);
  executeQuery(hiveDriver, insertDataCmd0);
  executeQuery(hiveDriver, insertDataCmd1);
  executeQuery(hiveDriver, insertDataCmd2);
  executeQuery(hiveDriver, insertDataCmd3);
  executeQuery(hiveDriver, alterTableCmd);
}
 
Example #6
Source File: ITSqlStdBasedAuthorization.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private static void generateHiveTestData() throws Exception {
  final SessionState ss = new SessionState(hiveConf);
  SessionState.start(ss);
  final Driver driver = new Driver(hiveConf);

  executeQuery(driver, "CREATE DATABASE " + db_general);
  createTbl(driver, db_general, g_student_user0, studentDef, studentData);
  createTbl(driver, db_general, g_voter_role0, voterDef, voterData);
  createTbl(driver, db_general, g_student_user2, studentDef, studentData);

  executeQuery(driver, "SET ROLE admin");
  executeQuery(driver, "CREATE ROLE " + test_role0);
  executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[1]);
  executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[2]);

  executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user0, org1Users[0]));
  executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO ROLE %s", db_general, g_voter_role0, test_role0));
  executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user2, org1Users[2]));
}
 
Example #7
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createStructTypesTextTable(final Driver hiveDriver, final String table) throws Exception {
  String testDataFile = generateStructTypesDataFile();
  executeQuery(hiveDriver,
    "CREATE TABLE IF NOT EXISTS " + table + " (" +
      " rownum int," +
      " struct_field struct<" +
      " tinyint_field: tinyint, " +
      " smallint_field: smallint, " +
      " int_field: int, " +
      " bigint_field: bigint, " +
      " float_field: float, " +
      " double_field: double, " +
      " string_field: string> " +
      ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ','");
  executeQuery(hiveDriver,
    String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE default." + table, testDataFile));
}
 
Example #8
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createTimestampToStringTable(Driver hiveDriver, String table) throws Exception {
  String testDataFile = generateTimestampsDataFile();
  String datatabletxt = "CREATE TABLE IF NOT EXISTS " + table + " (col1 timestamp)";
  String datatableorc = "CREATE TABLE IF NOT EXISTS " + table + "_orc" + " (col1 timestamp) STORED AS ORC";
  executeQuery(hiveDriver, datatabletxt);
  executeQuery(hiveDriver, datatableorc);

  String insert_datatable = String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE default." + table, testDataFile);
  executeQuery(hiveDriver, insert_datatable);
  String insert_datatableorc = "INSERT OVERWRITE TABLE " + table + "_orc" + " SELECT * FROM " + table;
  executeQuery(hiveDriver, insert_datatableorc);
  String exttable = table + "_orc_ext";
  String ext_table = "CREATE EXTERNAL TABLE IF NOT EXISTS " + exttable +
    " (col1 string)" + "STORED AS ORC LOCATION 'file://" + this.getWhDir() + "/" + table + "_orc" + "'";
  executeQuery(hiveDriver, ext_table);
}
 
Example #9
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createParquetDecimalSchemaChangeFilterTestTable(final Driver hiveDriver, final String table) {
  String createParqetTableCmd = "CREATE TABLE " + table + " (schema_mismatch_col decimal(8,5), value_in_parquet decimal(8,5)) STORED AS " +
      "PARQUET";
  String insertDataCmd = "INSERT INTO " + table + " VALUES (123.12341, 123.12341)";
  String insertDataCmd0 = "INSERT INTO " + table + " VALUES (123.13341, 123.13341)";
  String insertDataCmd1 = "INSERT INTO " + table + " VALUES (123.12000, 123.12000)";
  String insertDataCmd2 = "INSERT INTO " + table + " VALUES (123.12100, 123.12100)";
  String insertDataCmd3 = "INSERT INTO " + table + " VALUES (543.21568, 543.21568)";
  String alterTableCmd = "ALTER TABLE " + table + " CHANGE schema_mismatch_col schema_mismatch_col decimal(5,2)";
  executeQuery(hiveDriver, createParqetTableCmd);
  executeQuery(hiveDriver, insertDataCmd);
  executeQuery(hiveDriver, insertDataCmd0);
  executeQuery(hiveDriver, insertDataCmd1);
  executeQuery(hiveDriver, insertDataCmd2);
  executeQuery(hiveDriver, insertDataCmd3);
  executeQuery(hiveDriver, alterTableCmd);
}
 
Example #10
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createVeryComplexHiveTableFloatToDouble(final Driver hiveDriver) throws Exception {
  final File verycomplexparquetDir = new File(BaseTestQuery.getTempDir("verycomplexparquetfloat"));
  verycomplexparquetDir.mkdirs();
  final URL verycomplexparqueurl = Resources.getResource("very_complex_float.parquet");
  if (verycomplexparqueurl == null) {
    throw new IOException(String.format("Unable to find path %s.", "very_complex_float.parquet"));
  }

  final File verycomplexparquefile = new File(verycomplexparquetDir, "very_complex_float.parquet");
  verycomplexparquefile.deleteOnExit();
  verycomplexparquetDir.deleteOnExit();
  Files.write(Paths.get(verycomplexparquefile.toURI()), Resources.toByteArray(verycomplexparqueurl));

  final String verycomplexparquetable = "create external table very_complex_parquet_float(col1 " +
    "array<array<array<struct<f1:array<array<array<float>>>,f2:struct<sub_f1:array<array<array<float>>>,sub_f2:array<array<array<struct<sub_sub_f1:float,sub_sub_f2:string>>>>>>>>>, " +
    " col2 int) " +
    "stored as parquet location '" + verycomplexparquefile.getParent() + "'";
  executeQuery(hiveDriver, verycomplexparquetable);

  final String verycomplexparquetable_uppromote_double = "create external table very_complex_parquet_float_to_double(col1 " +
    "array<array<array<struct<f1:array<array<array<double>>>,f2:struct<sub_f1:array<array<array<double>>>,sub_f2:array<array<array<struct<sub_sub_f1:double,sub_sub_f2:string>>>>>>>>>, " +
    " col2 int) " +
    "stored as parquet location '" + verycomplexparquefile.getParent() + "'";
  executeQuery(hiveDriver, verycomplexparquetable_uppromote_double);
}
 
Example #11
Source File: AbstractMetastoreTestWithStaticConfiguration.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
public void execHiveSQLwithOverlay(final String sqlStmt,
    final String userName, Map<String, String> overLay) throws Exception {
  final HiveConf hiveConf = new HiveConf();
  for (Map.Entry<String, String> entry : overLay.entrySet()) {
    hiveConf.set(entry.getKey(), entry.getValue());
  }
  UserGroupInformation clientUgi = UserGroupInformation
      .createRemoteUser(userName);
  clientUgi.doAs(new PrivilegedExceptionAction<Object>() {
    @Override
    public Void run() throws Exception {
      Driver driver = new Driver(hiveConf, userName);
      SessionState.start(new CliSessionState(hiveConf));
      CommandProcessorResponse cpr = driver.run(sqlStmt);
      if (cpr.getResponseCode() != 0) {
        throw new IOException("Failed to execute \"" + sqlStmt
            + "\". Driver returned " + cpr.getResponseCode() + " Error: "
            + cpr.getErrorMessage());
      }
      driver.close();
      SessionState.get().close();
      return null;
    }
  });
}
 
Example #12
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createParquetComplexNullTestTable(final Driver hiveDriver)  throws Exception {
  final File complexNullTest = new File(BaseTestQuery.getTempDir("parquetcomplexnulltest"));
  complexNullTest.mkdirs();
  final URL complexNullTestParquetUrl = Resources.getResource("complex_null_test.parquet");
  if (complexNullTestParquetUrl == null) {
    throw new IOException(String.format("Unable to find path %s.", "complex_null_test.parquet"));
  }

  final File complexNullTestParquetFile = new File(complexNullTest, "complex_null_test.parquet");
  complexNullTestParquetFile.deleteOnExit();
  complexNullTest.deleteOnExit();
  Files.write(Paths.get(complexNullTestParquetFile.toURI()), Resources.toByteArray(complexNullTestParquetUrl));
  final String nullTest = "create external table complex_types_null_test_ext(" +
    "id int," +
    "emp_name string," +
    "city array<struct<f1:string>>)" +
    "stored as parquet location '" + complexNullTestParquetFile.getParent() + "'";
  executeQuery(hiveDriver, nullTest);
}
 
Example #13
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createParquetVarcharWithMoreTypesTable(final Driver hiveDriver, final String table) throws Exception {
  String createParqetTableCmd = "CREATE TABLE " + table +
    " (A int, Country string, B int, Capital string, C int, Lang string)" +
    " STORED AS PARQUET";

  String insertData = "INSERT INTO TABLE " + table + " SELECT" +
    " 1, 'United Kingdom', 2, 'London', 3, 'English'";

  String createParquetExtTable = "CREATE EXTERNAL TABLE " + table + "_ext" +
    " (A int, Country varchar(50), B int, Capital string, C int, Lang varchar(3))" +
    " STORED AS PARQUET LOCATION 'file://" + this.getWhDir() + "/" + table + "'";

  executeQuery(hiveDriver, createParqetTableCmd);
  executeQuery(hiveDriver, insertData);
  executeQuery(hiveDriver, createParquetExtTable);
}
 
Example #14
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createComplexTypesTextTable(final Driver hiveDriver, final String table) throws Exception {
  String testDataFile = generateComplexTypesDataFile();
  executeQuery(hiveDriver,
    "CREATE TABLE IF NOT EXISTS " + table + " (" +
      " rownum int," +
      " list_field array<int>, " +
      " struct_field struct<name:string, age:int>, " +
      " struct_list_field struct<type:string, value:array<string>>, " +
      " list_struct_field array<struct<name:string, age:int>>, " +
      " map_field map<string, int>, " +
      " map_struct_field map<string, struct<type:string>> " +
      ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ','" +
      " MAP KEYS TERMINATED BY ':'");
  executeQuery(hiveDriver,
    String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE default." + table, testDataFile));
}
 
Example #15
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createParquetVarcharWithComplexTypeTable(final Driver hiveDriver, final String table) throws Exception {
  String createParqetTableCmd = "CREATE TABLE " + table +
    " (Country string, A struct<B:int, C:string>, D array<int>, E int, F map<int, string>, Capital string)" +
    " STORED AS PARQUET";

  String insertData = "INSERT INTO TABLE " + table + " SELECT" +
    " 'United Kingdom', named_struct('B', 3, 'C', 'test3'),array(1,2,3), 1, map(1, 'value1', 2, 'value2'), 'London'";

  String createParquetExtTable = "CREATE EXTERNAL TABLE " + table + "_ext" +
    " (Country varchar(3), A struct<B:int, C:string>, D array<int>, E int, F map<int, string>, Capital varchar(3))" +
    " STORED AS PARQUET LOCATION 'file://" + this.getWhDir() + "/" + table + "'";

  executeQuery(hiveDriver, createParqetTableCmd);
  executeQuery(hiveDriver, insertData);
  executeQuery(hiveDriver, createParquetExtTable);
}
 
Example #16
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createListTypesTable(final Driver hiveDriver, final String format, final String table) throws Exception {
  executeQuery(hiveDriver,
    "CREATE TABLE " + table + format + "(" +
      " rownum int," +
      " tinyint_field array<tinyint>, " +
      " smallint_field array<smallint>, " +
      " int_field array<int>, " +
      " bigint_field array<bigint>, " +
      " float_field array<float>, " +
      " double_field array<double>, " +
      " timestamp_field array<timestamp>, " +
      " date_field array<date>, " +
      " string_field array<string>, " +
      " boolean_field array<boolean>, " +
      " binary_field array<binary> " +
      ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ',' STORED AS " + format);
  executeQuery(hiveDriver, "INSERT OVERWRITE TABLE " + table + format + " SELECT * FROM " + table);
}
 
Example #17
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createStructTypesTextTable(final Driver hiveDriver, final String table) throws Exception {
  String testDataFile = generateStructTypesDataFile();
  executeQuery(hiveDriver,
    "CREATE TABLE IF NOT EXISTS " + table + " (" +
      " rownum int," +
      " struct_field struct<" +
      " tinyint_field: tinyint, " +
      " smallint_field: smallint, " +
      " int_field: int, " +
      " bigint_field: bigint, " +
      " float_field: float, " +
      " double_field: double, " +
      " string_field: string> " +
      ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ','");
  executeQuery(hiveDriver,
    String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE default." + table, testDataFile));
}
 
Example #18
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createDoubleToStringTable(Driver hiveDriver, String table) throws Exception {
  String testDataFile = generateDoubleDataFile();
  String datatabletxt = "CREATE TABLE IF NOT EXISTS " + table + " (col1 double)";
  String datatableorc = "CREATE TABLE IF NOT EXISTS " + table + "_orc" + " (col1 double) STORED AS ORC";
  executeQuery(hiveDriver, datatabletxt);
  executeQuery(hiveDriver, datatableorc);

  String insert_datatable = String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE default." + table, testDataFile);
  executeQuery(hiveDriver, insert_datatable);
  String insert_datatableorc = "INSERT OVERWRITE TABLE " + table + "_orc" + " SELECT * FROM " + table;
  executeQuery(hiveDriver, insert_datatableorc);
  String exttable = table + "_orc_ext";
  String ext_table = "CREATE EXTERNAL TABLE IF NOT EXISTS " + exttable +
    " (col1 string)" + "STORED AS ORC LOCATION 'file://" + this.getWhDir() + "/" + table + "_orc" + "'";
  executeQuery(hiveDriver, ext_table);
}
 
Example #19
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createParquetComplexCaseInsensitivityTestTable(final Driver hiveDriver)  throws Exception {
  final File complexCaseTest = new File(BaseTestQuery.getTempDir("parquetcomplexcasetest"));
  complexCaseTest.mkdirs();
  final URL complexCaseTestParquetUrl = Resources.getResource("complex_types_case_test.parquet");
  if (complexCaseTestParquetUrl == null) {
    throw new IOException(String.format("Unable to find path %s.", "complex_types_case_test.parquet"));
  }

  final File complexCaseTestParquetFile = new File(complexCaseTest, "complex_types_case_test.parquet");
  complexCaseTestParquetFile.deleteOnExit();
  complexCaseTest.deleteOnExit();
  Files.write(Paths.get(complexCaseTestParquetFile.toURI()), Resources.toByteArray(complexCaseTestParquetUrl));
  final String caseSensitivityTest = "create external table complex_types_case_test_ext(" +
    "listcol array<int>, " +
    "structcol struct<" +
    "f1:int," +
    "list_field:array<int>," +
    "struct_field:struct<sub_f1:int>>)" +
    "stored as parquet location '" + complexCaseTestParquetFile.getParent() + "'";
  executeQuery(hiveDriver, caseSensitivityTest);
}
 
Example #20
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createDecimalParquetTableWithSameScaleHighFilePrecision(Driver hiveDriver, String table) throws Exception {
  String createTable = "CREATE TABLE " + table + " (decimal_col decimal(6,2), name varchar(20)) stored as parquet";
  String alterTable = "ALTER TABLE " + table + " change column decimal_col decimal_col decimal(4,2)";
  List<String> valuesToInsert = Lists.newArrayList(
    "1234.56", "-1234.56", "12.34", "-12.34"
  );

  executeQuery(hiveDriver, createTable);
  insertValuesIntoTable(hiveDriver, table, valuesToInsert);
  executeQuery(hiveDriver, alterTable);

  valuesToInsert = Lists.newArrayList(
    "50.12", "-50.12"
  );
  insertValuesIntoTable(hiveDriver, table, valuesToInsert);
}
 
Example #21
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createDoubleToStringTable(Driver hiveDriver, String table) throws Exception {
  String testDataFile = generateDoubleDataFile();
  String datatabletxt = "CREATE TABLE IF NOT EXISTS " + table + " (col1 double)";
  String datatableorc = "CREATE TABLE IF NOT EXISTS " + table + "_orc" + " (col1 double) STORED AS ORC";
  executeQuery(hiveDriver, datatabletxt);
  executeQuery(hiveDriver, datatableorc);

  String insert_datatable = String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE default." + table, testDataFile);
  executeQuery(hiveDriver, insert_datatable);
  String insert_datatableorc = "INSERT OVERWRITE TABLE " + table + "_orc" + " SELECT * FROM " + table;
  executeQuery(hiveDriver, insert_datatableorc);
  String exttable = table + "_orc_ext";
  String ext_table = "CREATE EXTERNAL TABLE IF NOT EXISTS " + exttable +
    " (col1 string)" + "STORED AS ORC LOCATION 'file://" + this.getWhDir() + "/" + table + "_orc" + "'";
  executeQuery(hiveDriver, ext_table);
}
 
Example #22
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void insertValuesIntoTextTable(Driver hiveDriver, String table, List<String> valuesToInsert) {
  // can be combined to one query using select and union all - this will create one Parquet file as opposed to many Parquet files
  /*
  INSERT INTO TABLE table1
  select 151, 'cash', 'lunch'
  union all
  select 152, 'credit', 'lunch'
  union all
  select 153, 'cash', 'dinner';
   */
  final String insertTable = String.join("", "insert into ", table, "\n");
  String query = valuesToInsert.stream()
    .map(c -> String.join("", "select '",c,"', '",c,"'\n"))
    .collect(Collectors.joining("union all\n", insertTable, "\n"));
  executeQuery(hiveDriver, query);
}
 
Example #23
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createDecimalParquetTableWithHighFileScale(Driver hiveDriver, String table) throws Exception {
  String createTable = "CREATE TABLE " + table + " (decimal_col decimal(37, 4), name varchar(20)) stored as parquet";
  String alterTable = "ALTER TABLE " + table + " change column decimal_col decimal_col decimal(37, 2)";
  List<String> valuesToInsert = Lists.newArrayList(
    "100.1289", "100.1234", "99.1289", "99.1212", "10.1234", "1.1234", "0.1234",
    "-100.1289", "-100.1234", "-99.1289", "-99.1212", "-10.1234", "-1.1234"
  );

  executeQuery(hiveDriver, createTable);
  insertValuesIntoTable(hiveDriver, table, valuesToInsert);
  executeQuery(hiveDriver, alterTable);

  valuesToInsert = Lists.newArrayList(
    "50.12", "-50.12"
  );
  insertValuesIntoTable(hiveDriver, table, valuesToInsert);
}
 
Example #24
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createDecimalParquetTableWithSameScaleHighFilePrecision(Driver hiveDriver, String table) throws Exception {
  String createTable = "CREATE TABLE " + table + " (decimal_col decimal(6,2), name varchar(20)) stored as parquet";
  String alterTable = "ALTER TABLE " + table + " change column decimal_col decimal_col decimal(4,2)";
  List<String> valuesToInsert = Lists.newArrayList(
    "1234.56", "-1234.56", "12.34", "-12.34"
  );

  executeQuery(hiveDriver, createTable);
  insertValuesIntoTable(hiveDriver, table, valuesToInsert);
  executeQuery(hiveDriver, alterTable);

  valuesToInsert = Lists.newArrayList(
    "50.12", "-50.12"
  );
  insertValuesIntoTable(hiveDriver, table, valuesToInsert);
}
 
Example #25
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void insertValuesIntoTextTable(Driver hiveDriver, String table, List<String> valuesToInsert) {
  // can be combined to one query using select and union all - this will create one Parquet file as opposed to many Parquet files
  /*
  INSERT INTO TABLE table1
  select 151, 'cash', 'lunch'
  union all
  select 152, 'credit', 'lunch'
  union all
  select 153, 'cash', 'dinner';
   */
  final String insertTable = String.join("", "insert into ", table, "\n");
  String query = valuesToInsert.stream()
    .map(c -> String.join("", "select '", c, "', '", c, "'\n"))
    .collect(Collectors.joining("union all\n", insertTable, "\n"));
  executeQuery(hiveDriver, query);
}
 
Example #26
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createTableForPartitionValueFormatException(Driver hiveDriver, String table) throws IOException {
  String createTextTable = "create table " + table + "_text" + " (col1 int, col2 double)";
  String insertTextTable = "insert into " + table + "_text"  + " values(1, -0.18)";

  executeQuery(hiveDriver, createTextTable);
  executeQuery(hiveDriver, insertTextTable);

  String createOrcTable = "create table " + table + "_orc" + " (col1 int)" + " partitioned by (col2 bigint) stored as orc";
  String insertOrcTable = "insert into " + table + "_orc"  + " partition(col2) select * from " + table + "_text";

  executeQuery(hiveDriver, createOrcTable);
  try {
    executeQuery(hiveDriver, insertOrcTable);
  } catch (Exception ex) {
    //for hive 3 this insert command is giving IndexOutOfBoundsException, but data gets created.
    //ignoring the exception
  }
}
 
Example #27
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createDecimalParquetTableWithHighFileScale(Driver hiveDriver, String table) throws Exception {
  String createTable = "CREATE TABLE " + table + " (decimal_col decimal(37, 4), name varchar(20)) stored as parquet";
  String alterTable = "ALTER TABLE " + table + " change column decimal_col decimal_col decimal(37, 2)";
  List<String> valuesToInsert = Lists.newArrayList(
    "100.1289", "100.1234", "99.1289", "99.1212", "10.1234", "1.1234", "0.1234",
    "-100.1289", "-100.1234", "-99.1289", "-99.1212", "-10.1234", "-1.1234"
  );

  executeQuery(hiveDriver, createTable);
  insertValuesIntoTable(hiveDriver, table, valuesToInsert);
  executeQuery(hiveDriver, alterTable);

  valuesToInsert = Lists.newArrayList(
    "50.12", "-50.12"
  );
  insertValuesIntoTable(hiveDriver, table, valuesToInsert);
}
 
Example #28
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createTableMixedCaseColumnsWithComplexTypes(Driver hiveDriver, String table) throws IOException {
  final File casetestDir = new File(BaseTestQuery.getTempDir("casetest"));
  casetestDir.mkdirs();
  final File parquetFile = new File(casetestDir, "casetestdata.parquet");
  parquetFile.deleteOnExit();
  casetestDir.deleteOnExit();
  final URL url = Resources.getResource("casetestdata.parquet");
  Files.write(Paths.get(parquetFile.toURI()), Resources.toByteArray(url));

  String caseTestTable = "create table " + table +
    " (upcase_col string, " +
    "lwcase_col string, " +
    "mixedcase_col string, " +
    "upcase_struct_col struct<upcase_sub1:string, lowcase_sub2:string, mixedcase_sub3:string>, " +
    "lwcase_struct_col struct<upcase_sub1:string, lowcase_sub2:string, mixedcase_sub3:string>) " +
    "stored as parquet location 'file://" + parquetFile.getParent() + "'";
  executeQuery(hiveDriver, caseTestTable);
}
 
Example #29
Source File: HiveTestDataGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private void createParquetStructExtraField(final Driver hiveDriver) throws Exception {
  final File structWithExtraField = new File(BaseTestQuery.getTempDir("struct_extra_test"));
  structWithExtraField.mkdirs();
  final URL structWithExtraFieldParquetUrl = Resources.getResource("struct_extra_test.parquet");
  if (structWithExtraFieldParquetUrl == null) {
    throw new IOException(String.format("Unable to find path %s.", "struct_extra_test.parquet"));
  }

  // parquet file has following columns with one valid row
  // col1 int, col2 array<struct<f1:int, f2:struct<f_f_1>>> with value 1, [{f1:1, f2:{2}}]
  final File structWithExtraFieldFile = new File(structWithExtraField, "struct_extra_test.parquet");
  structWithExtraFieldFile.deleteOnExit();
  structWithExtraField.deleteOnExit();
  Files.write(Paths.get(structWithExtraFieldFile.toURI()), Resources.toByteArray(structWithExtraFieldParquetUrl));

  final String structExtraFieldTest = "create external table struct_extra_test_ext(" +
    "col1 int, col2 array<struct<f1:int, f2:struct<f_f_1:int, f_f_2:int>>>)" +
    "stored as parquet location '" + structWithExtraFieldFile.getParent() + "'";
  executeQuery(hiveDriver, structExtraFieldTest);
}
 
Example #30
Source File: HiveTestUtilities.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
/**
 * Execute the give <i>query</i> on given <i>hiveDriver</i> instance.
 * @param hiveDriver
 * @param query
 */
public static void executeQuery(Driver hiveDriver, String query) {
  CommandProcessorResponse response = null;
  boolean failed = false;

  Exception cause;
  try {
    response = hiveDriver.run(query);
    cause = null;
  } catch(Exception ex) {
    failed = true;
    cause = ex;
  }

  if (failed || response.getResponseCode() != 0 ) {
    throw new RuntimeException(String.format("Failed to execute command '%s', errorMsg = '%s'",
        query, (response != null ? response.getErrorMessage() : "")), cause);
  }
}