Java Code Examples for org.apache.hadoop.hive.metastore.api.Table#setDbName()

The following examples show how to use org.apache.hadoop.hive.metastore.api.Table#setDbName() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FederatedHMSHandlerTest.java    From waggle-dance with Apache License 2.0 6 votes vote down vote up
@Test
public void get_table_objects_by_name_req() throws TException {
  Table table0 = new Table();
  table0.setDbName(DB_P);
  table0.setTableName("table0");
  Table table1 = new Table();
  table1.setDbName(DB_P);
  table1.setTableName("table1");
  GetTablesRequest request = new GetTablesRequest(DB_P);
  request.setTblNames(Arrays.asList(table0.getTableName(), table1.getTableName()));
  GetTablesResult response = new GetTablesResult(Arrays.asList(table0, table1));
  when(primaryClient.get_table_objects_by_name_req(request)).thenReturn(response);
  when(primaryMapping.transformInboundGetTablesRequest(request)).thenReturn(request);
  when(primaryMapping.transformOutboundGetTablesResult(response)).thenReturn(response);
  GetTablesResult result = handler.get_table_objects_by_name_req(request);
  assertThat(result.getTables().size(), is(2));
  assertThat(result.getTables().get(0).getDbName(), is(DB_P));
  assertThat(result.getTables().get(0).getTableName(), is("table0"));
  assertThat(result.getTables().get(1).getDbName(), is(DB_P));
  assertThat(result.getTables().get(1).getTableName(), is("table1"));
}
 
Example 2
Source File: AvroHiveTableStrategy.java    From data-highway with Apache License 2.0 6 votes vote down vote up
@Override
public Table newHiveTable(
    String databaseName,
    String tableName,
    String partitionColumnName,
    String location,
    Schema schema,
    int version) {

  Table table = new Table();
  table.setDbName(databaseName);
  table.setTableName(tableName);

  table.setTableType(TableType.EXTERNAL_TABLE.toString());
  table.putToParameters("EXTERNAL", "TRUE");
  addRoadAnnotations(table);

  URI schemaUri = uriResolver.resolve(schema, table.getTableName(), version);
  table.putToParameters(AVRO_SCHEMA_URL, schemaUri.toString());
  table.putToParameters(AVRO_SCHEMA_VERSION, Integer.toString(version));
  table.setPartitionKeys(Arrays.asList(new FieldSchema(partitionColumnName, "string", null)));

  table.setSd(AvroStorageDescriptorFactory.create(location));

  return table;
}
 
Example 3
Source File: AbstractMetastoreTestWithStaticConfiguration.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
public Table makeMetastoreTableObject(HiveMetaStoreClient client,
    String dbName, String tabName, List<FieldSchema> cols) throws Exception {
  Table tbl = new Table();
  tbl.setDbName(dbName);
  tbl.setTableName(tabName);
  StorageDescriptor sd = new StorageDescriptor();
  tbl.setSd(sd);
  tbl.setParameters(new HashMap<String, String>());
  sd.setCols(cols);
  sd.setCompressed(false);
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());
  sd.getSerdeInfo().setName(tbl.getTableName());
  sd.getSerdeInfo().setParameters(new HashMap<String, String>());
  sd.getSerdeInfo().getParameters()
      .put(serdeConstants.SERIALIZATION_FORMAT, "1");
  sd.setSortCols(new ArrayList<Order>());
  return tbl;
}
 
Example 4
Source File: DatabaseMappingImplTest.java    From waggle-dance with Apache License 2.0 6 votes vote down vote up
@Test
public void transformOutboundGetTableResultWithView() throws Exception {
  Table table = new Table();
  table.setDbName(DB_NAME);
  table.setTableName(TABLE_NAME);
  table.setViewExpandedText(VIEW_EXPANDED_TEXT);
  table.setViewOriginalText(VIEW_ORIGINAL_TEXT);
  GetTableResult result = new GetTableResult();
  result.setTable(table);
  GetTableResult transformedResult = databaseMapping.transformOutboundGetTableResult(result);
  assertThat(transformedResult, is(sameInstance(result)));
  assertThat(transformedResult.getTable(), is(sameInstance(result.getTable())));
  assertThat(transformedResult.getTable().getDbName(), is(OUT_DB_NAME));
  assertThat(transformedResult.getTable().getTableName(), is(TABLE_NAME));
  assertThat(transformedResult.getTable().getViewExpandedText(), is(VIEW_EXPANDED_TEXT_TRANSFORMED));
  assertThat(transformedResult.getTable().getViewOriginalText(), is(VIEW_ORIGINAL_TEXT_TRANSFORMED));
}
 
Example 5
Source File: HiveServer2CoreTest.java    From beeju with Apache License 2.0 6 votes vote down vote up
private Table createUnpartitionedTable(String databaseName, String tableName, HiveServer2Core server)
    throws Exception {
  Table table = new Table();
  table.setDbName(databaseName);
  table.setTableName(tableName);
  table.setSd(new StorageDescriptor());
  table.getSd().setCols(Arrays.asList(new FieldSchema("id", "int", null), new FieldSchema("name", "string", null)));
  table.getSd().setInputFormat("org.apache.hadoop.mapred.TextInputFormat");
  table.getSd().setOutputFormat("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat");
  table.getSd().setSerdeInfo(new SerDeInfo());
  table.getSd().getSerdeInfo().setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe");
  HiveMetaStoreClient client = server.getCore().newClient();
  client.createTable(table);
  client.close();
  return table;
}
 
Example 6
Source File: HiveUtils.java    From kite with Apache License 2.0 6 votes vote down vote up
static Table createEmptyTable(String namespace, String name) {
  Table table = new Table();
  table.setDbName(namespace);
  table.setTableName(name);
  table.setPartitionKeys(new ArrayList<FieldSchema>());
  table.setParameters(new HashMap<String, String>());

  StorageDescriptor sd = new StorageDescriptor();
  sd.setSerdeInfo(new SerDeInfo());
  sd.setNumBuckets(-1);
  sd.setBucketCols(new ArrayList<String>());
  sd.setCols(new ArrayList<FieldSchema>());
  sd.setParameters(new HashMap<String, String>());
  sd.setSortCols(new ArrayList<Order>());
  sd.getSerdeInfo().setParameters(new HashMap<String, String>());
  SkewedInfo skewInfo = new SkewedInfo();
  skewInfo.setSkewedColNames(new ArrayList<String>());
  skewInfo.setSkewedColValues(new ArrayList<List<String>>());
  skewInfo.setSkewedColValueLocationMaps(new HashMap<List<String>, String>());
  sd.setSkewedInfo(skewInfo);
  table.setSd(sd);

  return table;
}
 
Example 7
Source File: TestUtils.java    From waggle-dance with Apache License 2.0 6 votes vote down vote up
static Table createPartitionedTable(HiveMetaStoreClient metaStoreClient, String database, String table, File location)
  throws Exception {

  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  hiveTable.setPartitionKeys(PARTITION_COLUMNS);

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toURI().toString());
  sd.setParameters(new HashMap<>());
  sd.setSerdeInfo(new SerDeInfo());

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  return hiveTable;
}
 
Example 8
Source File: ReplicaTest.java    From circus-train with Apache License 2.0 6 votes vote down vote up
private Table newTable() {
  Table table = new Table();
  table.setDbName(DB_NAME);
  table.setTableName(TABLE_NAME);
  table.setTableType(TableType.EXTERNAL_TABLE.name());

  StorageDescriptor sd = new StorageDescriptor();
  sd.setLocation(tableLocation);
  table.setSd(sd);

  HashMap<String, String> parameters = new HashMap<>();
  parameters.put(StatsSetupConst.ROW_COUNT, "1");
  table.setParameters(parameters);

  table.setPartitionKeys(PARTITIONS);
  return table;
}
 
Example 9
Source File: TestUtils.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public static Table newTable(String database, String tableName) {
  Table table = new Table();
  table.setDbName(database);
  table.setTableName(tableName);
  table.setTableType(TABLE_TYPE);
  table.setOwner(OWNER);
  table.setCreateTime(CREATE_TIME);
  table.setRetention(RETENTION);

  Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
  userPrivileges.put("read", ImmutableList.of(new PrivilegeGrantInfo()));
  PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
  privileges.setUserPrivileges(userPrivileges);
  table.setPrivileges(privileges);

  StorageDescriptor storageDescriptor = new StorageDescriptor();
  storageDescriptor.setCols(COLS);
  storageDescriptor.setInputFormat(INPUT_FORMAT);
  storageDescriptor.setOutputFormat(OUTPUT_FORMAT);
  storageDescriptor.setSerdeInfo(new SerDeInfo(SERDE_INFO_NAME, SERIALIZATION_LIB, new HashMap<String, String>()));
  storageDescriptor.setSkewedInfo(new SkewedInfo());
  storageDescriptor.setParameters(new HashMap<String, String>());
  storageDescriptor.setLocation(DATABASE + "/" + tableName + "/");
  table.setSd(storageDescriptor);

  Map<String, String> parameters = new HashMap<>();
  parameters.put("com.company.parameter", "abc");
  table.setParameters(parameters);

  return table;
}
 
Example 10
Source File: TestUtils.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public static Table createUnpartitionedTable(
    HiveMetaStoreClient metaStoreClient,
    String database,
    String table,
    URI location)
  throws TException {
  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setInputFormat(TextInputFormat.class.getName());
  sd.setOutputFormat(TextOutputFormat.class.getName());
  sd.setSerdeInfo(new SerDeInfo());
  sd.getSerdeInfo().setSerializationLib("org.apache.hadoop.hive.serde2.OpenCSVSerde");

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, database, table);
  ColumnStatisticsData statsData = new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(1L, 2L));
  ColumnStatisticsObj cso1 = new ColumnStatisticsObj("id", "bigint", statsData);
  List<ColumnStatisticsObj> statsObj = Collections.singletonList(cso1);
  metaStoreClient.updateTableColumnStatistics(new ColumnStatistics(statsDesc, statsObj));

  return hiveTable;
}
 
Example 11
Source File: DatabaseMappingImplTest.java    From waggle-dance with Apache License 2.0 5 votes vote down vote up
@Test
public void transformOutboundTable() throws Exception {
  Table table = new Table();
  table.setDbName(DB_NAME);
  Table result = databaseMapping.transformOutboundTable(table);
  assertThat(result, is(sameInstance(table)));
  assertThat(result.getDbName(), is(OUT_DB_NAME));
  assertThat(result.getViewExpandedText(), nullValue());
  assertThat(result.getViewOriginalText(), nullValue());
}
 
Example 12
Source File: DatabaseMappingImplTest.java    From waggle-dance with Apache License 2.0 5 votes vote down vote up
@Test
public void transformOutboundGetTableResult() throws Exception {
  Table table = new Table();
  table.setDbName(DB_NAME);
  table.setTableName(TABLE_NAME);
  GetTableResult result = new GetTableResult();
  result.setTable(table);
  GetTableResult transformedResult = databaseMapping.transformOutboundGetTableResult(result);
  assertThat(transformedResult, is(sameInstance(result)));
  assertThat(transformedResult.getTable(), is(sameInstance(result.getTable())));
  assertThat(transformedResult.getTable().getDbName(), is(OUT_DB_NAME));
  assertThat(transformedResult.getTable().getTableName(), is(TABLE_NAME));
  assertFalse(transformedResult.getTable().isSetViewExpandedText());
  assertFalse(transformedResult.getTable().isSetViewOriginalText());
}
 
Example 13
Source File: TableTransformationTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Before
public void init() {
  table = new Table();
  table.setDbName("database");
  table.setTableName("table");
  table.setTableType("type");

  Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
  userPrivileges.put("read", ImmutableList.of(new PrivilegeGrantInfo()));
  PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
  privileges.setUserPrivileges(userPrivileges);
  table.setPrivileges(privileges);

  StorageDescriptor storageDescriptor = new StorageDescriptor();
  storageDescriptor.setCols(Arrays.asList(new FieldSchema("a", "int", null)));
  storageDescriptor.setInputFormat("input_format");
  storageDescriptor.setOutputFormat("output_format");
  storageDescriptor.setSerdeInfo(new SerDeInfo("serde", "lib", new HashMap<String, String>()));
  storageDescriptor.setSkewedInfo(new SkewedInfo());
  storageDescriptor.setParameters(new HashMap<String, String>());
  storageDescriptor.setLocation("database/table/");
  table.setSd(storageDescriptor);

  Map<String, String> parameters = new HashMap<>();
  parameters.put("com.company.parameter", "abc");
  table.setParameters(parameters);
}
 
Example 14
Source File: HiveMetaStoreServiceJdbcImpl.java    From griffin with Apache License 2.0 5 votes vote down vote up
@Override
@Cacheable(unless = "#result==null")
public Table getTable(String dbName, String tableName) {
    Table result = new Table();
    result.setDbName(dbName);
    result.setTableName(tableName);

    String sql = SHOW_CREATE_TABLE + dbName + "." + tableName;
    Statement stmt = null;
    ResultSet rs = null;
    StringBuilder sb = new StringBuilder();

    try {
        Class.forName(hiveClassName);
        if (conn == null) {
            conn = DriverManager.getConnection(hiveUrl);
        }
        LOGGER.info("got connection");

        stmt = conn.createStatement();
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            String s = rs.getString(1);
            sb.append(s);
        }
        String location = getLocation(sb.toString());
        List<FieldSchema> cols = getColums(sb.toString());
        StorageDescriptor sd = new StorageDescriptor();
        sd.setLocation(location);
        sd.setCols(cols);
        result.setSd(sd);
    } catch (Exception e) {
        LOGGER.error("Query Hive Table metadata has error. {}", e.getMessage());
    } finally {
        closeConnection(stmt, rs);
    }
    return result;
}
 
Example 15
Source File: FederatedHMSHandlerTest.java    From waggle-dance with Apache License 2.0 5 votes vote down vote up
@Test
public void get_table_req() throws TException {
  Table table = new Table();
  table.setDbName(DB_P);
  table.setTableName("table");
  GetTableRequest request = new GetTableRequest(table.getDbName(), table.getTableName());
  GetTableResult response = new GetTableResult(table);
  when(primaryClient.get_table_req(request)).thenReturn(response);
  when(primaryMapping.transformInboundGetTableRequest(request)).thenReturn(request);
  when(primaryMapping.transformOutboundGetTableResult(response)).thenReturn(response);
  GetTableResult result = handler.get_table_req(request);
  assertThat(result.getTable().getDbName(), is(DB_P));
  assertThat(result.getTable().getTableName(), is("table"));
}
 
Example 16
Source File: HiveMetaStoreUtils.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
/**
 * Convert a {@link HiveTable} into a {@link Table}.
 */
public static Table getTable(HiveTable hiveTable) {
  State props = hiveTable.getProps();
  Table table = new Table();
  table.setDbName(hiveTable.getDbName());
  table.setTableName(hiveTable.getTableName());
  table.setParameters(getParameters(props));
  if (hiveTable.getCreateTime().isPresent()) {
    table.setCreateTime(Ints.checkedCast(hiveTable.getCreateTime().get()));
  }
  if (hiveTable.getLastAccessTime().isPresent()) {
    table.setLastAccessTime(Ints.checkedCast(hiveTable.getLastAccessTime().get()));
  }
  if (hiveTable.getOwner().isPresent()) {
    table.setOwner(hiveTable.getOwner().get());
  }
  if (hiveTable.getRetention().isPresent()) {
    table.setRetention(Ints.checkedCast(hiveTable.getRetention().get()));
  }
  if (hiveTable.getTableType().isPresent()) {
    table.setTableType(hiveTable.getTableType().get());
  } else {
    table.setTableType(DEFAULT_TABLE_TYPE.toString());
  }
  if (table.getTableType().equals(TableType.EXTERNAL_TABLE.toString())) {
    table.getParameters().put(EXTERNAL, Boolean.TRUE.toString().toUpperCase());
  }
  table.setPartitionKeys(getFieldSchemas(hiveTable.getPartitionKeys()));
  table.setSd(getStorageDescriptor(hiveTable));
  return table;
}
 
Example 17
Source File: ComparisonToolIntegrationTest.java    From circus-train with Apache License 2.0 4 votes vote down vote up
private void createSourceTable() throws Exception {
  File partitionEurope = new File(sourceTableUri, "local_date=2000-01-01");
  File partitionUk = new File(partitionEurope, "local_hour=0");
  File dataFileUk = new File(partitionUk, PART_00000);
  FileUtils.writeStringToFile(dataFileUk, "1\tadam\tlondon\n2\tsusan\tglasgow\n");

  File partitionAsia = new File(sourceTableUri, "local_date=2000-01-02");
  File partitionChina = new File(partitionAsia, "local_hour=0");
  File dataFileChina = new File(partitionChina, PART_00000);
  String data = "1\tchun\tbeijing\n2\tshanghai\tmilan\n";
  FileUtils.writeStringToFile(dataFileChina, data);

  HiveMetaStoreClient sourceClient = catalog.client();

  Table source = new Table();
  source.setDbName(DATABASE);
  source.setTableName(SOURCE_TABLE);
  source.setTableType(TableType.EXTERNAL_TABLE.name());
  Map<String, String> parameters = new HashMap<>();
  parameters.put("comment", "comment source");
  source.setParameters(parameters);

  List<FieldSchema> partitionColumns = Arrays.asList(new FieldSchema("local_date", "string", ""),
      new FieldSchema("local_hour", "string", ""));
  source.setPartitionKeys(partitionColumns);

  List<FieldSchema> dataColumns = Arrays.asList(new FieldSchema("id", "bigint", ""),
      new FieldSchema("name", "string", ""), new FieldSchema("city", "string", ""));

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(dataColumns);
  sd.setLocation(sourceTableUri.toURI().toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());

  source.setSd(sd);

  sourceClient.createTable(source);
  LOG.info(">>>> Partitions added: {}",
      +sourceClient
          .add_partitions(Arrays.asList(newPartition(SOURCE_TABLE, sd, Arrays.asList("2000-01-01", "0"), partitionUk),
              newPartition(SOURCE_TABLE, sd, Arrays.asList("2000-01-02", "0"), partitionChina))));
}
 
Example 18
Source File: HiveConvertersImpl.java    From metacat with Apache License 2.0 4 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public Table metacatToHiveTable(final TableDto dto) {
    final Table table = new Table();
    final QualifiedName name = dto.getName();
    if (name != null) {
        table.setTableName(name.getTableName());
        table.setDbName(name.getDatabaseName());
    }

    final StorageDto storageDto = dto.getSerde();
    if (storageDto != null) {
        table.setOwner(storageDto.getOwner());
    }

    final AuditDto auditDto = dto.getAudit();
    if (auditDto != null && auditDto.getCreatedDate() != null) {
        table.setCreateTime(dateToEpochSeconds(auditDto.getCreatedDate()));
    }

    Map<String, String> params = new HashMap<>();
    if (dto.getMetadata() != null) {
        params = dto.getMetadata();
    }
    table.setParameters(params);
    updateTableTypeAndViewInfo(dto, table);

    table.setSd(fromStorageDto(storageDto, table.getTableName()));

    final List<FieldDto> fields = dto.getFields();
    if (fields == null) {
        table.setPartitionKeys(Collections.emptyList());
        table.getSd().setCols(Collections.emptyList());
    } else {
        final List<FieldSchema> nonPartitionFields = Lists.newArrayListWithCapacity(fields.size());
        final List<FieldSchema> partitionFields = Lists.newArrayListWithCapacity(fields.size());
        for (FieldDto fieldDto : fields) {
            final FieldSchema f = metacatToHiveField(fieldDto);

            if (fieldDto.isPartition_key()) {
                partitionFields.add(f);
            } else {
                nonPartitionFields.add(f);
            }
        }
        table.setPartitionKeys(partitionFields);
        table.getSd().setCols(nonPartitionFields);
    }
    return table;
}
 
Example 19
Source File: DatabaseMappingImpl.java    From waggle-dance with Apache License 2.0 4 votes vote down vote up
@Override
public Table transformInboundTable(Table table) {
  table.setDbName(metaStoreMapping.transformInboundDatabaseName(table.getDbName()));
  return table;
}
 
Example 20
Source File: TestUtils.java    From circus-train with Apache License 2.0 4 votes vote down vote up
public static Table createPartitionedTable(
    HiveMetaStoreClient metaStoreClient,
    String database,
    String table,
    URI location,
    List<FieldSchema> columns,
    List<FieldSchema> partitionKeys,
    String serializationLib,
    String inputFormatClassName,
    String outputFormatClassName)
    throws Exception {

  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  hiveTable.setPartitionKeys(partitionKeys);

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(columns);
  sd.setLocation(location.toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setInputFormat(inputFormatClassName);
  sd.setOutputFormat(outputFormatClassName);
  sd.setSerdeInfo(new SerDeInfo());
  sd.getSerdeInfo().setSerializationLib(serializationLib);

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, database, table);
  ColumnStatisticsData statsData = new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(1L, 2L));
  ColumnStatisticsObj cso1 = new ColumnStatisticsObj("id", "bigint", statsData);
  List<ColumnStatisticsObj> statsObj = Collections.singletonList(cso1);
  metaStoreClient.updateTableColumnStatistics(new ColumnStatistics(statsDesc, statsObj));

  return hiveTable;
}