Java Code Examples for org.apache.hadoop.hive.metastore.api.StorageDescriptor#setSerdeInfo()

The following examples show how to use org.apache.hadoop.hive.metastore.api.StorageDescriptor#setSerdeInfo() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestUtils.java    From waggle-dance with Apache License 2.0 6 votes vote down vote up
static Table createUnpartitionedTable(
    HiveMetaStoreClient metaStoreClient,
    String database,
    String table,
    File location)
  throws TException {
  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toURI().toString());
  sd.setParameters(new HashMap<>());
  sd.setSerdeInfo(new SerDeInfo());

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  return hiveTable;
}
 
Example 2
Source File: HiveUtils.java    From kite with Apache License 2.0 6 votes vote down vote up
static Table createEmptyTable(String namespace, String name) {
  Table table = new Table();
  table.setDbName(namespace);
  table.setTableName(name);
  table.setPartitionKeys(new ArrayList<FieldSchema>());
  table.setParameters(new HashMap<String, String>());

  StorageDescriptor sd = new StorageDescriptor();
  sd.setSerdeInfo(new SerDeInfo());
  sd.setNumBuckets(-1);
  sd.setBucketCols(new ArrayList<String>());
  sd.setCols(new ArrayList<FieldSchema>());
  sd.setParameters(new HashMap<String, String>());
  sd.setSortCols(new ArrayList<Order>());
  sd.getSerdeInfo().setParameters(new HashMap<String, String>());
  SkewedInfo skewInfo = new SkewedInfo();
  skewInfo.setSkewedColNames(new ArrayList<String>());
  skewInfo.setSkewedColValues(new ArrayList<List<String>>());
  skewInfo.setSkewedColValueLocationMaps(new HashMap<List<String>, String>());
  sd.setSkewedInfo(skewInfo);
  table.setSd(sd);

  return table;
}
 
Example 3
Source File: TestUtils.java    From circus-train with Apache License 2.0 6 votes vote down vote up
private static Table createView(
    HiveMetaStoreClient metaStoreClient,
    String database,
    String view,
    String table,
    List<FieldSchema> partitionCols)
  throws TException {
  Table hiveView = new Table();
  hiveView.setDbName(database);
  hiveView.setTableName(view);
  hiveView.setTableType(TableType.VIRTUAL_VIEW.name());
  hiveView.setViewOriginalText(hql(database, table));
  hiveView.setViewExpandedText(expandHql(database, table, DATA_COLUMNS, partitionCols));
  hiveView.setPartitionKeys(partitionCols);

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());
  hiveView.setSd(sd);

  metaStoreClient.createTable(hiveView);

  return hiveView;
}
 
Example 4
Source File: TestUtils.java    From waggle-dance with Apache License 2.0 6 votes vote down vote up
static Table createPartitionedTable(HiveMetaStoreClient metaStoreClient, String database, String table, File location)
  throws Exception {

  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  hiveTable.setPartitionKeys(PARTITION_COLUMNS);

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toURI().toString());
  sd.setParameters(new HashMap<>());
  sd.setSerdeInfo(new SerDeInfo());

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  return hiveTable;
}
 
Example 5
Source File: TestUtils.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public static Table createUnpartitionedTable(
    HiveMetaStoreClient metaStoreClient,
    String database,
    String table,
    URI location)
  throws TException {
  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setInputFormat(TextInputFormat.class.getName());
  sd.setOutputFormat(TextOutputFormat.class.getName());
  sd.setSerdeInfo(new SerDeInfo());
  sd.getSerdeInfo().setSerializationLib("org.apache.hadoop.hive.serde2.OpenCSVSerde");

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, database, table);
  ColumnStatisticsData statsData = new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(1L, 2L));
  ColumnStatisticsObj cso1 = new ColumnStatisticsObj("id", "bigint", statsData);
  List<ColumnStatisticsObj> statsObj = Collections.singletonList(cso1);
  metaStoreClient.updateTableColumnStatistics(new ColumnStatistics(statsDesc, statsObj));

  return hiveTable;
}
 
Example 6
Source File: TestUtils.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public static Partition newPartition() {
  Partition partition = new Partition();
  StorageDescriptor sd = new StorageDescriptor();
  SerDeInfo info = new SerDeInfo();
  info.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(info);
  partition.setSd(sd);
  partition.setParameters(new HashMap<String, String>());
  return partition;
}
 
Example 7
Source File: AvroSchemaManagerTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private Partition getTestPartition(Table table) throws HiveException {
  Partition partition = new Partition(table, ImmutableMap.of("partition_key", "1"), null);
  StorageDescriptor sd = new StorageDescriptor();
  sd.setSerdeInfo(new SerDeInfo("avro", AvroSerDe.class.getName(), null));
  sd.setCols(Lists.newArrayList(new FieldSchema("foo", "int", null)));
  partition.getTPartition().setSd(sd);
  return partition;
}
 
Example 8
Source File: PartitionTransformationTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Before
public void init() {
  partition = new Partition();
  partition.setDbName("database");
  partition.setTableName("table");
  partition.setValues(ImmutableList.of("part"));

  Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
  userPrivileges.put("read", ImmutableList.of(new PrivilegeGrantInfo()));
  PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
  privileges.setUserPrivileges(userPrivileges);
  partition.setPrivileges(privileges);

  StorageDescriptor storageDescriptor = new StorageDescriptor();
  storageDescriptor.setCols(Arrays.asList(new FieldSchema("a", "int", null)));
  storageDescriptor.setInputFormat("input_format");
  storageDescriptor.setOutputFormat("output_format");
  storageDescriptor.setSerdeInfo(new SerDeInfo("serde", "lib", new HashMap<String, String>()));
  storageDescriptor.setSkewedInfo(new SkewedInfo());
  storageDescriptor.setParameters(new HashMap<String, String>());
  storageDescriptor.setLocation("database/table/part/");
  partition.setSd(storageDescriptor);

  Map<String, String> parameters = new HashMap<>();
  parameters.put("com.company.parameter", "abc");
  partition.setParameters(parameters);
}
 
Example 9
Source File: HiveMetaStoreUtils.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private static StorageDescriptor getStorageDescriptor(HiveRegistrationUnit unit) {
  State props = unit.getStorageProps();
  StorageDescriptor sd = new StorageDescriptor();
  sd.setParameters(getParameters(props));
  //Treat AVRO and other formats differently. Details can be found in GOBBLIN-877
  if (unit.isRegisterSchema() ||
      (unit.getInputFormat().isPresent() && !unit.getInputFormat().get().equals(AvroContainerInputFormat.class.getName()))) {
    sd.setCols(getFieldSchemas(unit));
  }
  if (unit.getLocation().isPresent()) {
    sd.setLocation(unit.getLocation().get());
  }
  if (unit.getInputFormat().isPresent()) {
    sd.setInputFormat(unit.getInputFormat().get());
  }
  if (unit.getOutputFormat().isPresent()) {
    sd.setOutputFormat(unit.getOutputFormat().get());
  }
  if (unit.getIsCompressed().isPresent()) {
    sd.setCompressed(unit.getIsCompressed().get());
  }
  if (unit.getNumBuckets().isPresent()) {
    sd.setNumBuckets(unit.getNumBuckets().get());
  }
  if (unit.getBucketColumns().isPresent()) {
    sd.setBucketCols(unit.getBucketColumns().get());
  }
  if (unit.getIsStoredAsSubDirs().isPresent()) {
    sd.setStoredAsSubDirectories(unit.getIsStoredAsSubDirs().get());
  }
  sd.setSerdeInfo(getSerDeInfo(unit));
  return sd;
}
 
Example 10
Source File: TestUtils.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public static Partition newPartition(String database, String tableName, String partitionValue) {
  Partition partition = new Partition();
  partition.setDbName(database);
  partition.setTableName(tableName);
  partition.setCreateTime(CREATE_TIME);
  partition.setValues(ImmutableList.of(partitionValue));

  Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
  userPrivileges.put("read", ImmutableList.of(new PrivilegeGrantInfo()));
  PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
  privileges.setUserPrivileges(userPrivileges);
  partition.setPrivileges(privileges);

  StorageDescriptor storageDescriptor = new StorageDescriptor();
  storageDescriptor.setCols(COLS);
  storageDescriptor.setInputFormat(INPUT_FORMAT);
  storageDescriptor.setOutputFormat(OUTPUT_FORMAT);
  storageDescriptor.setSerdeInfo(new SerDeInfo(SERDE_INFO_NAME, SERIALIZATION_LIB, new HashMap<String, String>()));
  storageDescriptor.setSkewedInfo(new SkewedInfo());
  storageDescriptor.setParameters(new HashMap<String, String>());
  storageDescriptor.setLocation(DATABASE + "/" + tableName + "/" + partitionValue + "/");
  partition.setSd(storageDescriptor);

  Map<String, String> parameters = new HashMap<>();
  parameters.put("com.company.parameter", "abc");
  partition.setParameters(parameters);

  return partition;
}
 
Example 11
Source File: TestUtils.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public static Table newTable(String database, String tableName) {
  Table table = new Table();
  table.setDbName(database);
  table.setTableName(tableName);
  table.setTableType(TABLE_TYPE);
  table.setOwner(OWNER);
  table.setCreateTime(CREATE_TIME);
  table.setRetention(RETENTION);

  Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
  userPrivileges.put("read", ImmutableList.of(new PrivilegeGrantInfo()));
  PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
  privileges.setUserPrivileges(userPrivileges);
  table.setPrivileges(privileges);

  StorageDescriptor storageDescriptor = new StorageDescriptor();
  storageDescriptor.setCols(COLS);
  storageDescriptor.setInputFormat(INPUT_FORMAT);
  storageDescriptor.setOutputFormat(OUTPUT_FORMAT);
  storageDescriptor.setSerdeInfo(new SerDeInfo(SERDE_INFO_NAME, SERIALIZATION_LIB, new HashMap<String, String>()));
  storageDescriptor.setSkewedInfo(new SkewedInfo());
  storageDescriptor.setParameters(new HashMap<String, String>());
  storageDescriptor.setLocation(DATABASE + "/" + tableName + "/");
  table.setSd(storageDescriptor);

  Map<String, String> parameters = new HashMap<>();
  parameters.put("com.company.parameter", "abc");
  table.setParameters(parameters);

  return table;
}
 
Example 12
Source File: HiveEntityFactory.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public static StorageDescriptor newStorageDescriptor(File location, String... columns) {
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = new ArrayList<>(columns.length);
  for (String name : columns) {
    cols.add(newFieldSchema(name));
  }
  sd.setCols(cols);
  sd.setSerdeInfo(new SerDeInfo());
  sd.setLocation(location.toURI().toString());
  return sd;
}
 
Example 13
Source File: CircusTrainTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Before
public void before() throws TException, IOException {
  Table table = new Table();
  table.setDbName(DATABASE);
  table.setTableName("source_" + TABLE);
  table.setTableType(TableType.EXTERNAL_TABLE.name());
  table.putToParameters("EXTERNAL", "TRUE");

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(Arrays.asList(new FieldSchema("col1", "string", null)));
  sd.setSerdeInfo(new SerDeInfo());
  table.setSd(sd);

  hive.client().createTable(table);
}
 
Example 14
Source File: FilterToolIntegrationTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
private void createTable(File sourceTableUri) throws Exception {
  File partitionEurope = new File(sourceTableUri, "local_date=2000-01-01");
  File partitionUk = new File(partitionEurope, "local_hour=0");
  File dataFileUk = new File(partitionUk, PART_00000);
  FileUtils.writeStringToFile(dataFileUk, "1\tadam\tlondon\n2\tsusan\tglasgow\n");

  File partitionAsia = new File(sourceTableUri, "local_date=2000-01-02");
  File partitionChina = new File(partitionAsia, "local_hour=0");
  File dataFileChina = new File(partitionChina, PART_00000);
  String data = "1\tchun\tbeijing\n2\tshanghai\tmilan\n";
  FileUtils.writeStringToFile(dataFileChina, data);

  HiveMetaStoreClient sourceClient = sourceCatalog.client();

  Table source = new Table();
  source.setDbName(DATABASE);
  source.setTableName(TABLE);
  source.setTableType(TableType.EXTERNAL_TABLE.name());
  source.setParameters(new HashMap<String, String>());

  List<FieldSchema> partitionColumns = Arrays.asList(new FieldSchema("local_date", "string", ""),
      new FieldSchema("local_hour", "string", ""));
  source.setPartitionKeys(partitionColumns);

  List<FieldSchema> dataColumns = Arrays.asList(new FieldSchema("id", "bigint", ""),
      new FieldSchema("name", "string", ""), new FieldSchema("city", "tinyint", ""));

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(dataColumns);
  sd.setLocation(sourceTableUri.toURI().toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());

  source.setSd(sd);

  sourceClient.createTable(source);
  LOG.info(">>>> Partitions added: {}",
      +sourceClient.add_partitions(Arrays.asList(newPartition(sd, Arrays.asList("2000-01-01", "0"), partitionUk),
          newPartition(sd, Arrays.asList("2000-01-02", "0"), partitionChina))));
}
 
Example 15
Source File: HiveConnectorPartitionService.java    From metacat with Apache License 2.0 5 votes vote down vote up
private void copyTableSdToPartitionSd(final List<Partition> hivePartitions, final Table table) {
    //
    // Update the partition info based on that of the table.
    //
    for (Partition partition : hivePartitions) {
        final StorageDescriptor sd = partition.getSd();
        final StorageDescriptor tableSdCopy = table.getSd().deepCopy();
        if (tableSdCopy.getSerdeInfo() == null) {
            final SerDeInfo serDeInfo = new SerDeInfo(null, null, new HashMap<>());
            tableSdCopy.setSerdeInfo(serDeInfo);
        }

        tableSdCopy.setLocation(sd.getLocation());
        if (!Strings.isNullOrEmpty(sd.getInputFormat())) {
            tableSdCopy.setInputFormat(sd.getInputFormat());
        }
        if (!Strings.isNullOrEmpty(sd.getOutputFormat())) {
            tableSdCopy.setOutputFormat(sd.getOutputFormat());
        }
        if (sd.getParameters() != null && !sd.getParameters().isEmpty()) {
            tableSdCopy.setParameters(sd.getParameters());
        }
        if (sd.getSerdeInfo() != null) {
            if (!Strings.isNullOrEmpty(sd.getSerdeInfo().getName())) {
                tableSdCopy.getSerdeInfo().setName(sd.getSerdeInfo().getName());
            }
            if (!Strings.isNullOrEmpty(sd.getSerdeInfo().getSerializationLib())) {
                tableSdCopy.getSerdeInfo().setSerializationLib(sd.getSerdeInfo().getSerializationLib());
            }
            if (sd.getSerdeInfo().getParameters() != null && !sd.getSerdeInfo().getParameters().isEmpty()) {
                tableSdCopy.getSerdeInfo().setParameters(sd.getSerdeInfo().getParameters());
            }
        }
        partition.setSd(tableSdCopy);
    }
}
 
Example 16
Source File: AvroStorageDescriptorFactory.java    From data-highway with Apache License 2.0 5 votes vote down vote up
public static StorageDescriptor create(String location) {
  StorageDescriptor storageDescriptor = new StorageDescriptor();
  storageDescriptor.setInputFormat(AVRO_INPUT_FORMAT);
  storageDescriptor.setOutputFormat(AVRO_OUTPUT_FORMAT);
  storageDescriptor.setLocation(location);
  storageDescriptor.setCols(emptyList());

  SerDeInfo serdeInfo = new SerDeInfo();
  serdeInfo.setSerializationLib(AVRO_SERDE);
  storageDescriptor.setSerdeInfo(serdeInfo);

  return storageDescriptor;
}
 
Example 17
Source File: MetaStoreRestApiTest.java    From submarine with Apache License 2.0 4 votes vote down vote up
@Before
public void createDatabase() {
  Database database = new Database();
  database.setName("testdb");
  database.setDescription("testdb");
  database.setLocationUri("hdfs://mycluster/user/hive/warehouse/testdb.db");
  Map<String, String> map = new HashMap<>();
  map.put("key", "value");
  database.setParameters(map);
  database.setOwnerName("root");
  database.setOwnerType(PrincipalType.USER);

  Gson gson = new Gson();
  String databaseJson = gson.toJson(database);

  metaStoreApi.createDatabase(databaseJson);
  Response databaseCountResponse = metaStoreApi.getDatabaseCount();
  assertEquals(databaseCountResponse.getStatus(), Response.Status.OK.getStatusCode());
  assertTrue(((String) databaseCountResponse.getEntity()).contains("\"result\":1"));

  Table table = new Table();
  table.setTableName("testtable");
  table.setDbName("testdb");
  table.setOwner("root");
  table.setCreateTime((int) new java.util.Date().getTime() / 1000);
  table.setLastAccessTime((int) new Date().getTime() / 1000);
  table.setRetention(0);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> fieldSchemas = new ArrayList<>();
  FieldSchema fieldSchema = new FieldSchema();
  fieldSchema.setName("a");
  fieldSchema.setType("int");
  fieldSchema.setComment("a");
  fieldSchemas.add(fieldSchema);
  sd.setCols(fieldSchemas);
  sd.setLocation("hdfs://mycluster/user/hive/warehouse/testdb.db/testtable");
  sd.setInputFormat("org.apache.hadoop.mapred.TextInputFormat");
  sd.setOutputFormat("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat");
  sd.setCompressed(false);
  sd.setNumBuckets(-1);
  SerDeInfo serdeInfo = new SerDeInfo();
  serdeInfo.setName("test");
  serdeInfo.setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe");
  Map<String, String> parametersMap = new HashMap<>();
  parametersMap.put("serialization.format", "|");
  parametersMap.put("field.delim", "|");
  serdeInfo.setParameters(parametersMap);
  sd.setSerdeInfo(serdeInfo);
  table.setSd(sd);
  List<FieldSchema> partitionKeys = new ArrayList<>();
  table.setPartitionKeys(partitionKeys);
  Map<String, String> parameters = new HashMap<>();
  table.setParameters(parameters);
  String viewOriginalText = "";
  table.setViewOriginalText(viewOriginalText);
  String viewExpandedText = "";
  table.setViewExpandedText(viewExpandedText);
  String tableType = "MANAGED_TABLE";
  table.setTableType(tableType);

  String tableJson = gson.toJson(table);
  metaStoreApi.createTable(tableJson);

  Response tableResponse = metaStoreApi.getTable("testdb", "testtable");
  assertEquals(tableResponse.getStatus(), Response.Status.OK.getStatusCode());
  assertTrue(((String) tableResponse.getEntity()).contains("\"tableName\":\"testtable\""));
  Response tableCountResponse = metaStoreApi.getTableCount();
  assertEquals(tableCountResponse.getStatus(), Response.Status.OK.getStatusCode());
  assertTrue(((String) tableCountResponse.getEntity()).contains("\"result\":1"));
}
 
Example 18
Source File: HiveDifferencesIntegrationTest.java    From circus-train with Apache License 2.0 4 votes vote down vote up
private void createTable(
    String databaseName,
    String tableName,
    File tableLocation,
    String sourceTable,
    String sourceLocation,
    boolean addChecksum)
  throws Exception {
  File partition0 = createPartitionData("part=0", tableLocation, Arrays.asList("1\tadam", "2\tsusan"));
  File partition1 = createPartitionData("part=1", tableLocation, Arrays.asList("3\tchun", "4\tkim"));

  Table table = new Table();
  table.setDbName(databaseName);
  table.setTableName(tableName);
  table.setTableType(TableType.EXTERNAL_TABLE.name());
  table.setParameters(new HashMap<String, String>());
  if (sourceTable != null) {
    table.getParameters().put(CircusTrainTableParameter.SOURCE_TABLE.parameterName(), sourceTable);
  }
  if (sourceLocation != null) {
    table.getParameters().put(CircusTrainTableParameter.SOURCE_LOCATION.parameterName(), sourceLocation);
  }

  List<FieldSchema> partitionColumns = Arrays.asList(PARTITION_COL);
  table.setPartitionKeys(partitionColumns);

  List<FieldSchema> dataColumns = Arrays.asList(FOO_COL, BAR_COL);

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(dataColumns);
  sd.setLocation(tableLocation.toURI().toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());

  table.setSd(sd);

  HiveMetaStoreClient client = catalog.client();
  client.createTable(table);
  LOG
      .info(">>>> Partitions added: {}",
          +client
              .add_partitions(Arrays
                  .asList(
                      newPartition(databaseName, tableName, sd, Arrays.asList("0"), partition0, sourceTable,
                          sourceLocation + "part=0", addChecksum),
                      newPartition(databaseName, tableName, sd, Arrays.asList("1"), partition1, sourceTable,
                          sourceLocation + "part=1", addChecksum))));
}
 
Example 19
Source File: ComparisonToolIntegrationTest.java    From circus-train with Apache License 2.0 4 votes vote down vote up
private void createReplicaTable() throws Exception {
  File partitionEurope = new File(replicaTableUri, "local_date=2000-01-01");
  File partitionUk = new File(partitionEurope, "local_hour=0");
  File dataFileUk = new File(partitionUk, PART_00000);
  FileUtils.writeStringToFile(dataFileUk, "1\tadam\tlondon\tuk\n2\tsusan\tglasgow\tuk\n");

  File partitionAsia = new File(replicaTableUri, "local_date=2000-01-02");
  File partitionChina = new File(partitionAsia, "local_hour=0");
  File dataFileChina = new File(partitionChina, PART_00000);
  String data = "1\tchun\tbeijing\tchina\n2\tshanghai\tmilan\titaly\n";
  FileUtils.writeStringToFile(dataFileChina, data);

  HiveMetaStoreClient replicaClient = catalog.client();

  Table replica = new Table();
  replica.setDbName(DATABASE);
  replica.setTableName(REPLICA_TABLE);
  replica.setTableType(TableType.EXTERNAL_TABLE.name());
  Map<String, String> parameters = new HashMap<>();
  parameters.put("comment", "comment replica");
  replica.setParameters(parameters);
  List<FieldSchema> partitionColumns = Arrays.asList(new FieldSchema("local_date", "string", ""),
      new FieldSchema("local_hour", "string", ""));
  replica.setPartitionKeys(partitionColumns);

  List<FieldSchema> dataColumns = Arrays.asList(new FieldSchema("id", "bigint", ""),
      new FieldSchema("name", "string", ""), new FieldSchema("city", "string", ""),
      new FieldSchema("country", "string", ""));

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(dataColumns);
  sd.setLocation(replicaTableUri.toURI().toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());

  replica.setSd(sd);

  replicaClient.createTable(replica);
  LOG.info(">>>> Partitions added: {}",
      +replicaClient.add_partitions(
          Arrays.asList(newPartition(REPLICA_TABLE, sd, Arrays.asList("2000-01-01", "0"), partitionUk),
              newPartition(REPLICA_TABLE, sd, Arrays.asList("2000-01-02", "0"), partitionChina))));
}
 
Example 20
Source File: SubmarineMetaStoreTest.java    From submarine with Apache License 2.0 4 votes vote down vote up
@Before
public void createDatabase() throws InvalidObjectException, MetaException {
  listTables();

  Database database = new Database();
  database.setName("testdb");
  database.setDescription("testdb");
  database.setLocationUri("hdfs://mycluster/user/hive/warehouse/testdb.db");
  Map map = new HashMap();
  map.put("key", "value");
  database.setParameters(map);
  database.setOwnerName("root");
  database.setOwnerType(PrincipalType.USER);
  submarineMetaStore.createDatabase(database);
  assertEquals(1, submarineMetaStore.getDatabaseCount());

  Table table = new Table();
  table.setTableName("testtable");
  table.setDbName("testdb");
  table.setOwner("root");
  table.setCreateTime((int) new Date().getTime() / 1000);
  table.setLastAccessTime((int) new Date().getTime() / 1000);
  table.setRetention(0);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> fieldSchemas = new ArrayList<>();
  FieldSchema fieldSchema = new FieldSchema();
  fieldSchema.setName("a");
  fieldSchema.setType("int");
  fieldSchema.setComment("a");
  fieldSchemas.add(fieldSchema);
  sd.setCols(fieldSchemas);
  sd.setLocation("hdfs://mycluster/user/hive/warehouse/testdb.db/testtable");
  sd.setInputFormat("org.apache.hadoop.mapred.TextInputFormat");
  sd.setOutputFormat("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat");
  sd.setCompressed(false);
  sd.setNumBuckets(-1);
  SerDeInfo serdeInfo = new SerDeInfo();
  serdeInfo.setName("test");
  serdeInfo.setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe");
  Map<String, String> parametersMap = new HashMap();
  parametersMap.put("serialization.format", "|");
  parametersMap.put("field.delim", "|");
  serdeInfo.setParameters(parametersMap);
  sd.setSerdeInfo(serdeInfo);
  table.setSd(sd);
  List<FieldSchema> partitionKeys = new ArrayList<>();
  table.setPartitionKeys(partitionKeys);
  Map<String, String> parameters = new HashMap<>();
  table.setParameters(parameters);
  String viewOriginalText = "";
  table.setViewOriginalText(viewOriginalText);
  String viewExpandedText = "";
  table.setViewExpandedText(viewExpandedText);
  String tableType = "MANAGED_TABLE";
  table.setTableType(tableType);
  submarineMetaStore.createTable(table);

  Table tableTest = submarineMetaStore.getTable("testdb", "testtable");
  assertEquals("testtable", tableTest.getTableName());
  int tableCount = submarineMetaStore.getTableCount();
  assertEquals(1, tableCount);
}