org.apache.hadoop.hive.metastore.api.Order Java Examples

The following examples show how to use org.apache.hadoop.hive.metastore.api.Order. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveUtils.java    From kite with Apache License 2.0 6 votes vote down vote up
static Table createEmptyTable(String namespace, String name) {
  Table table = new Table();
  table.setDbName(namespace);
  table.setTableName(name);
  table.setPartitionKeys(new ArrayList<FieldSchema>());
  table.setParameters(new HashMap<String, String>());

  StorageDescriptor sd = new StorageDescriptor();
  sd.setSerdeInfo(new SerDeInfo());
  sd.setNumBuckets(-1);
  sd.setBucketCols(new ArrayList<String>());
  sd.setCols(new ArrayList<FieldSchema>());
  sd.setParameters(new HashMap<String, String>());
  sd.setSortCols(new ArrayList<Order>());
  sd.getSerdeInfo().setParameters(new HashMap<String, String>());
  SkewedInfo skewInfo = new SkewedInfo();
  skewInfo.setSkewedColNames(new ArrayList<String>());
  skewInfo.setSkewedColValues(new ArrayList<List<String>>());
  skewInfo.setSkewedColValueLocationMaps(new HashMap<List<String>, String>());
  sd.setSkewedInfo(skewInfo);
  table.setSd(sd);

  return table;
}
 
Example #2
Source File: AbstractMetastoreTestWithStaticConfiguration.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
public Table makeMetastoreTableObject(HiveMetaStoreClient client,
    String dbName, String tabName, List<FieldSchema> cols) throws Exception {
  Table tbl = new Table();
  tbl.setDbName(dbName);
  tbl.setTableName(tabName);
  StorageDescriptor sd = new StorageDescriptor();
  tbl.setSd(sd);
  tbl.setParameters(new HashMap<String, String>());
  sd.setCols(cols);
  sd.setCompressed(false);
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());
  sd.getSerdeInfo().setName(tbl.getTableName());
  sd.getSerdeInfo().setParameters(new HashMap<String, String>());
  sd.getSerdeInfo().getParameters()
      .put(serdeConstants.SERIALIZATION_FORMAT, "1");
  sd.setSortCols(new ArrayList<Order>());
  return tbl;
}
 
Example #3
Source File: PartitionAndMetadataComparatorTest.java    From circus-train with Apache License 2.0 6 votes vote down vote up
@Test
public void allShortCircuit() {
  left.getPartition().getParameters().put("com.company.key", "value");
  left.getPartition().setValues(ImmutableList.of("p1", "p2"));
  List<PrivilegeGrantInfo> privilege = ImmutableList.of(new PrivilegeGrantInfo());
  left.getPartition().setPrivileges(new PrincipalPrivilegeSet(ImmutableMap.of("write", privilege), null, null));
  left.getPartition().getSd().setLocation("left");
  left.getPartition().getSd().setInputFormat("LeftInputFormat");
  left.getPartition().getSd().setOutputFormat("LeftOutputFormat");
  left.getPartition().getSd().getParameters().put("com.company.key", "value");
  left.getPartition().getSd().getSerdeInfo().setName("left serde info");
  left.getPartition().getSd().getSkewedInfo().setSkewedColNames(ImmutableList.of("left skewed col"));
  left.getPartition().getSd().setCols(ImmutableList.of(new FieldSchema("left", "type", "comment")));
  left.getPartition().getSd().setSortCols(ImmutableList.of(new Order()));
  left.getPartition().getSd().setBucketCols(ImmutableList.of("bucket"));
  left.getPartition().getSd().setNumBuckets(9000);

  List<Diff<Object, Object>> diffs = newPartitionAndMetadataComparator(SHORT_CIRCUIT).compare(left, right);

  assertThat(diffs, is(notNullValue()));
  assertThat(diffs.size(), is(1));
  assertThat(diffs.get(0), is(newPropertyDiff(PartitionAndMetadata.class, "partition.parameters",
      left.getPartition().getParameters(), right.getPartition().getParameters())));
}
 
Example #4
Source File: CatalogToHiveConverter.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
public static List<Order> convertOrderList(List<com.amazonaws.services.glue.model.Order> catalogOrderList) {
  List<Order> hiveOrderList = new ArrayList<>();
  if (catalogOrderList == null) {
    return hiveOrderList;
  }
  for (com.amazonaws.services.glue.model.Order catalogOrder : catalogOrderList){
    hiveOrderList.add(convertOrder(catalogOrder));
  }

  return hiveOrderList;
}
 
Example #5
Source File: HiveSchemaEvolutionTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private Optional<Table> createEvolvedDestinationTable(String tableName, String dbName, String location,
    boolean withComment) {
  List<FieldSchema> cols = new ArrayList<>();
  // Existing columns that match avroToOrcSchemaEvolutionTest/source_schema_evolution_enabled.ddl
  cols.add(new FieldSchema("parentFieldRecord__nestedFieldRecord__superNestedFieldString", "string",
      withComment ? "from flatten_source parentFieldRecord.nestedFieldRecord.superNestedFieldString" : ""));
  cols.add(new FieldSchema("parentFieldRecord__nestedFieldRecord__superNestedFieldInt", "int",
      withComment ? "from flatten_source parentFieldRecord.nestedFieldRecord.superNestedFieldInt" : ""));
  cols.add(new FieldSchema("parentFieldRecord__nestedFieldString", "string",
      withComment ? "from flatten_source parentFieldRecord.nestedFieldString" : ""));
  // The following column is skipped (simulating un-evolved schema):
  // Column name   : parentFieldRecord__nestedFieldInt
  // Column type   : int
  // Column comment: from flatten_source parentFieldRecord.nestedFieldInt
  cols.add(new FieldSchema("parentFieldInt", "int",
      withComment ? "from flatten_source parentFieldInt" : ""));
  // Extra schema
  cols.add(new FieldSchema("parentFieldRecord__nestedFieldString2", "string",
      withComment ? "from flatten_source parentFieldRecord.nestedFieldString2" : ""));

  String inputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat";
  String outputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat";
  StorageDescriptor storageDescriptor = new StorageDescriptor(cols, location, inputFormat, outputFormat, false, 0,
      new SerDeInfo(), null, Lists.<Order>newArrayList(), null);
  Table table = new Table(tableName, dbName, "ketl_dev", 0, 0, 0, storageDescriptor,
      Lists.<FieldSchema>newArrayList(), Maps.<String,String>newHashMap(), "", "", "");

  return Optional.of(table);
}
 
Example #6
Source File: TableAndMetadataComparatorTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Test
public void allShortCircuit() {
  left.getTable().getParameters().put("com.company.key", "value");
  left.getTable().setPartitionKeys(ImmutableList.of(new FieldSchema("p", "string", "p comment")));
  left.getTable().setOwner("left owner");
  List<PrivilegeGrantInfo> privilege = ImmutableList.of(new PrivilegeGrantInfo());
  left.getTable().setPrivileges(new PrincipalPrivilegeSet(ImmutableMap.of("write", privilege), null, null));
  left.getTable().setRetention(2);
  left.getTable().setTableType("internal");
  left.getTable().getSd().setLocation("left");
  left.getTable().getSd().setInputFormat("LeftInputFormat");
  left.getTable().getSd().setOutputFormat("LeftOutputFormat");
  left.getTable().getSd().getParameters().put("com.company.key", "value");
  left.getTable().getSd().getSerdeInfo().setName("left serde info");
  left.getTable().getSd().getSkewedInfo().setSkewedColNames(ImmutableList.of("left skewed col"));
  left.getTable().getSd().setCols(ImmutableList.of(new FieldSchema("left", "type", "comment")));
  left.getTable().getSd().setSortCols(ImmutableList.of(new Order()));
  left.getTable().getSd().setBucketCols(ImmutableList.of("bucket"));
  left.getTable().getSd().setNumBuckets(9000);

  List<Diff<Object, Object>> diffs = newTableAndMetadataComparator(SHORT_CIRCUIT).compare(left, right);

  assertThat(diffs, is(notNullValue()));
  assertThat(diffs.size(), is(1));
  assertThat(diffs.get(0), is(newPropertyDiff(TableAndMetadata.class, "table.parameters",
      left.getTable().getParameters(), right.getTable().getParameters())));
}
 
Example #7
Source File: TableAndMetadataComparatorTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Test
public void sdSortColsFullComparison() {
  left.getTable().getSd().setSortCols(ImmutableList.of(new Order()));
  List<Diff<Object, Object>> diffs = newTableAndMetadataComparator(FULL_COMPARISON).compare(left, right);
  assertThat(diffs, is(notNullValue()));
  assertThat(diffs.size(), is(1));
  assertThat(diffs.get(0), is(newPropertyDiff(TableAndMetadata.class, "table.sd.sortCols",
      left.getTable().getSd().getSortCols(), right.getTable().getSd().getSortCols())));
}
 
Example #8
Source File: TableAndMetadataComparatorTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Test
public void sdSortColsShortCircuit() {
  left.getTable().getSd().setSortCols(ImmutableList.of(new Order()));
  List<Diff<Object, Object>> diffs = newTableAndMetadataComparator(SHORT_CIRCUIT).compare(left, right);
  assertThat(diffs, is(notNullValue()));
  assertThat(diffs.size(), is(1));
  assertThat(diffs.get(0), is(newPropertyDiff(TableAndMetadata.class, "table.sd.sortCols",
      left.getTable().getSd().getSortCols(), right.getTable().getSd().getSortCols())));
}
 
Example #9
Source File: PartitionAndMetadataComparatorTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Test
public void sdSortColsFullComparison() {
  left.getPartition().getSd().setSortCols(ImmutableList.of(new Order()));
  List<Diff<Object, Object>> diffs = newPartitionAndMetadataComparator(FULL_COMPARISON).compare(left, right);
  assertThat(diffs, is(notNullValue()));
  assertThat(diffs.size(), is(1));
  assertThat(diffs.get(0), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.sortCols",
      left.getPartition().getSd().getSortCols(), right.getPartition().getSd().getSortCols())));
}
 
Example #10
Source File: PartitionAndMetadataComparatorTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Test
public void sdSortColsShortCircuit() {
  left.getPartition().getSd().setSortCols(ImmutableList.of(new Order()));
  List<Diff<Object, Object>> diffs = newPartitionAndMetadataComparator(SHORT_CIRCUIT).compare(left, right);
  assertThat(diffs, is(notNullValue()));
  assertThat(diffs.size(), is(1));
  assertThat(diffs.get(0), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.sortCols",
      left.getPartition().getSd().getSortCols(), right.getPartition().getSd().getSortCols())));
}
 
Example #11
Source File: ThriftMetastoreUtil.java    From presto with Apache License 2.0 5 votes vote down vote up
private static StorageDescriptor makeStorageDescriptor(String tableName, List<Column> columns, Storage storage)
{
    SerDeInfo serdeInfo = new SerDeInfo();
    serdeInfo.setName(tableName);
    serdeInfo.setSerializationLib(storage.getStorageFormat().getSerDeNullable());
    serdeInfo.setParameters(storage.getSerdeParameters());

    StorageDescriptor sd = new StorageDescriptor();
    sd.setLocation(emptyToNull(storage.getLocation()));
    sd.setCols(columns.stream()
            .map(ThriftMetastoreUtil::toMetastoreApiFieldSchema)
            .collect(toImmutableList()));
    sd.setSerdeInfo(serdeInfo);
    sd.setInputFormat(storage.getStorageFormat().getInputFormatNullable());
    sd.setOutputFormat(storage.getStorageFormat().getOutputFormatNullable());
    sd.setSkewedInfoIsSet(storage.isSkewed());
    sd.setParameters(ImmutableMap.of());

    Optional<HiveBucketProperty> bucketProperty = storage.getBucketProperty();
    if (bucketProperty.isPresent()) {
        sd.setNumBuckets(bucketProperty.get().getBucketCount());
        sd.setBucketCols(bucketProperty.get().getBucketedBy());
        if (!bucketProperty.get().getSortedBy().isEmpty()) {
            sd.setSortCols(bucketProperty.get().getSortedBy().stream()
                    .map(column -> new Order(column.getColumnName(), column.getOrder().getHiveOrder()))
                    .collect(toImmutableList()));
        }
    }

    return sd;
}
 
Example #12
Source File: HiveToCatalogConverter.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
public static List<com.amazonaws.services.glue.model.Order> convertOrderList(List<Order> hiveOrderList) {
  if (hiveOrderList == null) {
    return null;
  }
  List<com.amazonaws.services.glue.model.Order> catalogOrderList = new ArrayList<>();
  for (Order hiveOrder : hiveOrderList) {
    catalogOrderList.add(convertOrder(hiveOrder));
  }

  return catalogOrderList;
}
 
Example #13
Source File: HiveToCatalogConverter.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
public static com.amazonaws.services.glue.model.Order convertOrder(Order hiveOrder) {
  com.amazonaws.services.glue.model.Order order = new com.amazonaws.services.glue.model.Order();
  order.setColumn(hiveOrder.getCol());
  order.setSortOrder(hiveOrder.getOrder());

  return order;
}
 
Example #14
Source File: CatalogToHiveConverter.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
public static Order convertOrder(com.amazonaws.services.glue.model.Order catalogOrder) {
  Order hiveOrder = new Order();
  hiveOrder.setCol(catalogOrder.getColumn());
  hiveOrder.setOrder(catalogOrder.getSortOrder());

  return hiveOrder;
}
 
Example #15
Source File: PartitionAndMetadataComparatorTest.java    From circus-train with Apache License 2.0 4 votes vote down vote up
@Test
public void allFullComparison() {
  left.getPartition().getParameters().put("com.company.key", "value");
  left.getPartition().setValues(ImmutableList.of("p1", "p2"));
  List<PrivilegeGrantInfo> privilege = ImmutableList.of(new PrivilegeGrantInfo());
  left.getPartition().setPrivileges(new PrincipalPrivilegeSet(ImmutableMap.of("write", privilege), null, null));
  left.getPartition().getSd().setLocation("left");
  left.getPartition().getSd().setInputFormat("LeftInputFormat");
  left.getPartition().getSd().setOutputFormat("LeftOutputFormat");
  left.getPartition().getSd().getParameters().put("com.company.key", "value");
  left.getPartition().getSd().getSerdeInfo().setName("left serde info");
  left.getPartition().getSd().getSkewedInfo().setSkewedColNames(ImmutableList.of("left skewed col"));
  left.getPartition().getSd().setCols(ImmutableList.of(new FieldSchema("left", "type", "comment")));
  left.getPartition().getSd().setSortCols(ImmutableList.of(new Order()));
  left.getPartition().getSd().setBucketCols(ImmutableList.of("bucket"));
  left.getPartition().getSd().setNumBuckets(9000);

  List<Diff<Object, Object>> diffs = newPartitionAndMetadataComparator(FULL_COMPARISON).compare(left, right);

  assertThat(diffs, is(notNullValue()));
  assertThat(diffs.size(), is(10));
  assertThat(diffs.get(0), is(newPropertyDiff(PartitionAndMetadata.class, "partition.parameters",
      left.getPartition().getParameters(), right.getPartition().getParameters())));
  assertThat(diffs.get(1), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.inputFormat",
      left.getPartition().getSd().getInputFormat(), right.getPartition().getSd().getInputFormat())));
  assertThat(diffs.get(2), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.outputFormat",
      left.getPartition().getSd().getOutputFormat(), right.getPartition().getSd().getOutputFormat())));
  assertThat(diffs.get(3), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.parameters",
      left.getPartition().getSd().getParameters(), right.getPartition().getSd().getParameters())));
  assertThat(diffs.get(4), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.serdeInfo",
      left.getPartition().getSd().getSerdeInfo(), right.getPartition().getSd().getSerdeInfo())));
  assertThat(diffs.get(5), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.skewedInfo",
      left.getPartition().getSd().getSkewedInfo(), right.getPartition().getSd().getSkewedInfo())));
  assertThat(diffs.get(6),
      is(newDiff(
          "Collection partition.sd.cols of class com.google.common.collect.SingletonImmutableList has different size: left.size()=1 and right.size()=2",
          left.getPartition().getSd().getCols(), right.getPartition().getSd().getCols())));
  assertThat(diffs.get(7), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.sortCols",
      left.getPartition().getSd().getSortCols(), right.getPartition().getSd().getSortCols())));
  assertThat(diffs.get(8), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.bucketCols",
      left.getPartition().getSd().getBucketCols(), right.getPartition().getSd().getBucketCols())));
  assertThat(diffs.get(9), is(newPropertyDiff(PartitionAndMetadata.class, "partition.sd.numBuckets",
      left.getPartition().getSd().getNumBuckets(), right.getPartition().getSd().getNumBuckets())));
}
 
Example #16
Source File: HiveMetaStoreBridge.java    From atlas with Apache License 2.0 4 votes vote down vote up
private AtlasEntity toStorageDescEntity(StorageDescriptor storageDesc, String tableQualifiedName, String sdQualifiedName, AtlasObjectId tableId ) throws AtlasHookException {
    AtlasEntity ret = new AtlasEntity(HiveDataTypes.HIVE_STORAGEDESC.getName());

    ret.setRelationshipAttribute(ATTRIBUTE_TABLE, AtlasTypeUtil.getAtlasRelatedObjectId(tableId, RELATIONSHIP_HIVE_TABLE_STORAGE_DESC));
    ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, sdQualifiedName);
    ret.setAttribute(ATTRIBUTE_PARAMETERS, storageDesc.getParameters());
    ret.setAttribute(ATTRIBUTE_LOCATION, HdfsNameServiceResolver.getPathWithNameServiceID(storageDesc.getLocation()));
    ret.setAttribute(ATTRIBUTE_INPUT_FORMAT, storageDesc.getInputFormat());
    ret.setAttribute(ATTRIBUTE_OUTPUT_FORMAT, storageDesc.getOutputFormat());
    ret.setAttribute(ATTRIBUTE_COMPRESSED, storageDesc.isCompressed());
    ret.setAttribute(ATTRIBUTE_NUM_BUCKETS, storageDesc.getNumBuckets());
    ret.setAttribute(ATTRIBUTE_STORED_AS_SUB_DIRECTORIES, storageDesc.isStoredAsSubDirectories());

    if (storageDesc.getBucketCols().size() > 0) {
        ret.setAttribute(ATTRIBUTE_BUCKET_COLS, storageDesc.getBucketCols());
    }

    if (storageDesc.getSerdeInfo() != null) {
        SerDeInfo serdeInfo = storageDesc.getSerdeInfo();

        LOG.debug("serdeInfo = {}", serdeInfo);
        // SkewedInfo skewedInfo = storageDesc.getSkewedInfo();

        AtlasStruct serdeInfoStruct = new AtlasStruct(HiveDataTypes.HIVE_SERDE.getName());

        serdeInfoStruct.setAttribute(ATTRIBUTE_NAME, serdeInfo.getName());
        serdeInfoStruct.setAttribute(ATTRIBUTE_SERIALIZATION_LIB, serdeInfo.getSerializationLib());
        serdeInfoStruct.setAttribute(ATTRIBUTE_PARAMETERS, serdeInfo.getParameters());

        ret.setAttribute(ATTRIBUTE_SERDE_INFO, serdeInfoStruct);
    }

    if (CollectionUtils.isNotEmpty(storageDesc.getSortCols())) {
        List<AtlasStruct> sortColsStruct = new ArrayList<>();

        for (Order sortcol : storageDesc.getSortCols()) {
            String hiveOrderName = HiveDataTypes.HIVE_ORDER.getName();
            AtlasStruct colStruct = new AtlasStruct(hiveOrderName);
            colStruct.setAttribute("col", sortcol.getCol());
            colStruct.setAttribute("order", sortcol.getOrder());

            sortColsStruct.add(colStruct);
        }

        ret.setAttribute(ATTRIBUTE_SORT_COLS, sortColsStruct);
    }

    return ret;
}
 
Example #17
Source File: TableAndMetadataComparatorTest.java    From circus-train with Apache License 2.0 4 votes vote down vote up
@Test
public void allFullComparison() {
  left.getTable().getParameters().put("com.company.key", "value");
  left.getTable().setPartitionKeys(ImmutableList.of(new FieldSchema("p", "string", "p comment")));
  left.getTable().setOwner("left owner");
  List<PrivilegeGrantInfo> privilege = ImmutableList.of(new PrivilegeGrantInfo());
  left.getTable().setPrivileges(new PrincipalPrivilegeSet(ImmutableMap.of("write", privilege), null, null));
  left.getTable().setRetention(2);
  left.getTable().setTableType("internal");
  left.getTable().getSd().setLocation("left");
  left.getTable().getSd().setInputFormat("LeftInputFormat");
  left.getTable().getSd().setOutputFormat("LeftOutputFormat");
  left.getTable().getSd().getParameters().put("com.company.key", "value");
  left.getTable().getSd().getSerdeInfo().setName("left serde info");
  left.getTable().getSd().getSkewedInfo().setSkewedColNames(ImmutableList.of("left skewed col"));
  left.getTable().getSd().setCols(ImmutableList.of(new FieldSchema("left", "type", "comment")));
  left.getTable().getSd().setSortCols(ImmutableList.of(new Order()));
  left.getTable().getSd().setBucketCols(ImmutableList.of("bucket"));
  left.getTable().getSd().setNumBuckets(9000);

  List<Diff<Object, Object>> diffs = newTableAndMetadataComparator(FULL_COMPARISON).compare(left, right);

  assertThat(diffs, is(notNullValue()));
  assertThat(diffs.size(), is(12));
  assertThat(diffs.get(0), is(newPropertyDiff(TableAndMetadata.class, "table.parameters",
      left.getTable().getParameters(), right.getTable().getParameters())));
  assertThat(diffs.get(1), is(newPropertyDiff(TableAndMetadata.class, "table.partitionKeys",
      left.getTable().getPartitionKeys(), right.getTable().getPartitionKeys())));
  assertThat(diffs.get(2), is(newPropertyDiff(TableAndMetadata.class, "table.retention",
      left.getTable().getRetention(), right.getTable().getRetention())));
  assertThat(diffs.get(3), is(newPropertyDiff(TableAndMetadata.class, "table.sd.inputFormat",
      left.getTable().getSd().getInputFormat(), right.getTable().getSd().getInputFormat())));
  assertThat(diffs.get(4), is(newPropertyDiff(TableAndMetadata.class, "table.sd.outputFormat",
      left.getTable().getSd().getOutputFormat(), right.getTable().getSd().getOutputFormat())));
  assertThat(diffs.get(5), is(newPropertyDiff(TableAndMetadata.class, "table.sd.parameters",
      left.getTable().getSd().getParameters(), right.getTable().getSd().getParameters())));
  assertThat(diffs.get(6), is(newPropertyDiff(TableAndMetadata.class, "table.sd.serdeInfo",
      left.getTable().getSd().getSerdeInfo(), right.getTable().getSd().getSerdeInfo())));
  assertThat(diffs.get(7), is(newPropertyDiff(TableAndMetadata.class, "table.sd.skewedInfo",
      left.getTable().getSd().getSkewedInfo(), right.getTable().getSd().getSkewedInfo())));
  assertThat(diffs.get(8),
      is(newDiff(
          "Collection table.sd.cols of class com.google.common.collect.SingletonImmutableList has different size: left.size()=1 and right.size()=2",
          left.getTable().getSd().getCols(), right.getTable().getSd().getCols())));
  assertThat(diffs.get(9), is(newPropertyDiff(TableAndMetadata.class, "table.sd.sortCols",
      left.getTable().getSd().getSortCols(), right.getTable().getSd().getSortCols())));
  assertThat(diffs.get(10), is(newPropertyDiff(TableAndMetadata.class, "table.sd.bucketCols",
      left.getTable().getSd().getBucketCols(), right.getTable().getSd().getBucketCols())));
  assertThat(diffs.get(11), is(newPropertyDiff(TableAndMetadata.class, "table.sd.numBuckets",
      left.getTable().getSd().getNumBuckets(), right.getTable().getSd().getNumBuckets())));
}
 
Example #18
Source File: HiveMetaStoreBridge.java    From incubator-atlas with Apache License 2.0 4 votes vote down vote up
public Referenceable fillStorageDesc(StorageDescriptor storageDesc, String tableQualifiedName,
    String sdQualifiedName, Id tableId) throws AtlasHookException {
    LOG.debug("Filling storage descriptor information for {}", storageDesc);

    Referenceable sdReferenceable = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
    sdReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, sdQualifiedName);

    SerDeInfo serdeInfo = storageDesc.getSerdeInfo();
    LOG.debug("serdeInfo = {}", serdeInfo);
    // SkewedInfo skewedInfo = storageDesc.getSkewedInfo();

    String serdeInfoName = HiveDataTypes.HIVE_SERDE.getName();
    Struct serdeInfoStruct = new Struct(serdeInfoName);

    serdeInfoStruct.set(AtlasClient.NAME, serdeInfo.getName());
    serdeInfoStruct.set("serializationLib", serdeInfo.getSerializationLib());
    serdeInfoStruct.set(PARAMETERS, serdeInfo.getParameters());

    sdReferenceable.set("serdeInfo", serdeInfoStruct);
    sdReferenceable.set(STORAGE_NUM_BUCKETS, storageDesc.getNumBuckets());
    sdReferenceable
            .set(STORAGE_IS_STORED_AS_SUB_DIRS, storageDesc.isStoredAsSubDirectories());

    List<Struct> sortColsStruct = new ArrayList<>();
    for (Order sortcol : storageDesc.getSortCols()) {
        String hiveOrderName = HiveDataTypes.HIVE_ORDER.getName();
        Struct colStruct = new Struct(hiveOrderName);
        colStruct.set("col", sortcol.getCol());
        colStruct.set("order", sortcol.getOrder());

        sortColsStruct.add(colStruct);
    }
    if (sortColsStruct.size() > 0) {
        sdReferenceable.set("sortCols", sortColsStruct);
    }

    sdReferenceable.set(LOCATION, storageDesc.getLocation());
    sdReferenceable.set("inputFormat", storageDesc.getInputFormat());
    sdReferenceable.set("outputFormat", storageDesc.getOutputFormat());
    sdReferenceable.set("compressed", storageDesc.isCompressed());

    if (storageDesc.getBucketCols().size() > 0) {
        sdReferenceable.set("bucketCols", storageDesc.getBucketCols());
    }

    sdReferenceable.set(PARAMETERS, storageDesc.getParameters());
    sdReferenceable.set("storedAsSubDirectories", storageDesc.isStoredAsSubDirectories());
    sdReferenceable.set(TABLE, tableId);

    return sdReferenceable;
}