Java Code Examples for org.apache.hadoop.hive.metastore.api.Partition#setCreateTime()

The following examples show how to use org.apache.hadoop.hive.metastore.api.Partition#setCreateTime() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LocalHiveMetastoreTestUtils.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
public Partition addTestPartition(Table tbl, List<String> values, int createTime) throws Exception {
  StorageDescriptor partitionSd = new StorageDescriptor();
  if (StringUtils.isNotBlank(tbl.getSd().getLocation())) {
    partitionSd.setLocation(tbl.getSd().getLocation() + values);
  } else {
    partitionSd.setLocation("/tmp/" + tbl.getTableName() + "/part1");
  }

  partitionSd.setSerdeInfo(
      new SerDeInfo("name", "serializationLib", ImmutableMap.of(HiveAvroSerDeManager.SCHEMA_URL, "/tmp/dummy")));
  partitionSd.setCols(tbl.getPartitionKeys());
  Partition partition =
      new Partition(values, tbl.getDbName(), tbl.getTableName(), 1, 1, partitionSd, new HashMap<String, String>());
  partition.setCreateTime(createTime);
  return this.getLocalMetastoreClient().add_partition(partition);

}
 
Example 2
Source File: HiveMetaStoreUtils.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
/**
 * Convert a {@link HivePartition} into a {@link Partition}.
 */
public static Partition getPartition(HivePartition hivePartition) {
  State props = hivePartition.getProps();
  Partition partition = new Partition();
  partition.setDbName(hivePartition.getDbName());
  partition.setTableName(hivePartition.getTableName());
  partition.setValues(hivePartition.getValues());
  partition.setParameters(getParameters(props));
  if (hivePartition.getCreateTime().isPresent()) {
    partition.setCreateTime(Ints.checkedCast(hivePartition.getCreateTime().get()));
  } else if (props.contains(HiveConstants.CREATE_TIME)) {
    partition.setCreateTime(props.getPropAsInt(HiveConstants.CREATE_TIME));
  }
  if (props.contains(HiveConstants.LAST_ACCESS_TIME)) {
    partition.setLastAccessTime(props.getPropAsInt(HiveConstants.LAST_ACCESS_TIME));
  }
  partition.setSd(getStorageDescriptor(hivePartition));
  return partition;
}
 
Example 3
Source File: HiveTableUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a Hive partition instance.
 */
public static Partition createHivePartition(String dbName, String tableName, List<String> values,
		StorageDescriptor sd, Map<String, String> parameters) {
	Partition partition = new Partition();
	partition.setDbName(dbName);
	partition.setTableName(tableName);
	partition.setValues(values);
	partition.setParameters(parameters);
	partition.setSd(sd);
	int currentTime = (int) (System.currentTimeMillis() / 1000);
	partition.setCreateTime(currentTime);
	partition.setLastAccessTime(currentTime);
	return partition;
}
 
Example 4
Source File: TestUtils.java    From circus-train with Apache License 2.0 5 votes vote down vote up
public static Partition newPartition(String database, String tableName, String partitionValue) {
  Partition partition = new Partition();
  partition.setDbName(database);
  partition.setTableName(tableName);
  partition.setCreateTime(CREATE_TIME);
  partition.setValues(ImmutableList.of(partitionValue));

  Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
  userPrivileges.put("read", ImmutableList.of(new PrivilegeGrantInfo()));
  PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
  privileges.setUserPrivileges(userPrivileges);
  partition.setPrivileges(privileges);

  StorageDescriptor storageDescriptor = new StorageDescriptor();
  storageDescriptor.setCols(COLS);
  storageDescriptor.setInputFormat(INPUT_FORMAT);
  storageDescriptor.setOutputFormat(OUTPUT_FORMAT);
  storageDescriptor.setSerdeInfo(new SerDeInfo(SERDE_INFO_NAME, SERIALIZATION_LIB, new HashMap<String, String>()));
  storageDescriptor.setSkewedInfo(new SkewedInfo());
  storageDescriptor.setParameters(new HashMap<String, String>());
  storageDescriptor.setLocation(DATABASE + "/" + tableName + "/" + partitionValue + "/");
  partition.setSd(storageDescriptor);

  Map<String, String> parameters = new HashMap<>();
  parameters.put("com.company.parameter", "abc");
  partition.setParameters(parameters);

  return partition;
}
 
Example 5
Source File: HiveTableUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a Hive partition instance.
 */
public static Partition createHivePartition(String dbName, String tableName, List<String> values,
		StorageDescriptor sd, Map<String, String> parameters) {
	Partition partition = new Partition();
	partition.setDbName(dbName);
	partition.setTableName(tableName);
	partition.setValues(values);
	partition.setParameters(parameters);
	partition.setSd(sd);
	int currentTime = (int) (System.currentTimeMillis() / 1000);
	partition.setCreateTime(currentTime);
	partition.setLastAccessTime(currentTime);
	return partition;
}
 
Example 6
Source File: LocalHiveMetastoreTestUtils.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public org.apache.hadoop.hive.ql.metadata.Partition createDummyPartition(long createTime) {
  org.apache.hadoop.hive.ql.metadata.Partition partition = new org.apache.hadoop.hive.ql.metadata.Partition();
  Partition tPartition = new Partition();
  tPartition.setCreateTime((int) TimeUnit.SECONDS.convert(createTime, TimeUnit.MILLISECONDS));
  partition.setTPartition(tPartition);

  return partition;
}
 
Example 7
Source File: HiveMetaStoreBasedRegister.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
/**
 * Sets create time if not already set.
 */
private Partition getPartitionWithCreateTime(Partition partition, int createTime) {
  if (partition.isSetCreateTime() && partition.getCreateTime() > 0) {
    return partition;
  }
  Partition actualPartition = partition.deepCopy();
  actualPartition.setCreateTime(createTime);
  return actualPartition;
}
 
Example 8
Source File: CatalogToHiveConverter.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 4 votes vote down vote up
public static Partition convertPartition(com.amazonaws.services.glue.model.Partition src) {
 Partition tgt = new Partition();
 Date createTime = src.getCreationTime();
 if (createTime != null) {
  tgt.setCreateTime((int) (createTime.getTime() / 1000)); 
  tgt.setCreateTimeIsSet(true);
 } else {
  tgt.setCreateTimeIsSet(false);
 }
 String dbName = src.getDatabaseName();
 if (dbName != null) {
  tgt.setDbName(dbName);
  tgt.setDbNameIsSet(true);
 } else {
  tgt.setDbNameIsSet(false);
 }
 Date lastAccessTime = src.getLastAccessTime();
 if (lastAccessTime != null) {
  tgt.setLastAccessTime((int) (lastAccessTime.getTime() / 1000));
  tgt.setLastAccessTimeIsSet(true);
 } else {
  tgt.setLastAccessTimeIsSet(false);
 }
 Map<String, String> params = src.getParameters();
 
 // A null parameter map causes Hive to throw a NPE
 // so ensure we do not return a Partition object with a null parameter map.
 if (params == null) {
   params = Maps.newHashMap();
 }
 
 tgt.setParameters(params);
 tgt.setParametersIsSet(true);
 
 String tableName = src.getTableName();
 if (tableName != null) {
  tgt.setTableName(tableName);
  tgt.setTableNameIsSet(true);
 } else {
  tgt.setTableNameIsSet(false);
 }
 
 List<String> values = src.getValues();
 if (values != null) {
  tgt.setValues(values);
  tgt.setValuesIsSet(true);
 } else {
  tgt.setValuesIsSet(false);
 }
 
 com.amazonaws.services.glue.model.StorageDescriptor sd = src.getStorageDescriptor();
 if (sd != null) {
  StorageDescriptor hiveSd = convertStorageDescriptor(sd);
  tgt.setSd(hiveSd);
  tgt.setSdIsSet(true);
 } else {
  tgt.setSdIsSet(false);
 }
 
 return tgt;
}
 
Example 9
Source File: HiveConvertersImpl.java    From metacat with Apache License 2.0 4 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public Partition metacatToHivePartition(final PartitionDto partitionDto, @Nullable final TableDto tableDto) {
    final Partition result = new Partition();

    final QualifiedName name = partitionDto.getName();
    List<String> values = Lists.newArrayListWithCapacity(16);
    String databaseName = null;
    String tableName = null;
    if (name != null) {
        if (name.getPartitionName() != null) {
            //
            // Unescape the partition name to get the right partition values.
            // Partition name always are escaped where as the parition values are not.
            //
            values = getPartValsFromName(tableDto, name.getPartitionName());
        }

        if (name.getDatabaseName() != null) {
            databaseName = name.getDatabaseName();
        }

        if (name.getTableName() != null) {
            tableName = name.getTableName();
        }
    }
    result.setValues(values);
    result.setDbName(databaseName);
    result.setTableName(tableName);

    Map<String, String> metadata = partitionDto.getMetadata();
    if (metadata == null) {
        metadata = Maps.newHashMap();
    }
    result.setParameters(metadata);

    result.setSd(fromStorageDto(partitionDto.getSerde(), tableName));
    final StorageDescriptor sd = result.getSd();
    if (tableDto != null) {
        if (sd.getSerdeInfo() != null && tableDto.getSerde() != null && Strings.isNullOrEmpty(
            sd.getSerdeInfo().getSerializationLib())) {
            sd.getSerdeInfo().setSerializationLib(tableDto.getSerde().getSerializationLib());
        }

        final List<FieldDto> fields = tableDto.getFields();
        if (fields == null) {
            sd.setCols(Collections.emptyList());
        } else {
            sd.setCols(fields.stream()
                .filter(field -> !field.isPartition_key())
                .map(this::metacatToHiveField)
                .collect(Collectors.toList()));
        }
    }

    final AuditDto auditDto = partitionDto.getAudit();
    if (auditDto != null) {
        if (auditDto.getCreatedDate() != null) {
            result.setCreateTime(dateToEpochSeconds(auditDto.getCreatedDate()));
        }
        if (auditDto.getLastModifiedDate() != null) {
            result.setLastAccessTime(dateToEpochSeconds(auditDto.getLastModifiedDate()));
        }
    }

    return result;
}