Java Code Examples for org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy

The following examples show how to use org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: metacat   Source File: CatalogThriftHiveMetastore.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public int add_partitions_pspec(final List<PartitionSpec> newParts) throws TException {
    if (newParts == null || newParts.isEmpty()) {
        return 0;
    }
    final String dbName = newParts.get(0).getDbName();
    final String tableName = newParts.get(0).getTableName();
    return requestWrapper("add_partition", new Object[]{dbName, tableName}, () -> {
        final PartitionSpecProxy partitionSpecProxy = PartitionSpecProxy.Factory.get(newParts);
        final PartitionSpecProxy.PartitionIterator partitionIterator = partitionSpecProxy.getPartitionIterator();
        final List<Partition> partitions = addPartitionsCore(dbName, tableName,
            Lists.newArrayList(partitionIterator), false);
        return partitions.size();
    });
}
 
Example 2
public PartitionSpecProxy listPartitionSpecs(
    String dbName,
    String tblName,
    int max
) throws TException {
  throw new UnsupportedOperationException("listPartitionSpecs is not supported.");
}
 
Example 3
public PartitionSpecProxy listPartitionSpecsByFilter(
    String dbName,
    String tblName,
    String filter,
    int max
) throws TException {
  throw new UnsupportedOperationException("listPartitionSpecsByFilter is not supported");
}
 
Example 4
@Override
public int add_partitions_pspec(
    PartitionSpecProxy pSpec
) throws InvalidObjectException, org.apache.hadoop.hive.metastore.api.AlreadyExistsException,
    MetaException, TException {
  return glueMetastoreClientDelegate.addPartitionsSpecProxy(pSpec);
}
 
Example 5
@Override
public int add_partitions_pspec(
    PartitionSpecProxy pSpec
) throws InvalidObjectException, org.apache.hadoop.hive.metastore.api.AlreadyExistsException,
    MetaException, TException {
  return glueMetastoreClientDelegate.addPartitionsSpecProxy(pSpec);
}
 
Example 6
Source Project: metacat   Source File: MetacatHMSHandler.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressFBWarnings
private void initializeAddedPartition(
        final Table tbl, final PartitionSpecProxy.PartitionIterator part,
        final boolean madeDir) throws MetaException {
    // set create time
    final long time = System.currentTimeMillis() / 1000;
    part.setCreateTime((int) time);
    if (part.getParameters() == null || part.getParameters().get(hive_metastoreConstants.DDL_TIME) == null) {
        part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time));
    }

    // Inherit table properties into partition properties.
    final Map<String, String> tblParams = tbl.getParameters();
    final String inheritProps = getHiveConf().getVar(HiveConf.ConfVars.METASTORE_PART_INHERIT_TBL_PROPS).trim();
    // Default value is empty string in which case no properties will be inherited.
    // * implies all properties needs to be inherited
    Set<String> inheritKeys = new HashSet<String>(Arrays.asList(inheritProps.split(",")));
    if (inheritKeys.contains("*")) {
        inheritKeys = tblParams.keySet();
    }

    for (String key : inheritKeys) {
        final String paramVal = tblParams.get(key);
        if (null != paramVal) { // add the property only if it exists in table properties
            part.putToParameters(key, paramVal);
        }
    }
}
 
Example 7
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public List<CatalogPartitionSpec> listPartitionsByFilter(ObjectPath tablePath, List<Expression> expressions)
		throws TableNotExistException, TableNotPartitionedException, CatalogException {
	Table hiveTable = getHiveTable(tablePath);
	ensurePartitionedTable(tablePath, hiveTable);
	List<String> partColNames = getFieldNames(hiveTable.getPartitionKeys());
	Optional<String> filter = HiveTableUtil.makePartitionFilter(
			getNonPartitionFields(hiveConf, hiveTable).size(), partColNames, expressions, hiveShim);
	if (!filter.isPresent()) {
		throw new UnsupportedOperationException(
				"HiveCatalog is unable to handle the partition filter expressions: " + expressions);
	}
	try {
		PartitionSpecProxy partitionSpec = client.listPartitionSpecsByFilter(
				tablePath.getDatabaseName(), tablePath.getObjectName(), filter.get(), (short) -1);
		List<CatalogPartitionSpec> res = new ArrayList<>(partitionSpec.size());
		PartitionSpecProxy.PartitionIterator partitions = partitionSpec.getPartitionIterator();
		while (partitions.hasNext()) {
			Partition partition = partitions.next();
			Map<String, String> spec = new HashMap<>();
			for (int i = 0; i < partColNames.size(); i++) {
				spec.put(partColNames.get(i), partition.getValues().get(i));
			}
			res.add(new CatalogPartitionSpec(spec));
		}
		return res;
	} catch (TException e) {
		throw new UnsupportedOperationException(
				"Failed to list partition by filter from HMS, filter expressions: " + expressions, e);
	}
}
 
Example 8
public int addPartitionsSpecProxy(PartitionSpecProxy pSpec) throws TException {
  throw new UnsupportedOperationException("addPartitionsSpecProxy is unsupported");
}
 
Example 9
@Override
public PartitionSpecProxy listPartitionSpecs(String dbName, String tblName, int max) throws TException {
  return glueMetastoreClientDelegate.listPartitionSpecs(dbName, tblName, max);
}
 
Example 10
@Override
public PartitionSpecProxy listPartitionSpecsByFilter(String dbName, String tblName, String filter, int max)
    throws MetaException, NoSuchObjectException, TException {
  return glueMetastoreClientDelegate.listPartitionSpecsByFilter(dbName, tblName, filter, max);
}
 
Example 11
@Override
public PartitionSpecProxy listPartitionSpecs(String dbName, String tblName, int max) throws TException {
  return glueMetastoreClientDelegate.listPartitionSpecs(dbName, tblName, max);
}
 
Example 12
@Override
public PartitionSpecProxy listPartitionSpecsByFilter(String dbName, String tblName, String filter, int max)
    throws MetaException, NoSuchObjectException, TException {
  return glueMetastoreClientDelegate.listPartitionSpecsByFilter(dbName, tblName, filter, max);
}
 
Example 13
Source Project: metacat   Source File: MetacatHMSHandler.java    License: Apache License 2.0 4 votes vote down vote up
private void initializeAddedPartition(
        final Table tbl, final Partition part, final boolean madeDir) throws MetaException {
    initializeAddedPartition(tbl, new PartitionSpecProxy.SimplePartitionWrapperIterator(part), madeDir);
}
 
Example 14
Source Project: flink   Source File: HiveMetastoreClientWrapper.java    License: Apache License 2.0 4 votes vote down vote up
public PartitionSpecProxy listPartitionSpecsByFilter(String dbName, String tblName, String filter, short max) throws TException {
	return client.listPartitionSpecsByFilter(dbName, tblName, filter, max);
}