Java Code Examples for org.apache.hadoop.hive.metastore.Warehouse#makeSpecFromName()

The following examples show how to use org.apache.hadoop.hive.metastore.Warehouse#makeSpecFromName() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PartitionUtil.java    From metacat with Apache License 2.0 6 votes vote down vote up
/**
 * Retrieves the partition values from the partition name. This method also validates the partition keys to that
 * of the table.
 *
 * @param tableQName  table name
 * @param table       table
 * @param partName    partition name
 * @return list of partition values
 */
public static List<String> getPartValuesFromPartName(final QualifiedName tableQName, final Table table,
    final String partName) {
    if (Strings.isNullOrEmpty(partName)) {
        throw new InvalidMetaException(tableQName, partName, null);
    }
    final LinkedHashMap<String, String> partSpec = new LinkedHashMap<>();
    Warehouse.makeSpecFromName(partSpec, new Path(partName));
    final List<String> values = new ArrayList<>();
    for (FieldSchema field : table.getPartitionKeys()) {
        final String key = field.getName();
        final String val = partSpec.get(key);
        if (val == null) {
            throw new InvalidMetaException(tableQName, partName, null);
        }
        values.add(val);
    }
    return values;
}
 
Example 2
Source File: GlueMetastoreClientDelegate.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
/**
 *  Taken from HiveMetaStore#partition_name_to_vals
 */
public List<String> partitionNameToVals(String name) throws TException {
  checkNotNull(name, "name cannot be null");
  if (name.isEmpty()) {
    return Lists.newArrayList();
  }
  LinkedHashMap<String, String> map = Warehouse.makeSpecFromName(name);
  List<String> vals = Lists.newArrayList();
  vals.addAll(map.values());
  return vals;
}
 
Example 3
Source File: AWSCatalogMetastoreClient.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, String> partitionNameToSpec(String name) throws MetaException, TException {
    // Lifted from HiveMetaStore
    if (name.length() == 0) {
        return new HashMap<String, String>();
    }
    return Warehouse.makeSpecFromName(name);
}
 
Example 4
Source File: AWSCatalogMetastoreClient.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, String> partitionNameToSpec(String name) throws MetaException, TException {
  // Lifted from HiveMetaStore
  if (name.length() == 0) {
    return new HashMap<String, String>();
  }
  return Warehouse.makeSpecFromName(name);
}
 
Example 5
Source File: HiveTableOutputFormat.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void finalizeGlobal(int parallelism) throws IOException {
	StorageDescriptor jobSD = hiveTablePartition.getStorageDescriptor();
	Path stagingDir = new Path(jobSD.getLocation());
	FileSystem fs = stagingDir.getFileSystem(jobConf);
	try (HiveMetastoreClientWrapper client = HiveMetastoreClientFactory.create(new HiveConf(jobConf, HiveConf.class), hiveVersion)) {
		Table table = client.getTable(tablePath.getDatabaseName(), tablePath.getObjectName());
		if (!isDynamicPartition) {
			commitJob(stagingDir.toString());
		}
		if (isPartitioned) {
			if (isDynamicPartition) {
				FileStatus[] generatedParts = HiveStatsUtils.getFileStatusRecurse(stagingDir,
					partitionColumns.size() - hiveTablePartition.getPartitionSpec().size(), fs);
				for (FileStatus part : generatedParts) {
					commitJob(part.getPath().toString());
					LinkedHashMap<String, String> fullPartSpec = new LinkedHashMap<>();
					Warehouse.makeSpecFromName(fullPartSpec, part.getPath());
					loadPartition(part.getPath(), table, fullPartSpec, client);
				}
			} else {
				LinkedHashMap<String, String> partSpec = new LinkedHashMap<>();
				for (String partCol : hiveTablePartition.getPartitionSpec().keySet()) {
					partSpec.put(partCol, hiveTablePartition.getPartitionSpec().get(partCol).toString());
				}
				loadPartition(stagingDir, table, partSpec, client);
			}
		} else {
			moveFiles(stagingDir, new Path(table.getSd().getLocation()));
		}
	} catch (TException e) {
		throw new CatalogException("Failed to query Hive metaStore", e);
	} finally {
		fs.delete(stagingDir, true);
	}
}
 
Example 6
Source File: HiveConvertersImpl.java    From metacat with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public List<String> getPartValsFromName(@Nullable final TableDto tableDto, final String partName) {
    // Unescape the partition name

    final LinkedHashMap<String, String> hm;
    try {
        hm = Warehouse.makeSpecFromName(partName);
    } catch (MetaException e) {
        throw new IllegalArgumentException("Invalid partition name", e);
    }
    // Get the partition keys.
    List<String> partitionKeys = null;
    if (tableDto != null) {
        partitionKeys = tableDto.getPartition_keys();
    }
    // If table has not been provided, return the values without validating.
    if (partitionKeys != null) {
        final List<String> partVals = Lists.newArrayListWithCapacity(partitionKeys.size());
        for (String key : partitionKeys) {
            final String val = hm.get(key);
            if (val == null) {
                throw new IllegalArgumentException("Invalid partition name - missing " + key);
            }
            partVals.add(val);
        }
        return partVals;
    } else {
        return Lists.newArrayList(hm.values());
    }
}
 
Example 7
Source File: PartitionUtil.java    From metacat with Apache License 2.0 2 votes vote down vote up
/**
 * Escape partition name.
 *
 * @param partName    partition name
 * @return Escaped partition name
 */
public static String escapePartitionName(final String partName) {
    final LinkedHashMap<String, String> partSpec = new LinkedHashMap<>();
    Warehouse.makeSpecFromName(partSpec, new Path(partName));
    return FileUtils.makePartName(new ArrayList<>(partSpec.keySet()), new ArrayList<>(partSpec.values()));
}