Java Code Examples for org.apache.hadoop.hive.metastore.api.FieldSchema#getType()

The following examples show how to use org.apache.hadoop.hive.metastore.api.FieldSchema#getType() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveStatsUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
private static ColumnStatistics createHiveColumnStatistics(
		Map<String, CatalogColumnStatisticsDataBase> colStats,
		StorageDescriptor sd,
		ColumnStatisticsDesc desc) {
	List<ColumnStatisticsObj> colStatsList = new ArrayList<>();

	for (FieldSchema field : sd.getCols()) {
		String hiveColName = field.getName();
		String hiveColType = field.getType();
		CatalogColumnStatisticsDataBase flinkColStat = colStats.get(field.getName());
		if (null != flinkColStat) {
			ColumnStatisticsData statsData =
					getColumnStatisticsData(HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(hiveColType)), flinkColStat);
			ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj(hiveColName, hiveColType, statsData);
			colStatsList.add(columnStatisticsObj);
		}
	}

	return new ColumnStatistics(desc, colStatsList);
}
 
Example 2
Source File: HiveClientWrapper.java    From pxf with Apache License 2.0 6 votes vote down vote up
private String serializePartitionKeys(HiveTablePartition partData) {
    if (partData.partition == null) {
        /* this is a simple hive table - there are no partitions */
        return HiveDataFragmenter.HIVE_NO_PART_TBL;
    }

    StringBuilder partitionKeys = new StringBuilder();
    String prefix = "";
    ListIterator<String> valsIter = partData.partition.getValues().listIterator();
    ListIterator<FieldSchema> keysIter = partData.partitionKeys.listIterator();
    while (valsIter.hasNext() && keysIter.hasNext()) {
        FieldSchema key = keysIter.next();
        String name = key.getName();
        String type = key.getType();
        String val = valsIter.next();
        String oneLevel = prefix + name + HiveDataFragmenter.HIVE_1_PART_DELIM + type
                + HiveDataFragmenter.HIVE_1_PART_DELIM + val;
        partitionKeys.append(oneLevel);
        prefix = HiveDataFragmenter.HIVE_PARTITIONS_DELIM;
    }

    return partitionKeys.toString();
}
 
Example 3
Source File: HiveMetaStoreBridge.java    From atlas with Apache License 2.0 6 votes vote down vote up
private String getCreateTableString(Table table, String location){
    String            colString = "";
    List<FieldSchema> colList   = table.getAllCols();

    if (colList != null) {
        for (FieldSchema col : colList) {
            colString += col.getName() + " " + col.getType() + ",";
        }

        if (colList.size() > 0) {
            colString = colString.substring(0, colString.length() - 1);
            colString = "(" + colString + ")";
        }
    }

    String query = "create external table " + table.getTableName() +  colString + " location '" + location + "'";

    return query;
}
 
Example 4
Source File: HiveMetaStoreBridge.java    From incubator-atlas with Apache License 2.0 6 votes vote down vote up
private String getCreateTableString(Table table, String location){
    String colString = "";
    List<FieldSchema> colList = table.getAllCols();
    if ( colList != null) {
        for (FieldSchema col : colList) {
            colString += col.getName() + " " + col.getType() + ",";
        }
        if (colList.size() > 0) {
            colString = colString.substring(0, colString.length() - 1);
            colString = "(" + colString + ")";
        }
    }
    String query = "create external table " + table.getTableName() +  colString +
            " location '" + location + "'";
    return query;
}
 
Example 5
Source File: HiveStatsUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
private static ColumnStatistics createHiveColumnStatistics(
		Map<String, CatalogColumnStatisticsDataBase> colStats,
		StorageDescriptor sd,
		ColumnStatisticsDesc desc,
		String hiveVersion) {
	List<ColumnStatisticsObj> colStatsList = new ArrayList<>();

	for (FieldSchema field : sd.getCols()) {
		String hiveColName = field.getName();
		String hiveColType = field.getType();
		CatalogColumnStatisticsDataBase flinkColStat = colStats.get(field.getName());
		if (null != flinkColStat) {
			ColumnStatisticsData statsData = getColumnStatisticsData(
					HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(hiveColType)),
					flinkColStat,
					hiveVersion);
			ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj(hiveColName, hiveColType, statsData);
			colStatsList.add(columnStatisticsObj);
		}
	}

	return new ColumnStatistics(desc, colStatsList);
}
 
Example 6
Source File: HiveUtilities.java    From pxf with Apache License 2.0 5 votes vote down vote up
/**
 * Checks if hive type is supported, and if so return its matching GPDB
 * type. Unsupported types will result in an exception. <br>
 * The supported mappings are:
 * <ul>
 * <li>{@code tinyint -> int2}</li>
 * <li>{@code smallint -> int2}</li>
 * <li>{@code int -> int4}</li>
 * <li>{@code bigint -> int8}</li>
 * <li>{@code boolean -> bool}</li>
 * <li>{@code float -> float4}</li>
 * <li>{@code double -> float8}</li>
 * <li>{@code string -> text}</li>
 * <li>{@code binary -> bytea}</li>
 * <li>{@code timestamp -> timestamp}</li>
 * <li>{@code date -> date}</li>
 * <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li>
 * <li>{@code varchar(size) -> varchar(size)}</li>
 * <li>{@code char(size) -> bpchar(size)}</li>
 * <li>{@code array<dataType> -> text}</li>
 * <li>{@code map<keyDataType, valueDataType> -> text}</li>
 * <li>{@code struct<field1:dataType,...,fieldN:dataType> -> text}</li>
 * <li>{@code uniontype<...> -> text}</li>
 * </ul>
 *
 * @param hiveColumn hive column schema
 * @return field with mapped GPDB type and modifiers
 * @throws UnsupportedTypeException if the column type is not supported
 * @see EnumHiveToGpdbType
 */
public static Metadata.Field mapHiveType(FieldSchema hiveColumn) throws UnsupportedTypeException {
    String fieldName = hiveColumn.getName();
    String hiveType = hiveColumn.getType(); // Type name and modifiers if any
    String hiveTypeName; // Type name
    String[] modifiers = null; // Modifiers
    EnumHiveToGpdbType hiveToGpdbType = EnumHiveToGpdbType.getHiveToGpdbType(hiveType);
    EnumGpdbType gpdbType = hiveToGpdbType.getGpdbType();

    if (hiveToGpdbType.getSplitExpression() != null) {
        String[] tokens = hiveType.split(hiveToGpdbType.getSplitExpression());
        hiveTypeName = tokens[0];
        if (gpdbType.getModifiersNum() > 0) {
            modifiers = Arrays.copyOfRange(tokens, 1, tokens.length);
            if (modifiers.length != gpdbType.getModifiersNum()) {
                throw new UnsupportedTypeException(
                        "GPDB does not support type " + hiveType
                                + " (Field " + fieldName + "), "
                                + "expected number of modifiers: "
                                + gpdbType.getModifiersNum()
                                + ", actual number of modifiers: "
                                + modifiers.length);
            }
            if (!verifyIntegerModifiers(modifiers)) {
                throw new UnsupportedTypeException("GPDB does not support type " + hiveType + " (Field " + fieldName + "), modifiers should be integers");
            }
        }
    } else
        hiveTypeName = hiveType;

    return new Metadata.Field(fieldName, gpdbType, hiveToGpdbType.isComplexType(), hiveTypeName, modifiers);
}
 
Example 7
Source File: SchemaUtils.java    From beam with Apache License 2.0 5 votes vote down vote up
private static Schema.Field toBeamField(FieldSchema field) {
  String name = field.getName();
  HCatFieldSchema hCatFieldSchema;

  try {
    hCatFieldSchema = HCatSchemaUtils.getHCatFieldSchema(field);
  } catch (HCatException e) {
    // Converting checked Exception to unchecked Exception.
    throw new UnsupportedOperationException(
        "Error while converting FieldSchema to HCatFieldSchema", e);
  }

  switch (hCatFieldSchema.getCategory()) {
    case PRIMITIVE:
      {
        if (!HCAT_TO_BEAM_TYPES_MAP.containsKey(hCatFieldSchema.getType())) {
          throw new UnsupportedOperationException(
              "The Primitive HCat type '"
                  + field.getType()
                  + "' of field '"
                  + name
                  + "' cannot be converted to Beam FieldType");
        }

        FieldType fieldType = HCAT_TO_BEAM_TYPES_MAP.get(hCatFieldSchema.getType());
        return Schema.Field.of(name, fieldType).withNullable(true);
      }
      // TODO: Add Support for Complex Types i.e. ARRAY, MAP, STRUCT
    default:
      throw new UnsupportedOperationException(
          "The category '" + hCatFieldSchema.getCategory() + "' is not supported.");
  }
}
 
Example 8
Source File: HiveCatalogUtil.java    From tajo with Apache License 2.0 5 votes vote down vote up
public static void validateSchema(Table tblSchema) {
  for (FieldSchema fieldSchema : tblSchema.getCols()) {
    String fieldType = fieldSchema.getType();
    if (fieldType.equalsIgnoreCase("ARRAY") || fieldType.equalsIgnoreCase("STRUCT")
      || fieldType.equalsIgnoreCase("MAP")) {
      throw new TajoRuntimeException(new UnsupportedException("data type '" + fieldType.toUpperCase() + "'"));
    }
  }
}
 
Example 9
Source File: TestSchemaConversion.java    From kite with Apache License 2.0 5 votes vote down vote up
@Override
public String apply(@Nullable FieldSchema input) {
  if (input != null) {
    return input.getType();
  } else {
    return null;
  }
}