Java Code Examples for org.apache.hadoop.hive.serde2.typeinfo.TypeInfo#getTypeName()

The following examples show how to use org.apache.hadoop.hive.serde2.typeinfo.TypeInfo#getTypeName() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveDynamoDBTypeFactory.java    From emr-dynamodb-connector with Apache License 2.0 5 votes vote down vote up
public static HiveDynamoDBType getTypeObjectFromHiveType(TypeInfo typeInfo) {
  if (SIMPLE_HIVE_DYNAMODB_TYPES_MAP.containsKey(typeInfo)) {
    return SIMPLE_HIVE_DYNAMODB_TYPES_MAP.get(typeInfo);
  }
  for (HiveDynamoDBType type : COMPLEX_HIVE_DYNAMODB_TYPES_SET) {
    if (type.supportsHiveType(typeInfo)) {
      return type;
    }
  }
  throw new IllegalArgumentException("Unsupported Hive type: " + typeInfo.getTypeName());
}
 
Example 2
Source File: IndexROutputFormat.java    From indexr with Apache License 2.0 5 votes vote down vote up
private static SegmentSchema convertToIndexRSchema(List<String> columnNames,
                                                   List<TypeInfo> columnTypes,
                                                   Set<String> indexColumns) throws IOException {
    List<ColumnSchema> schemas = new ArrayList<ColumnSchema>();
    for (int i = 0; i < columnNames.size(); i++) {
        String currentColumn = columnNames.get(i);
        TypeInfo currentType = columnTypes.get(i);
        SQLType convertedType = null;

        if (currentType.equals(TypeInfoFactory.intTypeInfo)) {
            convertedType = SQLType.INT;
        } else if (currentType.equals(TypeInfoFactory.longTypeInfo)) {
            convertedType = SQLType.BIGINT;
        } else if (currentType.equals(TypeInfoFactory.floatTypeInfo)) {
            convertedType = SQLType.FLOAT;
        } else if (currentType.equals(TypeInfoFactory.doubleTypeInfo)) {
            convertedType = SQLType.DOUBLE;
        } else if (currentType.equals(TypeInfoFactory.stringTypeInfo)) {
            convertedType = SQLType.VARCHAR;
        } else if (currentType.equals(TypeInfoFactory.dateTypeInfo)) {
            convertedType = SQLType.DATE;
        } else if (currentType.equals(TypeInfoFactory.timestampTypeInfo)) {
            convertedType = SQLType.DATETIME;
        } else {
            throw new IOException("can't recognize this type [" + currentType.getTypeName() + "]");
        }

        boolean isIndexed = indexColumns.contains(currentColumn.toLowerCase());
        schemas.add(new ColumnSchema(currentColumn, convertedType, isIndexed));
    }
    return new SegmentSchema(schemas);
}
 
Example 3
Source File: HiveType.java    From presto with Apache License 2.0 4 votes vote down vote up
private HiveType(TypeInfo typeInfo)
{
    requireNonNull(typeInfo, "typeInfo is null");
    this.hiveTypeName = new HiveTypeName(typeInfo.getTypeName());
    this.typeInfo = typeInfo;
}
 
Example 4
Source File: CassandraColumnSerDe.java    From Hive-Cassandra with Apache License 2.0 4 votes vote down vote up
/**
 * Initialize the cassandra serialization and deserialization parameters from table properties and configuration.
 *
 * @param job
 * @param tbl
 * @param serdeName
 * @throws SerDeException
 */
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
    throws SerDeException {
  cassandraColumnFamily = getCassandraColumnFamily(tbl);
  cassandraColumnNames = parseOrCreateColumnMapping(tbl);

  cassandraColumnNamesBytes = new ArrayList<BytesWritable>();
  for (String columnName : cassandraColumnNames) {
    cassandraColumnNamesBytes.add(new BytesWritable(columnName.getBytes()));
  }

  iKey = cassandraColumnNames.indexOf(AbstractColumnSerDe.CASSANDRA_KEY_COLUMN);

  serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);

  validatorType = parseOrCreateValidatorType(tbl);

  setTableMapping();

  if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
    throw new SerDeException(serdeName + ": columns has " +
        serdeParams.getColumnNames().size() +
        " elements while cassandra.columns.mapping has " +
        cassandraColumnNames.size() + " elements" +
        " (counting the key if implicit)");
  }

  // we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
  for (int i = 0; i < cassandraColumnNames.size(); i++) {
    String cassandraColName = cassandraColumnNames.get(i);
    if (cassandraColName.endsWith(":")) {
      TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
      if ((typeInfo.getCategory() != Category.MAP) ||
          (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
              != Constants.STRING_TYPE_NAME)) {

        throw new SerDeException(
            serdeName + ": Cassandra column family '"
                + cassandraColName
                + "' should be mapped to map<string,?> but is mapped to "
                + typeInfo.getTypeName());
      }
    }
  }
}
 
Example 5
Source File: HiveSchemaConverter.java    From kite with Apache License 2.0 4 votes vote down vote up
@VisibleForTesting
static Schema convert(LinkedList<String> path, String name,
                      TypeInfo type, Collection<String[]> required) {
  switch (type.getCategory()) {
    case PRIMITIVE:
      if (type.getClass() == charClass || type.getClass() == varcharClass) {
        // this is required because type name includes length
        return Schema.create(Schema.Type.STRING);
      }

      String typeInfoName = type.getTypeName();
      Preconditions.checkArgument(TYPEINFO_TO_TYPE.containsKey(typeInfoName),
          "Cannot convert unsupported type: %s", typeInfoName);
      return Schema.create(TYPEINFO_TO_TYPE.get(typeInfoName));

    case LIST:
      return Schema.createArray(optional(convert(path, name,
          ((ListTypeInfo) type).getListElementTypeInfo(), required)));

    case MAP:
      MapTypeInfo mapType = (MapTypeInfo) type;
      Preconditions.checkArgument(
          "string".equals(mapType.getMapKeyTypeInfo().toString()),
          "Non-String map key type: %s", mapType.getMapKeyTypeInfo());

      return Schema.createMap(optional(convert(path, name,
          mapType.getMapValueTypeInfo(), required)));

    case STRUCT:
      return convert(path, name, (StructTypeInfo) type, required);

    case UNION:
      List<TypeInfo> unionTypes = ((UnionTypeInfo) type)
          .getAllUnionObjectTypeInfos();

      // add NULL so all union types are optional
      List<Schema> types = Lists.newArrayList(NULL);
      for (int i = 0; i < unionTypes.size(); i += 1) {
        // types within unions cannot be required
        types.add(convert(
            path, name + "_" + i, unionTypes.get(i), NO_REQUIRED_FIELDS));
      }

      return Schema.createUnion(types);

    default:
      throw new IllegalArgumentException(
          "Unknown TypeInfo category: " + type.getCategory());
  }
}