Java Code Examples for org.apache.hive.hcatalog.data.schema.HCatFieldSchema#Type

The following examples show how to use org.apache.hive.hcatalog.data.schema.HCatFieldSchema#Type . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SqoopHCatImportHelper.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 6 votes vote down vote up
private Object convertClobType(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  ClobRef cr = (ClobRef) val;
  String s = cr.isExternal() ? cr.toString() : cr.getData();

  if (hfsType == HCatFieldSchema.Type.STRING) {
    return s;
  } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
    VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
    HiveVarchar hvc = new HiveVarchar(s, vti.getLength());
    return hvc;
  } else if (hfsType == HCatFieldSchema.Type.CHAR) {
    CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
    HiveChar hc = new HiveChar(s, cti.getLength());
    return hc;
  }
  return null;
}
 
Example 2
Source File: SqoopHCatExportHelper.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 5 votes vote down vote up
public SqoopRecord convertToSqoopRecord(HCatRecord hcr)
  throws IOException {
  Text key = new Text();
  for (Map.Entry<String, Object> e : sqoopRecord.getFieldMap().entrySet()) {
    String colName = e.getKey();
    String hfn = colName.toLowerCase();
    key.set(hfn);
    Object hCatVal = hcr.get(hfn, hCatFullTableSchema);
    if (!isOdps) {
      String javaColType = colTypesJava.get(key).toString();
      int sqlType = ((IntWritable) colTypesSql.get(key)).get();
      HCatFieldSchema field = hCatFullTableSchema.get(hfn);
      HCatFieldSchema.Type fieldType = field.getType();
      String hCatTypeString = field.getTypeString();
      Object sqlVal = convertToSqoop(hCatVal, fieldType, javaColType, hCatTypeString);
      if (debugHCatExportMapper) {
        LOG.debug("hCatVal " + hCatVal + " of type "
            + (hCatVal == null ? null : hCatVal.getClass().getName()) + ",sqlVal " + sqlVal
            + " of type " + (sqlVal == null ? null : sqlVal.getClass().getName()) + ",java type "
            + javaColType + ", sql type = " + SqoopHCatUtilities.sqlTypeString(sqlType));
      }
      sqoopRecord.setField(colName, sqlVal);
    } else {
      sqoopRecord.setField(colName, hCatVal == null ? null : hCatVal.toString());
    }
  }
  return sqoopRecord;
}
 
Example 3
Source File: SqoopHCatImportHelper.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 5 votes vote down vote up
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (hfsType == HCatFieldSchema.Type.STRING
      || hfsType == HCatFieldSchema.Type.VARCHAR
      || hfsType == HCatFieldSchema.Type.CHAR) {
    String str = val.toString();
    if (doHiveDelimsReplacement) {
      str = FieldFormatter.hiveStringReplaceDelims(str,
        hiveDelimsReplacement, hiveDelimiters);
    }
    if (hfsType == HCatFieldSchema.Type.STRING) {
      return str;
    } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
      VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
      HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
      return hvc;
    } else if (hfsType == HCatFieldSchema.Type.CHAR) {
      CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
      HiveChar hc = new HiveChar(val.toString(), cti.getLength());
      return hc;
    }
  } else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
    BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
    HiveDecimal hd = HiveDecimal.create(bd);
    return hd;
  }
  return null;
}
 
Example 4
Source File: SqoopHCatImportHelper.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 5 votes vote down vote up
private Object convertBooleanTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  Boolean b = (Boolean) val;
  if (hfsType == HCatFieldSchema.Type.BOOLEAN) {
    return b;
  } else if (hfsType == HCatFieldSchema.Type.TINYINT) {
    return (byte) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.SMALLINT) {
    return (short) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.INT) {
    return (int) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.BIGINT) {
    return (long) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.FLOAT) {
    return (float) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.DOUBLE) {
    return (double) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.STRING) {
    return val.toString();
  } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
    VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
    HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength());
    return hvc;
  } else if (hfsType == HCatFieldSchema.Type.CHAR) {
    CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
    HiveChar hChar = new HiveChar(val.toString(), cti.getLength());
    return hChar;
  }
  return null;
}
 
Example 5
Source File: SqoopHCatExportHelper.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 4 votes vote down vote up
private Object convertToSqoop(Object val,
  HCatFieldSchema.Type fieldType, String javaColType,
  String hCatTypeString) throws IOException {

  if (val == null) {
    return null;
  }

  switch (fieldType) {
    case INT:
    case TINYINT:
    case SMALLINT:
    case FLOAT:
    case DOUBLE:
      val = convertNumberTypes(val, javaColType);
      if (val != null) {
        return val;
      }
      break;
    case BOOLEAN:
      val = convertBooleanTypes(val, javaColType);
      if (val != null) {
        return val;
      }
      break;
    case BIGINT:
      if (javaColType.equals(DATE_TYPE)) {
        return new Date((Long) val);
      } else if (javaColType.equals(TIME_TYPE)) {
        return new Time((Long) val);
      } else if (javaColType.equals(TIMESTAMP_TYPE)) {
        return new Timestamp((Long) val);
      } else {
        val = convertNumberTypes(val, javaColType);
        if (val != null) {
          return val;
        }
      }
      break;
    case DATE:
      Date date = (Date) val;
      if (javaColType.equals(DATE_TYPE)) {
        return date;
      } else if (javaColType.equals(TIME_TYPE)) {
        return new Time(date.getTime());
      } else if (javaColType.equals(TIMESTAMP_TYPE)) {
        return new Timestamp(date.getTime());
      }
      break;
    case TIMESTAMP:
      Timestamp ts = (Timestamp) val;
      if (javaColType.equals(DATE_TYPE)) {
        return new Date(ts.getTime());
      } else if (javaColType.equals(TIME_TYPE)) {
        return new Time(ts.getTime());
      } else if (javaColType.equals(TIMESTAMP_TYPE)) {
        return ts;
      }
      break;
    case STRING:
    case VARCHAR:
    case CHAR:
      val = convertStringTypes(val, javaColType);
      if (val != null) {
        return val;
      }
      break;
    case BINARY:
      val = convertBinaryTypes(val, javaColType);
      if (val != null) {
        return val;
      }
      break;
    case DECIMAL:
      val = convertDecimalTypes(val, javaColType);
      if (val != null) {
        return val;
      }
      break;
    case ARRAY:
    case MAP:
    case STRUCT:
    default:
      throw new IOException("Cannot convert HCatalog type "
        + fieldType);
  }
  LOG.error("Cannot convert HCatalog object of "
    + " type " + hCatTypeString + " to java object type "
    + javaColType);
  return null;
}
 
Example 6
Source File: SqoopHCatImportHelper.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 4 votes vote down vote up
private Object toHCat(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (val == null) {
    return null;
  }

  Object retVal = null;

  if (val instanceof Number) {
    retVal = convertNumberTypes(val, hfs);
  } else if (val instanceof Boolean) {
    retVal = convertBooleanTypes(val, hfs);
  } else if (val instanceof String) {
    retVal = convertStringTypes(val, hfs);
  } else if (val instanceof java.util.Date) {
    retVal = converDateTypes(val, hfs);
  } else if (val instanceof BytesWritable) {
    if (hfsType == HCatFieldSchema.Type.BINARY) {
      BytesWritable bw = (BytesWritable) val;
      retVal = bw.getBytes();
    }
  } else if (val instanceof BlobRef) {
    if (hfsType == HCatFieldSchema.Type.BINARY) {
      BlobRef br = (BlobRef) val;
      byte[] bytes = br.isExternal() ? br.toString().getBytes() : br
        .getData();
      retVal = bytes;
    }
  } else if (val instanceof ClobRef) {
    retVal = convertClobType(val, hfs);
  } else {
    throw new UnsupportedOperationException("Objects of type "
      + val.getClass().getName() + " are not suported");
  }
  if (retVal == null) {
    LOG.error("Unable to convert [" + val
      + "]  of type " + val.getClass().getName()
      + " to HCatalog type " + hfs.getTypeString());
  }
  return retVal;
}
 
Example 7
Source File: HCatalogTestUtils.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 4 votes vote down vote up
/** Return the HCat type for this column. */
HCatFieldSchema.Type getHCatType();