Java Code Examples for org.apache.spark.sql.types.DataTypes#DateType

The following examples show how to use org.apache.spark.sql.types.DataTypes#DateType . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LocalWithSparkSessionTest.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private static DataType convertType(org.apache.kylin.metadata.datatype.DataType type) {
    if (type.isTimeFamily())
        return DataTypes.TimestampType;

    if (type.isDateTimeFamily())
        return DataTypes.DateType;

    if (type.isIntegerFamily())
        return DataTypes.LongType;

    if (type.isNumberFamily())
        return DataTypes.createDecimalType(19, 4);

    if (type.isStringFamily())
        return DataTypes.StringType;

    if (type.isBoolean())
        return DataTypes.BooleanType;

    throw new IllegalArgumentException("KAP data type: " + type + " can not be converted to spark's type.");
}
 
Example 2
Source File: TypeCastStep.java    From bpmn.ai with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
private DataType mapDataType(List<StructField> datasetFields, String column, String typeConfig) {

        DataType currentDatatype = getCurrentDataType(datasetFields, column);

        // when typeConfig is null (no config for this column), return the current DataType
        if(typeConfig == null) {
            return currentDatatype;
        }

        switch (typeConfig) {
            case "integer":
                return DataTypes.IntegerType;
            case "long":
                return DataTypes.LongType;
            case "double":
                return DataTypes.DoubleType;
            case "boolean":
                return DataTypes.BooleanType;
            case "date":
                return DataTypes.DateType;
            case "timestamp":
                return DataTypes.TimestampType;
            default:
                return DataTypes.StringType;
        }
    }
 
Example 3
Source File: TestRowUtils.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testToRowValueDate() {
  DataType field = DataTypes.DateType;

  DateTime dateObj = DateTime.parse("2017-01-01T00:00:00"); // Pass-thru the TZ
  Date sqlDate = new Date(dateObj.getMillis());

  assertEquals("Invalid Long", sqlDate, RowUtils.toRowValue(dateObj.getMillis(), field));
  assertEquals("Invalid String", sqlDate, RowUtils.toRowValue("2017-001", field)); // ISO Date format
  assertEquals("Invalid Date", sqlDate, RowUtils.toRowValue(dateObj.toDate(), field));
  assertEquals("Invalid DateTime", sqlDate, RowUtils.toRowValue(dateObj, field));

  thrown.expect(RuntimeException.class);
  thrown.expectMessage(CoreMatchers.containsString("Invalid or unrecognized input format"));
  RowUtils.toRowValue(123, field);
}
 
Example 4
Source File: IndexRUtil.java    From indexr with Apache License 2.0 5 votes vote down vote up
public static List<StructField> indexrSchemaToSparkSchema(SegmentSchema schema) {
    List<StructField> fields = new ArrayList<>();
    for (ColumnSchema cs : schema.getColumns()) {
        DataType dataType;
        switch (cs.getSqlType()) {
            case INT:
                dataType = DataTypes.IntegerType;
                break;
            case BIGINT:
                dataType = DataTypes.LongType;
                break;
            case FLOAT:
                dataType = DataTypes.FloatType;
                break;
            case DOUBLE:
                dataType = DataTypes.DoubleType;
                break;
            case VARCHAR:
                dataType = DataTypes.StringType;
                break;
            case DATE:
                dataType = DataTypes.DateType;
                break;
            case DATETIME:
                dataType = DataTypes.TimestampType;
                break;
            default:
                throw new IllegalStateException("Unsupported type: " + cs.getSqlType());
        }
        fields.add(new StructField(cs.getName(), dataType, scala.Boolean.box(false), Metadata.empty()));
    }
    return fields;
}
 
Example 5
Source File: FlightDataSourceReader.java    From flight-spark-source with Apache License 2.0 4 votes vote down vote up
private DataType sparkFromArrow(FieldType fieldType) {
  switch (fieldType.getType().getTypeID()) {
    case Null:
      return DataTypes.NullType;
    case Struct:
      throw new UnsupportedOperationException("have not implemented Struct type yet");
    case List:
      throw new UnsupportedOperationException("have not implemented List type yet");
    case FixedSizeList:
      throw new UnsupportedOperationException("have not implemented FixedSizeList type yet");
    case Union:
      throw new UnsupportedOperationException("have not implemented Union type yet");
    case Int:
      ArrowType.Int intType = (ArrowType.Int) fieldType.getType();
      int bitWidth = intType.getBitWidth();
      if (bitWidth == 8) {
        return DataTypes.ByteType;
      } else if (bitWidth == 16) {
        return DataTypes.ShortType;
      } else if (bitWidth == 32) {
        return DataTypes.IntegerType;
      } else if (bitWidth == 64) {
        return DataTypes.LongType;
      }
      throw new UnsupportedOperationException("unknown int type with bitwidth " + bitWidth);
    case FloatingPoint:
      ArrowType.FloatingPoint floatType = (ArrowType.FloatingPoint) fieldType.getType();
      FloatingPointPrecision precision = floatType.getPrecision();
      switch (precision) {
        case HALF:
        case SINGLE:
          return DataTypes.FloatType;
        case DOUBLE:
          return DataTypes.DoubleType;
      }
    case Utf8:
      return DataTypes.StringType;
    case Binary:
    case FixedSizeBinary:
      return DataTypes.BinaryType;
    case Bool:
      return DataTypes.BooleanType;
    case Decimal:
      throw new UnsupportedOperationException("have not implemented Decimal type yet");
    case Date:
      return DataTypes.DateType;
    case Time:
      return DataTypes.TimestampType; //note i don't know what this will do!
    case Timestamp:
      return DataTypes.TimestampType;
    case Interval:
      return DataTypes.CalendarIntervalType;
    case NONE:
      return DataTypes.NullType;
  }
  throw new IllegalStateException("Unexpected value: " + fieldType);
}
 
Example 6
Source File: SQLHepler.java    From sylph with Apache License 2.0 4 votes vote down vote up
static DataType getSparkType(Type type)
{
    if (type instanceof ParameterizedType && ((ParameterizedType) type).getRawType() == Map.class) {
        Type[] arguments = ((ParameterizedType) type).getActualTypeArguments();

        return DataTypes.createMapType(getSparkType(arguments[0]), getSparkType(arguments[1]));
    }
    else if (type instanceof ParameterizedType && ((ParameterizedType) type).getRawType() == List.class) {
        DataType dataType = getSparkType(((ParameterizedType) type).getActualTypeArguments()[0]);

        return DataTypes.createArrayType(dataType);
    }
    else {
        if (type == String.class) {
            return DataTypes.StringType;
        }
        else if (type == int.class || type == Integer.class) {
            return DataTypes.IntegerType;
        }
        else if (type == long.class || type == Long.class) {
            return DataTypes.LongType;
        }
        else if (type == boolean.class || type == Boolean.class) {
            return DataTypes.BooleanType;
        }
        else if (type == double.class || type == Double.class) {
            return DataTypes.DoubleType;
        }
        else if (type == float.class || type == Float.class) {
            return DataTypes.FloatType;
        }
        else if (type == byte.class || type == Byte.class) {
            return DataTypes.ByteType;
        }
        else if (type == Timestamp.class) {
            return DataTypes.TimestampType;
        }
        else if (type == Date.class) {
            return DataTypes.DateType;
        }
        else if (type == byte[].class || type == Byte[].class) {
            return DataTypes.BinaryType;
        }
        else {
            throw new IllegalArgumentException("this TYPE " + type + " have't support!");
        }
    }
}
 
Example 7
Source File: SparkSturctTypeUtil.java    From waterdrop with Apache License 2.0 4 votes vote down vote up
private static DataType getType(String type) {
    DataType dataType = DataTypes.NullType;
    switch (type.toLowerCase()) {
        case "string":
            dataType = DataTypes.StringType;
            break;
        case "integer":
            dataType = DataTypes.IntegerType;
            break;
        case "long":
            dataType = DataTypes.LongType;
            break;
        case "double":
            dataType = DataTypes.DoubleType;
            break;
        case "float":
            dataType = DataTypes.FloatType;
            break;
        case "short":
            dataType = DataTypes.ShortType;
            break;
        case "date":
            dataType = DataTypes.DateType;
            break;
        case "timestamp":
            dataType = DataTypes.TimestampType;
            break;
        case "boolean":
            dataType = DataTypes.BooleanType;
            break;
        case "binary":
            dataType = DataTypes.BinaryType;
            break;
        case "byte":
            dataType = DataTypes.ByteType;
            break;
        default:
            throw new ConfigRuntimeException("Throw data type exception, unknown type: " + type);
    }
    return dataType;
}
 
Example 8
Source File: AvroUtils.java    From envelope with Apache License 2.0 4 votes vote down vote up
/**
 * Convert Avro Types into their associated DataType.
 *
 * @param schemaType Avro Schema.Type
 * @return DataType representation
 */
public static DataType dataTypeFor(Schema schemaType) {
  LOG.trace("Converting Schema[{}] to DataType", schemaType);

  // Unwrap "optional" unions to the base type
  boolean isOptional = isNullable(schemaType);

  if (isOptional) {
    // if only 2 items in the union, then "unwrap," otherwise, it's a full union and should be rendered as such
    if (schemaType.getTypes().size() == 2) {
      LOG.trace("Unwrapping simple 'optional' union for {}", schemaType);
      for (Schema s : schemaType.getTypes()) {
        if (s.getType().equals(NULL)) {
          continue;
        }
        // Unwrap
        schemaType = s;
        break;
      }
    }
  }

  // Convert supported LogicalTypes
  if (null != schemaType.getLogicalType()) {
    LogicalType logicalType = schemaType.getLogicalType();
    switch (logicalType.getName()) {
      case "date" :
        return DataTypes.DateType;
      case "timestamp-millis" :
        return DataTypes.TimestampType;
      case "decimal" :
        LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) logicalType;
        return DataTypes.createDecimalType(decimal.getPrecision(), decimal.getScale());
      default:
        // Pass-thru
        LOG.warn("Unsupported LogicalType[{}], continuing with underlying base type", logicalType.getName());
    }
  }

  switch (schemaType.getType()) {
    case RECORD:
      // StructType
      List<StructField> structFieldList = Lists.newArrayListWithCapacity(schemaType.getFields().size());
      for (Field f : schemaType.getFields()) {
        structFieldList.add(DataTypes.createStructField(f.name(), dataTypeFor(f.schema()), isNullable(f.schema())));
      }
      return DataTypes.createStructType(structFieldList);
    case ARRAY:
      Schema elementType = schemaType.getElementType();
      return DataTypes.createArrayType(dataTypeFor(elementType), isNullable(elementType));
    case MAP:
      Schema valueType = schemaType.getValueType();
      return DataTypes.createMapType(DataTypes.StringType, dataTypeFor(valueType), isNullable(valueType));
    case UNION:
      // StructType of members
      List<StructField> unionFieldList = Lists.newArrayListWithCapacity(schemaType.getTypes().size());
      int m = 0;
      for (Schema u : schemaType.getTypes()) {
        unionFieldList.add(DataTypes.createStructField("member" + m++, dataTypeFor(u), isNullable(u)));
      }
      return DataTypes.createStructType(unionFieldList);
    case FIXED:
    case BYTES:
      return DataTypes.BinaryType;
    case ENUM:
    case STRING:
      return DataTypes.StringType;
    case INT:
      return DataTypes.IntegerType;
    case LONG:
      return DataTypes.LongType;
    case FLOAT:
      return DataTypes.FloatType;
    case DOUBLE:
      return DataTypes.DoubleType;
    case BOOLEAN:
      return DataTypes.BooleanType;
    case NULL:
      return DataTypes.NullType;
    default:
      throw new RuntimeException(String.format("Unrecognized or unsupported Avro Type conversion: %s", schemaType));
  }
}
 
Example 9
Source File: ConfigurationDataTypes.java    From envelope with Apache License 2.0 4 votes vote down vote up
public static DataType getSparkDataType(String typeString) {
  DataType type;

  String prec_scale_regex_groups = "\\s*(decimal)\\s*\\(\\s*(\\d+)\\s*,\\s*(\\d+)\\s*\\)\\s*";
  Pattern prec_scale_regex_pattern = Pattern.compile(prec_scale_regex_groups);
  Matcher prec_scale_regex_matcher = prec_scale_regex_pattern.matcher(typeString);

  if (prec_scale_regex_matcher.matches()) {
    int precision = Integer.parseInt(prec_scale_regex_matcher.group(2)); 
    int scale = Integer.parseInt(prec_scale_regex_matcher.group(3)); 
    type = DataTypes.createDecimalType(precision, scale);
  }
  else {
    switch (typeString) {
      case DECIMAL:
        type = DataTypes.createDecimalType();
        break;
      case STRING:
        type = DataTypes.StringType;
        break;
      case FLOAT:
        type = DataTypes.FloatType;
        break;
      case DOUBLE:
        type = DataTypes.DoubleType;
        break;
      case BYTE:
        type = DataTypes.ByteType;
        break;
      case SHORT:
        type = DataTypes.ShortType;
        break;
      case INT:
        type = DataTypes.IntegerType;
        break;
      case LONG:
        type = DataTypes.LongType;
        break;
      case BOOLEAN:
        type = DataTypes.BooleanType;
        break;
      case BINARY:
        type = DataTypes.BinaryType;
        break;
      case DATE:
        type = DataTypes.DateType;
        break;
      case TIMESTAMP:
        type = DataTypes.TimestampType;
        break;
      default:
        throw new RuntimeException("Unsupported or unrecognized field type: " + typeString);
    } 
  }

  return type;
}