Java Code Examples for org.apache.spark.sql.types.DataTypes#ShortType
The following examples show how to use
org.apache.spark.sql.types.DataTypes#ShortType .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ShortColumnBlockTest.java From spliceengine with GNU Affero General Public License v3.0 | 5 votes |
@Test public void setPartitionValueTest() { ShortColumnBlock shortColumnBlock = new ShortColumnBlock(null, DataTypes.ShortType); shortColumnBlock.setPartitionValue("5",1000); short value = 5; for (int i = 0; i< 1000; i++) { Assert.assertEquals(value,shortColumnBlock.getTestObject(i)); } }
Example 2
Source File: FlightDataSourceReader.java From flight-spark-source with Apache License 2.0 | 4 votes |
private DataType sparkFromArrow(FieldType fieldType) { switch (fieldType.getType().getTypeID()) { case Null: return DataTypes.NullType; case Struct: throw new UnsupportedOperationException("have not implemented Struct type yet"); case List: throw new UnsupportedOperationException("have not implemented List type yet"); case FixedSizeList: throw new UnsupportedOperationException("have not implemented FixedSizeList type yet"); case Union: throw new UnsupportedOperationException("have not implemented Union type yet"); case Int: ArrowType.Int intType = (ArrowType.Int) fieldType.getType(); int bitWidth = intType.getBitWidth(); if (bitWidth == 8) { return DataTypes.ByteType; } else if (bitWidth == 16) { return DataTypes.ShortType; } else if (bitWidth == 32) { return DataTypes.IntegerType; } else if (bitWidth == 64) { return DataTypes.LongType; } throw new UnsupportedOperationException("unknown int type with bitwidth " + bitWidth); case FloatingPoint: ArrowType.FloatingPoint floatType = (ArrowType.FloatingPoint) fieldType.getType(); FloatingPointPrecision precision = floatType.getPrecision(); switch (precision) { case HALF: case SINGLE: return DataTypes.FloatType; case DOUBLE: return DataTypes.DoubleType; } case Utf8: return DataTypes.StringType; case Binary: case FixedSizeBinary: return DataTypes.BinaryType; case Bool: return DataTypes.BooleanType; case Decimal: throw new UnsupportedOperationException("have not implemented Decimal type yet"); case Date: return DataTypes.DateType; case Time: return DataTypes.TimestampType; //note i don't know what this will do! case Timestamp: return DataTypes.TimestampType; case Interval: return DataTypes.CalendarIntervalType; case NONE: return DataTypes.NullType; } throw new IllegalStateException("Unexpected value: " + fieldType); }
Example 3
Source File: SparkSturctTypeUtil.java From waterdrop with Apache License 2.0 | 4 votes |
private static DataType getType(String type) { DataType dataType = DataTypes.NullType; switch (type.toLowerCase()) { case "string": dataType = DataTypes.StringType; break; case "integer": dataType = DataTypes.IntegerType; break; case "long": dataType = DataTypes.LongType; break; case "double": dataType = DataTypes.DoubleType; break; case "float": dataType = DataTypes.FloatType; break; case "short": dataType = DataTypes.ShortType; break; case "date": dataType = DataTypes.DateType; break; case "timestamp": dataType = DataTypes.TimestampType; break; case "boolean": dataType = DataTypes.BooleanType; break; case "binary": dataType = DataTypes.BinaryType; break; case "byte": dataType = DataTypes.ByteType; break; default: throw new ConfigRuntimeException("Throw data type exception, unknown type: " + type); } return dataType; }
Example 4
Source File: KuduOutput.java From envelope with Apache License 2.0 | 4 votes |
private StructType schemaFor(KuduTable table) { List<StructField> fields = Lists.newArrayList(); for (ColumnSchema columnSchema : table.getSchema().getColumns()) { DataType fieldType; switch (columnSchema.getType()) { case DOUBLE: fieldType = DataTypes.DoubleType; break; case FLOAT: fieldType = DataTypes.FloatType; break; case INT8: fieldType = DataTypes.ByteType; break; case INT16: fieldType = DataTypes.ShortType; break; case INT32: fieldType = DataTypes.IntegerType; break; case INT64: fieldType = DataTypes.LongType; break; case STRING: fieldType = DataTypes.StringType; break; case BOOL: fieldType = DataTypes.BooleanType; break; case BINARY: fieldType = DataTypes.BinaryType; break; case UNIXTIME_MICROS: fieldType = DataTypes.TimestampType; break; case DECIMAL: int precision = columnSchema.getTypeAttributes().getPrecision(); int scale = columnSchema.getTypeAttributes().getScale(); fieldType = DataTypes.createDecimalType(precision, scale); break; default: throw new RuntimeException("Unsupported Kudu column type: " + columnSchema.getType()); } fields.add(DataTypes.createStructField(columnSchema.getName(), fieldType, true)); } return DataTypes.createStructType(fields); }
Example 5
Source File: ConfigurationDataTypes.java From envelope with Apache License 2.0 | 4 votes |
public static DataType getSparkDataType(String typeString) { DataType type; String prec_scale_regex_groups = "\\s*(decimal)\\s*\\(\\s*(\\d+)\\s*,\\s*(\\d+)\\s*\\)\\s*"; Pattern prec_scale_regex_pattern = Pattern.compile(prec_scale_regex_groups); Matcher prec_scale_regex_matcher = prec_scale_regex_pattern.matcher(typeString); if (prec_scale_regex_matcher.matches()) { int precision = Integer.parseInt(prec_scale_regex_matcher.group(2)); int scale = Integer.parseInt(prec_scale_regex_matcher.group(3)); type = DataTypes.createDecimalType(precision, scale); } else { switch (typeString) { case DECIMAL: type = DataTypes.createDecimalType(); break; case STRING: type = DataTypes.StringType; break; case FLOAT: type = DataTypes.FloatType; break; case DOUBLE: type = DataTypes.DoubleType; break; case BYTE: type = DataTypes.ByteType; break; case SHORT: type = DataTypes.ShortType; break; case INT: type = DataTypes.IntegerType; break; case LONG: type = DataTypes.LongType; break; case BOOLEAN: type = DataTypes.BooleanType; break; case BINARY: type = DataTypes.BinaryType; break; case DATE: type = DataTypes.DateType; break; case TIMESTAMP: type = DataTypes.TimestampType; break; default: throw new RuntimeException("Unsupported or unrecognized field type: " + typeString); } } return type; }