org.apache.flink.table.utils.TypeStringUtils Java Examples
The following examples show how to use
org.apache.flink.table.utils.TypeStringUtils.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DescriptorProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Adds a table schema under the given key. */ public void putTableSchema(String key, TableSchema schema) { checkNotNull(key); checkNotNull(schema); final String[] fieldNames = schema.getFieldNames(); final TypeInformation<?>[] fieldTypes = schema.getFieldTypes(); final List<List<String>> values = new ArrayList<>(); for (int i = 0; i < schema.getFieldCount(); i++) { values.add(Arrays.asList(fieldNames[i], TypeStringUtils.writeTypeInfo(fieldTypes[i]))); } putIndexedFixedProperties( key, Arrays.asList(TABLE_SCHEMA_NAME, TABLE_SCHEMA_TYPE), values); }
Example #2
Source File: DescriptorProperties.java From flink with Apache License 2.0 | 6 votes |
/** * Adds a table schema under the given key. */ public void putTableSchema(String key, TableSchema schema) { checkNotNull(key); checkNotNull(schema); final String[] fieldNames = schema.getFieldNames(); final TypeInformation<?>[] fieldTypes = schema.getFieldTypes(); final List<List<String>> values = new ArrayList<>(); for (int i = 0; i < schema.getFieldCount(); i++) { values.add(Arrays.asList(fieldNames[i], TypeStringUtils.writeTypeInfo(fieldTypes[i]))); } putIndexedFixedProperties( key, Arrays.asList(TABLE_SCHEMA_NAME, TABLE_SCHEMA_TYPE), values); }
Example #3
Source File: LogicalTypeParser.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private LogicalType parseLegacyType() { nextToken(TokenType.BEGIN_PARAMETER); nextToken(TokenType.LITERAL_STRING); final String rootString = tokenAsString(); nextToken(TokenType.LIST_SEPARATOR); nextToken(TokenType.LITERAL_STRING); final String typeInfoString = tokenAsString(); nextToken(TokenType.END_PARAMETER); try { final LogicalTypeRoot root = LogicalTypeRoot.valueOf(rootString); final TypeInformation typeInfo = TypeStringUtils.readTypeInfo(typeInfoString); return new LegacyTypeInformationType<>(root, typeInfo); } catch (Throwable t) { throw parsingError( "Unable to restore the Legacy type of '" + typeInfoString + "' with " + "type root '" + rootString + "'.", t); } }
Example #4
Source File: DescriptorProperties.java From flink with Apache License 2.0 | 5 votes |
/** * Validates a type property. */ public void validateType(String key, boolean isOptional, boolean requireRow) { validateOptional( key, isOptional, (value) -> { // we don't validate the string but let the parser do the work for us // it throws a validation exception final TypeInformation<?> typeInfo = TypeStringUtils.readTypeInfo(value); if (requireRow && !(typeInfo instanceof RowTypeInfo)) { throw new ValidationException( "Row type information expected for key '" + key + "' but was: " + value); } }); }
Example #5
Source File: DescriptorProperties.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Returns a table schema under the given key if it exists. */ public Optional<TableSchema> getOptionalTableSchema(String key) { // filter for number of fields final int fieldCount = properties.keySet().stream() .filter((k) -> k.startsWith(key) && k.endsWith('.' + TABLE_SCHEMA_NAME)) .mapToInt((k) -> 1) .sum(); if (fieldCount == 0) { return Optional.empty(); } // validate fields and build schema final TableSchema.Builder schemaBuilder = TableSchema.builder(); for (int i = 0; i < fieldCount; i++) { final String nameKey = key + '.' + i + '.' + TABLE_SCHEMA_NAME; final String typeKey = key + '.' + i + '.' + TABLE_SCHEMA_TYPE; final String name = optionalGet(nameKey).orElseThrow(exceptionSupplier(nameKey)); final TypeInformation<?> type = optionalGet(typeKey) .map(TypeStringUtils::readTypeInfo) .orElseThrow(exceptionSupplier(typeKey)); schemaBuilder.field(name, type); } return Optional.of(schemaBuilder.build()); }
Example #6
Source File: DescriptorProperties.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Validates a type property. */ public void validateType(String key, boolean isOptional, boolean requireRow) { validateOptional( key, isOptional, (value) -> { // we don't validate the string but let the parser do the work for us // it throws a validation exception final TypeInformation<?> typeInfo = TypeStringUtils.readTypeInfo(value); if (requireRow && !(typeInfo instanceof RowTypeInfo)) { throw new ValidationException( "Row type information expected for key '" + key + "' but was: " + value); } }); }
Example #7
Source File: DescriptorProperties.java From flink with Apache License 2.0 | 5 votes |
/** * Validates a data type property. */ public void validateDataType(String key, String fallbackKey, boolean isOptional) { if (properties.containsKey(key)) { validateOptional( key, isOptional, // we don't validate the string but let the parser do the work for us // it throws a validation exception v -> { LogicalType t = LogicalTypeParser.parse(v); if (t.getTypeRoot() == LogicalTypeRoot.UNRESOLVED) { throw new ValidationException("Could not parse type string '" + v + "'."); } }); } else if (fallbackKey != null && properties.containsKey(fallbackKey)) { validateOptional( fallbackKey, isOptional, // we don't validate the string but let the parser do the work for us // it throws a validation exception TypeStringUtils::readTypeInfo); } else { if (!isOptional) { throw new ValidationException("Could not find required property '" + key + "'."); } } }
Example #8
Source File: DescriptorProperties.java From flink with Apache License 2.0 | 5 votes |
/** * Returns a table schema under the given key if it exists. */ public Optional<TableSchema> getOptionalTableSchema(String key) { // filter for number of fields final int fieldCount = properties.keySet().stream() .filter((k) -> k.startsWith(key) && k.endsWith('.' + TABLE_SCHEMA_NAME)) .mapToInt((k) -> 1) .sum(); if (fieldCount == 0) { return Optional.empty(); } // validate fields and build schema final TableSchema.Builder schemaBuilder = TableSchema.builder(); for (int i = 0; i < fieldCount; i++) { final String nameKey = key + '.' + i + '.' + TABLE_SCHEMA_NAME; final String typeKey = key + '.' + i + '.' + TABLE_SCHEMA_TYPE; final String name = optionalGet(nameKey).orElseThrow(exceptionSupplier(nameKey)); final TypeInformation<?> type = optionalGet(typeKey) .map(TypeStringUtils::readTypeInfo) .orElseThrow(exceptionSupplier(typeKey)); schemaBuilder.field(name, type); } return Optional.of(schemaBuilder.build()); }
Example #9
Source File: DescriptorProperties.java From flink with Apache License 2.0 | 5 votes |
/** * Validates a type property. */ public void validateType(String key, boolean isOptional, boolean requireRow) { validateOptional( key, isOptional, (value) -> { // we don't validate the string but let the parser do the work for us // it throws a validation exception final TypeInformation<?> typeInfo = TypeStringUtils.readTypeInfo(value); if (requireRow && !(typeInfo instanceof RowTypeInfo)) { throw new ValidationException( "Row type information expected for key '" + key + "' but was: " + value); } }); }
Example #10
Source File: FlinkTypeConverter.java From Alink with Apache License 2.0 | 5 votes |
/** * Convert TypeInformation array to string format array. * * @param types TypeInformation array * @return string representation of the types. */ public static String[] getTypeString(TypeInformation<?>[] types) { String[] sqlTypes = new String[types.length]; for (int i = 0; i < types.length; i++) { sqlTypes[i] = TypeStringUtils.writeTypeInfo(types[i]); } return sqlTypes; }
Example #11
Source File: FeatureBorder.java From Alink with Apache License 2.0 | 5 votes |
public static String getTypeString(TypeInformation<?> type) { String typeStr = TypeStringUtils.writeTypeInfo(type); if ("VARCHAR".equals(typeStr)) { typeStr = "STRING"; } return typeStr; }
Example #12
Source File: LegacyTypeInformationType.java From flink with Apache License 2.0 | 5 votes |
@Override public String asSerializableString() { return withNullability( FORMAT, getTypeRoot(), EncodingUtils.escapeSingleQuotes(TypeStringUtils.writeTypeInfo(typeInfo))); }
Example #13
Source File: DescriptorProperties.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * Returns the type information under the given key if it exists. */ public Optional<TypeInformation<?>> getOptionalType(String key) { return optionalGet(key).map(TypeStringUtils::readTypeInfo); }
Example #14
Source File: DescriptorProperties.java From flink with Apache License 2.0 | 4 votes |
/** * Returns the type information under the given key if it exists. */ public Optional<TypeInformation<?>> getOptionalType(String key) { return optionalGet(key).map(TypeStringUtils::readTypeInfo); }
Example #15
Source File: LiteralValue.java From flink with Apache License 2.0 | 4 votes |
/** * Type information of the literal value. E.g. Types.BOOLEAN. * * @param typeInfo type information describing the value */ public LiteralValue of(TypeInformation<?> typeInfo) { Preconditions.checkNotNull(typeInfo, "Type information must not be null."); this.typeInfo = TypeStringUtils.writeTypeInfo(typeInfo); return this; }
Example #16
Source File: FeatureBorder.java From Alink with Apache License 2.0 | 4 votes |
public static TypeInformation<?> getFlinkType(String typeStr) { if ("STRING".equals(typeStr)) { typeStr = "VARCHAR"; } return TypeStringUtils.readTypeInfo(typeStr); }
Example #17
Source File: Csv.java From flink with Apache License 2.0 | 4 votes |
/** * Sets the format schema with field names and the types. Required if schema is not derived. * * @param schemaType type information that describes the schema */ public Csv schema(TypeInformation<Row> schemaType) { Preconditions.checkNotNull(schemaType); internalProperties.putString(FORMAT_SCHEMA, TypeStringUtils.writeTypeInfo(schemaType)); return this; }
Example #18
Source File: DescriptorProperties.java From flink with Apache License 2.0 | 4 votes |
/** * Returns the type information under the given key if it exists. */ public Optional<TypeInformation<?>> getOptionalType(String key) { return optionalGet(key).map(TypeStringUtils::readTypeInfo); }
Example #19
Source File: LiteralValue.java From flink with Apache License 2.0 | 4 votes |
/** * Type information of the literal value. E.g. Types.BOOLEAN. * * @param typeInfo type information describing the value */ public LiteralValue of(TypeInformation<?> typeInfo) { Preconditions.checkNotNull(typeInfo, "Type information must not be null."); this.typeInfo = TypeStringUtils.writeTypeInfo(typeInfo); return this; }
Example #20
Source File: Csv.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * Sets the format schema with field names and the types. Required if schema is not derived. * * @param schemaType type information that describes the schema */ public Csv schema(TypeInformation<Row> schemaType) { Preconditions.checkNotNull(schemaType); internalProperties.putString(FORMAT_SCHEMA, TypeStringUtils.writeTypeInfo(schemaType)); return this; }
Example #21
Source File: OldCsv.java From flink with Apache License 2.0 | 3 votes |
/** * Adds a format field with the field name and the type string. Required. * This method can be called multiple times. The call order of this method defines * also the order of the fields in the format. * * <p>NOTE: the fieldType string should follow the type string defined in {@link LogicalTypeParser}. * This method also keeps compatible with old type string defined in {@link TypeStringUtils} but * will be dropped in future versions as it uses the old type system. * * @param fieldName the field name * @param fieldType the type string of the field * @deprecated {@link OldCsv} supports derive schema from table schema by default, * it is no longer necessary to explicitly declare the format schema. * This method will be removed in the future. */ @Deprecated public OldCsv field(String fieldName, String fieldType) { if (isLegacyTypeString(fieldType)) { // fallback to legacy parser TypeInformation<?> typeInfo = TypeStringUtils.readTypeInfo(fieldType); return field(fieldName, TypeConversions.fromLegacyInfoToDataType(typeInfo)); } else { return addField(fieldName, fieldType); } }
Example #22
Source File: Schema.java From flink with Apache License 2.0 | 3 votes |
/** * Adds a field with the field name and the type string. Required. * This method can be called multiple times. The call order of this method defines * also the order of the fields in a row. * * <p>NOTE: the fieldType string should follow the type string defined in {@link LogicalTypeParser}. * This method also keeps compatible with old type string defined in {@link TypeStringUtils} * but will be dropped in future versions as it uses the old type system. * * @param fieldName the field name * @param fieldType the type string of the field */ public Schema field(String fieldName, String fieldType) { if (isLegacyTypeString(fieldType)) { // fallback to legacy parser TypeInformation<?> typeInfo = TypeStringUtils.readTypeInfo(fieldType); return field(fieldName, TypeConversions.fromLegacyInfoToDataType(typeInfo)); } else { return addField(fieldName, fieldType); } }
Example #23
Source File: Csv.java From flink with Apache License 2.0 | 3 votes |
/** * Sets the format schema with field names and the types. Required if schema is not derived. * * @param schemaType type information that describes the schema * @deprecated {@link Csv} supports derive schema from table schema by default, * it is no longer necessary to explicitly declare the format schema. * This method will be removed in the future. */ @Deprecated public Csv schema(TypeInformation<Row> schemaType) { Preconditions.checkNotNull(schemaType); internalProperties.putString(FORMAT_SCHEMA, TypeStringUtils.writeTypeInfo(schemaType)); return this; }
Example #24
Source File: Json.java From flink with Apache License 2.0 | 3 votes |
/** * Sets the schema using type information. * * <p>JSON objects are represented as ROW types. * * <p>The schema might be nested. * * @param schemaType type information that describes the schema * @deprecated {@link Json} supports derive schema from table schema by default, * it is no longer necessary to explicitly declare the format schema. * This method will be removed in the future. */ @Deprecated public Json schema(TypeInformation<Row> schemaType) { Preconditions.checkNotNull(schemaType); this.schema = TypeStringUtils.writeTypeInfo(schemaType); this.jsonSchema = null; this.deriveSchema = null; return this; }
Example #25
Source File: Json.java From flink with Apache License 2.0 | 3 votes |
/** * Sets the schema using type information. * * <p>JSON objects are represented as ROW types. * * <p>The schema might be nested. * * @param schemaType type information that describes the schema */ public Json schema(TypeInformation<Row> schemaType) { Preconditions.checkNotNull(schemaType); this.schema = TypeStringUtils.writeTypeInfo(schemaType); this.jsonSchema = null; this.deriveSchema = null; return this; }
Example #26
Source File: Json.java From Flink-CEPplus with Apache License 2.0 | 3 votes |
/** * Sets the schema using type information. * * <p>JSON objects are represented as ROW types. * * <p>The schema might be nested. * * @param schemaType type information that describes the schema */ public Json schema(TypeInformation<Row> schemaType) { Preconditions.checkNotNull(schemaType); this.schema = TypeStringUtils.writeTypeInfo(schemaType); this.jsonSchema = null; this.deriveSchema = null; return this; }
Example #27
Source File: FlinkTypeConverter.java From Alink with Apache License 2.0 | 2 votes |
/** * Convert string representation to flink TypeInformation. * * @param typeSQL string representation of type * @return flink TypeInformation */ public static TypeInformation<?> getFlinkType(String typeSQL) { return TypeStringUtils.readTypeInfo(typeSQL); }
Example #28
Source File: FlinkTypeConverter.java From Alink with Apache License 2.0 | 2 votes |
/** * Convert TypeInformation to string representation. * * @param type TypeInformation * @return string representation of the type. */ public static String getTypeString(TypeInformation<?> type) { return TypeStringUtils.writeTypeInfo(type); }
Example #29
Source File: TypeUtils.java From alchemy with Apache License 2.0 | 2 votes |
public static TypeInformation readTypeInfo(String typeString){ return TypeStringUtils.readTypeInfo(typeString); }
Example #30
Source File: OldCsv.java From flink with Apache License 2.0 | 2 votes |
/** * Adds a format field with the field name and the type information. Required. * This method can be called multiple times. The call order of this method defines * also the order of the fields in the format. * * @param fieldName the field name * @param fieldType the type information of the field */ public OldCsv field(String fieldName, TypeInformation<?> fieldType) { field(fieldName, TypeStringUtils.writeTypeInfo(fieldType)); return this; }