Java Code Examples for org.apache.flink.api.common.typeinfo.TypeInformation#equals()
The following examples show how to use
org.apache.flink.api.common.typeinfo.TypeInformation#equals() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SemanticPropUtil.java From flink with Apache License 2.0 | 6 votes |
private static boolean areFieldsCompatible(String sourceField, TypeInformation<?> inType, String targetField, TypeInformation<?> outType, boolean throwException) { try { // get source type information TypeInformation<?> sourceType = getExpressionTypeInformation(sourceField, inType); // get target type information TypeInformation<?> targetType = getExpressionTypeInformation(targetField, outType); return sourceType.equals(targetType); } catch (InvalidFieldReferenceException e) { if (throwException) { throw e; } else { return false; } } }
Example 2
Source File: SingleOutputStreamOperator.java From flink with Apache License 2.0 | 6 votes |
/** * Gets the {@link DataStream} that contains the elements that are emitted from an operation * into the side output with the given {@link OutputTag}. * * @see org.apache.flink.streaming.api.functions.ProcessFunction.Context#output(OutputTag, Object) */ public <X> DataStream<X> getSideOutput(OutputTag<X> sideOutputTag) { if (wasSplitApplied) { throw new UnsupportedOperationException("getSideOutput() and split() may not be called on the same DataStream. " + "As a work-around, please add a no-op map function before the split() call."); } sideOutputTag = clean(requireNonNull(sideOutputTag)); // make a defensive copy sideOutputTag = new OutputTag<X>(sideOutputTag.getId(), sideOutputTag.getTypeInfo()); TypeInformation<?> type = requestedSideOutputs.get(sideOutputTag); if (type != null && !type.equals(sideOutputTag.getTypeInfo())) { throw new UnsupportedOperationException("A side output with a matching id was " + "already requested with a different type. This is not allowed, side output " + "ids need to be unique."); } requestedSideOutputs.put(sideOutputTag, sideOutputTag.getTypeInfo()); SideOutputTransformation<X> sideOutputTransformation = new SideOutputTransformation<>(this.getTransformation(), sideOutputTag); return new DataStream<>(this.getExecutionEnvironment(), sideOutputTransformation); }
Example 3
Source File: SingleOutputStreamOperator.java From flink with Apache License 2.0 | 6 votes |
/** * Gets the {@link DataStream} that contains the elements that are emitted from an operation * into the side output with the given {@link OutputTag}. * * @see org.apache.flink.streaming.api.functions.ProcessFunction.Context#output(OutputTag, Object) */ public <X> DataStream<X> getSideOutput(OutputTag<X> sideOutputTag) { if (wasSplitApplied) { throw new UnsupportedOperationException("getSideOutput() and split() may not be called on the same DataStream. " + "As a work-around, please add a no-op map function before the split() call."); } sideOutputTag = clean(requireNonNull(sideOutputTag)); // make a defensive copy sideOutputTag = new OutputTag<X>(sideOutputTag.getId(), sideOutputTag.getTypeInfo()); TypeInformation<?> type = requestedSideOutputs.get(sideOutputTag); if (type != null && !type.equals(sideOutputTag.getTypeInfo())) { throw new UnsupportedOperationException("A side output with a matching id was " + "already requested with a different type. This is not allowed, side output " + "ids need to be unique."); } requestedSideOutputs.put(sideOutputTag, sideOutputTag.getTypeInfo()); SideOutputTransformation<X> sideOutputTransformation = new SideOutputTransformation<>(this.getTransformation(), sideOutputTag); return new DataStream<>(this.getExecutionEnvironment(), sideOutputTransformation); }
Example 4
Source File: PlannerTypeInferenceUtilImpl.java From flink with Apache License 2.0 | 6 votes |
private DataType castIfNeeded( ResolvedExpression child, PlannerExpression plannerChild, TypeInformation<?> expectedType) { TypeInformation<?> actualType = plannerChild.resultType(); if (actualType.equals(expectedType)) { return child.getOutputDataType(); } else if (TypeCoercion.canSafelyCast( fromTypeInfoToLogicalType(actualType), fromTypeInfoToLogicalType(expectedType))) { return fromLegacyInfoToDataType(expectedType); } else { throw new ValidationException(String.format("Incompatible type of argument: %s Expected: %s", child, expectedType)); } }
Example 5
Source File: SemanticPropUtil.java From flink with Apache License 2.0 | 6 votes |
private static boolean areFieldsCompatible(String sourceField, TypeInformation<?> inType, String targetField, TypeInformation<?> outType, boolean throwException) { try { // get source type information TypeInformation<?> sourceType = getExpressionTypeInformation(sourceField, inType); // get target type information TypeInformation<?> targetType = getExpressionTypeInformation(targetField, outType); return sourceType.equals(targetType); } catch (InvalidFieldReferenceException e) { if (throwException) { throw e; } else { return false; } } }
Example 6
Source File: LiteralValueValidator.java From flink with Apache License 2.0 | 5 votes |
/** * Gets the value according to the type and value strings. * * @param keyPrefix the prefix of the literal type key * @param properties the descriptor properties * @return the derived value */ public static Object getValue(String keyPrefix, DescriptorProperties properties) { String typeKey = keyPrefix + TYPE; // explicit type if (properties.containsKey(typeKey)) { String valueKey = keyPrefix + VALUE; TypeInformation<?> typeInfo = properties.getType(typeKey); if (typeInfo.equals(Types.BIG_DEC)) { return properties.getBigDecimal(valueKey); } else if (typeInfo.equals(Types.BOOLEAN)) { return properties.getBoolean(valueKey); } else if (typeInfo.equals(Types.BYTE)) { return properties.getByte(valueKey); } else if (typeInfo.equals(Types.DOUBLE)) { return properties.getDouble(valueKey); } else if (typeInfo.equals(Types.FLOAT)) { return properties.getFloat(valueKey); } else if (typeInfo.equals(Types.INT)) { return properties.getInt(valueKey); } else if (typeInfo.equals(Types.LONG)) { return properties.getLong(valueKey); } else if (typeInfo.equals(Types.SHORT)) { return properties.getShort(valueKey); } else if (typeInfo.equals(Types.STRING)) { return properties.getString(valueKey); } else { throw new TableException("Unsupported type '" + typeInfo.getTypeClass() + "'."); } } // implicit type else { return deriveTypeStringFromValueString( properties.getString(keyPrefix.substring(0, keyPrefix.length() - 1))); } }
Example 7
Source File: ParquetInputFormat.java From flink with Apache License 2.0 | 5 votes |
/** * Generates and returns the read schema based on the projected fields for a given file. * * @param fileSchema The schema of the given file. * @param filePath The path of the given file. * @return The read schema based on the given file's schema and the projected fields. */ private MessageType getReadSchema(MessageType fileSchema, Path filePath) { RowTypeInfo fileTypeInfo = (RowTypeInfo) ParquetSchemaConverter.fromParquetType(fileSchema); List<Type> types = new ArrayList<>(); for (int i = 0; i < fieldNames.length; ++i) { String readFieldName = fieldNames[i]; TypeInformation<?> readFieldType = fieldTypes[i]; if (fileTypeInfo.getFieldIndex(readFieldName) < 0) { if (!skipWrongSchemaFileSplit) { throw new IllegalArgumentException("Field " + readFieldName + " cannot be found in schema of " + " Parquet file: " + filePath + "."); } else { this.skipThisSplit = true; return fileSchema; } } if (!readFieldType.equals(fileTypeInfo.getTypeAt(readFieldName))) { if (!skipWrongSchemaFileSplit) { throw new IllegalArgumentException("Expecting type " + readFieldType + " for field " + readFieldName + " but found type " + fileTypeInfo.getTypeAt(readFieldName) + " in Parquet file: " + filePath + "."); } else { this.skipThisSplit = true; return fileSchema; } } types.add(fileSchema.getType(readFieldName)); } return new MessageType(fileSchema.getName(), types); }
Example 8
Source File: ParquetInputFormat.java From flink with Apache License 2.0 | 5 votes |
/** * Generates and returns the read schema based on the projected fields for a given file. * * @param fileSchema The schema of the given file. * @param filePath The path of the given file. * @return The read schema based on the given file's schema and the projected fields. */ private MessageType getReadSchema(MessageType fileSchema, Path filePath) { RowTypeInfo fileTypeInfo = (RowTypeInfo) ParquetSchemaConverter.fromParquetType(fileSchema); List<Type> types = new ArrayList<>(); for (int i = 0; i < fieldNames.length; ++i) { String readFieldName = fieldNames[i]; TypeInformation<?> readFieldType = fieldTypes[i]; if (fileTypeInfo.getFieldIndex(readFieldName) < 0) { if (!skipWrongSchemaFileSplit) { throw new IllegalArgumentException("Field " + readFieldName + " cannot be found in schema of " + " Parquet file: " + filePath + "."); } else { this.skipThisSplit = true; return fileSchema; } } if (!readFieldType.equals(fileTypeInfo.getTypeAt(readFieldName))) { if (!skipWrongSchemaFileSplit) { throw new IllegalArgumentException("Expecting type " + readFieldType + " for field " + readFieldName + " but found type " + fileTypeInfo.getTypeAt(readFieldName) + " in Parquet file: " + filePath + "."); } else { this.skipThisSplit = true; return fileSchema; } } types.add(fileSchema.getType(readFieldName)); } return new MessageType(fileSchema.getName(), types); }
Example 9
Source File: FieldInfoUtils.java From flink with Apache License 2.0 | 5 votes |
private static void checkRowtimeType(TypeInformation<?> type) { if (!(type.equals(Types.LONG()) || type instanceof SqlTimeTypeInfo)) { throw new ValidationException( "The rowtime attribute can only replace a field with a valid time type, " + "such as Timestamp or Long. But was: " + type); } }
Example 10
Source File: FieldInfoUtils.java From flink with Apache License 2.0 | 5 votes |
private static void checkRowtimeType(TypeInformation<?> type) { if (!(type.equals(Types.LONG()) || type instanceof SqlTimeTypeInfo)) { throw new ValidationException( "The rowtime attribute can only replace a field with a valid time type, " + "such as Timestamp or Long. But was: " + type); } }
Example 11
Source File: PlannerTypeInferenceUtilImpl.java From flink with Apache License 2.0 | 5 votes |
private DataType castIfNeeded( ResolvedExpression child, PlannerExpression plannerChild, TypeInformation<?> expectedType) { TypeInformation<?> actualType = plannerChild.resultType(); if (actualType.equals(expectedType)) { return child.getOutputDataType(); } else if (TypeCoercion.canSafelyCast(actualType, expectedType)) { return fromLegacyInfoToDataType(expectedType); } else { throw new ValidationException(String.format("Incompatible type of argument: %s Expected: %s", child, expectedType)); } }
Example 12
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
/** * Collects batch result depends on the {@link #planner} context. */ private List<Row> collectBatchResult(Table table) throws Exception { TableImpl tableImpl = (TableImpl) table; if (OLD_PLANNER.equals(planner)) { BatchTableEnvironment batchTableEnv = (BatchTableEnvironment) tableImpl.getTableEnvironment(); DataSet<Row> resultSet = batchTableEnv.toDataSet(table, Row.class); return resultSet.collect(); } else { TableImpl t = (TableImpl) table; TableSchema schema = t.getSchema(); List<TypeInformation> types = new ArrayList<>(); for (TypeInformation typeInfo : t.getSchema().getFieldTypes()) { // convert LOCAL_DATE_TIME to legacy TIMESTAMP to make the output consistent with flink batch planner if (typeInfo.equals(Types.LOCAL_DATE_TIME)) { types.add(Types.SQL_TIMESTAMP); } else if (typeInfo.equals(Types.LOCAL_DATE)) { types.add(Types.SQL_DATE); } else if (typeInfo.equals(Types.LOCAL_TIME)) { types.add(Types.SQL_TIME); } else { types.add(typeInfo); } } CollectRowTableSink sink = new CollectRowTableSink(); CollectTableSink<Row> configuredSink = (CollectTableSink<Row>) sink.configure( schema.getFieldNames(), types.toArray(new TypeInformation[0])); return JavaScalaConversionUtil.toJava( BatchTableEnvUtil.collect( t.getTableEnvironment(), table, configuredSink, Option.apply("JOB"))); } }
Example 13
Source File: BaseLinearModelTrainBatchOp.java From Alink with Apache License 2.0 | 5 votes |
/** * Get feature types. * * @param in train data. * @param featureColNames feature column names. * @return feature types. */ private String[] getFeatureTypes(BatchOperator in, String[] featureColNames) { if (featureColNames != null) { String[] featureColTypes = new String[featureColNames.length]; for (int i = 0; i < featureColNames.length; ++i) { int idx = TableUtil.findColIndexWithAssertAndHint(in.getColNames(), featureColNames[i]); TypeInformation type = in.getSchema().getFieldTypes()[idx]; if (type.equals(Types.DOUBLE)) { featureColTypes[i] = "double"; } else if (type.equals(Types.FLOAT)) { featureColTypes[i] = "float"; } else if (type.equals(Types.LONG)) { featureColTypes[i] = "long"; } else if (type.equals(Types.INT)) { featureColTypes[i] = "int"; } else if (type.equals(Types.SHORT)) { featureColTypes[i] = "short"; } else if (type.equals(Types.BOOLEAN)) { featureColTypes[i] = "bool"; } else { throw new RuntimeException( "linear algorithm only support numerical data type. type is : " + type); } } return featureColTypes; } return null; }
Example 14
Source File: JoinedStreams.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Specifies a {@link KeySelector} for elements from the second input with explicit type information for the key type. * * @param keySelector The KeySelector to be used for extracting the second input's key for partitioning. * @param keyType The type information describing the key type. */ public EqualTo equalTo(KeySelector<T2, KEY> keySelector, TypeInformation<KEY> keyType) { requireNonNull(keySelector); requireNonNull(keyType); if (!keyType.equals(this.keyType)) { throw new IllegalArgumentException("The keys for the two inputs are not equal: " + "first key = " + this.keyType + " , second key = " + keyType); } return new EqualTo(input2.clean(keySelector)); }
Example 15
Source File: JoinedStreams.java From flink with Apache License 2.0 | 5 votes |
/** * Specifies a {@link KeySelector} for elements from the second input with explicit type information for the key type. * * @param keySelector The KeySelector to be used for extracting the second input's key for partitioning. * @param keyType The type information describing the key type. */ public EqualTo equalTo(KeySelector<T2, KEY> keySelector, TypeInformation<KEY> keyType) { requireNonNull(keySelector); requireNonNull(keyType); if (!keyType.equals(this.keyType)) { throw new IllegalArgumentException("The keys for the two inputs are not equal: " + "first key = " + this.keyType + " , second key = " + keyType); } return new EqualTo(input2.clean(keySelector)); }
Example 16
Source File: Keys.java From flink with Apache License 2.0 | 5 votes |
@Override public <E> void validateCustomPartitioner(Partitioner<E> partitioner, TypeInformation<E> typeInfo) { if (keyFields.size() != 1) { throw new InvalidProgramException("Custom partitioners can only be used with keys that have one key field."); } if (typeInfo == null) { // try to extract key type from partitioner try { typeInfo = TypeExtractor.getPartitionerTypes(partitioner); } catch (Throwable t) { // best effort check, so we ignore exceptions } } if (typeInfo != null && !(typeInfo instanceof GenericTypeInfo)) { // only check type compatibility if type is known and not a generic type TypeInformation<?> keyType = keyFields.get(0).getType(); if (!keyType.equals(typeInfo)) { throw new InvalidProgramException("The partitioner is incompatible with the key type. " + "Partitioner type: " + typeInfo + " , key type: " + keyType); } } }
Example 17
Source File: Keys.java From flink with Apache License 2.0 | 5 votes |
@Override public <E> void validateCustomPartitioner(Partitioner<E> partitioner, TypeInformation<E> typeInfo) { if (keyFields.size() != 1) { throw new InvalidProgramException("Custom partitioners can only be used with keys that have one key field."); } if (typeInfo == null) { // try to extract key type from partitioner try { typeInfo = TypeExtractor.getPartitionerTypes(partitioner); } catch (Throwable t) { // best effort check, so we ignore exceptions } } if (typeInfo != null && !(typeInfo instanceof GenericTypeInfo)) { // only check type compatibility if type is known and not a generic type TypeInformation<?> keyType = keyFields.get(0).getType(); if (!keyType.equals(typeInfo)) { throw new InvalidProgramException("The partitioner is incompatible with the key type. " + "Partitioner type: " + typeInfo + " , key type: " + keyType); } } }
Example 18
Source File: LiteralValueValidator.java From flink with Apache License 2.0 | 4 votes |
@Override protected void validateWithPrefix(String keyPrefix, DescriptorProperties properties) { String typeKey = keyPrefix + TYPE; properties.validateType(typeKey, true, false); // explicit type if (properties.containsKey(typeKey)) { String valueKey = keyPrefix + VALUE; TypeInformation<?> typeInfo = properties.getType(typeKey); if (typeInfo.equals(Types.BIG_DEC)) { properties.validateBigDecimal(valueKey, false); } else if (typeInfo.equals(Types.BOOLEAN)) { properties.validateBoolean(valueKey, false); } else if (typeInfo.equals(Types.BYTE)) { properties.validateByte(valueKey, false); } else if (typeInfo.equals(Types.DOUBLE)) { properties.validateDouble(valueKey, false); } else if (typeInfo.equals(Types.FLOAT)) { properties.validateFloat(valueKey, false); } else if (typeInfo.equals(Types.INT)) { properties.validateInt(valueKey, false); } else if (typeInfo.equals(Types.LONG)) { properties.validateLong(valueKey, false); } else if (typeInfo.equals(Types.SHORT)) { properties.validateShort(valueKey, false); } else if (typeInfo.equals(Types.STRING)) { properties.validateString(valueKey, false); } else { throw new TableException("Unsupported type '" + typeInfo + "'."); } } // implicit type else { // do not allow values in top-level if (keyPrefix.equals(HierarchyDescriptorValidator.EMPTY_PREFIX)) { throw new ValidationException( "Literal values with implicit type must not exist in the top level of a hierarchy."); } properties.validateString(keyPrefix.substring(0, keyPrefix.length() - 1), false); } }
Example 19
Source File: CsvRowDeserializationSchema.java From flink with Apache License 2.0 | 4 votes |
private static RuntimeConverter createRuntimeConverter(TypeInformation<?> info, boolean ignoreParseErrors) { if (info.equals(Types.VOID)) { return (node) -> null; } else if (info.equals(Types.STRING)) { return JsonNode::asText; } else if (info.equals(Types.BOOLEAN)) { return (node) -> Boolean.valueOf(node.asText().trim()); } else if (info.equals(Types.BYTE)) { return (node) -> Byte.valueOf(node.asText().trim()); } else if (info.equals(Types.SHORT)) { return (node) -> Short.valueOf(node.asText().trim()); } else if (info.equals(Types.INT)) { return (node) -> Integer.valueOf(node.asText().trim()); } else if (info.equals(Types.LONG)) { return (node) -> Long.valueOf(node.asText().trim()); } else if (info.equals(Types.FLOAT)) { return (node) -> Float.valueOf(node.asText().trim()); } else if (info.equals(Types.DOUBLE)) { return (node) -> Double.valueOf(node.asText().trim()); } else if (info.equals(Types.BIG_DEC)) { return (node) -> new BigDecimal(node.asText().trim()); } else if (info.equals(Types.BIG_INT)) { return (node) -> new BigInteger(node.asText().trim()); } else if (info.equals(Types.SQL_DATE)) { return (node) -> Date.valueOf(node.asText()); } else if (info.equals(Types.SQL_TIME)) { return (node) -> Time.valueOf(node.asText()); } else if (info.equals(Types.SQL_TIMESTAMP)) { return (node) -> Timestamp.valueOf(node.asText()); } else if (info.equals(Types.LOCAL_DATE)) { return (node) -> Date.valueOf(node.asText()).toLocalDate(); } else if (info.equals(Types.LOCAL_TIME)) { return (node) -> Time.valueOf(node.asText()).toLocalTime(); } else if (info.equals(Types.LOCAL_DATE_TIME)) { return (node) -> Timestamp.valueOf(node.asText()).toLocalDateTime(); } else if (info instanceof RowTypeInfo) { final RowTypeInfo rowTypeInfo = (RowTypeInfo) info; return createRowRuntimeConverter(rowTypeInfo, ignoreParseErrors, false); } else if (info instanceof BasicArrayTypeInfo) { return createObjectArrayRuntimeConverter( ((BasicArrayTypeInfo<?, ?>) info).getComponentInfo(), ignoreParseErrors); } else if (info instanceof ObjectArrayTypeInfo) { return createObjectArrayRuntimeConverter( ((ObjectArrayTypeInfo<?, ?>) info).getComponentInfo(), ignoreParseErrors); } else if (info instanceof PrimitiveArrayTypeInfo && ((PrimitiveArrayTypeInfo) info).getComponentType() == Types.BYTE) { return createByteArrayRuntimeConverter(ignoreParseErrors); } else { throw new RuntimeException("Unsupported type information '" + info + "'."); } }
Example 20
Source File: SemanticPropUtil.java From flink with Apache License 2.0 | 4 votes |
private static void parseNonForwardedFields(SemanticProperties sp, String[] nonForwardedStr, TypeInformation<?> inType, TypeInformation<?> outType, int input, boolean skipIncompatibleTypes) { if (nonForwardedStr == null) { return; } FieldSet excludedFields = new FieldSet(); for (String s : nonForwardedStr) { // remove white characters s = s.replaceAll("\\s", ""); if (s.equals("")) { continue; } if (!inType.equals(outType)) { if (skipIncompatibleTypes) { continue; } else { throw new InvalidSemanticAnnotationException("Non-forwarded fields annotation only allowed for identical input and output types."); } } Matcher matcher = PATTERN_LIST.matcher(s); if (!matcher.matches()) { throw new InvalidSemanticAnnotationException("Invalid format of non-forwarded fields annotation \"" + s + "\"."); } // process individual fields matcher = PATTERN_FIELD.matcher(s); while (matcher.find()) { String fieldStr = matcher.group(); try { // get and add all flat field positions List<FlatFieldDescriptor> inFFDs = getFlatFields(fieldStr, inType); for (FlatFieldDescriptor ffd : inFFDs) { excludedFields = excludedFields.addField(ffd.getPosition()); } } catch (InvalidFieldReferenceException ifre) { throw new InvalidSemanticAnnotationException("Invalid field reference in non-forwarded fields annotation \"" + fieldStr + "\".", ifre); } } } for (int i = 0; i < inType.getTotalFields(); i++) { if (!excludedFields.contains(i)) { if (sp instanceof SingleInputSemanticProperties) { ((SingleInputSemanticProperties) sp).addForwardedField(i, i); } else if (sp instanceof DualInputSemanticProperties) { ((DualInputSemanticProperties) sp).addForwardedField(input, i, i); } } } }