Java Code Examples for org.apache.flink.api.java.typeutils.RowTypeInfo#getFieldTypes()

The following examples show how to use org.apache.flink.api.java.typeutils.RowTypeInfo#getFieldTypes() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JsonRowDeserializationSchema.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private Row convertRow(JsonNode node, RowTypeInfo info) {
	final String[] names = info.getFieldNames();
	final TypeInformation<?>[] types = info.getFieldTypes();

	final Row row = new Row(names.length);
	for (int i = 0; i < names.length; i++) {
		final String name = names[i];
		final JsonNode subNode = node.get(name);
		if (subNode == null) {
			if (failOnMissingField) {
				throw new IllegalStateException(
					"Could not find field with name '" + name + "'.");
			} else {
				row.setField(i, null);
			}
		} else {
			row.setField(i, convert(subNode, types[i]));
		}
	}

	return row;
}
 
Example 2
Source File: HBaseTableFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private HBaseTableSchema validateTableSchema(TableSchema schema) {
	HBaseTableSchema hbaseSchema = new HBaseTableSchema();
	String[] fieldNames = schema.getFieldNames();
	TypeInformation[] fieldTypes = schema.getFieldTypes();
	for (int i = 0; i < fieldNames.length; i++) {
		String name = fieldNames[i];
		TypeInformation<?> type = fieldTypes[i];
		if (type instanceof RowTypeInfo) {
			RowTypeInfo familyType = (RowTypeInfo) type;
			String[] qualifierNames = familyType.getFieldNames();
			TypeInformation[] qualifierTypes = familyType.getFieldTypes();
			for (int j = 0; j < familyType.getArity(); j++) {
				hbaseSchema.addColumn(name, qualifierNames[j], qualifierTypes[j].getTypeClass());
			}
		} else {
			hbaseSchema.setRowKey(name, type.getTypeClass());
		}
	}
	return hbaseSchema;
}
 
Example 3
Source File: ParquetTableSource.java    From flink with Apache License 2.0 5 votes vote down vote up
private ParquetTableSource(String path, MessageType parquetSchema, Configuration configuration,
								boolean recursiveEnumeration, @Nullable int[] selectedFields, @Nullable FilterPredicate predicate) {
	Preconditions.checkNotNull(path, "Path must not be null.");
	Preconditions.checkNotNull(parquetSchema, "ParquetSchema must not be null.");
	Preconditions.checkNotNull(configuration, "Configuration must not be null");
	this.path = path;
	this.parquetSchema = parquetSchema;
	this.parquetConfig = configuration;
	this.selectedFields = selectedFields;
	this.predicate = predicate;
	this.recursiveEnumeration = recursiveEnumeration;

	if (predicate != null) {
		this.isFilterPushedDown = true;
	}
	// determine the type information from the Parquet schema
	RowTypeInfo typeInfoFromSchema = (RowTypeInfo) ParquetSchemaConverter.fromParquetType(parquetSchema);

	// set return type info
	if (selectedFields == null) {
		this.typeInfo = typeInfoFromSchema;
	} else {
		this.typeInfo = RowTypeInfo.projectFields(typeInfoFromSchema, selectedFields);
	}

	// create a TableSchema that corresponds to the Parquet schema
	this.tableSchema = new TableSchema(
		typeInfoFromSchema.getFieldNames(),
		typeInfoFromSchema.getFieldTypes()
	);
}
 
Example 4
Source File: CsvRowSerializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
private static RuntimeConverter createRowRuntimeConverter(RowTypeInfo rowTypeInfo, boolean isTopLevel) {
	final TypeInformation[] fieldTypes = rowTypeInfo.getFieldTypes();
	final String[] fieldNames = rowTypeInfo.getFieldNames();

	final RuntimeConverter[] fieldConverters = createFieldRuntimeConverters(fieldTypes);

	return assembleRowRuntimeConverter(isTopLevel, fieldNames, fieldConverters);
}
 
Example 5
Source File: ParquetInputFormat.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Read parquet files with given parquet file schema.
 *
 * @param path The path of the file to read.
 * @param messageType schema of parquet file
 */

protected ParquetInputFormat(Path path, MessageType messageType) {
	super(path);
	this.expectedFileSchema = checkNotNull(messageType, "messageType");
	RowTypeInfo rowTypeInfo = (RowTypeInfo) ParquetSchemaConverter.fromParquetType(expectedFileSchema);
	this.fieldTypes = rowTypeInfo.getFieldTypes();
	this.fieldNames = rowTypeInfo.getFieldNames();
	// read whole parquet file as one file split
	this.unsplittable = true;
}
 
Example 6
Source File: CsvRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Convert {@link RowTypeInfo} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowTypeInfo rowType) {
	final Builder builder = new CsvSchema.Builder();
	final String[] fields = rowType.getFieldNames();
	final TypeInformation<?>[] types = rowType.getFieldTypes();
	for (int i = 0; i < rowType.getArity(); i++) {
		builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
	}
	return builder.build();
}
 
Example 7
Source File: HBaseTableFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
private HBaseTableSchema validateTableSchema(TableSchema schema) {
	HBaseTableSchema hbaseSchema = new HBaseTableSchema();
	String[] fieldNames = schema.getFieldNames();
	TypeInformation[] fieldTypes = schema.getFieldTypes();
	for (int i = 0; i < fieldNames.length; i++) {
		String name = fieldNames[i];
		TypeInformation<?> type = fieldTypes[i];
		if (type instanceof RowTypeInfo) {
			RowTypeInfo familyType = (RowTypeInfo) type;
			String[] qualifierNames = familyType.getFieldNames();
			TypeInformation[] qualifierTypes = familyType.getFieldTypes();
			for (int j = 0; j < familyType.getArity(); j++) {
				// HBase connector doesn't support LocalDateTime
				// use Timestamp as conversion class for now.
				Class clazz = qualifierTypes[j].getTypeClass();
				if (LocalDateTime.class.equals(clazz)) {
					clazz = Timestamp.class;
				} else if (LocalDate.class.equals(clazz)) {
					clazz = Date.class;
				} else if (LocalTime.class.equals(clazz)) {
					clazz = Time.class;
				}
				hbaseSchema.addColumn(name, qualifierNames[j], clazz);
			}
		} else {
			hbaseSchema.setRowKey(name, type.getTypeClass());
		}
	}
	return hbaseSchema;
}
 
Example 8
Source File: ParquetTableSource.java    From flink with Apache License 2.0 5 votes vote down vote up
private ParquetTableSource(String path, MessageType parquetSchema, Configuration configuration,
								boolean recursiveEnumeration, @Nullable int[] selectedFields, @Nullable FilterPredicate predicate) {
	Preconditions.checkNotNull(path, "Path must not be null.");
	Preconditions.checkNotNull(parquetSchema, "ParquetSchema must not be null.");
	Preconditions.checkNotNull(configuration, "Configuration must not be null");
	this.path = path;
	this.parquetSchema = parquetSchema;
	this.parquetConfig = configuration;
	this.selectedFields = selectedFields;
	this.predicate = predicate;
	this.recursiveEnumeration = recursiveEnumeration;

	if (predicate != null) {
		this.isFilterPushedDown = true;
	}
	// determine the type information from the Parquet schema
	RowTypeInfo typeInfoFromSchema = (RowTypeInfo) ParquetSchemaConverter.fromParquetType(parquetSchema);

	// set return type info
	if (selectedFields == null) {
		this.typeInfo = typeInfoFromSchema;
	} else {
		this.typeInfo = RowTypeInfo.projectFields(typeInfoFromSchema, selectedFields);
	}

	// create a TableSchema that corresponds to the Parquet schema
	this.tableSchema = new TableSchema(
		typeInfoFromSchema.getFieldNames(),
		typeInfoFromSchema.getFieldTypes()
	);
}
 
Example 9
Source File: ParquetInputFormat.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Read parquet files with given parquet file schema.
 *
 * @param path The path of the file to read.
 * @param messageType schema of parquet file
 */

protected ParquetInputFormat(Path path, MessageType messageType) {
	super(path);
	this.expectedFileSchema = checkNotNull(messageType, "messageType");
	RowTypeInfo rowTypeInfo = (RowTypeInfo) ParquetSchemaConverter.fromParquetType(expectedFileSchema);
	this.fieldTypes = rowTypeInfo.getFieldTypes();
	this.fieldNames = rowTypeInfo.getFieldNames();
	// read whole parquet file as one file split
	this.unsplittable = true;
}
 
Example 10
Source File: AvroRowDeserializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
private Row convertAvroRecordToRow(Schema schema, RowTypeInfo typeInfo, IndexedRecord record) {
	final List<Schema.Field> fields = schema.getFields();
	final TypeInformation<?>[] fieldInfo = typeInfo.getFieldTypes();
	final int length = fields.size();
	final Row row = new Row(length);
	for (int i = 0; i < length; i++) {
		final Schema.Field field = fields.get(i);
		row.setField(i, convertAvroType(field.schema(), fieldInfo[i], record.get(i)));
	}
	return row;
}
 
Example 11
Source File: CsvRowSerializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
private static RuntimeConverter createRowRuntimeConverter(RowTypeInfo rowTypeInfo, boolean isTopLevel) {
	final TypeInformation[] fieldTypes = rowTypeInfo.getFieldTypes();
	final String[] fieldNames = rowTypeInfo.getFieldNames();

	final RuntimeConverter[] fieldConverters = createFieldRuntimeConverters(fieldTypes);

	return assembleRowRuntimeConverter(isTopLevel, fieldNames, fieldConverters);
}
 
Example 12
Source File: OrcTableSource.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private OrcTableSource(String path, TypeDescription orcSchema, Configuration orcConfig,
						int batchSize, boolean recursiveEnumeration,
						int[] selectedFields, Predicate[] predicates) {

	Preconditions.checkNotNull(path, "Path must not be null.");
	Preconditions.checkNotNull(orcSchema, "OrcSchema must not be null.");
	Preconditions.checkNotNull(path, "Configuration must not be null.");
	Preconditions.checkArgument(batchSize > 0, "Batch size must be larger than null.");
	this.path = path;
	this.orcSchema = orcSchema;
	this.orcConfig = orcConfig;
	this.batchSize = batchSize;
	this.recursiveEnumeration = recursiveEnumeration;
	this.selectedFields = selectedFields;
	this.predicates = predicates;

	// determine the type information from the ORC schema
	RowTypeInfo typeInfoFromSchema = (RowTypeInfo) OrcBatchReader.schemaToTypeInfo(this.orcSchema);

	// set return type info
	if (selectedFields == null) {
		this.typeInfo = typeInfoFromSchema;
	} else {
		this.typeInfo = RowTypeInfo.projectFields(typeInfoFromSchema, selectedFields);
	}

	// create a TableSchema that corresponds to the ORC schema
	this.tableSchema = new TableSchema(
		typeInfoFromSchema.getFieldNames(),
		typeInfoFromSchema.getFieldTypes()
	);
}
 
Example 13
Source File: CsvRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Convert {@link RowTypeInfo} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowTypeInfo rowType) {
	final Builder builder = new CsvSchema.Builder();
	final String[] fields = rowType.getFieldNames();
	final TypeInformation<?>[] types = rowType.getFieldTypes();
	for (int i = 0; i < rowType.getArity(); i++) {
		builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
	}
	return builder.build();
}
 
Example 14
Source File: OrcTableSource.java    From flink with Apache License 2.0 5 votes vote down vote up
private OrcTableSource(String path, TypeDescription orcSchema, Configuration orcConfig,
						int batchSize, boolean recursiveEnumeration,
						int[] selectedFields, Predicate[] predicates) {

	Preconditions.checkNotNull(path, "Path must not be null.");
	Preconditions.checkNotNull(orcSchema, "OrcSchema must not be null.");
	Preconditions.checkNotNull(path, "Configuration must not be null.");
	Preconditions.checkArgument(batchSize > 0, "Batch size must be larger than null.");
	this.path = path;
	this.orcSchema = orcSchema;
	this.orcConfig = orcConfig;
	this.batchSize = batchSize;
	this.recursiveEnumeration = recursiveEnumeration;
	this.selectedFields = selectedFields;
	this.predicates = predicates;

	// determine the type information from the ORC schema
	RowTypeInfo typeInfoFromSchema = (RowTypeInfo) OrcBatchReader.schemaToTypeInfo(this.orcSchema);

	// set return type info
	if (selectedFields == null) {
		this.typeInfo = typeInfoFromSchema;
	} else {
		this.typeInfo = RowTypeInfo.projectFields(typeInfoFromSchema, selectedFields);
	}

	// create a TableSchema that corresponds to the ORC schema
	this.tableSchema = new TableSchema(
		typeInfoFromSchema.getFieldNames(),
		typeInfoFromSchema.getFieldTypes()
	);
}
 
Example 15
Source File: OrcTableSource.java    From flink with Apache License 2.0 5 votes vote down vote up
private OrcTableSource(String path, TypeDescription orcSchema, Configuration orcConfig,
						int batchSize, boolean recursiveEnumeration,
						int[] selectedFields, Predicate[] predicates) {

	Preconditions.checkNotNull(path, "Path must not be null.");
	Preconditions.checkNotNull(orcSchema, "OrcSchema must not be null.");
	Preconditions.checkNotNull(path, "Configuration must not be null.");
	Preconditions.checkArgument(batchSize > 0, "Batch size must be larger than null.");
	this.path = path;
	this.orcSchema = orcSchema;
	this.orcConfig = orcConfig;
	this.batchSize = batchSize;
	this.recursiveEnumeration = recursiveEnumeration;
	this.selectedFields = selectedFields;
	this.predicates = predicates;

	// determine the type information from the ORC schema
	RowTypeInfo typeInfoFromSchema = (RowTypeInfo) OrcBatchReader.schemaToTypeInfo(this.orcSchema);

	// set return type info
	if (selectedFields == null) {
		this.typeInfo = typeInfoFromSchema;
	} else {
		this.typeInfo = RowTypeInfo.projectFields(typeInfoFromSchema, selectedFields);
	}

	// create a TableSchema that corresponds to the ORC schema
	this.tableSchema = new TableSchema(
		typeInfoFromSchema.getFieldNames(),
		typeInfoFromSchema.getFieldTypes()
	);
}
 
Example 16
Source File: AvroRowDeserializationSchema.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private Row convertAvroRecordToRow(Schema schema, RowTypeInfo typeInfo, IndexedRecord record) {
	final List<Schema.Field> fields = schema.getFields();
	final TypeInformation<?>[] fieldInfo = typeInfo.getFieldTypes();
	final int length = fields.size();
	final Row row = new Row(length);
	for (int i = 0; i < length; i++) {
		final Schema.Field field = fields.get(i);
		row.setField(i, convertAvroType(field.schema(), fieldInfo[i], record.get(i)));
	}
	return row;
}
 
Example 17
Source File: CsvRowSerializationSchema.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static RuntimeConverter createRowRuntimeConverter(RowTypeInfo rowTypeInfo, boolean isTopLevel) {
	final TypeInformation[] fieldTypes = rowTypeInfo.getFieldTypes();
	final String[] fieldNames = rowTypeInfo.getFieldNames();

	final RuntimeConverter[] fieldConverters = createFieldRuntimeConverters(fieldTypes);

	return assembleRowRuntimeConverter(isTopLevel, fieldNames, fieldConverters);
}
 
Example 18
Source File: CsvRowDeserializationSchema.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static RuntimeConverter createRowRuntimeConverter(
		RowTypeInfo rowTypeInfo,
		boolean ignoreParseErrors,
		boolean isTopLevel) {
	final TypeInformation<?>[] fieldTypes = rowTypeInfo.getFieldTypes();
	final String[] fieldNames = rowTypeInfo.getFieldNames();

	final RuntimeConverter[] fieldConverters =
		createFieldRuntimeConverters(ignoreParseErrors, fieldTypes);

	return assembleRowRuntimeConverter(ignoreParseErrors, isTopLevel, fieldNames, fieldConverters);
}
 
Example 19
Source File: CsvRowSchemaConverter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Convert {@link RowTypeInfo} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowTypeInfo rowType) {
	final Builder builder = new CsvSchema.Builder();
	final String[] fields = rowType.getFieldNames();
	final TypeInformation<?>[] types = rowType.getFieldTypes();
	for (int i = 0; i < rowType.getArity(); i++) {
		builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
	}
	return builder.build();
}
 
Example 20
Source File: MockTableSource.java    From AthenaX with Apache License 2.0 4 votes vote down vote up
MockTableSource(List<Row> data, RowTypeInfo type) {
  this.data = data;
  this.type = type;
  this.schema = new TableSchema(type.getFieldNames(), type.getFieldTypes());
}