Java Code Examples for org.apache.flink.api.java.typeutils.TupleTypeInfo#getArity()

The following examples show how to use org.apache.flink.api.java.typeutils.TupleTypeInfo#getArity() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
/**
 * Constructor which is overwriting the default constructor.
 * @param type Types of tuple whether to check if given fields are key types.
 * @param fields Array of integers which are used as key for comparison. The order of indexes
 * is regarded in the reduce function. First index has highest priority and last index has
 * least priority.
 */
public SelectByMinFunction(TupleTypeInfo<T> type, int... fields) {
	this.fields = fields;

	// Check correctness of each position
	for (int field : fields) {
		// Is field inside array
		if (field < 0 || field >= type.getArity()) {
			throw new java.lang.IndexOutOfBoundsException(
					"MinReduceFunction field position " + field + " is out of range.");
		}

		// Check whether type is comparable
		if (!type.getTypeAt(field).isKeyType()) {
			throw new java.lang.IllegalArgumentException(
					"MinReduceFunction supports only key(Comparable) types.");
		}

	}
}
 
Example 2
/**
 * Constructor which is overwriting the default constructor.
 * @param type Types of tuple whether to check if given fields are key types.
 * @param fields Array of integers which are used as key for comparison. The order of indexes
 * is regarded in the reduce function. First index has highest priority and last index has
 * least priority.
 */
public SelectByMaxFunction(TupleTypeInfo<T> type, int... fields) {
	this.fields = fields;

	// Check correctness of each position
	for (int field : fields) {
		// Is field inside array
		if (field < 0 || field >= type.getArity()) {
			throw new IndexOutOfBoundsException(
					"MinReduceFunction field position " + field + " is out of range.");
		}

		// Check whether type is comparable
		if (!type.getTypeAt(field).isKeyType()) {
			throw new java.lang.IllegalArgumentException(
					"MinReduceFunction supports only key(Comparable) types.");
		}

	}
}
 
Example 3
Source Project: Flink-CEPplus   File: CsvReader.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Configures the reader to read the CSV data and parse it to the given type. The type must be a subclass of
 * {@link Tuple}. The type information for the fields is obtained from the type class. The type
 * consequently needs to specify all generic field types of the tuple.
 *
 * @param targetType The class of the target type, needs to be a subclass of Tuple.
 * @return The DataSet representing the parsed CSV data.
 */
public <T extends Tuple> DataSource<T> tupleType(Class<T> targetType) {
	Preconditions.checkNotNull(targetType, "The target type class must not be null.");
	if (!Tuple.class.isAssignableFrom(targetType)) {
		throw new IllegalArgumentException("The target type must be a subclass of " + Tuple.class.getName());
	}

	@SuppressWarnings("unchecked")
	TupleTypeInfo<T> typeInfo = (TupleTypeInfo<T>) TypeExtractor.createTypeInfo(targetType);
	CsvInputFormat<T> inputFormat = new TupleCsvInputFormat<T>(path, this.lineDelimiter, this.fieldDelimiter, typeInfo, this.includedMask);

	Class<?>[] classes = new Class<?>[typeInfo.getArity()];
	for (int i = 0; i < typeInfo.getArity(); i++) {
		classes[i] = typeInfo.getTypeAt(i).getTypeClass();
	}

	configureInputFormat(inputFormat);
	return new DataSource<T>(executionContext, inputFormat, typeInfo, Utils.getCallLocationName());
}
 
Example 4
Source Project: flink   File: SelectByMinFunction.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Constructor which is overwriting the default constructor.
 * @param type Types of tuple whether to check if given fields are key types.
 * @param fields Array of integers which are used as key for comparison. The order of indexes
 * is regarded in the reduce function. First index has highest priority and last index has
 * least priority.
 */
public SelectByMinFunction(TupleTypeInfo<T> type, int... fields) {
	this.fields = fields;

	// Check correctness of each position
	for (int field : fields) {
		// Is field inside array
		if (field < 0 || field >= type.getArity()) {
			throw new java.lang.IndexOutOfBoundsException(
					"MinReduceFunction field position " + field + " is out of range.");
		}

		// Check whether type is comparable
		if (!type.getTypeAt(field).isKeyType()) {
			throw new java.lang.IllegalArgumentException(
					"MinReduceFunction supports only key(Comparable) types.");
		}

	}
}
 
Example 5
Source Project: flink   File: SelectByMaxFunction.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Constructor which is overwriting the default constructor.
 * @param type Types of tuple whether to check if given fields are key types.
 * @param fields Array of integers which are used as key for comparison. The order of indexes
 * is regarded in the reduce function. First index has highest priority and last index has
 * least priority.
 */
public SelectByMaxFunction(TupleTypeInfo<T> type, int... fields) {
	this.fields = fields;

	// Check correctness of each position
	for (int field : fields) {
		// Is field inside array
		if (field < 0 || field >= type.getArity()) {
			throw new IndexOutOfBoundsException(
					"MinReduceFunction field position " + field + " is out of range.");
		}

		// Check whether type is comparable
		if (!type.getTypeAt(field).isKeyType()) {
			throw new java.lang.IllegalArgumentException(
					"MinReduceFunction supports only key(Comparable) types.");
		}

	}
}
 
Example 6
Source Project: flink   File: CsvReader.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Configures the reader to read the CSV data and parse it to the given type. The type must be a subclass of
 * {@link Tuple}. The type information for the fields is obtained from the type class. The type
 * consequently needs to specify all generic field types of the tuple.
 *
 * @param targetType The class of the target type, needs to be a subclass of Tuple.
 * @return The DataSet representing the parsed CSV data.
 */
public <T extends Tuple> DataSource<T> tupleType(Class<T> targetType) {
	Preconditions.checkNotNull(targetType, "The target type class must not be null.");
	if (!Tuple.class.isAssignableFrom(targetType)) {
		throw new IllegalArgumentException("The target type must be a subclass of " + Tuple.class.getName());
	}

	@SuppressWarnings("unchecked")
	TupleTypeInfo<T> typeInfo = (TupleTypeInfo<T>) TypeExtractor.createTypeInfo(targetType);
	CsvInputFormat<T> inputFormat = new TupleCsvInputFormat<T>(path, this.lineDelimiter, this.fieldDelimiter, typeInfo, this.includedMask);

	Class<?>[] classes = new Class<?>[typeInfo.getArity()];
	for (int i = 0; i < typeInfo.getArity(); i++) {
		classes[i] = typeInfo.getTypeAt(i).getTypeClass();
	}

	configureInputFormat(inputFormat);
	return new DataSource<T>(executionContext, inputFormat, typeInfo, Utils.getCallLocationName());
}
 
Example 7
Source Project: flink   File: SelectByMinFunction.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Constructor which is overwriting the default constructor.
 * @param type Types of tuple whether to check if given fields are key types.
 * @param fields Array of integers which are used as key for comparison. The order of indexes
 * is regarded in the reduce function. First index has highest priority and last index has
 * least priority.
 */
public SelectByMinFunction(TupleTypeInfo<T> type, int... fields) {
	this.fields = fields;

	// Check correctness of each position
	for (int field : fields) {
		// Is field inside array
		if (field < 0 || field >= type.getArity()) {
			throw new java.lang.IndexOutOfBoundsException(
					"MinReduceFunction field position " + field + " is out of range.");
		}

		// Check whether type is comparable
		if (!type.getTypeAt(field).isKeyType()) {
			throw new java.lang.IllegalArgumentException(
					"MinReduceFunction supports only key(Comparable) types.");
		}

	}
}
 
Example 8
Source Project: flink   File: SelectByMaxFunction.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Constructor which is overwriting the default constructor.
 * @param type Types of tuple whether to check if given fields are key types.
 * @param fields Array of integers which are used as key for comparison. The order of indexes
 * is regarded in the reduce function. First index has highest priority and last index has
 * least priority.
 */
public SelectByMaxFunction(TupleTypeInfo<T> type, int... fields) {
	this.fields = fields;

	// Check correctness of each position
	for (int field : fields) {
		// Is field inside array
		if (field < 0 || field >= type.getArity()) {
			throw new IndexOutOfBoundsException(
					"MinReduceFunction field position " + field + " is out of range.");
		}

		// Check whether type is comparable
		if (!type.getTypeAt(field).isKeyType()) {
			throw new java.lang.IllegalArgumentException(
					"MinReduceFunction supports only key(Comparable) types.");
		}

	}
}
 
Example 9
Source Project: flink   File: CsvReader.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Configures the reader to read the CSV data and parse it to the given type. The type must be a subclass of
 * {@link Tuple}. The type information for the fields is obtained from the type class. The type
 * consequently needs to specify all generic field types of the tuple.
 *
 * @param targetType The class of the target type, needs to be a subclass of Tuple.
 * @return The DataSet representing the parsed CSV data.
 */
public <T extends Tuple> DataSource<T> tupleType(Class<T> targetType) {
	Preconditions.checkNotNull(targetType, "The target type class must not be null.");
	if (!Tuple.class.isAssignableFrom(targetType)) {
		throw new IllegalArgumentException("The target type must be a subclass of " + Tuple.class.getName());
	}

	@SuppressWarnings("unchecked")
	TupleTypeInfo<T> typeInfo = (TupleTypeInfo<T>) TypeExtractor.createTypeInfo(targetType);
	CsvInputFormat<T> inputFormat = new TupleCsvInputFormat<T>(path, this.lineDelimiter, this.fieldDelimiter, typeInfo, this.includedMask);

	Class<?>[] classes = new Class<?>[typeInfo.getArity()];
	for (int i = 0; i < typeInfo.getArity(); i++) {
		classes[i] = typeInfo.getTypeAt(i).getTypeClass();
	}

	configureInputFormat(inputFormat);
	return new DataSource<T>(executionContext, inputFormat, typeInfo, Utils.getCallLocationName());
}