org.apache.flink.api.java.tuple.Tuple Java Examples

The following examples show how to use org.apache.flink.api.java.tuple.Tuple. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CassandraSink.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Writes a DataStream into a Cassandra database.
 *
 * @param input input DataStream
 * @param <IN>  input type
 * @return CassandraSinkBuilder, to further configure the sink
 */
public static <IN> CassandraSinkBuilder<IN> addSink(DataStream<IN> input) {
	TypeInformation<IN> typeInfo = input.getType();
	if (typeInfo instanceof TupleTypeInfo) {
		DataStream<Tuple> tupleInput = (DataStream<Tuple>) input;
		return (CassandraSinkBuilder<IN>) new CassandraTupleSinkBuilder<>(tupleInput, tupleInput.getType(), tupleInput.getType().createSerializer(tupleInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof RowTypeInfo) {
		DataStream<Row> rowInput = (DataStream<Row>) input;
		return (CassandraSinkBuilder<IN>) new CassandraRowSinkBuilder(rowInput, rowInput.getType(), rowInput.getType().createSerializer(rowInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof PojoTypeInfo) {
		return new CassandraPojoSinkBuilder<>(input, input.getType(), input.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof CaseClassTypeInfo) {
		DataStream<Product> productInput = (DataStream<Product>) input;
		return (CassandraSinkBuilder<IN>) new CassandraScalaProductSinkBuilder<>(productInput, productInput.getType(), productInput.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	throw new IllegalArgumentException("No support for the type of the given DataStream: " + input.getType());
}
 
Example #2
Source File: DataSetUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Summarize a DataSet of Tuples by collecting single pass statistics for all columns.
 *
 * <p>Example usage:
 * <pre>
 * {@code
 * Dataset<Tuple3<Double, String, Boolean>> input = // [...]
 * Tuple3<NumericColumnSummary,StringColumnSummary, BooleanColumnSummary> summary = DataSetUtils.summarize(input)
 *
 * summary.f0.getStandardDeviation()
 * summary.f1.getMaxLength()
 * }
 * </pre>
 * @return the summary as a Tuple the same width as input rows
 */
public static <R extends Tuple, T extends Tuple> R summarize(DataSet<T> input) throws Exception {
	if (!input.getType().isTupleType()) {
		throw new IllegalArgumentException("summarize() is only implemented for DataSet's of Tuples");
	}
	final TupleTypeInfoBase<?> inType = (TupleTypeInfoBase<?>) input.getType();
	DataSet<TupleSummaryAggregator<R>> result = input.mapPartition(new MapPartitionFunction<T, TupleSummaryAggregator<R>>() {
		@Override
		public void mapPartition(Iterable<T> values, Collector<TupleSummaryAggregator<R>> out) throws Exception {
			TupleSummaryAggregator<R> aggregator = SummaryAggregatorFactory.create(inType);
			for (Tuple value : values) {
				aggregator.aggregate(value);
			}
			out.collect(aggregator);
		}
	}).reduce(new ReduceFunction<TupleSummaryAggregator<R>>() {
		@Override
		public TupleSummaryAggregator<R> reduce(TupleSummaryAggregator<R> agg1, TupleSummaryAggregator<R> agg2) throws Exception {
			agg1.combine(agg2);
			return agg1;
		}
	});
	return result.collect().get(0).result();
}
 
Example #3
Source File: JoinOperator.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public void join(T1 in1, T2 in2, Collector<R> out) {
	for (int i = 0; i < fields.length; i++) {
		if (isFromFirst[i]) {
			if (fields[i] >= 0 && in1 != null) {
				outTuple.setField(((Tuple) in1).getField(fields[i]), i);
			} else {
				outTuple.setField(in1, i);
			}
		} else {
			if (fields[i] >= 0 && in2 != null) {
				outTuple.setField(((Tuple) in2).getField(fields[i]), i);
			} else {
				outTuple.setField(in2, i);
			}
		}
	}
	out.collect(outTuple);
}
 
Example #4
Source File: DeeplyEqualsChecker.java    From flink with Apache License 2.0 6 votes vote down vote up
private boolean deepEqualsTuple(Tuple tuple1, Tuple tuple2) {
	if (tuple1.getArity() != tuple2.getArity()) {
		return false;
	}

	for (int i = 0; i < tuple1.getArity(); i++) {
		Object o1 = tuple1.getField(i);
		Object o2 = tuple2.getField(i);

		if (!deepEquals(o1, o2)) {
			return false;
		}
	}

	return true;
}
 
Example #5
Source File: TripDurationToAverageTripDuration.java    From amazon-kinesis-analytics-taxi-consumer with Apache License 2.0 6 votes vote down vote up
@Override
public void apply(Tuple tuple, TimeWindow timeWindow, Iterable<TripDuration> iterable, Collector<AverageTripDuration> collector) {
  if (Iterables.size(iterable) > 1) {
    String location = Iterables.get(iterable, 0).pickupGeoHash;
    String airportCode = Iterables.get(iterable, 0).airportCode;

    long sumDuration = StreamSupport
        .stream(iterable.spliterator(), false)
        .mapToLong(trip -> trip.tripDuration)
        .sum();

    double avgDuration = (double) sumDuration / Iterables.size(iterable);

    collector.collect(new AverageTripDuration(location, airportCode, sumDuration, avgDuration, timeWindow.getEnd()));
  }
}
 
Example #6
Source File: DataStream.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Writes a DataStream to the file specified by the path parameter. The
 * writing is performed periodically every millis milliseconds.
 *
 * <p>For every field of an element of the DataStream the result of {@link Object#toString()}
 * is written. This method can only be used on data streams of tuples.
 *
 * @param path
 *            the path pointing to the location the text file is written to
 * @param writeMode
 *            Controls the behavior for existing files. Options are
 *            NO_OVERWRITE and OVERWRITE.
 * @param rowDelimiter
 *            the delimiter for two rows
 * @param fieldDelimiter
 *            the delimiter for two fields
 *
 * @return the closed DataStream
 */
@SuppressWarnings("unchecked")
@PublicEvolving
public <X extends Tuple> DataStreamSink<T> writeAsCsv(
		String path,
		WriteMode writeMode,
		String rowDelimiter,
		String fieldDelimiter) {
	Preconditions.checkArgument(
		getType().isTupleType(),
		"The writeAsCsv() method can only be used on data streams of tuples.");

	CsvOutputFormat<X> of = new CsvOutputFormat<>(
		new Path(path),
		rowDelimiter,
		fieldDelimiter);

	if (writeMode != null) {
		of.setWriteMode(writeMode);
	}

	return writeUsingOutputFormat((OutputFormat<T>) of);
}
 
Example #7
Source File: TupleSummaryAggregator.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public R result() {
	try {
		Class tupleClass = Tuple.getTupleClass(columnAggregators.length);
		R tuple = (R) tupleClass.newInstance();
		for (int i = 0; i < columnAggregators.length; i++) {
			tuple.setField(columnAggregators[i].result(), i);
		}
		return tuple;
	}
	catch (InstantiationException | IllegalAccessException e) {
		throw new RuntimeException("Unexpected error instantiating Tuple class for aggregation results", e);

	}
}
 
Example #8
Source File: KafkaShuffleTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
static KeyedStream<Tuple3<Integer, Long, Integer>, Tuple> createKafkaShuffle(
		StreamExecutionEnvironment env,
		String topic,
		int numElementsPerProducer,
		int producerParallelism,
		TimeCharacteristic timeCharacteristic,
		int numberOfPartitions) {
	return createKafkaShuffle(
		env,
		topic,
		numElementsPerProducer,
		producerParallelism,
		timeCharacteristic,
		numberOfPartitions,
		false);
}
 
Example #9
Source File: WordCloudApply.java    From wingcloud with Apache License 2.0 6 votes vote down vote up
@Override
public void apply(Tuple tuple, TimeWindow window, Iterable<Tuple2<Long, String>> input, Collector<String> out) throws Exception {
    String shoptype = tuple.getField(0).toString();

    Iterator<Tuple2<Long, String>> it = input.iterator();

    //计数器
    Long count = 0L;
    while (it.hasNext()) {
        Tuple2<Long, String> next = it.next();
        count++;
    }
    System.err.println(Thread.currentThread().getId() + "【词云】window触发了,数据条数:" + count);
    //组装结果
    Tuple2<String, Long> res = new Tuple2<>(shoptype, count);
    String resstring = "{\"shoptype\":\"" + shoptype + "\",\"count\":\"" + count + "\"}";
    System.err.println("【词云】窗口数据 : "+ res);
    out.collect(resstring);
}
 
Example #10
Source File: ProjectOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
public Projection(DataSet<T> ds, int[] fieldIndexes) {

			if (!(ds.getType() instanceof TupleTypeInfo)) {
				throw new UnsupportedOperationException("project() can only be applied to DataSets of Tuples.");
			}

			if (fieldIndexes.length == 0) {
				throw new IllegalArgumentException("project() needs to select at least one (1) field.");
			} else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) {
				throw new IllegalArgumentException(
					"project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields.");
			}

			int maxFieldIndex = ds.getType().getArity();
			for (int fieldIndexe : fieldIndexes) {
				Preconditions.checkElementIndex(fieldIndexe, maxFieldIndex);
			}

			this.ds = ds;
			this.fieldIndexes = fieldIndexes;
		}
 
Example #11
Source File: KeySelectorUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
public static <X> ArrayKeySelector<X> getSelectorForArray(int[] positions, TypeInformation<X> typeInfo) {
	if (positions == null || positions.length == 0 || positions.length > Tuple.MAX_ARITY) {
		throw new IllegalArgumentException("Array keys must have between 1 and " + Tuple.MAX_ARITY + " fields.");
	}

	TypeInformation<?> componentType;

	if (typeInfo instanceof BasicArrayTypeInfo) {
		BasicArrayTypeInfo<X, ?>  arrayInfo = (BasicArrayTypeInfo<X, ?>) typeInfo;
		componentType = arrayInfo.getComponentInfo();
	}
	else if (typeInfo instanceof PrimitiveArrayTypeInfo) {
		PrimitiveArrayTypeInfo<X> arrayType = (PrimitiveArrayTypeInfo<X>) typeInfo;
		componentType = arrayType.getComponentType();
	}
	else {
		throw new IllegalArgumentException("This method only supports arrays of primitives and boxed primitives.");
	}

	TypeInformation<?>[] primitiveInfos = new TypeInformation<?>[positions.length];
	Arrays.fill(primitiveInfos, componentType);

	return new ArrayKeySelector<>(positions, new TupleTypeInfo<>(primitiveInfos));
}
 
Example #12
Source File: DriverTestData.java    From flink with Apache License 2.0 6 votes vote down vote up
public static final void compareTupleArrays(Object[] expected, Object[] found) {
	if (expected.length != found.length) {
		Assert.assertEquals("Length of result is wrong", expected.length, found.length);
	}
	
	for (int i = 0; i < expected.length; i++) {
		Tuple v1 = (Tuple) expected[i];
		Tuple v2 = (Tuple) found[i];
		
		for (int k = 0; k < v1.getArity(); k++) {
			Object o1 = v1.getField(k);
			Object o2 = v2.getField(k);
			Assert.assertEquals(o1, o2);
		}
	}
}
 
Example #13
Source File: StreamProjection.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
protected StreamProjection(DataStream<IN> dataStream, int[] fieldIndexes) {
	if (!dataStream.getType().isTupleType()) {
		throw new RuntimeException("Only Tuple DataStreams can be projected");
	}
	if (fieldIndexes.length == 0) {
		throw new IllegalArgumentException("project() needs to select at least one (1) field.");
	} else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) {
		throw new IllegalArgumentException(
				"project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields.");
	}

	int maxFieldIndex = (dataStream.getType()).getArity();
	for (int i = 0; i < fieldIndexes.length; i++) {
		Preconditions.checkElementIndex(fieldIndexes[i], maxFieldIndex);
	}

	this.dataStream = dataStream;
	this.fieldIndexes = fieldIndexes;
}
 
Example #14
Source File: SessionWindowITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private void runTest(
		SourceFunction<SessionEvent<Integer, TestEventPayload>> dataSource,
		WindowFunction<SessionEvent<Integer, TestEventPayload>,
				String, Tuple, TimeWindow> windowFunction) throws Exception {

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
	WindowedStream<SessionEvent<Integer, TestEventPayload>, Tuple, TimeWindow> windowedStream =
			env.addSource(dataSource).keyBy("sessionKey")
			.window(EventTimeSessionWindows.withGap(Time.milliseconds(MAX_SESSION_EVENT_GAP_MS)));

	if (ALLOWED_LATENESS_MS != Long.MAX_VALUE) {
		windowedStream = windowedStream.allowedLateness(Time.milliseconds(ALLOWED_LATENESS_MS));
	}

	if (PURGE_WINDOW_ON_FIRE) {
		windowedStream = windowedStream.trigger(PurgingTrigger.of(EventTimeTrigger.create()));
	}

	windowedStream.apply(windowFunction).print();
	JobExecutionResult result = env.execute();

	// check that overall event counts match with our expectations. remember that late events within lateness will
	// each trigger a window!
	Assert.assertEquals(
		(LATE_EVENTS_PER_SESSION + 1) * NUMBER_OF_SESSIONS * EVENTS_PER_SESSION,
		(long) result.getAccumulatorResult(SESSION_COUNTER_ON_TIME_KEY));
	Assert.assertEquals(
		NUMBER_OF_SESSIONS * (LATE_EVENTS_PER_SESSION * (LATE_EVENTS_PER_SESSION + 1) / 2),
		(long) result.getAccumulatorResult(SESSION_COUNTER_LATE_KEY));
}
 
Example #15
Source File: UdfStreamOperatorCheckpointingITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Assembles a stream of a grouping field and some long data. Applies reduce functions
 * on this stream.
 */
@Override
public void testProgram(StreamExecutionEnvironment env) {

	// base stream
	KeyedStream<Tuple2<Integer, Long>, Tuple> stream = env.addSource(new StatefulMultipleSequence())
			.keyBy(0);

	stream
			// testing built-in aggregate
			.min(1)
			// failure generation
			.map(new OnceFailingIdentityMapFunction(NUM_INPUT))
			.keyBy(0)
			.addSink(new MinEvictingQueueSink());

	stream
			// testing UDF reducer
			.reduce(new ReduceFunction<Tuple2<Integer, Long>>() {
				@Override
				public Tuple2<Integer, Long> reduce(
						Tuple2<Integer, Long> value1, Tuple2<Integer, Long> value2) throws Exception {
					return Tuple2.of(value1.f0, value1.f1 + value2.f1);
				}
			})
			.keyBy(0)
			.addSink(new SumEvictingQueueSink());

	stream
			// testing UDF folder
			.fold(Tuple2.of(0, 0L), new FoldFunction<Tuple2<Integer, Long>, Tuple2<Integer, Long>>() {
				@Override
				public Tuple2<Integer, Long> fold(
						Tuple2<Integer, Long> accumulator, Tuple2<Integer, Long> value) throws Exception {
					return Tuple2.of(value.f0, accumulator.f1 + value.f1);
				}
			})
			.keyBy(0)
			.addSink(new FoldEvictingQueueSink());
}
 
Example #16
Source File: FieldsFromTuple.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public double[] extract(Tuple in) {
	double[] out = new double[indexes.length];
	for (int i = 0; i < indexes.length; i++) {
		out[i] = (Double) in.getField(indexes[i]);
	}
	return out;
}
 
Example #17
Source File: SemanticPropertiesProjectionTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testProjectionSemProps2() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<Tuple4<Integer, Tuple3<String, Integer, Long>, Tuple2<Long, Long>, String>> tupleDs = env.fromCollection(emptyNestedTupleData, nestedTupleTypeInfo);

	tupleDs.project(2, 3, 1, 2).output(new DiscardingOutputFormat<Tuple>());

	Plan plan = env.createProgramPlan();

	GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
	PlanProjectOperator<?, ?> projectOperator = ((PlanProjectOperator<?, ?>) sink.getInput());

	SingleInputSemanticProperties props = projectOperator.getSemanticProperties();

	assertNotNull(props.getForwardingTargetFields(0, 0));
	assertEquals(1, props.getForwardingTargetFields(0, 1).size());
	assertEquals(1, props.getForwardingTargetFields(0, 2).size());
	assertEquals(1, props.getForwardingTargetFields(0, 3).size());
	assertEquals(2, props.getForwardingTargetFields(0, 4).size());
	assertEquals(2, props.getForwardingTargetFields(0, 5).size());
	assertEquals(1, props.getForwardingTargetFields(0, 6).size());
	assertEquals(0, props.getForwardingTargetFields(0, 0).size());

	assertTrue(props.getForwardingTargetFields(0, 4).contains(0));
	assertTrue(props.getForwardingTargetFields(0, 5).contains(1));
	assertTrue(props.getForwardingTargetFields(0, 6).contains(2));
	assertTrue(props.getForwardingTargetFields(0, 1).contains(3));
	assertTrue(props.getForwardingTargetFields(0, 2).contains(4));
	assertTrue(props.getForwardingTargetFields(0, 3).contains(5));
	assertTrue(props.getForwardingTargetFields(0, 4).contains(6));
	assertTrue(props.getForwardingTargetFields(0, 5).contains(7));
}
 
Example #18
Source File: CSV.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void write(String executionName, PrintStream out, DataSet<T> data) throws Exception {
	if (Tuple.class.isAssignableFrom(data.getType().getTypeClass())) {
		data
			.writeAsCsv(filename.getValue(), lineDelimiter.getValue(), fieldDelimiter.getValue())
				.name("CSV: " + filename.getValue());
	} else {
		// line and field delimiters are ineffective when writing custom POJOs result types
		data
			.writeAsText(filename.getValue())
				.name("CSV: " + filename.getValue());
	}

	data.getExecutionEnvironment().execute();
}
 
Example #19
Source File: LargeRecordHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public T next() throws IOException {
	Tuple value = tupleInput.next(this.value);
	if (value != null) {
		this.value = value;
		long pointer = value.<Long>getField(pointerPos);

		recordsInputs.seek(pointer);
		return serializer.deserialize(recordsInputs);
	} else {
		return null;
	}
}
 
Example #20
Source File: JoinOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSemanticPropsWithKeySelector3() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<Tuple5<Integer, Long, String, Long, Integer>> tupleDs1 = env.fromCollection(emptyTupleData, tupleTypeInfo);
	DataSet<Tuple5<Integer, Long, String, Long, Integer>> tupleDs2 = env.fromCollection(emptyTupleData, tupleTypeInfo);

	JoinOperator<?, ?, ? extends Tuple> joinOp = tupleDs1.join(tupleDs2)
			.where(new DummyTestKeySelector()).equalTo(new DummyTestKeySelector())
			.projectFirst(2)
			.projectSecond(0, 0, 3)
			.projectFirst(0, 4)
			.projectSecond(2);

	SemanticProperties semProps = joinOp.getSemanticProperties();

	assertTrue(semProps.getForwardingTargetFields(0, 0).size() == 0);
	assertTrue(semProps.getForwardingTargetFields(0, 1).size() == 0);
	assertTrue(semProps.getForwardingTargetFields(0, 2).size() == 1);
	assertTrue(semProps.getForwardingTargetFields(0, 2).contains(4));
	assertTrue(semProps.getForwardingTargetFields(0, 3).size() == 0);
	assertTrue(semProps.getForwardingTargetFields(0, 4).size() == 1);
	assertTrue(semProps.getForwardingTargetFields(0, 4).contains(0));
	assertTrue(semProps.getForwardingTargetFields(0, 5).size() == 0);
	assertTrue(semProps.getForwardingTargetFields(0, 6).size() == 1);
	assertTrue(semProps.getForwardingTargetFields(0, 6).contains(5));

	assertTrue(semProps.getForwardingTargetFields(1, 0).size() == 0);
	assertTrue(semProps.getForwardingTargetFields(1, 1).size() == 0);
	assertTrue(semProps.getForwardingTargetFields(1, 2).size() == 2);
	assertTrue(semProps.getForwardingTargetFields(1, 2).contains(1));
	assertTrue(semProps.getForwardingTargetFields(1, 2).contains(2));
	assertTrue(semProps.getForwardingTargetFields(1, 3).size() == 0);
	assertTrue(semProps.getForwardingTargetFields(1, 4).size() == 1);
	assertTrue(semProps.getForwardingTargetFields(1, 4).contains(6));
	assertTrue(semProps.getForwardingTargetFields(1, 5).size() == 1);
	assertTrue(semProps.getForwardingTargetFields(1, 5).contains(3));
	assertTrue(semProps.getForwardingTargetFields(1, 6).size() == 0);

}
 
Example #21
Source File: SummaryAggregatorFactory.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public static <R extends Tuple> TupleSummaryAggregator<R> create(TupleTypeInfoBase<?> inType) {
	Aggregator[] columnAggregators = new Aggregator[inType.getArity()];
	for (int field = 0; field < inType.getArity(); field++) {
		Class clazz = inType.getTypeAt(field).getTypeClass();
		columnAggregators[field] = SummaryAggregatorFactory.create(clazz);
	}
	return new TupleSummaryAggregator<>(columnAggregators);
}
 
Example #22
Source File: TupleSummaryAggregator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public void combine(Aggregator<Tuple, R> other) {
	TupleSummaryAggregator tupleSummaryAggregator = (TupleSummaryAggregator) other;
	for (int i = 0; i < columnAggregators.length; i++) {
		columnAggregators[i].combine(tupleSummaryAggregator.columnAggregators[i]);
	}
}
 
Example #23
Source File: SiddhiTupleFactory.java    From bahir-flink with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public static <T extends Tuple> T setTupleValue(Tuple tuple, Object[] row) {
    if (row.length != tuple.getArity()) {
        throw new IllegalArgumentException("Row length" + row.length + " is not equal with tuple's arity: " + tuple.getArity());
    }
    for (int i = 0; i < row.length; i++) {
        tuple.setField(row[i], i);
    }
    return (T) tuple;
}
 
Example #24
Source File: LargeRecordHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
public FetchingIterator(TypeSerializer<T> serializer, MutableObjectIterator<Tuple> tupleInput,
		SeekableFileChannelInputView recordsInputs, TypeSerializer<Tuple> tupleSerializer, int pointerPos) {
	this.serializer = serializer;
	this.tupleInput = tupleInput;
	this.recordsInputs = recordsInputs;
	this.pointerPos = pointerPos;
	
	this.value = tupleSerializer.createInstance();
}
 
Example #25
Source File: FieldFromTupleTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSingleFieldExtraction() throws InstantiationException, IllegalAccessException {
	// extract single fields
	for (int i = 0; i < Tuple.MAX_ARITY; i++) {
		Tuple current = (Tuple) CLASSES[i].newInstance();
		for (int j = 0; j < i; j++) {
			current.setField(testStrings[j], j);
		}
		for (int j = 0; j < i; j++) {
			assertEquals(testStrings[j], new FieldFromTuple<String>(j).extract(current));
		}
	}
}
 
Example #26
Source File: StreamingETL.java    From flink-streaming-etl with Apache License 2.0 5 votes vote down vote up
@Override
public void apply(Tuple key, TimeWindow timeWindow, Iterable<Tuple2<String, Long>> iterable, Collector<Tuple2<String, Long>> collector) throws Exception {
	long count = 0;
	for(Tuple2<String, Long> e: iterable) {
		count += e.f1;
	}
	collector.collect(Tuple2.of(((Tuple1<String>)key).f0, count));
}
 
Example #27
Source File: ArrayFromTupleTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Before
public void init() {
	testStrings = new String[Tuple.MAX_ARITY];
	for (int i = 0; i < Tuple.MAX_ARITY; i++) {
		testStrings[i] = Integer.toString(i);
	}
}
 
Example #28
Source File: FieldFromTupleTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSingleFieldExtraction() throws InstantiationException, IllegalAccessException {
	// extract single fields
	for (int i = 0; i < Tuple.MAX_ARITY; i++) {
		Tuple current = (Tuple) CLASSES[i].newInstance();
		for (int j = 0; j < i; j++) {
			current.setField(testStrings[j], j);
		}
		for (int j = 0; j < i; j++) {
			assertEquals(testStrings[j], new FieldFromTuple<String>(j).extract(current));
		}
	}
}
 
Example #29
Source File: SemanticPropertiesProjectionTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testProjectionSemProps2() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<Tuple4<Integer, Tuple3<String, Integer, Long>, Tuple2<Long, Long>, String>> tupleDs = env.fromCollection(emptyNestedTupleData, nestedTupleTypeInfo);

	tupleDs.project(2, 3, 1, 2).output(new DiscardingOutputFormat<Tuple>());

	Plan plan = env.createProgramPlan();

	GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
	PlanProjectOperator<?, ?> projectOperator = ((PlanProjectOperator<?, ?>) sink.getInput());

	SingleInputSemanticProperties props = projectOperator.getSemanticProperties();

	assertNotNull(props.getForwardingTargetFields(0, 0));
	assertEquals(1, props.getForwardingTargetFields(0, 1).size());
	assertEquals(1, props.getForwardingTargetFields(0, 2).size());
	assertEquals(1, props.getForwardingTargetFields(0, 3).size());
	assertEquals(2, props.getForwardingTargetFields(0, 4).size());
	assertEquals(2, props.getForwardingTargetFields(0, 5).size());
	assertEquals(1, props.getForwardingTargetFields(0, 6).size());
	assertEquals(0, props.getForwardingTargetFields(0, 0).size());

	assertTrue(props.getForwardingTargetFields(0, 4).contains(0));
	assertTrue(props.getForwardingTargetFields(0, 5).contains(1));
	assertTrue(props.getForwardingTargetFields(0, 6).contains(2));
	assertTrue(props.getForwardingTargetFields(0, 1).contains(3));
	assertTrue(props.getForwardingTargetFields(0, 2).contains(4));
	assertTrue(props.getForwardingTargetFields(0, 3).contains(5));
	assertTrue(props.getForwardingTargetFields(0, 4).contains(6));
	assertTrue(props.getForwardingTargetFields(0, 5).contains(7));
}
 
Example #30
Source File: SemanticPropertiesProjectionTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testJoinProjectionSemProps1() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<Tuple5<Integer, Long, String, Long, Integer>> tupleDs = env.fromCollection(emptyTupleData, tupleTypeInfo);

	tupleDs.join(tupleDs).where(0).equalTo(0)
			.projectFirst(2, 3)
			.projectSecond(1, 4)
			.output(new DiscardingOutputFormat<Tuple>());

	Plan plan = env.createProgramPlan();

	GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
	InnerJoinOperatorBase<?, ?, ?, ?> projectJoinOperator = ((InnerJoinOperatorBase<?, ?, ?, ?>) sink.getInput());

	DualInputSemanticProperties props = projectJoinOperator.getSemanticProperties();

	assertEquals(1, props.getForwardingTargetFields(0, 2).size());
	assertEquals(1, props.getForwardingTargetFields(0, 3).size());
	assertEquals(1, props.getForwardingTargetFields(1, 1).size());
	assertEquals(1, props.getForwardingTargetFields(1, 4).size());

	assertTrue(props.getForwardingTargetFields(0, 2).contains(0));
	assertTrue(props.getForwardingTargetFields(0, 3).contains(1));
	assertTrue(props.getForwardingTargetFields(1, 1).contains(2));
	assertTrue(props.getForwardingTargetFields(1, 4).contains(3));
}