Java Code Examples for org.apache.flink.types.IntValue

The following examples show how to use org.apache.flink.types.IntValue. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: VertexOutDegreeTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWithUndirectedSimpleGraph() throws Exception {
	DataSet<Vertex<IntValue, LongValue>> outDegree = undirectedSimpleGraph
		.run(new VertexOutDegree<IntValue, NullValue, NullValue>()
			.setIncludeZeroDegreeVertices(true));

	String expectedResult =
		"(0,2)\n" +
		"(1,3)\n" +
		"(2,3)\n" +
		"(3,4)\n" +
		"(4,1)\n" +
		"(5,1)";

	TestBaseUtils.compareResultAsText(outDegree.collect(), expectedResult);
}
 
Example 2
Source Project: flink   Source File: GenericCsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testReadTooShortInputLenient() throws IOException {
	try {
		final String fileContent = "666|777|888|999|555\n111|222|333|444\n666|777|888|999|555";
		final FileInputSplit split = createTempFile(fileContent);	
	
		final Configuration parameters = new Configuration();
		format.setFieldDelimiter("|");
		format.setFieldTypesGeneric(IntValue.class, IntValue.class, IntValue.class, IntValue.class, IntValue.class);
		format.setLenient(true);
		
		format.configure(parameters);
		format.open(split);
		
		Value[] values = createIntValues(5);
		
		assertNotNull(format.nextRecord(values));	// line okay
		assertNull(format.nextRecord(values));	// line too short
		assertNotNull(format.nextRecord(values));	// line okay
	}
	catch (Exception ex) {
		fail("Test failed due to a " + ex.getClass().getSimpleName() + ": " + ex.getMessage());
	}
}
 
Example 3
Source Project: flink   Source File: GroupReduceDriverTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void reduce(Iterable<Tuple2<StringValue, IntValue>> values, Collector<Tuple2<StringValue, IntValue>> out) throws Exception {
	List<Tuple2<StringValue, IntValue>> all = new ArrayList<Tuple2<StringValue,IntValue>>();
	
	for (Tuple2<StringValue, IntValue> t : values) {
		all.add(t);
	}
	
	Tuple2<StringValue, IntValue> result = all.get(0);
	
	for (int i = 1; i < all.size(); i++) {
		Tuple2<StringValue, IntValue> e = all.get(i);
		result.f0.append(e.f0);
		result.f1.setValue(result.f1.getValue() + e.f1.getValue());
	}
	
	out.collect(result);
}
 
Example 4
Source Project: flink   Source File: PageRankTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWithSimpleGraph() throws Exception {
	DataSet<Result<IntValue>> pr = new PageRank<IntValue, NullValue, NullValue>(DAMPING_FACTOR, 20)
		.run(directedSimpleGraph);

	List<Double> expectedResults = new ArrayList<>();
	expectedResults.add(0.0909212166211);
	expectedResults.add(0.279516064311);
	expectedResults.add(0.129562719068);
	expectedResults.add(0.223268406353);
	expectedResults.add(0.185810377026);
	expectedResults.add(0.0909212166211);

	for (Result<IntValue> result : pr.collect()) {
		int id = result.getVertexId0().getValue();
		assertEquals(expectedResults.get(id), result.getPageRankScore().getValue(), ACCURACY);
	}
}
 
Example 5
Source Project: flink   Source File: OutputEmitterTest.java    License: Apache License 2.0 6 votes vote down vote up
private boolean verifyWrongPartitionHashKey(int position, int fieldNum) {
	final TypeComparator<Record> comparator = new RecordComparatorFactory(
		new int[] {position}, new Class[] {IntValue.class}).createComparator();
	final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector(
		ShipStrategyType.PARTITION_HASH, comparator, 100);
	final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer());

	Record record = new Record(2);
	record.setField(fieldNum, new IntValue(1));
	delegate.setInstance(record);

	try {
		selector.selectChannel(delegate);
	} catch (NullKeyFieldException re) {
		Assert.assertEquals(position, re.getFieldNumber());
		return true;
	}
	return false;
}
 
Example 6
Source Project: flink   Source File: EdgeTargetDegreesTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWithSimpleGraph() throws Exception {
	String expectedResult =
		"(0,1,((null),(3,0,3)))\n" +
		"(0,2,((null),(3,2,1)))\n" +
		"(2,1,((null),(3,0,3)))\n" +
		"(2,3,((null),(4,2,2)))\n" +
		"(3,1,((null),(3,0,3)))\n" +
		"(3,4,((null),(1,0,1)))\n" +
		"(5,3,((null),(4,2,2)))";

	DataSet<Edge<IntValue, Tuple2<NullValue, Degrees>>> targetDegrees = directedSimpleGraph
			.run(new EdgeTargetDegrees<>());

	TestBaseUtils.compareResultAsText(targetDegrees.collect(), expectedResult);
}
 
Example 7
Source Project: flink   Source File: DataSourceTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
public static void prepareInputFile(MutableObjectIterator<Record> inIt, File inputFile, boolean insertInvalidData)
throws IOException {

	try (BufferedWriter bw = new BufferedWriter(new FileWriter(inputFile))) {
		if (insertInvalidData) {
			bw.write("####_I_AM_INVALID_########\n");
		}

		Record rec = new Record();
		while ((rec = inIt.next(rec)) != null) {
			IntValue key = rec.getField(0, IntValue.class);
			IntValue value = rec.getField(1, IntValue.class);

			bw.write(key.getValue() + "_" + value.getValue() + "\n");
		}
		if (insertInvalidData) {
			bw.write("####_I_AM_INVALID_########\n");
		}

		bw.flush();
	}
}
 
Example 8
Source Project: flink   Source File: AdamicAdar.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void reduce(Iterable<Tuple3<T, T, FloatValue>> values, Collector<Tuple4<IntValue, T, T, FloatValue>> out)
		throws Exception {
	int groupCount = 0;
	int groupSpans = 1;

	groupSpansValue.setValue(groupSpans);

	for (Tuple3<T, T, FloatValue> edge : values) {
		output.f1 = edge.f0;
		output.f2 = edge.f1;
		output.f3 = edge.f2;

		out.collect(output);

		if (++groupCount == GROUP_SIZE) {
			groupCount = 0;
			groupSpansValue.setValue(++groupSpans);
		}
	}
}
 
Example 9
Source Project: Flink-CEPplus   Source File: VertexInDegreeTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWithDirectedSimpleGraph() throws Exception {
	DataSet<Vertex<IntValue, LongValue>> inDegree = directedSimpleGraph
		.run(new VertexInDegree<IntValue, NullValue, NullValue>()
			.setIncludeZeroDegreeVertices(true));

	String expectedResult =
		"(0,0)\n" +
		"(1,3)\n" +
		"(2,1)\n" +
		"(3,2)\n" +
		"(4,1)\n" +
		"(5,0)";

	TestBaseUtils.compareResultAsText(inDegree.collect(), expectedResult);
}
 
Example 10
@Test
public void testCreate() throws Exception {
	// supported primitive types
	Assert.assertEquals(StringSummaryAggregator.class, SummaryAggregatorFactory.create(String.class).getClass());
	Assert.assertEquals(ShortSummaryAggregator.class, SummaryAggregatorFactory.create(Short.class).getClass());
	Assert.assertEquals(IntegerSummaryAggregator.class, SummaryAggregatorFactory.create(Integer.class).getClass());
	Assert.assertEquals(LongSummaryAggregator.class, SummaryAggregatorFactory.create(Long.class).getClass());
	Assert.assertEquals(FloatSummaryAggregator.class, SummaryAggregatorFactory.create(Float.class).getClass());
	Assert.assertEquals(DoubleSummaryAggregator.class, SummaryAggregatorFactory.create(Double.class).getClass());
	Assert.assertEquals(BooleanSummaryAggregator.class, SummaryAggregatorFactory.create(Boolean.class).getClass());

	// supported value types
	Assert.assertEquals(ValueSummaryAggregator.StringValueSummaryAggregator.class, SummaryAggregatorFactory.create(StringValue.class).getClass());
	Assert.assertEquals(ValueSummaryAggregator.ShortValueSummaryAggregator.class, SummaryAggregatorFactory.create(ShortValue.class).getClass());
	Assert.assertEquals(ValueSummaryAggregator.IntegerValueSummaryAggregator.class, SummaryAggregatorFactory.create(IntValue.class).getClass());
	Assert.assertEquals(ValueSummaryAggregator.LongValueSummaryAggregator.class, SummaryAggregatorFactory.create(LongValue.class).getClass());
	Assert.assertEquals(ValueSummaryAggregator.FloatValueSummaryAggregator.class, SummaryAggregatorFactory.create(FloatValue.class).getClass());
	Assert.assertEquals(ValueSummaryAggregator.DoubleValueSummaryAggregator.class, SummaryAggregatorFactory.create(DoubleValue.class).getClass());
	Assert.assertEquals(ValueSummaryAggregator.BooleanValueSummaryAggregator.class, SummaryAggregatorFactory.create(BooleanValue.class).getClass());

	// some not well supported types - these fallback to ObjectSummaryAggregator
	Assert.assertEquals(ObjectSummaryAggregator.class, SummaryAggregatorFactory.create(Object.class).getClass());
	Assert.assertEquals(ObjectSummaryAggregator.class, SummaryAggregatorFactory.create(List.class).getClass());
}
 
Example 11
@Test
public void testOrderErased() {

	SingleInputSemanticProperties sProps = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sProps, new String[]{"1; 4"}, null, null, tupleInfo, tupleInfo);

	Ordering o = new Ordering();
	o.appendOrdering(4, LongValue.class, Order.DESCENDING);
	o.appendOrdering(1, IntValue.class, Order.ASCENDING);
	o.appendOrdering(6, ByteValue.class, Order.DESCENDING);

	RequestedLocalProperties rlProp = new RequestedLocalProperties();
	rlProp.setOrdering(o);

	RequestedLocalProperties filtered = rlProp.filterBySemanticProperties(sProps, 0);

	assertNull(filtered);
}
 
Example 12
Source Project: flink   Source File: ValueArrayFactory.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Produce a {@code ValueArray} for the given {@code Value} type.
 *
 * @param cls {@code Value} class
 * @return {@code ValueArray} for given {@code Value} class
 */
@SuppressWarnings("unchecked")
public static <T> ValueArray<T> createValueArray(Class<? extends Value> cls) {
	if (ByteValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new ByteValueArray();
	} else if (CharValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new CharValueArray();
	} else if (DoubleValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new DoubleValueArray();
	} else if (FloatValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new FloatValueArray();
	} else if (IntValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new IntValueArray();
	} else if (LongValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new LongValueArray();
	} else if (NullValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new NullValueArray();
	} else if (ShortValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new ShortValueArray();
	} else if (StringValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new StringValueArray();
	} else {
		throw new IllegalArgumentException("Unable to create unbounded ValueArray for type " + cls);
	}
}
 
Example 13
Source Project: flink   Source File: SlotCountExceedingParallelismTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void invoke() throws Exception {
	RecordWriter<IntValue> writer = new RecordWriterBuilder().build(getEnvironment().getWriter(0));
	final int numberOfTimesToSend = getTaskConfiguration().getInteger(CONFIG_KEY, 0);

	final IntValue subtaskIndex = new IntValue(
			getEnvironment().getTaskInfo().getIndexOfThisSubtask());

	try {
		for (int i = 0; i < numberOfTimesToSend; i++) {
			writer.emit(subtaskIndex);
		}
		writer.flushAll();
	}
	finally {
		writer.clearBuffers();
	}
}
 
Example 14
@Test
public void testRangePartitioningErased() {

	SingleInputSemanticProperties sProp = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sProp, new String[]{"1;2"}, null, null, tupleInfo, tupleInfo);

	Ordering o = new Ordering();
	o.appendOrdering(3, LongValue.class, Order.DESCENDING);
	o.appendOrdering(1, IntValue.class, Order.ASCENDING);
	o.appendOrdering(6, ByteValue.class, Order.DESCENDING);

	RequestedGlobalProperties rgProps = new RequestedGlobalProperties();
	rgProps.setRangePartitioned(o);

	RequestedGlobalProperties filtered = rgProps.filterBySemanticProperties(sProp, 0);

	assertNull(filtered);
}
 
Example 15
Source Project: flink   Source File: SlotCountExceedingParallelismTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void invoke() throws Exception {
	RecordWriter<IntValue> writer = new RecordWriterBuilder<IntValue>().build(getEnvironment().getWriter(0));
	final int numberOfTimesToSend = getTaskConfiguration().getInteger(CONFIG_KEY, 0);

	final IntValue subtaskIndex = new IntValue(
			getEnvironment().getTaskInfo().getIndexOfThisSubtask());

	try {
		for (int i = 0; i < numberOfTimesToSend; i++) {
			writer.emit(subtaskIndex);
		}
		writer.flushAll();
	}
	finally {
		writer.clearBuffers();
	}
}
 
Example 16
Source Project: Flink-CEPplus   Source File: DataSourceTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
public static void prepareInputFile(MutableObjectIterator<Record> inIt, File inputFile, boolean insertInvalidData)
throws IOException {

	try (BufferedWriter bw = new BufferedWriter(new FileWriter(inputFile))) {
		if (insertInvalidData) {
			bw.write("####_I_AM_INVALID_########\n");
		}

		Record rec = new Record();
		while ((rec = inIt.next(rec)) != null) {
			IntValue key = rec.getField(0, IntValue.class);
			IntValue value = rec.getField(1, IntValue.class);

			bw.write(key.getValue() + "_" + value.getValue() + "\n");
		}
		if (insertInvalidData) {
			bw.write("####_I_AM_INVALID_########\n");
		}

		bw.flush();
	}
}
 
Example 17
Source Project: flink   Source File: AggregateITCase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNestedAggregateOfMutableValueTypes() throws Exception {
	/*
	 * Nested Aggregate of mutable value types
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple3<IntValue, LongValue, StringValue>> ds = ValueCollectionDataSets.get3TupleDataSet(env);
	DataSet<Tuple1<IntValue>> aggregateDs = ds.groupBy(1)
			.aggregate(Aggregations.MIN, 0)
			.aggregate(Aggregations.MIN, 0)
			.project(0);

	List<Tuple1<IntValue>> result = aggregateDs.collect();

	String expected = "1\n";

	compareResultAsTuples(result, expected);
}
 
Example 18
Source Project: flink   Source File: AdamicAdarTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWithSimpleGraphWithMinimumScore() throws Exception {
	DataSet<Result<IntValue>> aa = undirectedSimpleGraph
		.run(new AdamicAdar<IntValue, NullValue, NullValue>()
			.setMinimumScore(0.75f));

	String expectedResult =
		"(0,1," + ilog[2] + ")\n" +
		"(0,2," + ilog[1] + ")\n" +
		"(0,3," + (ilog[1] + ilog[2]) + ")\n" +
		"(1,2," + (ilog[0] + ilog[3]) + ")\n" +
		"(1,3," + ilog[2] + ")\n" +
		"(2,3," + ilog[1] + ")";

	TestBaseUtils.compareResultAsText(aa.collect(), expectedResult);
}
 
Example 19
Source Project: Flink-CEPplus   Source File: HITSTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWithSimpleGraph() throws Exception {
	DataSet<Result<IntValue>> hits = new HITS<IntValue, NullValue, NullValue>(20)
		.run(directedSimpleGraph);

	List<Tuple2<Double, Double>> expectedResults = new ArrayList<>();
	expectedResults.add(Tuple2.of(0.54464336064, 0.0));
	expectedResults.add(Tuple2.of(0.0, 0.836329364957));
	expectedResults.add(Tuple2.of(0.607227075863, 0.268492484699));
	expectedResults.add(Tuple2.of(0.54464336064, 0.395445020996));
	expectedResults.add(Tuple2.of(0.0, 0.268492484699));
	expectedResults.add(Tuple2.of(0.194942293412, 0.0));

	for (Result<IntValue> result : hits.collect()) {
		int id = result.getVertexId0().getValue();
		assertEquals(expectedResults.get(id).f0, result.getHubScore().getValue(), ACCURACY);
		assertEquals(expectedResults.get(id).f1, result.getAuthorityScore().getValue(), ACCURACY);
	}
}
 
Example 20
Source Project: flink   Source File: ValueCollectionDataSets.java    License: Apache License 2.0 6 votes vote down vote up
public static DataSet<IntValue> getIntDataSet(ExecutionEnvironment env) {
	List<IntValue> data = new ArrayList<>();

	data.add(new IntValue(1));
	data.add(new IntValue(2));
	data.add(new IntValue(2));
	data.add(new IntValue(3));
	data.add(new IntValue(3));
	data.add(new IntValue(3));
	data.add(new IntValue(4));
	data.add(new IntValue(4));
	data.add(new IntValue(4));
	data.add(new IntValue(4));
	data.add(new IntValue(5));
	data.add(new IntValue(5));
	data.add(new IntValue(5));
	data.add(new IntValue(5));
	data.add(new IntValue(5));

	Collections.shuffle(data);

	return env.fromCollection(data);
}
 
Example 21
Source Project: Flink-CEPplus   Source File: GenericCsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testReadTooShortInputLenient() throws IOException {
	try {
		final String fileContent = "666|777|888|999|555\n111|222|333|444\n666|777|888|999|555";
		final FileInputSplit split = createTempFile(fileContent);	
	
		final Configuration parameters = new Configuration();
		format.setFieldDelimiter("|");
		format.setFieldTypesGeneric(IntValue.class, IntValue.class, IntValue.class, IntValue.class, IntValue.class);
		format.setLenient(true);
		
		format.configure(parameters);
		format.open(split);
		
		Value[] values = createIntValues(5);
		
		assertNotNull(format.nextRecord(values));	// line okay
		assertNull(format.nextRecord(values));	// line too short
		assertNotNull(format.nextRecord(values));	// line okay
	}
	catch (Exception ex) {
		fail("Test failed due to a " + ex.getClass().getSimpleName() + ": " + ex.getMessage());
	}
}
 
Example 22
@Test
public void testRangePartitioningPreserved2() {

	SingleInputSemanticProperties sProp = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sProp, new String[]{"7->3;1->1;2->6"}, null, null, tupleInfo, tupleInfo);

	Ordering o = new Ordering();
	o.appendOrdering(3, LongValue.class, Order.DESCENDING);
	o.appendOrdering(1, IntValue.class, Order.ASCENDING);
	o.appendOrdering(6, ByteValue.class, Order.DESCENDING);

	RequestedGlobalProperties rgProps = new RequestedGlobalProperties();
	rgProps.setRangePartitioned(o);

	RequestedGlobalProperties filtered = rgProps.filterBySemanticProperties(sProp, 0);

	assertNotNull(filtered);
	assertEquals(PartitioningProperty.RANGE_PARTITIONED, filtered.getPartitioning());
	assertNotNull(filtered.getOrdering());
	assertEquals(3, filtered.getOrdering().getNumberOfFields());
	assertEquals(7, filtered.getOrdering().getFieldNumber(0).intValue());
	assertEquals(1, filtered.getOrdering().getFieldNumber(1).intValue());
	assertEquals(2, filtered.getOrdering().getFieldNumber(2).intValue());
	assertEquals(LongValue.class, filtered.getOrdering().getType(0));
	assertEquals(IntValue.class, filtered.getOrdering().getType(1));
	assertEquals(ByteValue.class, filtered.getOrdering().getType(2));
	assertEquals(Order.DESCENDING, filtered.getOrdering().getOrder(0));
	assertEquals(Order.ASCENDING, filtered.getOrdering().getOrder(1));
	assertEquals(Order.DESCENDING, filtered.getOrdering().getOrder(2));
	assertNull(filtered.getPartitionedFields());
	assertNull(filtered.getDataDistribution());
	assertNull(filtered.getCustomPartitioner());
}
 
Example 23
Source Project: flink   Source File: GlobalPropertiesFilteringTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testRangePartitioningPreserved2() {

	SingleInputSemanticProperties sprops = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sprops, new String[]{"1->3; 2->0; 5->1"}, null, null, tupleInfo, tupleInfo);

	Ordering o = new Ordering();
	o.appendOrdering(1, IntValue.class, Order.ASCENDING);
	o.appendOrdering(5, LongValue.class, Order.DESCENDING);
	o.appendOrdering(2, StringValue.class, Order.ASCENDING);
	GlobalProperties gprops = new GlobalProperties();
	gprops.setRangePartitioned(o);

	GlobalProperties result = gprops.filterBySemanticProperties(sprops, 0);

	assertEquals(PartitioningProperty.RANGE_PARTITIONED, result.getPartitioning());
	FieldList pFields = result.getPartitioningFields();
	assertEquals(3, pFields.size());
	assertEquals(3, pFields.get(0).intValue());
	assertEquals(1, pFields.get(1).intValue());
	assertEquals(0, pFields.get(2).intValue());
	Ordering pOrder = result.getPartitioningOrdering();
	assertEquals(3, pOrder.getNumberOfFields());
	assertEquals(3, pOrder.getFieldNumber(0).intValue());
	assertEquals(1, pOrder.getFieldNumber(1).intValue());
	assertEquals(0, pOrder.getFieldNumber(2).intValue());
	assertEquals(Order.ASCENDING, pOrder.getOrder(0));
	assertEquals(Order.DESCENDING, pOrder.getOrder(1));
	assertEquals(Order.ASCENDING, pOrder.getOrder(2));
	assertEquals(IntValue.class, pOrder.getType(0));
	assertEquals(LongValue.class, pOrder.getType(1));
	assertEquals(StringValue.class, pOrder.getType(2));
}
 
Example 24
Source Project: flink   Source File: DataSinkTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void writeRecord(Record rec) throws IOException {
	IntValue key = rec.getField(0, IntValue.class);
	IntValue value = rec.getField(1, IntValue.class);

	this.bld.setLength(0);
	this.bld.append(key.getValue());
	this.bld.append('_');
	this.bld.append(value.getValue());
	this.bld.append('\n');

	byte[] bytes = this.bld.toString().getBytes(ConfigConstants.DEFAULT_CHARSET);

	this.stream.write(bytes);
}
 
Example 25
Source Project: flink   Source File: SpillingResettableIteratorTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Tests the resettable iterator with too few memory, so that the data
 * has to be written to disk.
 */
@Test
public void testResettableIterator() {
	try {
		// create the resettable Iterator
		SpillingResettableIterator<IntValue> iterator = new SpillingResettableIterator<IntValue>(
				this.reader, this.serializer, this.memman, this.ioman, 2, this.memOwner);
		// open the iterator
		iterator.open();

		// now test walking through the iterator
		int count = 0;
		while (iterator.hasNext()) {
			Assert.assertEquals("In initial run, element " + count + " does not match expected value!", count++,
				iterator.next().getValue());
		}
		Assert.assertEquals("Too few elements were deserialzied in initial run!", NUM_TESTRECORDS, count);
		// test resetting the iterator a few times
		for (int j = 0; j < 10; ++j) {
			count = 0;
			iterator.reset();
			// now we should get the same results
			while (iterator.hasNext()) {
				Assert.assertEquals("After reset nr. " + j + 1 + " element " + count
					+ " does not match expected value!", count++, iterator.next().getValue());
			}
			Assert.assertEquals("Too few elements were deserialzied after reset nr. " + j + 1 + "!", NUM_TESTRECORDS,
				count);
		}
		// close the iterator
		iterator.close();
	} catch (Exception ex)  {
		ex.printStackTrace();
		Assert.fail("Test encountered an exception.");
	}
}
 
Example 26
Source Project: flink   Source File: TriangleListingTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSimpleGraphPermuted() throws Exception {
	DataSet<Result<IntValue>> tl = directedSimpleGraph
		.run(new TriangleListing<IntValue, NullValue, NullValue>()
			.setPermuteResults(true));

	String expectedResult =
		// permutation of (0,1,2,41)
		"1st vertex ID: 0, 2nd vertex ID: 1, 3rd vertex ID: 2, edge directions: 0->1, 0->2, 1<-2\n" +
		"1st vertex ID: 0, 2nd vertex ID: 2, 3rd vertex ID: 1, edge directions: 0->2, 0->1, 2->1\n" +
		"1st vertex ID: 1, 2nd vertex ID: 0, 3rd vertex ID: 2, edge directions: 1<-0, 1<-2, 0->2\n" +
		"1st vertex ID: 1, 2nd vertex ID: 2, 3rd vertex ID: 0, edge directions: 1<-2, 1<-0, 2<-0\n" +
		"1st vertex ID: 2, 2nd vertex ID: 0, 3rd vertex ID: 1, edge directions: 2<-0, 2->1, 0->1\n" +
		"1st vertex ID: 2, 2nd vertex ID: 1, 3rd vertex ID: 0, edge directions: 2->1, 2<-0, 1<-0\n" +
		// permutation of (1,2,3,22)
		"1st vertex ID: 1, 2nd vertex ID: 2, 3rd vertex ID: 3, edge directions: 1<-2, 1<-3, 2->3\n" +
		"1st vertex ID: 1, 2nd vertex ID: 3, 3rd vertex ID: 2, edge directions: 1<-3, 1<-2, 3<-2\n" +
		"1st vertex ID: 2, 2nd vertex ID: 1, 3rd vertex ID: 3, edge directions: 2->1, 2->3, 1<-3\n" +
		"1st vertex ID: 2, 2nd vertex ID: 3, 3rd vertex ID: 1, edge directions: 2->3, 2->1, 3->1\n" +
		"1st vertex ID: 3, 2nd vertex ID: 1, 3rd vertex ID: 2, edge directions: 3->1, 3<-2, 1<-2\n" +
		"1st vertex ID: 3, 2nd vertex ID: 2, 3rd vertex ID: 1, edge directions: 3<-2, 3->1, 2->1";

	List<String> printableStrings = new ArrayList<>();

	for (Result<IntValue> result : tl.collect()) {
		printableStrings.add(result.toPrintableString());
	}

	TestBaseUtils.compareResultAsText(printableStrings, expectedResult);
}
 
Example 27
Source Project: Flink-CEPplus   Source File: GenericCsvInputFormatTest.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Test
public void testSparseParseWithIndices() {
	try {
		final String fileContent = "111|222|333|444|555|666|777|888|999|000|\n000|999|888|777|666|555|444|333|222|111|";
		final FileInputSplit split = createTempFile(fileContent);

		final Configuration parameters = new Configuration();

		format.setFieldDelimiter("|");
		format.setFieldsGeneric(new int[] { 0, 3, 7 },
			(Class<? extends Value>[]) new Class[] { IntValue.class, IntValue.class, IntValue.class });
		format.configure(parameters);
		format.open(split);

		Value[] values = createIntValues(3);

		values = format.nextRecord(values);
		assertNotNull(values);
		assertEquals(111, ((IntValue) values[0]).getValue());
		assertEquals(444, ((IntValue) values[1]).getValue());
		assertEquals(888, ((IntValue) values[2]).getValue());

		values = format.nextRecord(values);
		assertNotNull(values);
		assertEquals(000, ((IntValue) values[0]).getValue());
		assertEquals(777, ((IntValue) values[1]).getValue());
		assertEquals(333, ((IntValue) values[2]).getValue());

		assertNull(format.nextRecord(values));
		assertTrue(format.reachedEnd());
	} catch (Exception ex) {
		System.err.println(ex.getMessage());
		ex.printStackTrace();
		fail("Test erroneous");
	}
}
 
Example 28
@Override
public void invoke() throws Exception {
	List<RecordWriter<IntValue>> writers = Lists.newArrayListWithCapacity(2);

	// The order of intermediate result creation in the job graph specifies which produced
	// result partition is pipelined/blocking.
	final RecordWriter<IntValue> pipelinedWriter =
			new RecordWriter<>(getEnvironment().getWriter(0));

	final RecordWriter<IntValue> blockingWriter =
			new RecordWriter<>(getEnvironment().getWriter(1));

	writers.add(pipelinedWriter);
	writers.add(blockingWriter);

	final int numberOfTimesToSend = getTaskConfiguration().getInteger(CONFIG_KEY, 0);

	final IntValue subtaskIndex = new IntValue(
			getEnvironment().getTaskInfo().getIndexOfThisSubtask());

	// Produce the first intermediate result and then the second in a serial fashion.
	for (RecordWriter<IntValue> writer : writers) {
		try {
			for (int i = 0; i < numberOfTimesToSend; i++) {
				writer.emit(subtaskIndex);
			}
			writer.flushAll();
		}
		finally {
			writer.clearBuffers();
		}
	}
}
 
Example 29
Source Project: flink   Source File: IntValueArrayComparatorTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected IntValueArray[] getSortedTestData() {
	IntValueArray iva0 = new IntValueArray();

	IntValueArray iva1 = new IntValueArray();
	iva1.add(new IntValue(5));

	IntValueArray iva2 = new IntValueArray();
	iva2.add(new IntValue(5));
	iva2.add(new IntValue(10));

	return new IntValueArray[]{ iva0, iva1, iva2 };
}
 
Example 30
Source Project: flink   Source File: OverwriteObjects.java    License: Apache License 2.0 5 votes vote down vote up
private void scrambleIfNot(Tuple2<IntValue, IntValue> t, Object o) {
	// verify that the tuple is not null and the same as the
	// comparison object, then scramble the fields
	if (t != null && t != o) {
		t.f0.setValue(random.nextInt());
		t.f1.setValue(random.nextInt());
	}
}