Java Code Examples for org.apache.flink.types.ByteValue

The following examples show how to use org.apache.flink.types.ByteValue. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: ByteValueArray.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public boolean addAll(ValueArray<ByteValue> other) {
	ByteValueArray source = (ByteValueArray) other;

	int sourceSize = source.position;
	int newPosition = position + sourceSize;

	if (newPosition > data.length) {
		if (isBounded) {
			return false;
		} else {
			ensureCapacity(newPosition);
		}
	}

	System.arraycopy(source.data, 0, data, position, sourceSize);
	position = newPosition;

	return true;
}
 
Example 2
Source Project: flink   Source File: ValueArrayFactory.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Produce a {@code ValueArray} for the given {@code Value} type with the
 * given bounded size.
 *
 * @param cls {@code Value} class
 * @param bytes limit the array to the given number of bytes
 * @return {@code ValueArray} for given {@code Value} class
 */
@SuppressWarnings("unchecked")
public static <T> ValueArray<T> createValueArray(Class<? extends Value> cls, int bytes) {
	if (ByteValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new ByteValueArray(bytes);
	} else if (CharValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new CharValueArray(bytes);
	} else if (DoubleValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new DoubleValueArray(bytes);
	} else if (FloatValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new FloatValueArray(bytes);
	} else if (IntValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new IntValueArray(bytes);
	} else if (LongValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new LongValueArray(bytes);
	} else if (NullValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new NullValueArray(bytes);
	} else if (ShortValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new ShortValueArray(bytes);
	} else if (StringValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new StringValueArray(bytes);
	} else {
		throw new IllegalArgumentException("Unable to create bounded ValueArray for type " + cls);
	}
}
 
Example 3
@Test
public void testRangePartitioningErased() {

	SingleInputSemanticProperties sProp = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sProp, new String[]{"1;2"}, null, null, tupleInfo, tupleInfo);

	Ordering o = new Ordering();
	o.appendOrdering(3, LongValue.class, Order.DESCENDING);
	o.appendOrdering(1, IntValue.class, Order.ASCENDING);
	o.appendOrdering(6, ByteValue.class, Order.DESCENDING);

	RequestedGlobalProperties rgProps = new RequestedGlobalProperties();
	rgProps.setRangePartitioned(o);

	RequestedGlobalProperties filtered = rgProps.filterBySemanticProperties(sProp, 0);

	assertNull(filtered);
}
 
Example 4
Source Project: Flink-CEPplus   Source File: ByteValueArray.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public boolean add(ByteValue value) {
	int newPosition = position + 1;

	if (newPosition > data.length) {
		if (isBounded) {
			return false;
		} else {
			ensureCapacity(newPosition);
		}
	}

	data[position] = value.getValue();
	position = newPosition;

	return true;
}
 
Example 5
Source Project: Flink-CEPplus   Source File: ByteValueArray.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public boolean addAll(ValueArray<ByteValue> other) {
	ByteValueArray source = (ByteValueArray) other;

	int sourceSize = source.position;
	int newPosition = position + sourceSize;

	if (newPosition > data.length) {
		if (isBounded) {
			return false;
		} else {
			ensureCapacity(newPosition);
		}
	}

	System.arraycopy(source.data, 0, data, position, sourceSize);
	position = newPosition;

	return true;
}
 
Example 6
Source Project: flink   Source File: ValueArrayFactory.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Produce a {@code ValueArray} for the given {@code Value} type with the
 * given bounded size.
 *
 * @param cls {@code Value} class
 * @param bytes limit the array to the given number of bytes
 * @return {@code ValueArray} for given {@code Value} class
 */
@SuppressWarnings("unchecked")
public static <T> ValueArray<T> createValueArray(Class<? extends Value> cls, int bytes) {
	if (ByteValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new ByteValueArray(bytes);
	} else if (CharValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new CharValueArray(bytes);
	} else if (DoubleValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new DoubleValueArray(bytes);
	} else if (FloatValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new FloatValueArray(bytes);
	} else if (IntValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new IntValueArray(bytes);
	} else if (LongValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new LongValueArray(bytes);
	} else if (NullValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new NullValueArray(bytes);
	} else if (ShortValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new ShortValueArray(bytes);
	} else if (StringValue.class.isAssignableFrom(cls)) {
		return (ValueArray<T>) new StringValueArray(bytes);
	} else {
		throw new IllegalArgumentException("Unable to create bounded ValueArray for type " + cls);
	}
}
 
Example 7
Source Project: Flink-CEPplus   Source File: TriangleListing.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Tuple3<T, T, ByteValue> map(Edge<T, Tuple3<ET, Degrees, Degrees>> value)
		throws Exception {
	Tuple3<ET, Degrees, Degrees> degrees = value.f2;
	long sourceDegree = degrees.f1.getDegree().getValue();
	long targetDegree = degrees.f2.getDegree().getValue();

	if (sourceDegree < targetDegree ||
			(sourceDegree == targetDegree && value.f0.compareTo(value.f1) < 0)) {
		output.f0 = value.f0;
		output.f1 = value.f1;
		output.f2 = forward;
	} else {
		output.f0 = value.f1;
		output.f1 = value.f0;
		output.f2 = reverse;
	}

	return output;
}
 
Example 8
Source Project: flink   Source File: ValueArrayTypeInfo.java    License: Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public TypeSerializer<ValueArray<T>> createSerializer(ExecutionConfig executionConfig) {
	Preconditions.checkNotNull(type, "TypeInformation type class is required");

	if (ByteValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new ByteValueArraySerializer();
	} else if (CharValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new CharValueArraySerializer();
	} else if (DoubleValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new DoubleValueArraySerializer();
	} else if (FloatValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new FloatValueArraySerializer();
	} else if (IntValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new IntValueArraySerializer();
	} else if (LongValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new LongValueArraySerializer();
	} else if (NullValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new NullValueArraySerializer();
	} else if (ShortValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new ShortValueArraySerializer();
	} else if (StringValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new StringValueArraySerializer();
	} else {
		throw new InvalidTypesException("No ValueArray class exists for " + type);
	}
}
 
Example 9
Source Project: flink   Source File: ByteValueArray.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public int compareTo(ValueArray<ByteValue> o) {
	ByteValueArray other = (ByteValueArray) o;

	int min = Math.min(position, other.position);
	for (int i = 0; i < min; i++) {
		int cmp = Byte.compare(data[i], other.data[i]);

		if (cmp != 0) {
			return cmp;
		}
	}

	return Integer.compare(position, other.position);
}
 
Example 10
Source Project: flink   Source File: GraphKeyTypeTransformTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFromByteValue() throws Exception {
	TranslateFunction<ByteValue, LongValueWithProperHashCode> translator = new UnsignedByteValueToLongValueWithProperHashCode();

	Assert.assertEquals(new LongValueWithProperHashCode(0L),
		translator.translate(new ByteValue((byte) 0), longValueWithProperHashCode));

	Assert.assertEquals(new LongValueWithProperHashCode(Byte.MAX_VALUE + 1),
		translator.translate(new ByteValue(Byte.MIN_VALUE), longValueWithProperHashCode));

	Assert.assertEquals(new LongValueWithProperHashCode(LongValueToUnsignedByteValue.MAX_VERTEX_COUNT - 1),
		translator.translate(new ByteValue((byte) -1), longValueWithProperHashCode));
}
 
Example 11
Source Project: flink   Source File: ByteValueArrayComparatorTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected ByteValueArray[] getSortedTestData() {
	ByteValueArray lva0 = new ByteValueArray();

	ByteValueArray lva1 = new ByteValueArray();
	lva1.add(new ByteValue((byte) 5));

	ByteValueArray lva2 = new ByteValueArray();
	lva2.add(new ByteValue((byte) 5));
	lva2.add(new ByteValue((byte) 10));

	return new ByteValueArray[]{ lva0, lva1 };
}
 
Example 12
@Test
public void testRangePartitioningPreserved3() {

	SingleInputSemanticProperties sProp = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sProp, new String[]{"7->3;1->1;2->6"}, null, null, tupleInfo, tupleInfo);

	DataDistribution dd = new MockDistribution();
	Ordering o = new Ordering();
	o.appendOrdering(3, LongValue.class, Order.DESCENDING);
	o.appendOrdering(1, IntValue.class, Order.ASCENDING);
	o.appendOrdering(6, ByteValue.class, Order.DESCENDING);

	RequestedGlobalProperties rgProps = new RequestedGlobalProperties();
	rgProps.setRangePartitioned(o, dd);

	RequestedGlobalProperties filtered = rgProps.filterBySemanticProperties(sProp, 0);

	assertNotNull(filtered);
	assertEquals(PartitioningProperty.RANGE_PARTITIONED, filtered.getPartitioning());
	assertNotNull(filtered.getOrdering());
	assertEquals(3, filtered.getOrdering().getNumberOfFields());
	assertEquals(7, filtered.getOrdering().getFieldNumber(0).intValue());
	assertEquals(1, filtered.getOrdering().getFieldNumber(1).intValue());
	assertEquals(2, filtered.getOrdering().getFieldNumber(2).intValue());
	assertEquals(LongValue.class, filtered.getOrdering().getType(0));
	assertEquals(IntValue.class, filtered.getOrdering().getType(1));
	assertEquals(ByteValue.class, filtered.getOrdering().getType(2));
	assertEquals(Order.DESCENDING, filtered.getOrdering().getOrder(0));
	assertEquals(Order.ASCENDING, filtered.getOrdering().getOrder(1));
	assertEquals(Order.DESCENDING, filtered.getOrdering().getOrder(2));
	assertNotNull(filtered.getDataDistribution());
	assertEquals(dd, filtered.getDataDistribution());
	assertNull(filtered.getPartitionedFields());
	assertNull(filtered.getCustomPartitioner());
}
 
Example 13
Source Project: flink   Source File: ByteValueArray.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void copyTo(ValueArray<ByteValue> target) {
	ByteValueArray other = (ByteValueArray) target;

	other.position = position;
	other.mark = mark;

	other.ensureCapacity(position);
	System.arraycopy(data, 0, other.data, 0, position);
}
 
Example 14
Source Project: flink   Source File: GraphKeyTypeTransformTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testToByteValue() throws Exception {
	TranslateFunction<LongValue, ByteValue> translator = new LongValueToUnsignedByteValue();

	Assert.assertEquals(new ByteValue((byte) 0),
		translator.translate(new LongValue(0L), byteValue));

	Assert.assertEquals(new ByteValue(Byte.MIN_VALUE),
		translator.translate(new LongValue(Byte.MAX_VALUE + 1), byteValue));

	Assert.assertEquals(new ByteValue((byte) -1),
		translator.translate(new LongValue(LongValueToUnsignedByteValue.MAX_VERTEX_COUNT - 1), byteValue));
}
 
Example 15
Source Project: flink   Source File: VertexDegrees.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void reduce(Iterable<Tuple3<T, T, ByteValue>> values, Collector<Tuple2<T, ByteValue>> out)
		throws Exception {
	byte bitmask = 0;

	for (Tuple3<T, T, ByteValue> value: values) {
		output.f0 = value.f0;
		bitmask |= value.f2.getValue();
	}

	output.f1.setValue(bitmask);
	out.collect(output);
}
 
Example 16
Source Project: flink   Source File: CSVReaderTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testWithValueType() throws Exception {
	CsvReader reader = getCsvReader();
	DataSource<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> items =
			reader.types(StringValue.class, BooleanValue.class, ByteValue.class, ShortValue.class, IntValue.class, LongValue.class, FloatValue.class, DoubleValue.class);
	TypeInformation<?> info = items.getType();

	Assert.assertEquals(true, info.isTupleType());
	Assert.assertEquals(Tuple8.class, info.getTypeClass());
}
 
Example 17
Source Project: Flink-CEPplus   Source File: ByteValueParserTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public ByteValue[] getValidTestResults() {
	return new ByteValue[] {
		new ByteValue((byte) 0), new ByteValue((byte) 1), new ByteValue((byte) 76), new ByteValue((byte) -66),
		new ByteValue(Byte.MAX_VALUE), new ByteValue(Byte.MIN_VALUE), new ByteValue((byte) 19)
	};
}
 
Example 18
Source Project: flink   Source File: VertexDegrees.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public DataSet<Vertex<K, Degrees>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// s, t, bitmask
	DataSet<Tuple2<K, ByteValue>> vertexWithEdgeOrder = input.getEdges()
		.flatMap(new EmitAndFlipEdge<>())
			.setParallelism(parallelism)
			.name("Emit and flip edge")
		.groupBy(0, 1)
		.reduceGroup(new ReduceBitmask<>())
			.setParallelism(parallelism)
			.name("Reduce bitmask");

	// s, d(s)
	DataSet<Vertex<K, Degrees>> vertexDegrees = vertexWithEdgeOrder
		.groupBy(0)
		.reduceGroup(new DegreeCount<>())
			.setParallelism(parallelism)
			.name("Degree count");

	if (includeZeroDegreeVertices.get()) {
		vertexDegrees = input.getVertices()
			.leftOuterJoin(vertexDegrees)
			.where(0)
			.equalTo(0)
			.with(new JoinVertexWithVertexDegrees<>())
				.setParallelism(parallelism)
				.name("Zero degree vertices");
	}

	return vertexDegrees;
}
 
Example 19
Source Project: Flink-CEPplus   Source File: GraphKeyTypeTransformTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testToByteValue() throws Exception {
	TranslateFunction<LongValue, ByteValue> translator = new LongValueToUnsignedByteValue();

	Assert.assertEquals(new ByteValue((byte) 0),
		translator.translate(new LongValue(0L), byteValue));

	Assert.assertEquals(new ByteValue(Byte.MIN_VALUE),
		translator.translate(new LongValue(Byte.MAX_VALUE + 1), byteValue));

	Assert.assertEquals(new ByteValue((byte) -1),
		translator.translate(new LongValue(LongValueToUnsignedByteValue.MAX_VERTEX_COUNT - 1), byteValue));
}
 
Example 20
Source Project: Flink-CEPplus   Source File: ValueArrayTypeInfo.java    License: Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public TypeSerializer<ValueArray<T>> createSerializer(ExecutionConfig executionConfig) {
	Preconditions.checkNotNull(type, "TypeInformation type class is required");

	if (ByteValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new ByteValueArraySerializer();
	} else if (CharValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new CharValueArraySerializer();
	} else if (DoubleValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new DoubleValueArraySerializer();
	} else if (FloatValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new FloatValueArraySerializer();
	} else if (IntValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new IntValueArraySerializer();
	} else if (LongValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new LongValueArraySerializer();
	} else if (NullValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new NullValueArraySerializer();
	} else if (ShortValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new ShortValueArraySerializer();
	} else if (StringValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new StringValueArraySerializer();
	} else {
		throw new InvalidTypesException("No ValueArray class exists for " + type);
	}
}
 
Example 21
Source Project: flink   Source File: TriangleListing.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Result<T> join(Tuple4<T, T, T, ByteValue> triplet, Tuple3<T, T, ByteValue> edge)
		throws Exception {
	output.setVertexId0(triplet.f0);
	output.setVertexId1(triplet.f1);
	output.setVertexId2(triplet.f2);
	output.setBitmask((byte) (triplet.f3.getValue() | edge.f2.getValue()));
	return output;
}
 
Example 22
Source Project: flink   Source File: ByteValueSerializerTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected ByteValue[] getTestData() {
	Random rnd = new Random(874597969123412341L);
	byte byteArray[] = new byte[1];
	rnd.nextBytes(byteArray);
	
	return new ByteValue[] {new ByteValue((byte) 0), new ByteValue((byte) 1), new ByteValue((byte) -1), 
						new ByteValue(Byte.MAX_VALUE), new ByteValue(Byte.MIN_VALUE),
						new ByteValue(byteArray[0]), new ByteValue((byte) -byteArray[0])};
}
 
Example 23
Source Project: flink   Source File: CsvReaderITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testValueTypes() throws Exception {
	final String inputData = "ABC,true,1,2,3,4,5.0,6.0\nBCD,false,1,2,3,4,5.0,6.0";
	final String dataPath = createInputData(inputData);
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> data =
			env.readCsvFile(dataPath).types(StringValue.class, BooleanValue.class, ByteValue.class, ShortValue.class, IntValue.class, LongValue.class, FloatValue.class, DoubleValue.class);
	List<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> result = data.collect();

	expected = inputData;
	compareResultAsTuples(result, expected);
}
 
Example 24
Source Project: flink   Source File: ByteValueArray.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public int compareTo(ValueArray<ByteValue> o) {
	ByteValueArray other = (ByteValueArray) o;

	int min = Math.min(position, other.position);
	for (int i = 0; i < min; i++) {
		int cmp = Byte.compare(data[i], other.data[i]);

		if (cmp != 0) {
			return cmp;
		}
	}

	return Integer.compare(position, other.position);
}
 
Example 25
Source Project: flink   Source File: GraphKeyTypeTransform.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public LongValueWithProperHashCode translate(ByteValue value, LongValueWithProperHashCode reuse)
		throws Exception {
	if (reuse == null) {
		reuse = new LongValueWithProperHashCode();
	}

	reuse.setValue(value.getValue() & 0xff);
	return reuse;
}
 
Example 26
Source Project: Flink-CEPplus   Source File: VertexDegrees.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void flatMap(Edge<T, TV> value, Collector<Tuple3<T, T, ByteValue>> out)
		throws Exception {
	forward.f0 = value.f0;
	forward.f1 = value.f1;
	out.collect(forward);

	reverse.f0 = value.f1;
	reverse.f1 = value.f0;
	out.collect(reverse);
}
 
Example 27
Source Project: Flink-CEPplus   Source File: VertexDegrees.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void reduce(Iterable<Tuple2<T, ByteValue>> values, Collector<Vertex<T, Degrees>> out)
		throws Exception {
	long degree = 0;
	long outDegree = 0;
	long inDegree = 0;

	for (Tuple2<T, ByteValue> edge : values) {
		output.f0 = edge.f0;

		byte bitmask = edge.f1.getValue();

		degree++;

		if (bitmask == EdgeOrder.FORWARD.getBitmask()) {
			outDegree++;
		} else if (bitmask == EdgeOrder.REVERSE.getBitmask()) {
			inDegree++;
		} else {
			outDegree++;
			inDegree++;
		}
	}

	output.f1.getDegree().setValue(degree);
	output.f1.getOutDegree().setValue(outDegree);
	output.f1.getInDegree().setValue(inDegree);

	out.collect(output);
}
 
Example 28
Source Project: flink   Source File: ByteValueParserTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public ByteValue[] getValidTestResults() {
	return new ByteValue[] {
		new ByteValue((byte) 0), new ByteValue((byte) 1), new ByteValue((byte) 76), new ByteValue((byte) -66),
		new ByteValue(Byte.MAX_VALUE), new ByteValue(Byte.MIN_VALUE), new ByteValue((byte) 19)
	};
}
 
Example 29
@Override
protected ByteValueArray[] getSortedTestData() {
	ByteValueArray lva0 = new ByteValueArray();

	ByteValueArray lva1 = new ByteValueArray();
	lva1.add(new ByteValue((byte) 5));

	ByteValueArray lva2 = new ByteValueArray();
	lva2.add(new ByteValue((byte) 5));
	lva2.add(new ByteValue((byte) 10));

	return new ByteValueArray[]{ lva0, lva1 };
}
 
Example 30
Source Project: Flink-CEPplus   Source File: HashTableTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * This tests the case where no additional partition buffers are used at the point when spilling
 * is triggered, testing that overflow bucket buffers are taken into account when deciding which
 * partition to spill.
 */
@Test
public void testSpillingFreesOnlyOverflowSegments() {
	final IOManager ioMan = new IOManagerAsync();
	
	final TypeSerializer<ByteValue> serializer = ByteValueSerializer.INSTANCE;
	final TypeComparator<ByteValue> buildComparator = new ValueComparator<>(true, ByteValue.class);
	final TypeComparator<ByteValue> probeComparator = new ValueComparator<>(true, ByteValue.class);
	
	@SuppressWarnings("unchecked")
	final TypePairComparator<ByteValue, ByteValue> pairComparator = Mockito.mock(TypePairComparator.class);
	
	try {
		final int pageSize = 32*1024;
		final int numSegments = 34;

		List<MemorySegment> memory = getMemory(numSegments, pageSize);

		MutableHashTable<ByteValue, ByteValue> table = new MutableHashTable<>(
				serializer, serializer, buildComparator, probeComparator,
				pairComparator, memory, ioMan, 1, false);

		table.open(new ByteValueIterator(100000000), new ByteValueIterator(1));
		
		table.close();
		
		checkNoTempFilesRemain(ioMan);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
	finally {
		ioMan.shutdown();
	}
}