org.apache.flink.types.LongValue Java Examples

The following examples show how to use org.apache.flink.types.LongValue. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RMatGraph.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
	int scale = Long.SIZE - Long.numberOfLeadingZeros(vertexCount - 1);

	// Edges
	int cyclesPerEdge = noiseEnabled ? 5 * scale : scale;

	List<BlockInfo<T>> generatorBlocks = randomGenerableFactory
		.getRandomGenerables(edgeCount, cyclesPerEdge);

	DataSet<Edge<LongValue, NullValue>> edges = env
		.fromCollection(generatorBlocks)
			.name("Random generators")
		.rebalance()
			.setParallelism(parallelism)
			.name("Rebalance")
		.flatMap(new GenerateEdges<>(vertexCount, scale, a, b, c, noiseEnabled, noise))
			.setParallelism(parallelism)
			.name("RMat graph edges");

	// Vertices
	DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSet(edges, parallelism);

	// Graph
	return Graph.fromDataSet(vertices, edges, env);
}
 
Example #2
Source File: VertexDegreeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithEmptyGraphWithoutVertices() throws Exception {
	DataSet<Vertex<LongValue, LongValue>> degree;

	degree = emptyGraphWithoutVertices
		.run(new VertexDegree<LongValue, NullValue, NullValue>()
			.setIncludeZeroDegreeVertices(false));

	assertEquals(0, degree.collect().size());

	degree = emptyGraphWithoutVertices
		.run(new VertexDegree<LongValue, NullValue, NullValue>()
			.setIncludeZeroDegreeVertices(true));

	assertEquals(0, degree.collect().size());
}
 
Example #3
Source File: EdgeSourceDegreeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithRMatGraph() throws Exception {
	DataSet<Edge<LongValue, Tuple2<NullValue, LongValue>>> sourceDegreeOnSourceId = undirectedRMatGraph(10, 16)
		.run(new EdgeSourceDegree<LongValue, NullValue, NullValue>()
			.setReduceOnTargetId(false));

	Checksum checksumOnSourceId = new ChecksumHashCode<Edge<LongValue, Tuple2<NullValue, LongValue>>>()
		.run(sourceDegreeOnSourceId)
		.execute();

	assertEquals(20884, checksumOnSourceId.getCount());
	assertEquals(0x000000019d8f0070L, checksumOnSourceId.getChecksum());

	DataSet<Edge<LongValue, Tuple2<NullValue, LongValue>>> sourceDegreeOnTargetId = undirectedRMatGraph(10, 16)
		.run(new EdgeSourceDegree<LongValue, NullValue, NullValue>()
			.setReduceOnTargetId(true));

	Checksum checksumOnTargetId = new ChecksumHashCode<Edge<LongValue, Tuple2<NullValue, LongValue>>>()
		.run(sourceDegreeOnTargetId)
		.execute();

	assertEquals(checksumOnTargetId, checksumOnTargetId);
}
 
Example #4
Source File: AggregateITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testNestedAggregateOfMutableValueTypes() throws Exception {
	/*
	 * Nested Aggregate of mutable value types
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple3<IntValue, LongValue, StringValue>> ds = ValueCollectionDataSets.get3TupleDataSet(env);
	DataSet<Tuple1<IntValue>> aggregateDs = ds.groupBy(1)
			.aggregate(Aggregations.MIN, 0)
			.aggregate(Aggregations.MIN, 0)
			.project(0);

	List<Tuple1<IntValue>> result = aggregateDs.collect();

	String expected = "1\n";

	compareResultAsTuples(result, expected);
}
 
Example #5
Source File: DegreesITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetDegreesWithDisconnectedData() throws Exception {
	/*
	 * Test getDegrees() with disconnected data
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, NullValue, Long> graph =
		Graph.fromDataSet(TestGraphUtils.getDisconnectedLongLongEdgeData(env), env);

	DataSet<Tuple2<Long, LongValue>> data = graph.outDegrees();
	List<Tuple2<Long, LongValue>> result = data.collect();

	expectedResult = "1,2\n" +
		"2,1\n" +
		"3,0\n" +
		"4,1\n" +
		"5,0\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #6
Source File: RMatGraph.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
	int scale = Long.SIZE - Long.numberOfLeadingZeros(vertexCount - 1);

	// Edges
	int cyclesPerEdge = noiseEnabled ? 5 * scale : scale;

	List<BlockInfo<T>> generatorBlocks = randomGenerableFactory
		.getRandomGenerables(edgeCount, cyclesPerEdge);

	DataSet<Edge<LongValue, NullValue>> edges = env
		.fromCollection(generatorBlocks)
			.name("Random generators")
		.rebalance()
			.setParallelism(parallelism)
			.name("Rebalance")
		.flatMap(new GenerateEdges<>(vertexCount, scale, a, b, c, noiseEnabled, noise))
			.setParallelism(parallelism)
			.name("RMat graph edges");

	// Vertices
	DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSet(edges, parallelism);

	// Graph
	return Graph.fromDataSet(vertices, edges, env);
}
 
Example #7
Source File: HypercubeGraphTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGraphMetrics() throws Exception {
	int dimensions = 10;

	Graph<LongValue, NullValue, NullValue> graph = new HypercubeGraph(env, dimensions)
		.generate();

	assertEquals(1L << dimensions, graph.numberOfVertices());
	assertEquals(dimensions * (1L << dimensions), graph.numberOfEdges());

	long minInDegree = graph.inDegrees().min(1).collect().get(0).f1.getValue();
	long minOutDegree = graph.outDegrees().min(1).collect().get(0).f1.getValue();
	long maxInDegree = graph.inDegrees().max(1).collect().get(0).f1.getValue();
	long maxOutDegree = graph.outDegrees().max(1).collect().get(0).f1.getValue();

	assertEquals(dimensions, minInDegree);
	assertEquals(dimensions, minOutDegree);
	assertEquals(dimensions, maxInDegree);
	assertEquals(dimensions, maxOutDegree);
}
 
Example #8
Source File: LocalPropertiesFilteringTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSortingPreserved4() {
	SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sp, new String[]{"2->7;5"}, null, null, tupleInfo, tupleInfo);

	Ordering o = new Ordering();
	o.appendOrdering(2, IntValue.class, Order.ASCENDING);
	o.appendOrdering(0, StringValue.class, Order.DESCENDING);
	o.appendOrdering(5, LongValue.class, Order.DESCENDING);
	LocalProperties lProps = LocalProperties.forOrdering(o);

	LocalProperties filtered = lProps.filterBySemanticProperties(sp, 0);
	FieldList gFields = filtered.getGroupedFields();
	Ordering order = filtered.getOrdering();

	assertNotNull(gFields);
	assertEquals(1, gFields.size());
	assertTrue(gFields.contains(7));
	assertNotNull(order);
	assertEquals(1, order.getNumberOfFields());
	assertEquals(7, order.getFieldNumber(0).intValue());
	assertEquals(Order.ASCENDING, order.getOrder(0));
	assertEquals(IntValue.class, order.getType(0));
	assertNull(filtered.getUniqueFields());
}
 
Example #9
Source File: ScatterGatherConfigurationITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void preSuperstep() {

	// test bcast variable
	@SuppressWarnings("unchecked")
	List<Integer> bcastSet = (List<Integer>) (List<?>) getBroadcastSet("messagingBcastSet");
	Assert.assertEquals(4, bcastSet.get(0).intValue());
	Assert.assertEquals(5, bcastSet.get(1).intValue());
	Assert.assertEquals(6, bcastSet.get(2).intValue());

	// test number of vertices
	Assert.assertEquals(5, getNumberOfVertices());

	// test aggregator
	if (getSuperstepNumber() == 2) {
		long aggrValue = ((LongValue) getPreviousIterationAggregate("superstepAggregator")).getValue();
		Assert.assertEquals(5, aggrValue);
	}
}
 
Example #10
Source File: ScatterGatherConfigurationITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public void preSuperstep() {

	// test bcast variable
	@SuppressWarnings("unchecked")
	List<Integer> bcastSet = (List<Integer>) (List<?>) getBroadcastSet("messagingBcastSet");
	Assert.assertEquals(4, bcastSet.get(0).intValue());
	Assert.assertEquals(5, bcastSet.get(1).intValue());
	Assert.assertEquals(6, bcastSet.get(2).intValue());

	// test number of vertices
	Assert.assertEquals(5, getNumberOfVertices());

	// test aggregator
	if (getSuperstepNumber() == 2) {
		long aggrValue = ((LongValue) getPreviousIterationAggregate("superstepAggregator")).getValue();
		Assert.assertEquals(5, aggrValue);
	}
}
 
Example #11
Source File: EdgeDegreePairTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testWithEmptyGraphWithVertices() throws Exception {
	DataSet<Edge<LongValue, Tuple3<NullValue, LongValue, LongValue>>> degreePairOnSourceId = emptyGraphWithVertices
		.run(new EdgeDegreePair<LongValue, NullValue, NullValue>()
			.setReduceOnTargetId(false));

	assertEquals(0, degreePairOnSourceId.collect().size());

	DataSet<Edge<LongValue, Tuple3<NullValue, LongValue, LongValue>>> degreePairOnTargetId = emptyGraphWithVertices
		.run(new EdgeDegreePair<LongValue, NullValue, NullValue>()
			.setReduceOnTargetId(true));

	assertEquals(0, degreePairOnTargetId.collect().size());
}
 
Example #12
Source File: CirculantGraph.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
	// Vertices
	DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount);

	// Edges
	LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, this.vertexCount - 1);

	// Validate ranges
	Collections.sort(offsetRanges);
	Iterator<OffsetRange> iter = offsetRanges.iterator();
	OffsetRange lastRange = iter.next();

	while (iter.hasNext()) {
		OffsetRange nextRange = iter.next();

		if (lastRange.overlaps(nextRange)) {
			throw new IllegalArgumentException("Overlapping ranges " + lastRange + " and " + nextRange);
		}

		lastRange = nextRange;
	}

	DataSet<Edge<LongValue, NullValue>> edges = env
		.fromParallelCollection(iterator, LongValue.class)
			.setParallelism(parallelism)
			.name("Edge iterators")
		.flatMap(new LinkVertexToOffsets(vertexCount, offsetRanges))
			.setParallelism(parallelism)
			.name("Circulant graph edges");

	// Graph
	return Graph.fromDataSet(vertices, edges, env);
}
 
Example #13
Source File: ValueArraySerializerUpgradeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Matcher<LongValueArray> testDataMatcher() {
	LongValueArray array = new LongValueArray(128);
	array.add(new LongValue(123L));
	array.add(new LongValue(456L));
	return is(array);
}
 
Example #14
Source File: VertexMetrics.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void writeRecord(Vertex<T, LongValue> record) throws IOException {
	long degree = record.f1.getValue();
	long triplets = degree * (degree - 1) / 2;

	vertexCount++;
	edgeCount += degree;
	tripletCount += triplets;
	maximumDegree = Math.max(maximumDegree, degree);
	maximumTriplets = Math.max(maximumTriplets, triplets);
}
 
Example #15
Source File: PageRank.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters)
		throws Exception {
	super.open(parameters);

	Collection<LongValue> vertexCount = getRuntimeContext().getBroadcastVariable(VERTEX_COUNT);
	Iterator<LongValue> vertexCountIterator = vertexCount.iterator();
	output.f1 = new DoubleValue(vertexCountIterator.hasNext() ? 1.0 / vertexCountIterator.next().getValue() : Double.NaN);
}
 
Example #16
Source File: EdgeSourceDegreeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testWithSimpleGraph() throws Exception {
	String expectedResult =
		"(0,1,((null),2))\n" +
		"(0,2,((null),2))\n" +
		"(1,0,((null),3))\n" +
		"(1,2,((null),3))\n" +
		"(1,3,((null),3))\n" +
		"(2,0,((null),3))\n" +
		"(2,1,((null),3))\n" +
		"(2,3,((null),3))\n" +
		"(3,1,((null),4))\n" +
		"(3,2,((null),4))\n" +
		"(3,4,((null),4))\n" +
		"(3,5,((null),4))\n" +
		"(4,3,((null),1))\n" +
		"(5,3,((null),1))";

	DataSet<Edge<IntValue, Tuple2<NullValue, LongValue>>> sourceDegreeOnSourceId = undirectedSimpleGraph
		.run(new EdgeSourceDegree<IntValue, NullValue, NullValue>()
			.setReduceOnTargetId(false));

	TestBaseUtils.compareResultAsText(sourceDegreeOnSourceId.collect(), expectedResult);

	DataSet<Edge<IntValue, Tuple2<NullValue, LongValue>>> sourceDegreeOnTargetId = undirectedSimpleGraph
		.run(new EdgeSourceDegree<IntValue, NullValue, NullValue>()
			.setReduceOnTargetId(true));

	TestBaseUtils.compareResultAsText(sourceDegreeOnTargetId.collect(), expectedResult);
}
 
Example #17
Source File: GenericCsvInputFormatTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private Value[] createLongValues(int num) {
	Value[] v = new Value[num];
	
	for (int i = 0; i < num; i++) {
		v[i] = new LongValue();
	}
	
	return v;
}
 
Example #18
Source File: BulkIterationBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public boolean isConverged(int iteration, LongValue countAggregate) {
	long count = countAggregate.getValue();

	if (log.isInfoEnabled()) {
		log.info("Termination criterion stats in iteration [" + iteration + "]: " + count);
	}

	return count == 0;
}
 
Example #19
Source File: CycleGraphTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testGraph() throws Exception {
	Graph<LongValue, NullValue, NullValue> graph = new CycleGraph(env, 10)
		.generate();

	String vertices = "0; 1; 2; 3; 4; 5; 6; 7; 8; 9";
	String edges = "0,1; 1,0; 1,2; 2,1; 2,3; 3,2; 3,4; 4,3; 4,5; 5,4;" +
		"5,6; 6,5; 6,7; 7,6; 7,8; 8,7; 8,9; 9,8; 9,0; 0,9";

	TestUtils.compareGraph(graph, vertices, edges);
}
 
Example #20
Source File: GraphKeyTypeTransform.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Short translate(LongValue value, Short reuse)
		throws Exception {
	long l = value.getValue();

	if (l < 0 || l >= MAX_VERTEX_COUNT) {
		throw new IllegalArgumentException("Cannot cast long value " + value + " to short.");
	}

	return (short) (l & (MAX_VERTEX_COUNT - 1));
}
 
Example #21
Source File: SingletonEdgeGraphTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testGraph() throws Exception {
	int vertexPairCount = 5;

	Graph<LongValue, NullValue, NullValue> graph = new SingletonEdgeGraph(env, vertexPairCount)
		.generate();

	String vertices = "0; 1; 2; 3; 4; 5; 6; 7; 8; 9";
	String edges = "0,1; 1,0; 2,3; 3,2; 4,5; 5,4; 6,7; 7,6; 8,9; 9,8";

	TestUtils.compareGraph(graph, vertices, edges);
}
 
Example #22
Source File: GraphKeyTypeTransformTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testToCharacter() throws Exception {
	TranslateFunction<LongValue, Character> translator = new LongValueToChar();

	Assert.assertEquals(Character.valueOf((char) 0),
		translator.translate(new LongValue(0L), null));

	Assert.assertEquals(Character.valueOf(Character.MAX_VALUE),
		translator.translate(new LongValue(LongValueToChar.MAX_VERTEX_COUNT - 1), null));
}
 
Example #23
Source File: ScatterGatherIteration.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Method that builds the scatter function using a coGroup operator for a simple vertex (without
 * degrees).
 * It afterwards configures the function with a custom name and broadcast variables.
 *
 * @param iteration
 * @param messageTypeInfo
 * @param whereArg the argument for the where within the coGroup
 * @param equalToArg the argument for the equalTo within the coGroup
 * @return the scatter function
 */
private CoGroupOperator<?, ?, Tuple2<K, Message>> buildScatterFunction(
		DeltaIteration<Vertex<K, VV>, Vertex<K, VV>> iteration,
		TypeInformation<Tuple2<K, Message>> messageTypeInfo, int whereArg, int equalToArg,
		DataSet<LongValue> numberOfVertices) {

	// build the scatter function (co group)
	CoGroupOperator<?, ?, Tuple2<K, Message>> messages;
	ScatterUdfWithEdgeValues<K, VV, VV, Message, EV> messenger =
			new ScatterUdfWithEVsSimpleVV<>(scatterFunction, messageTypeInfo);

	messages = this.edgesWithValue.coGroup(iteration.getWorkset()).where(whereArg)
			.equalTo(equalToArg).with(messenger);

	// configure coGroup message function with name and broadcast variables
	messages = messages.name("Messaging");
	if (this.configuration != null) {
		for (Tuple2<String, DataSet<?>> e : this.configuration.getScatterBcastVars()) {
			messages = messages.withBroadcastSet(e.f1, e.f0);
		}
		if (this.configuration.isOptNumVertices()) {
			messages = messages.withBroadcastSet(numberOfVertices, "number of vertices");
		}
	}

	return messages;
}
 
Example #24
Source File: GraphKeyTypeTransform.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public LongValueWithProperHashCode translate(LongValue value, LongValueWithProperHashCode reuse)
		throws Exception {
	if (reuse == null) {
		reuse = new LongValueWithProperHashCode();
	}

	reuse.setValue(value.getValue());
	return reuse;
}
 
Example #25
Source File: ValueArrayTypeInfo.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public TypeSerializer<ValueArray<T>> createSerializer(ExecutionConfig executionConfig) {
	Preconditions.checkNotNull(type, "TypeInformation type class is required");

	if (ByteValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new ByteValueArraySerializer();
	} else if (CharValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new CharValueArraySerializer();
	} else if (DoubleValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new DoubleValueArraySerializer();
	} else if (FloatValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new FloatValueArraySerializer();
	} else if (IntValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new IntValueArraySerializer();
	} else if (LongValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new LongValueArraySerializer();
	} else if (NullValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new NullValueArraySerializer();
	} else if (ShortValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new ShortValueArraySerializer();
	} else if (StringValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new StringValueArraySerializer();
	} else {
		throw new InvalidTypesException("No ValueArray class exists for " + type);
	}
}
 
Example #26
Source File: PathGraph.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
	return new GridGraph(env)
		.addDimension(vertexCount, false)
		.setParallelism(parallelism)
		.generate();
}
 
Example #27
Source File: EdgeTargetDegreesTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testWithEmptyGraphWithVertices() throws Exception {
	DataSet<Edge<LongValue, Tuple2<NullValue, Degrees>>> targetDegrees = emptyGraphWithVertices
		.run(new EdgeTargetDegrees<>());

	assertEquals(0, targetDegrees.collect().size());
}
 
Example #28
Source File: LocalClusteringCoefficient.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public Result<T> join(Vertex<T, Degrees> vertexAndDegree, Tuple2<T, LongValue> vertexAndTriangleCount)
		throws Exception {
	output.setVertexId0(vertexAndDegree.f0);
	output.setDegree(vertexAndDegree.f1.f0);
	output.setTriangleCount((vertexAndTriangleCount == null) ? zero : vertexAndTriangleCount.f1);

	return output;
}
 
Example #29
Source File: GridGraph.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Graph<LongValue, NullValue, NullValue> create(ExecutionEnvironment env) {
	org.apache.flink.graph.generator.GridGraph graph = new org.apache.flink.graph.generator.GridGraph(env);

	for (Dimension dimension : dimensions) {
		graph.addDimension(dimension.size, dimension.wrapEndpoints);
	}

	return graph
		.setParallelism(parallelism.getValue().intValue())
		.generate();
}
 
Example #30
Source File: ValueArrayTypeInfo.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public TypeSerializer<ValueArray<T>> createSerializer(ExecutionConfig executionConfig) {
	Preconditions.checkNotNull(type, "TypeInformation type class is required");

	if (ByteValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new ByteValueArraySerializer();
	} else if (CharValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new CharValueArraySerializer();
	} else if (DoubleValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new DoubleValueArraySerializer();
	} else if (FloatValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new FloatValueArraySerializer();
	} else if (IntValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new IntValueArraySerializer();
	} else if (LongValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new LongValueArraySerializer();
	} else if (NullValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new NullValueArraySerializer();
	} else if (ShortValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new ShortValueArraySerializer();
	} else if (StringValue.class.isAssignableFrom(type)) {
		return (TypeSerializer<ValueArray<T>>) (TypeSerializer<?>) new StringValueArraySerializer();
	} else {
		throw new InvalidTypesException("No ValueArray class exists for " + type);
	}
}