org.apache.flink.api.common.typeutils.base.IntSerializer Java Examples

The following examples show how to use org.apache.flink.api.common.typeutils.base.IntSerializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FromElementsFunction.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
	Preconditions.checkState(this.checkpointedState == null,
		"The " + getClass().getSimpleName() + " has already been initialized.");

	this.checkpointedState = context.getOperatorStateStore().getListState(
		new ListStateDescriptor<>(
			"from-elements-state",
			IntSerializer.INSTANCE
		)
	);

	if (context.isRestored()) {
		List<Integer> retrievedStates = new ArrayList<>();
		for (Integer entry : this.checkpointedState.get()) {
			retrievedStates.add(entry);
		}

		// given that the parallelism of the function is 1, we can only have 1 state
		Preconditions.checkArgument(retrievedStates.size() == 1,
			getClass().getSimpleName() + " retrieved invalid state.");

		this.numElementsToSkip = retrievedStates.get(0);
	}
}
 
Example #2
Source File: StateBackendMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testBroadcastStateRegistrationFailsIfNewValueSerializerIsIncompatible() throws Exception {
	final String stateName = "broadcast-state";

	try {
		testBroadcastStateValueUpgrade(
			new MapStateDescriptor<>(
				stateName,
				IntSerializer.INSTANCE,
				new TestType.V1TestTypeSerializer()),
			new MapStateDescriptor<>(
				stateName,
				IntSerializer.INSTANCE,
				// new value serializer is incompatible
				new TestType.IncompatibleTestTypeSerializer()));

		Assert.fail("should have failed.");
	} catch (Exception e) {
		Assert.assertTrue(ExceptionUtils.findThrowable(e, StateMigrationException.class).isPresent());
	}
}
 
Example #3
Source File: StateBackendTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Verify that an empty {@code ValueState} will yield the default value.
 */
@Test
public void testValueStateDefaultValue() throws Exception {
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	ValueStateDescriptor<String> kvId = new ValueStateDescriptor<>("id", String.class, "Hello");

	ValueState<String> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);

	backend.setCurrentKey(1);
	assertEquals("Hello", state.value());

	state.update("Ciao");
	assertEquals("Ciao", state.value());

	state.clear();
	assertEquals("Hello", state.value());

	backend.dispose();
}
 
Example #4
Source File: FlinkKafkaProducer011MigrationTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected OneInputStreamOperatorTestHarness<Integer, Object> createTestHarness() throws Exception {
	FlinkKafkaProducer011<Integer> kafkaProducer = new FlinkKafkaProducer011<>(
		TOPIC,
		integerKeyedSerializationSchema,
		createProperties(),
		FlinkKafkaProducer011.Semantic.EXACTLY_ONCE
	).ignoreFailuresAfterTransactionTimeout();

	return new OneInputStreamOperatorTestHarness<>(
		new StreamSink<>(kafkaProducer),
		1,
		1,
		0,
		IntSerializer.INSTANCE,
		new OperatorID(1, 1));
}
 
Example #5
Source File: PojoSerializerSnapshotTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testRestoreSerializerWithSameFields() {
	final PojoSerializerSnapshot<TestPojo> testSnapshot = buildTestSnapshot(Arrays.asList(
		ID_FIELD,
		NAME_FIELD,
		HEIGHT_FIELD
	));

	final TypeSerializer<TestPojo> restoredSerializer = testSnapshot.restoreSerializer();
	assertSame(restoredSerializer.getClass(), PojoSerializer.class);
	final PojoSerializer<TestPojo> restoredPojoSerializer = (PojoSerializer<TestPojo>) restoredSerializer;

	final Field[] restoredFields = restoredPojoSerializer.getFields();
	assertArrayEquals(
		new Field[] { ID_FIELD.field, NAME_FIELD.field, HEIGHT_FIELD.field },
		restoredFields);

	final TypeSerializer<?>[] restoredFieldSerializers = restoredPojoSerializer.getFieldSerializers();
	assertArrayEquals(
		new TypeSerializer[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE, DoubleSerializer.INSTANCE },
		restoredFieldSerializers);
}
 
Example #6
Source File: StateBackendTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Verify that an empty {@code ValueState} will yield the default value.
 */
@Test
public void testValueStateDefaultValue() throws Exception {
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	ValueStateDescriptor<String> kvId = new ValueStateDescriptor<>("id", String.class, "Hello");

	ValueState<String> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);

	backend.setCurrentKey(1);
	assertEquals("Hello", state.value());

	state.update("Ciao");
	assertEquals("Ciao", state.value());

	state.clear();
	assertEquals("Hello", state.value());

	backend.dispose();
}
 
Example #7
Source File: StateBackendTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testCopyDefaultValue() throws Exception {
	final AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	ValueStateDescriptor<IntValue> kvId = new ValueStateDescriptor<>("id", IntValue.class, new IntValue(-1));

	ValueState<IntValue> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);

	backend.setCurrentKey(1);
	IntValue default1 = state.value();

	backend.setCurrentKey(2);
	IntValue default2 = state.value();

	assertNotNull(default1);
	assertNotNull(default2);
	assertEquals(default1, default2);
	assertFalse(default1 == default2);

	backend.dispose();
}
 
Example #8
Source File: TupleComparatorTTT2Test.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
protected TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>> createSerializer() {
	return new  TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>(
			(Class<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>) (Class<?>) Tuple3.class,
			new TypeSerializer[]{
				new TupleSerializer<Tuple2<String, Double>> (
						(Class<Tuple2<String, Double>>) (Class<?>) Tuple2.class,
						new TypeSerializer[]{
								StringSerializer.INSTANCE,
								DoubleSerializer.INSTANCE}),
				new TupleSerializer<Tuple2<Long, Long>> (
						(Class<Tuple2<Long, Long>>) (Class<?>) Tuple2.class,
						new TypeSerializer[]{
								LongSerializer.INSTANCE,
								LongSerializer.INSTANCE}),
				new TupleSerializer<Tuple2<Integer, Long>> (
						(Class<Tuple2<Integer, Long>>) (Class<?>) Tuple2.class,
						new TypeSerializer[]{
								IntSerializer.INSTANCE,
								LongSerializer.INSTANCE})
			});
}
 
Example #9
Source File: FlinkKafkaProducer011ITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private OneInputStreamOperatorTestHarness<Integer, Object> createTestHarness(
		String topic,
		int maxParallelism,
		int parallelism,
		int subtaskIndex,
		Semantic semantic) throws Exception {
	Properties properties = createProperties();

	FlinkKafkaProducer011<Integer> kafkaProducer = new FlinkKafkaProducer011<>(
		topic,
		integerKeyedSerializationSchema,
		properties,
		semantic);

	return new OneInputStreamOperatorTestHarness<>(
		new StreamSink<>(kafkaProducer),
		maxParallelism,
		parallelism,
		subtaskIndex,
		IntSerializer.INSTANCE,
		new OperatorID(42, 44));
}
 
Example #10
Source File: StateBackendMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testBroadcastStateRegistrationFailsIfNewKeySerializerIsIncompatible() throws Exception {
	final String stateName = "broadcast-state";

	try {
		testBroadcastStateKeyUpgrade(
			new MapStateDescriptor<>(
				stateName,
				new TestType.V1TestTypeSerializer(),
				IntSerializer.INSTANCE),
			new MapStateDescriptor<>(
				stateName,
				// new key serializer is incompatible
				new TestType.IncompatibleTestTypeSerializer(),
				IntSerializer.INSTANCE));

		Assert.fail("should have failed.");
	} catch (Exception e) {
		Assert.assertTrue(ExceptionUtils.findThrowable(e, StateMigrationException.class).isPresent());
	}
}
 
Example #11
Source File: ReusingSortMergeInnerJoinIteratorITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Before
public void beforeTest() {
	serializer1 = new TupleSerializer<Tuple2<Integer, String>>(
			(Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class,
			new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE });
	serializer2 = new TupleSerializer<Tuple2<Integer, String>>(
			(Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class,
			new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE });
	comparator1 =  new TupleComparator<Tuple2<Integer, String>>(
			new int[]{0},
			new TypeComparator<?>[] { new IntComparator(true) },
			new TypeSerializer<?>[] { IntSerializer.INSTANCE });
	comparator2 =  new TupleComparator<Tuple2<Integer, String>>(
			new int[]{0},
			new TypeComparator<?>[] { new IntComparator(true) },
			new TypeSerializer<?>[] { IntSerializer.INSTANCE });
	pairComparator = new GenericPairComparator<Tuple2<Integer, String>, Tuple2<Integer, String>>(comparator1, comparator2);

	this.memoryManager = new MemoryManager(MEMORY_SIZE, 1);
	this.ioManager = new IOManagerAsync();
}
 
Example #12
Source File: NonReusingSortMergeInnerJoinIteratorITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Before
public void beforeTest() {
	serializer1 = new TupleSerializer<Tuple2<Integer, String>>(
			(Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class,
			new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE });
	serializer2 = new TupleSerializer<Tuple2<Integer, String>>(
			(Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class,
			new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE });
	comparator1 =  new TupleComparator<Tuple2<Integer, String>>(
			new int[]{0},
			new TypeComparator<?>[] { new IntComparator(true) },
			new TypeSerializer<?>[] { IntSerializer.INSTANCE });
	comparator2 =  new TupleComparator<Tuple2<Integer, String>>(
			new int[]{0},
			new TypeComparator<?>[] { new IntComparator(true) },
			new TypeSerializer<?>[] { IntSerializer.INSTANCE });
	pairComparator = new GenericPairComparator<Tuple2<Integer, String>, Tuple2<Integer, String>>(comparator1, comparator2);
	
	this.memoryManager = new MemoryManager(MEMORY_SIZE, 1);
	this.ioManager = new IOManagerAsync();
}
 
Example #13
Source File: RocksDBKeySerializationUtilsTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testKeySerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	// test for key
	for (int orgKey = 0; orgKey < 100; ++orgKey) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgKey, deserializedKey);

		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgKey, deserializedKey);
	}
}
 
Example #14
Source File: OutputEmitterTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testPartitionHash() {
	// Test for IntValue
	verifyPartitionHashSelectedChannels(50000, 100, RecordType.INTEGER);
	// Test for StringValue
	verifyPartitionHashSelectedChannels(10000, 100, RecordType.STRING);

	// Test hash corner cases
	final TestIntComparator testIntComp = new TestIntComparator();
	final ChannelSelector<SerializationDelegate<Integer>> selector = createChannelSelector(
		ShipStrategyType.PARTITION_HASH, testIntComp, 100);
	final SerializationDelegate<Integer> serializationDelegate = new SerializationDelegate<>(new IntSerializer());

	assertPartitionHashSelectedChannels(selector, serializationDelegate, Integer.MIN_VALUE, 100);
	assertPartitionHashSelectedChannels(selector, serializationDelegate, -1, 100);
	assertPartitionHashSelectedChannels(selector, serializationDelegate, 0, 100);
	assertPartitionHashSelectedChannels(selector, serializationDelegate, 1, 100);
	assertPartitionHashSelectedChannels(selector, serializationDelegate, Integer.MAX_VALUE, 100);
}
 
Example #15
Source File: StateBackendMigrationTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testBroadcastStateRegistrationFailsIfNewKeySerializerIsIncompatible() {
	final String stateName = "broadcast-state";

	try {
		testBroadcastStateKeyUpgrade(
			new MapStateDescriptor<>(
				stateName,
				new TestType.V1TestTypeSerializer(),
				IntSerializer.INSTANCE),
			new MapStateDescriptor<>(
				stateName,
				// new key serializer is incompatible
				new TestType.IncompatibleTestTypeSerializer(),
				IntSerializer.INSTANCE));

		fail("should have failed.");
	} catch (Exception e) {
		Assert.assertTrue(ExceptionUtils.findThrowable(e, StateMigrationException.class).isPresent());
	}
}
 
Example #16
Source File: StateBackendTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Verify that an empty {@code FoldingState} yields {@code null}.
 */
@Test
public void testFoldingStateDefaultValue() throws Exception {
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	FoldingStateDescriptor<Integer, String> kvId =
			new FoldingStateDescriptor<>("id", "Fold-Initial:", new AppendingFold(), String.class);

	FoldingState<Integer, String> state = backend.getPartitionedState(
			VoidNamespace.INSTANCE,
			VoidNamespaceSerializer.INSTANCE, kvId);

	backend.setCurrentKey(1);
	assertNull(state.get());

	state.add(1);
	state.add(2);
	assertEquals("Fold-Initial:,1,2", state.get());

	state.clear();
	assertNull(state.get());

	backend.dispose();
}
 
Example #17
Source File: StateBackendTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Verify that an empty {@code ReduceState} yields {@code null}.
 */
@Test
public void testReducingStateDefaultValue() throws Exception {
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	ReducingStateDescriptor<String> kvId = new ReducingStateDescriptor<>("id", new AppendingReduce(), String.class);

	ReducingState<String> state = backend.getPartitionedState(
			VoidNamespace.INSTANCE,
			VoidNamespaceSerializer.INSTANCE, kvId);

	backend.setCurrentKey(1);
	assertNull(state.get());

	state.add("Ciao");
	assertEquals("Ciao", state.get());

	state.clear();
	assertNull(state.get());

	backend.dispose();
}
 
Example #18
Source File: StateBackendTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testValueStateWorkWithTtl() throws Exception {
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);
	try {
		ValueStateDescriptor<MutableLong> kvId = new ValueStateDescriptor<>("id", MutableLong.class);
		kvId.enableTimeToLive(StateTtlConfig.newBuilder(Time.seconds(1)).build());

		ValueState<MutableLong> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);
		backend.setCurrentKey(1);
		state.update(new MutableLong());
		state.value();
	} finally {
		backend.close();
		backend.dispose();
	}
}
 
Example #19
Source File: TypeSerializerSerializationUtilTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Verifies deserialization failure cases when reading a serializer from bytes, in the
 * case of a {@link InvalidClassException}.
 */
@Test
public void testSerializerSerializationWithInvalidClass() throws Exception {

	TypeSerializer<?> serializer = IntSerializer.INSTANCE;

	byte[] serialized;
	try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
		TypeSerializerSerializationUtil.writeSerializer(new DataOutputViewStreamWrapper(out), serializer);
		serialized = out.toByteArray();
	}

	TypeSerializer<?> deserializedSerializer;

	try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
		deserializedSerializer = TypeSerializerSerializationUtil.tryReadSerializer(
			new DataInputViewStreamWrapper(in),
			new ArtificialCNFExceptionThrowingClassLoader(
				Thread.currentThread().getContextClassLoader(),
				Collections.singleton(IntSerializer.class.getName())),
			true);
	}
	Assert.assertTrue(deserializedSerializer instanceof UnloadableDummyTypeSerializer);
}
 
Example #20
Source File: InternalTimerServiceImplTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private static InternalTimerServiceImpl<Integer, String> createAndStartInternalTimerService(
		Triggerable<Integer, String> triggerable,
		KeyContext keyContext,
		ProcessingTimeService processingTimeService,
		KeyGroupRange keyGroupList,
		PriorityQueueSetFactory priorityQueueSetFactory) {
	InternalTimerServiceImpl<Integer, String> service = createInternalTimerService(
		keyGroupList,
		keyContext,
		processingTimeService,
		IntSerializer.INSTANCE,
		StringSerializer.INSTANCE,
		priorityQueueSetFactory);

	service.startTimerService(IntSerializer.INSTANCE, StringSerializer.INSTANCE, triggerable);
	return service;
}
 
Example #21
Source File: StreamingRuntimeContextTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private static AbstractStreamOperator<?> createListPlainMockOp() throws Exception {

	AbstractStreamOperator<?> operatorMock = mock(AbstractStreamOperator.class);
	ExecutionConfig config = new ExecutionConfig();

	KeyedStateBackend keyedStateBackend = mock(KeyedStateBackend.class);

	DefaultKeyedStateStore keyedStateStore = new DefaultKeyedStateStore(keyedStateBackend, config);

	when(operatorMock.getExecutionConfig()).thenReturn(config);

	doAnswer(new Answer<ListState<String>>() {

		@Override
		public ListState<String> answer(InvocationOnMock invocationOnMock) throws Throwable {
			ListStateDescriptor<String> descr =
					(ListStateDescriptor<String>) invocationOnMock.getArguments()[2];

			AbstractKeyedStateBackend<Integer> backend = new MemoryStateBackend().createKeyedStateBackend(
				new DummyEnvironment("test_task", 1, 0),
				new JobID(),
				"test_op",
				IntSerializer.INSTANCE,
				1,
				new KeyGroupRange(0, 0),
				new KvStateRegistry().createTaskRegistry(new JobID(), new JobVertexID()),
				TtlTimeProvider.DEFAULT,
				new UnregisteredMetricsGroup(),
				Collections.emptyList(),
				new CloseableRegistry());
			backend.setCurrentKey(0);
			return backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, descr);
		}
	}).when(keyedStateBackend).getPartitionedState(Matchers.any(), any(TypeSerializer.class), any(ListStateDescriptor.class));

	when(operatorMock.getKeyedStateStore()).thenReturn(keyedStateStore);
	when(operatorMock.getOperatorID()).thenReturn(new OperatorID());
	return operatorMock;
}
 
Example #22
Source File: TupleComparatorISD1Test.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected TupleComparator<Tuple3<Integer, String, Double>> createComparator(boolean ascending) {
	return new TupleComparator<Tuple3<Integer, String, Double>>(
			new int[]{0},
			new TypeComparator[]{ new IntComparator(ascending) },
			new TypeSerializer[]{ IntSerializer.INSTANCE });
}
 
Example #23
Source File: StateBackendMigrationTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testPriorityQueueStateCreationFailsIfNewSerializerIsNotCompatible() throws Exception {
	CheckpointStreamFactory streamFactory = createStreamFactory();
	SharedStateRegistry sharedStateRegistry = new SharedStateRegistry();

	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	try {
		InternalPriorityQueue<TestType> internalPriorityQueue = backend.create(
			"testPriorityQueue", new TestType.V1TestTypeSerializer());

		internalPriorityQueue.add(new TestType("key-1", 123));
		internalPriorityQueue.add(new TestType("key-2", 346));
		internalPriorityQueue.add(new TestType("key-1", 777));

		KeyedStateHandle snapshot = runSnapshot(
			backend.snapshot(1L, 2L, streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation()),
			sharedStateRegistry);
		backend.dispose();

		backend = restoreKeyedBackend(IntSerializer.INSTANCE, snapshot);
		backend.create(
			"testPriorityQueue", new TestType.IncompatibleTestTypeSerializer());

		fail("should have failed");
	} catch (Exception e) {
		Assert.assertTrue(ExceptionUtils.findThrowable(e, StateMigrationException.class).isPresent());
	} finally {
		backend.dispose();
	}
}
 
Example #24
Source File: RocksDBStateBackendConfigTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * This tests whether the RocksDB backends uses the temp directories that are provided
 * from the {@link Environment} when no db storage path is set.
 */
@Test
public void testUseTempDirectories() throws Exception {
	String checkpointPath = tempFolder.newFolder().toURI().toString();
	RocksDBStateBackend rocksDbBackend = new RocksDBStateBackend(checkpointPath);

	File dir1 = tempFolder.newFolder();
	File dir2 = tempFolder.newFolder();

	assertNull(rocksDbBackend.getDbStoragePaths());

	Environment env = getMockEnvironment(dir1, dir2);
	RocksDBKeyedStateBackend<Integer> keyedBackend = (RocksDBKeyedStateBackend<Integer>) rocksDbBackend.
		createKeyedStateBackend(
			env,
			env.getJobID(),
			"test_op",
			IntSerializer.INSTANCE,
			1,
			new KeyGroupRange(0, 0),
			env.getTaskKvStateRegistry(),
			TtlTimeProvider.DEFAULT,
			new UnregisteredMetricsGroup(),
			Collections.emptyList(),
			new CloseableRegistry());

	try {
		File instanceBasePath = keyedBackend.getInstanceBasePath();
		assertThat(instanceBasePath.getAbsolutePath(), anyOf(startsWith(dir1.getAbsolutePath()), startsWith(dir2.getAbsolutePath())));
	} finally {
		IOUtils.closeQuietly(keyedBackend);
		keyedBackend.dispose();
	}
}
 
Example #25
Source File: CompositeTypeSerializerUtilTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test(expected = IllegalStateException.class)
public void testGetFinalResultOnUndefinedReconfigureIntermediateCompatibilityResultFails() {
	IntermediateCompatibilityResult<Integer> intermediateCompatibilityResult =
		IntermediateCompatibilityResult.undefinedReconfigureResult(new TypeSerializer[]{ IntSerializer.INSTANCE });

	intermediateCompatibilityResult.getFinalResult();
}
 
Example #26
Source File: UnboundedFeedbackLoggerTest.java    From stateful-functions with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private UnboundedFeedbackLogger<Integer> instanceUnderTest(int maxParallelism, long totalMemory) {

  ObjectContainer container =
      Loggers.unboundedSpillableLoggerContainer(
          IO_MANAGER, maxParallelism, totalMemory, IntSerializer.INSTANCE, Function.identity());

  container.add("checkpoint-stream-ops", CheckpointedStreamOperations.class, NOOP.INSTANCE);
  return container.get(UnboundedFeedbackLogger.class);
}
 
Example #27
Source File: NestedRowTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private BinaryRow getBinaryRow() {
	BinaryRow row = new BinaryRow(1);
	BinaryRowWriter writer = new BinaryRowWriter(row);

	GenericTypeInfo<MyObj> info = new GenericTypeInfo<>(MyObj.class);
	TypeSerializer<MyObj> genericSerializer = info.createSerializer(new ExecutionConfig());
	GenericRow gRow = new GenericRow(5);
	gRow.setField(0, 1);
	gRow.setField(1, 5L);
	gRow.setField(2, BinaryString.fromString("12345678"));
	gRow.setField(3, null);
	gRow.setField(4, new BinaryGeneric<>(new MyObj(15, 5), genericSerializer));

	BaseRowSerializer serializer = new BaseRowSerializer(
		new LogicalType[]{
			DataTypes.INT().getLogicalType(),
			DataTypes.BIGINT().getLogicalType(),
			DataTypes.STRING().getLogicalType(),
			DataTypes.STRING().getLogicalType(),
			DataTypes.ANY(info).getLogicalType()
		},
		new TypeSerializer[]{
			IntSerializer.INSTANCE,
			LongSerializer.INSTANCE,
			StringSerializer.INSTANCE,
			StringSerializer.INSTANCE,
			genericSerializer
		});
	writer.writeRow(0, gRow, serializer);
	writer.complete();

	return row;
}
 
Example #28
Source File: SortCodeGeneratorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private Object value2(LogicalType type, Random rnd) {
	switch (type.getTypeRoot()) {
		case BOOLEAN:
			return false;
		case TINYINT:
			return (byte) 0;
		case SMALLINT:
			return (short) 0;
		case INTEGER:
			return 0;
		case BIGINT:
			return 0L;
		case FLOAT:
			return 0f;
		case DOUBLE:
			return 0d;
		case VARCHAR:
			return BinaryString.fromString("0");
		case DECIMAL:
			DecimalType decimalType = (DecimalType) type;
			return Decimal.fromBigDecimal(new BigDecimal(0),
					decimalType.getPrecision(), decimalType.getScale());
		case ARRAY:
		case VARBINARY:
			byte[] bytes = new byte[rnd.nextInt(7) + 10];
			rnd.nextBytes(bytes);
			return type instanceof VarBinaryType ? bytes : BinaryArray.fromPrimitiveArray(bytes);
		case ROW:
			RowType rowType = (RowType) type;
			if (rowType.getFields().get(0).getType().getTypeRoot() == INTEGER) {
				return GenericRow.of(rnd.nextInt());
			} else {
				return GenericRow.of(GenericRow.of(new Object[]{null}));
			}
		case ANY:
			return new BinaryGeneric<>(rnd.nextInt(), IntSerializer.INSTANCE);
		default:
			throw new RuntimeException("Not support!");
	}
}
 
Example #29
Source File: TupleComparatorILDXC2Test.java    From flink with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
protected TupleSerializer<Tuple3<Integer, Long, Double>> createSerializer() {
	return new TupleSerializer<Tuple3<Integer, Long, Double>>(
			(Class<Tuple3<Integer, Long, Double>>) (Class<?>) Tuple3.class,
			new TypeSerializer[]{
				new IntSerializer(),
				new LongSerializer(),
				new DoubleSerializer()});
}
 
Example #30
Source File: RocksDBSerializedCompositeKeyBuilderTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testSetKey() throws IOException {
	for (int parallelism : TEST_PARALLELISMS) {
		testSetKeyInternal(IntSerializer.INSTANCE, TEST_INTS, parallelism);
		testSetKeyInternal(StringSerializer.INSTANCE, TEST_STRINGS, parallelism);
	}
}