org.apache.flink.api.common.typeutils.base.IntSerializer Java Examples
The following examples show how to use
org.apache.flink.api.common.typeutils.base.IntSerializer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FlinkKafkaProducer011ITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<Integer, Object> createTestHarness( String topic, int maxParallelism, int parallelism, int subtaskIndex, Semantic semantic) throws Exception { Properties properties = createProperties(); FlinkKafkaProducer011<Integer> kafkaProducer = new FlinkKafkaProducer011<>( topic, integerKeyedSerializationSchema, properties, semantic); return new OneInputStreamOperatorTestHarness<>( new StreamSink<>(kafkaProducer), maxParallelism, parallelism, subtaskIndex, IntSerializer.INSTANCE, new OperatorID(42, 44)); }
Example #2
Source File: StateBackendTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Verify that an empty {@code ValueState} will yield the default value. */ @Test public void testValueStateDefaultValue() throws Exception { AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE); ValueStateDescriptor<String> kvId = new ValueStateDescriptor<>("id", String.class, "Hello"); ValueState<String> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); assertEquals("Hello", state.value()); state.update("Ciao"); assertEquals("Ciao", state.value()); state.clear(); assertEquals("Hello", state.value()); backend.dispose(); }
Example #3
Source File: TupleComparatorTTT2Test.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>> createSerializer() { return new TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>( (Class<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new TupleSerializer<Tuple2<String, Double>> ( (Class<Tuple2<String, Double>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ StringSerializer.INSTANCE, DoubleSerializer.INSTANCE}), new TupleSerializer<Tuple2<Long, Long>> ( (Class<Tuple2<Long, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ LongSerializer.INSTANCE, LongSerializer.INSTANCE}), new TupleSerializer<Tuple2<Integer, Long>> ( (Class<Tuple2<Integer, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE}) }); }
Example #4
Source File: StateBackendMigrationTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testBroadcastStateRegistrationFailsIfNewKeySerializerIsIncompatible() throws Exception { final String stateName = "broadcast-state"; try { testBroadcastStateKeyUpgrade( new MapStateDescriptor<>( stateName, new TestType.V1TestTypeSerializer(), IntSerializer.INSTANCE), new MapStateDescriptor<>( stateName, // new key serializer is incompatible new TestType.IncompatibleTestTypeSerializer(), IntSerializer.INSTANCE)); Assert.fail("should have failed."); } catch (Exception e) { Assert.assertTrue(ExceptionUtils.findThrowable(e, StateMigrationException.class).isPresent()); } }
Example #5
Source File: FromElementsFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public void initializeState(FunctionInitializationContext context) throws Exception { Preconditions.checkState(this.checkpointedState == null, "The " + getClass().getSimpleName() + " has already been initialized."); this.checkpointedState = context.getOperatorStateStore().getListState( new ListStateDescriptor<>( "from-elements-state", IntSerializer.INSTANCE ) ); if (context.isRestored()) { List<Integer> retrievedStates = new ArrayList<>(); for (Integer entry : this.checkpointedState.get()) { retrievedStates.add(entry); } // given that the parallelism of the function is 1, we can only have 1 state Preconditions.checkArgument(retrievedStates.size() == 1, getClass().getSimpleName() + " retrieved invalid state."); this.numElementsToSkip = retrievedStates.get(0); } }
Example #6
Source File: StateBackendTestBase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testValueStateWorkWithTtl() throws Exception { AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE); try { ValueStateDescriptor<MutableLong> kvId = new ValueStateDescriptor<>("id", MutableLong.class); kvId.enableTimeToLive(StateTtlConfig.newBuilder(Time.seconds(1)).build()); ValueState<MutableLong> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); state.update(new MutableLong()); state.value(); } finally { backend.close(); backend.dispose(); } }
Example #7
Source File: TypeSerializerSerializationUtilTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Verifies deserialization failure cases when reading a serializer from bytes, in the * case of a {@link InvalidClassException}. */ @Test public void testSerializerSerializationWithInvalidClass() throws Exception { TypeSerializer<?> serializer = IntSerializer.INSTANCE; byte[] serialized; try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) { TypeSerializerSerializationUtil.writeSerializer(new DataOutputViewStreamWrapper(out), serializer); serialized = out.toByteArray(); } TypeSerializer<?> deserializedSerializer; try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) { deserializedSerializer = TypeSerializerSerializationUtil.tryReadSerializer( new DataInputViewStreamWrapper(in), new ArtificialCNFExceptionThrowingClassLoader( Thread.currentThread().getContextClassLoader(), Collections.singleton(IntSerializer.class.getName())), true); } Assert.assertTrue(deserializedSerializer instanceof UnloadableDummyTypeSerializer); }
Example #8
Source File: InternalTimerServiceImplTest.java From flink with Apache License 2.0 | 6 votes |
private static InternalTimerServiceImpl<Integer, String> createAndStartInternalTimerService( Triggerable<Integer, String> triggerable, KeyContext keyContext, ProcessingTimeService processingTimeService, KeyGroupRange keyGroupList, PriorityQueueSetFactory priorityQueueSetFactory) { InternalTimerServiceImpl<Integer, String> service = createInternalTimerService( keyGroupList, keyContext, processingTimeService, IntSerializer.INSTANCE, StringSerializer.INSTANCE, priorityQueueSetFactory); service.startTimerService(IntSerializer.INSTANCE, StringSerializer.INSTANCE, triggerable); return service; }
Example #9
Source File: StateBackendTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Verify that an empty {@code ReduceState} yields {@code null}. */ @Test public void testReducingStateDefaultValue() throws Exception { AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE); ReducingStateDescriptor<String> kvId = new ReducingStateDescriptor<>("id", new AppendingReduce(), String.class); ReducingState<String> state = backend.getPartitionedState( VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); assertNull(state.get()); state.add("Ciao"); assertEquals("Ciao", state.get()); state.clear(); assertNull(state.get()); backend.dispose(); }
Example #10
Source File: StateBackendTestBase.java From flink with Apache License 2.0 | 6 votes |
/** * Verify that an empty {@code FoldingState} yields {@code null}. */ @Test public void testFoldingStateDefaultValue() throws Exception { AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE); FoldingStateDescriptor<Integer, String> kvId = new FoldingStateDescriptor<>("id", "Fold-Initial:", new AppendingFold(), String.class); FoldingState<Integer, String> state = backend.getPartitionedState( VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); assertNull(state.get()); state.add(1); state.add(2); assertEquals("Fold-Initial:,1,2", state.get()); state.clear(); assertNull(state.get()); backend.dispose(); }
Example #11
Source File: StateBackendMigrationTestBase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testBroadcastStateRegistrationFailsIfNewKeySerializerIsIncompatible() { final String stateName = "broadcast-state"; try { testBroadcastStateKeyUpgrade( new MapStateDescriptor<>( stateName, new TestType.V1TestTypeSerializer(), IntSerializer.INSTANCE), new MapStateDescriptor<>( stateName, // new key serializer is incompatible new TestType.IncompatibleTestTypeSerializer(), IntSerializer.INSTANCE)); fail("should have failed."); } catch (Exception e) { Assert.assertTrue(ExceptionUtils.findThrowable(e, StateMigrationException.class).isPresent()); } }
Example #12
Source File: OutputEmitterTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testPartitionHash() { // Test for IntValue verifyPartitionHashSelectedChannels(50000, 100, RecordType.INTEGER); // Test for StringValue verifyPartitionHashSelectedChannels(10000, 100, RecordType.STRING); // Test hash corner cases final TestIntComparator testIntComp = new TestIntComparator(); final ChannelSelector<SerializationDelegate<Integer>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, testIntComp, 100); final SerializationDelegate<Integer> serializationDelegate = new SerializationDelegate<>(new IntSerializer()); assertPartitionHashSelectedChannels(selector, serializationDelegate, Integer.MIN_VALUE, 100); assertPartitionHashSelectedChannels(selector, serializationDelegate, -1, 100); assertPartitionHashSelectedChannels(selector, serializationDelegate, 0, 100); assertPartitionHashSelectedChannels(selector, serializationDelegate, 1, 100); assertPartitionHashSelectedChannels(selector, serializationDelegate, Integer.MAX_VALUE, 100); }
Example #13
Source File: RocksDBKeySerializationUtilsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testKeySerializationAndDeserialization() throws Exception { final DataOutputSerializer outputView = new DataOutputSerializer(8); final DataInputDeserializer inputView = new DataInputDeserializer(); // test for key for (int orgKey = 0; orgKey < 100; ++orgKey) { outputView.clear(); RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false); inputView.setBuffer(outputView.getCopyOfBuffer()); int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false); Assert.assertEquals(orgKey, deserializedKey); RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true); inputView.setBuffer(outputView.getCopyOfBuffer()); deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true); Assert.assertEquals(orgKey, deserializedKey); } }
Example #14
Source File: NonReusingSortMergeInnerJoinIteratorITCase.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Before public void beforeTest() { serializer1 = new TupleSerializer<Tuple2<Integer, String>>( (Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE }); serializer2 = new TupleSerializer<Tuple2<Integer, String>>( (Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE }); comparator1 = new TupleComparator<Tuple2<Integer, String>>( new int[]{0}, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE }); comparator2 = new TupleComparator<Tuple2<Integer, String>>( new int[]{0}, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE }); pairComparator = new GenericPairComparator<Tuple2<Integer, String>, Tuple2<Integer, String>>(comparator1, comparator2); this.memoryManager = new MemoryManager(MEMORY_SIZE, 1); this.ioManager = new IOManagerAsync(); }
Example #15
Source File: ReusingSortMergeInnerJoinIteratorITCase.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Before public void beforeTest() { serializer1 = new TupleSerializer<Tuple2<Integer, String>>( (Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE }); serializer2 = new TupleSerializer<Tuple2<Integer, String>>( (Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE }); comparator1 = new TupleComparator<Tuple2<Integer, String>>( new int[]{0}, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE }); comparator2 = new TupleComparator<Tuple2<Integer, String>>( new int[]{0}, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE }); pairComparator = new GenericPairComparator<Tuple2<Integer, String>, Tuple2<Integer, String>>(comparator1, comparator2); this.memoryManager = new MemoryManager(MEMORY_SIZE, 1); this.ioManager = new IOManagerAsync(); }
Example #16
Source File: StateBackendMigrationTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testBroadcastStateRegistrationFailsIfNewValueSerializerIsIncompatible() throws Exception { final String stateName = "broadcast-state"; try { testBroadcastStateValueUpgrade( new MapStateDescriptor<>( stateName, IntSerializer.INSTANCE, new TestType.V1TestTypeSerializer()), new MapStateDescriptor<>( stateName, IntSerializer.INSTANCE, // new value serializer is incompatible new TestType.IncompatibleTestTypeSerializer())); Assert.fail("should have failed."); } catch (Exception e) { Assert.assertTrue(ExceptionUtils.findThrowable(e, StateMigrationException.class).isPresent()); } }
Example #17
Source File: PojoSerializerSnapshotTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testRestoreSerializerWithSameFields() { final PojoSerializerSnapshot<TestPojo> testSnapshot = buildTestSnapshot(Arrays.asList( ID_FIELD, NAME_FIELD, HEIGHT_FIELD )); final TypeSerializer<TestPojo> restoredSerializer = testSnapshot.restoreSerializer(); assertSame(restoredSerializer.getClass(), PojoSerializer.class); final PojoSerializer<TestPojo> restoredPojoSerializer = (PojoSerializer<TestPojo>) restoredSerializer; final Field[] restoredFields = restoredPojoSerializer.getFields(); assertArrayEquals( new Field[] { ID_FIELD.field, NAME_FIELD.field, HEIGHT_FIELD.field }, restoredFields); final TypeSerializer<?>[] restoredFieldSerializers = restoredPojoSerializer.getFieldSerializers(); assertArrayEquals( new TypeSerializer[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }, restoredFieldSerializers); }
Example #18
Source File: FlinkKafkaProducer011MigrationTest.java From flink with Apache License 2.0 | 6 votes |
@Override protected OneInputStreamOperatorTestHarness<Integer, Object> createTestHarness() throws Exception { FlinkKafkaProducer011<Integer> kafkaProducer = new FlinkKafkaProducer011<>( TOPIC, integerKeyedSerializationSchema, createProperties(), FlinkKafkaProducer011.Semantic.EXACTLY_ONCE ).ignoreFailuresAfterTransactionTimeout(); return new OneInputStreamOperatorTestHarness<>( new StreamSink<>(kafkaProducer), 1, 1, 0, IntSerializer.INSTANCE, new OperatorID(1, 1)); }
Example #19
Source File: StateBackendTestBase.java From flink with Apache License 2.0 | 6 votes |
/** * Verify that an empty {@code ValueState} will yield the default value. */ @Test public void testValueStateDefaultValue() throws Exception { AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE); ValueStateDescriptor<String> kvId = new ValueStateDescriptor<>("id", String.class, "Hello"); ValueState<String> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); assertEquals("Hello", state.value()); state.update("Ciao"); assertEquals("Ciao", state.value()); state.clear(); assertEquals("Hello", state.value()); backend.dispose(); }
Example #20
Source File: StateBackendTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testCopyDefaultValue() throws Exception { final AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE); ValueStateDescriptor<IntValue> kvId = new ValueStateDescriptor<>("id", IntValue.class, new IntValue(-1)); ValueState<IntValue> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); IntValue default1 = state.value(); backend.setCurrentKey(2); IntValue default2 = state.value(); assertNotNull(default1); assertNotNull(default2); assertEquals(default1, default2); assertFalse(default1 == default2); backend.dispose(); }
Example #21
Source File: TupleComparatorILDXC2Test.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Integer, Long, Double>> createSerializer() { return new TupleSerializer<Tuple3<Integer, Long, Double>>( (Class<Tuple3<Integer, Long, Double>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new IntSerializer(), new LongSerializer(), new DoubleSerializer()}); }
Example #22
Source File: StateBackendMigrationTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testBroadcastStateKeySerializerReconfiguration() throws Exception { final String stateName = "broadcast-state"; testBroadcastStateKeyUpgrade( new MapStateDescriptor<>( stateName, new TestType.V1TestTypeSerializer(), IntSerializer.INSTANCE), new MapStateDescriptor<>( stateName, // new key serializer is a new serializer that requires reconfiguration new TestType.ReconfigurationRequiringTestTypeSerializer(), IntSerializer.INSTANCE)); }
Example #23
Source File: AsyncWaitOperatorTest.java From flink with Apache License 2.0 | 5 votes |
private void testUserExceptionHandling(AsyncDataStream.OutputMode outputMode) throws Exception { UserExceptionAsyncFunction asyncWaitFunction = new UserExceptionAsyncFunction(); long timeout = 2000L; AsyncWaitOperator<Integer, Integer> asyncWaitOperator = new AsyncWaitOperator<>( asyncWaitFunction, TIMEOUT, 2, outputMode); final MockEnvironment mockEnvironment = createMockEnvironment(); mockEnvironment.setExpectedExternalFailureCause(Throwable.class); OneInputStreamOperatorTestHarness<Integer, Integer> harness = new OneInputStreamOperatorTestHarness<>( asyncWaitOperator, IntSerializer.INSTANCE, mockEnvironment); harness.open(); synchronized (harness.getCheckpointLock()) { harness.processElement(1, 1L); } synchronized (harness.getCheckpointLock()) { harness.close(); } assertTrue(harness.getEnvironment().getActualExternalFailureCause().isPresent()); }
Example #24
Source File: SourceFunctionTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void fromElementsTest() throws Exception { List<Integer> expectedList = Arrays.asList(1, 2, 3); List<Integer> actualList = SourceFunctionUtil.runSourceFunction(CommonTestUtils.createCopySerializable( new FromElementsFunction<Integer>( IntSerializer.INSTANCE, 1, 2, 3))); assertEquals(expectedList, actualList); }
Example #25
Source File: StateBackendTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testCheckConcurrencyProblemWhenPerformingCheckpointAsync() throws Exception { CheckpointStreamFactory streamFactory = createStreamFactory(); Environment env = new DummyEnvironment(); AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE, env); ExecutorService executorService = Executors.newScheduledThreadPool(1); try { long checkpointID = 0; List<Future> futureList = new ArrayList(); for (int i = 0; i < 10; ++i) { ValueStateDescriptor<Integer> kvId = new ValueStateDescriptor<>("id" + i, IntSerializer.INSTANCE); ValueState<Integer> state = backend.getOrCreateKeyedState(VoidNamespaceSerializer.INSTANCE, kvId); ((InternalValueState) state).setCurrentNamespace(VoidNamespace.INSTANCE); backend.setCurrentKey(i); state.update(i); futureList.add(runSnapshotAsync(executorService, backend.snapshot(checkpointID++, System.currentTimeMillis(), streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation()))); } for (Future future : futureList) { future.get(20, TimeUnit.SECONDS); } } catch (Exception e) { fail(); } finally { backend.dispose(); executorService.shutdown(); } }
Example #26
Source File: TupleComparatorISD2Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Integer, String, Double>> createSerializer() { return new TupleSerializer<Tuple3<Integer, String, Double>>( (Class<Tuple3<Integer, String, Double>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new IntSerializer(), new StringSerializer(), new DoubleSerializer()}); }
Example #27
Source File: RocksDBRocksStateKeysIteratorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testIterator() throws Exception{ // test for keyGroupPrefixBytes == 1 && ambiguousKeyPossible == false testIteratorHelper(IntSerializer.INSTANCE, StringSerializer.INSTANCE, 128, i -> i); // test for keyGroupPrefixBytes == 1 && ambiguousKeyPossible == true testIteratorHelper(StringSerializer.INSTANCE, StringSerializer.INSTANCE, 128, i -> String.valueOf(i)); // test for keyGroupPrefixBytes == 2 && ambiguousKeyPossible == false testIteratorHelper(IntSerializer.INSTANCE, StringSerializer.INSTANCE, 256, i -> i); // test for keyGroupPrefixBytes == 2 && ambiguousKeyPossible == true testIteratorHelper(StringSerializer.INSTANCE, StringSerializer.INSTANCE, 256, i -> String.valueOf(i)); }
Example #28
Source File: StreamingRuntimeContextTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private static AbstractStreamOperator<?> createListPlainMockOp() throws Exception { AbstractStreamOperator<?> operatorMock = mock(AbstractStreamOperator.class); ExecutionConfig config = new ExecutionConfig(); KeyedStateBackend keyedStateBackend = mock(KeyedStateBackend.class); DefaultKeyedStateStore keyedStateStore = new DefaultKeyedStateStore(keyedStateBackend, config); when(operatorMock.getExecutionConfig()).thenReturn(config); doAnswer(new Answer<ListState<String>>() { @Override public ListState<String> answer(InvocationOnMock invocationOnMock) throws Throwable { ListStateDescriptor<String> descr = (ListStateDescriptor<String>) invocationOnMock.getArguments()[2]; AbstractKeyedStateBackend<Integer> backend = new MemoryStateBackend().createKeyedStateBackend( new DummyEnvironment("test_task", 1, 0), new JobID(), "test_op", IntSerializer.INSTANCE, 1, new KeyGroupRange(0, 0), new KvStateRegistry().createTaskRegistry(new JobID(), new JobVertexID()), TtlTimeProvider.DEFAULT, new UnregisteredMetricsGroup(), Collections.emptyList(), new CloseableRegistry()); backend.setCurrentKey(0); return backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, descr); } }).when(keyedStateBackend).getPartitionedState(Matchers.any(), any(TypeSerializer.class), any(ListStateDescriptor.class)); when(operatorMock.getKeyedStateStore()).thenReturn(keyedStateStore); when(operatorMock.getOperatorID()).thenReturn(new OperatorID()); return operatorMock; }
Example #29
Source File: StateBackendTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCheckConcurrencyProblemWhenPerformingCheckpointAsync() throws Exception { CheckpointStreamFactory streamFactory = createStreamFactory(); Environment env = new DummyEnvironment(); AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE, env); ExecutorService executorService = Executors.newScheduledThreadPool(1); try { long checkpointID = 0; List<Future> futureList = new ArrayList(); for (int i = 0; i < 10; ++i) { ValueStateDescriptor<Integer> kvId = new ValueStateDescriptor<>("id" + i, IntSerializer.INSTANCE); ValueState<Integer> state = backend.getOrCreateKeyedState(VoidNamespaceSerializer.INSTANCE, kvId); ((InternalValueState) state).setCurrentNamespace(VoidNamespace.INSTANCE); backend.setCurrentKey(i); state.update(i); futureList.add(runSnapshotAsync(executorService, backend.snapshot(checkpointID++, System.currentTimeMillis(), streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation()))); } for (Future future : futureList) { future.get(20, TimeUnit.SECONDS); } } catch (Exception e) { fail(); } finally { backend.dispose(); executorService.shutdown(); } }
Example #30
Source File: TupleComparatorISD1Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Integer, String, Double>> createSerializer() { return new TupleSerializer<Tuple3<Integer, String, Double>>( (Class<Tuple3<Integer, String, Double>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new IntSerializer(), new StringSerializer(), new DoubleSerializer()}); }