org.apache.flink.api.common.typeutils.base.LongSerializer Java Examples
The following examples show how to use
org.apache.flink.api.common.typeutils.base.LongSerializer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StatefulOperatorChainedTaskTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); counterState = context .getKeyedStateStore() .getState(new ValueStateDescriptor<>(prefix + "counter-state", LongSerializer.INSTANCE)); // set key manually to make RocksDBListState get the serialized key. setCurrentKey("10"); if (context.isRestored()) { counter = counterState.value(); assertEquals(snapshotOutData, counter); counterState.clear(); } }
Example #2
Source File: StatefulOperatorChainedTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); counterState = context .getKeyedStateStore() .getState(new ValueStateDescriptor<>(prefix + "counter-state", LongSerializer.INSTANCE)); // set key manually to make RocksDBListState get the serialized key. setCurrentKey("10"); if (context.isRestored()) { counter = counterState.value(); assertEquals(snapshotOutData, counter); counterState.clear(); } }
Example #3
Source File: CopyOnWriteSkipListStateMapBasicOpTest.java From flink with Apache License 2.0 | 6 votes |
/** * This tests the internal capability of using partial {@link ByteBuffer}, making sure the internal methods * works when put/get state with a key stored at a none-zero offset of a ByteBuffer. */ @Test public void testPutAndGetNodeWithNoneZeroOffset() { final int key = 10; final long namespace = 0L; final String valueString = "test"; SkipListKeySerializer<Integer, Long> skipListKeySerializer = new SkipListKeySerializer<>(IntSerializer.INSTANCE, LongSerializer.INSTANCE); SkipListValueSerializer<String> skipListValueSerializer = new SkipListValueSerializer<>(StringSerializer.INSTANCE); byte[] keyBytes = skipListKeySerializer.serialize(key, namespace); byte[] constructedKeyBytes = new byte[keyBytes.length + 1]; System.arraycopy(keyBytes, 0, constructedKeyBytes, 1, keyBytes.length); MemorySegment keySegment = MemorySegmentFactory.wrap(constructedKeyBytes); int keyLen = keyBytes.length; byte[] value = skipListValueSerializer.serialize(valueString); stateMap.putValue(keySegment, 1, keyLen, value, false); String state = stateMap.getNode(keySegment, 1, keyLen); assertThat(state, is(valueString)); }
Example #4
Source File: TupleComparatorTTT1Test.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>> createSerializer() { return new TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>( (Class<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new TupleSerializer<Tuple2<String, Double>> ( (Class<Tuple2<String, Double>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }), new TupleSerializer<Tuple2<Long, Long>> ( (Class<Tuple2<Long, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ LongSerializer.INSTANCE, LongSerializer.INSTANCE }), new TupleSerializer<Tuple2<Integer, Long>> ( (Class<Tuple2<Integer, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE }) }); }
Example #5
Source File: StateBackendBenchmarkUtils.java From flink with Apache License 2.0 | 6 votes |
private static HeapKeyedStateBackend<Long> createHeapKeyedStateBackend(File rootDir) throws IOException { File recoveryBaseDir = prepareDirectory(recoveryDirName, rootDir); KeyGroupRange keyGroupRange = new KeyGroupRange(0, 1); int numberOfKeyGroups = keyGroupRange.getNumberOfKeyGroups(); ExecutionConfig executionConfig = new ExecutionConfig(); HeapPriorityQueueSetFactory priorityQueueSetFactory = new HeapPriorityQueueSetFactory(keyGroupRange, numberOfKeyGroups, 128); HeapKeyedStateBackendBuilder<Long> backendBuilder = new HeapKeyedStateBackendBuilder<>( null, new LongSerializer(), Thread.currentThread().getContextClassLoader(), numberOfKeyGroups, keyGroupRange, executionConfig, TtlTimeProvider.DEFAULT, Collections.emptyList(), AbstractStateBackend.getCompressionDecorator(executionConfig), new LocalRecoveryConfig(false, new LocalRecoveryDirectoryProviderImpl(recoveryBaseDir, new JobID(), new JobVertexID(), 0)), priorityQueueSetFactory, false, new CloseableRegistry() ); return backendBuilder.build(); }
Example #6
Source File: TupleComparatorTTT3Test.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>> createSerializer() { return new TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>( (Class<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new TupleSerializer<Tuple2<String, Double>> ( (Class<Tuple2<String, Double>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }), new TupleSerializer<Tuple2<Long, Long>> ( (Class<Tuple2<Long, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ LongSerializer.INSTANCE, LongSerializer.INSTANCE }), new TupleSerializer<Tuple2<Integer, Long>> ( (Class<Tuple2<Integer, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE }) }); }
Example #7
Source File: IntervalJoinOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); this.leftBuffer = context.getKeyedStateStore().getMapState(new MapStateDescriptor<>( LEFT_BUFFER, LongSerializer.INSTANCE, new ListSerializer<>(new BufferEntrySerializer<>(leftTypeSerializer)) )); this.rightBuffer = context.getKeyedStateStore().getMapState(new MapStateDescriptor<>( RIGHT_BUFFER, LongSerializer.INSTANCE, new ListSerializer<>(new BufferEntrySerializer<>(rightTypeSerializer)) )); }
Example #8
Source File: KVStateRequestSerializerRocksDBTest.java From flink with Apache License 2.0 | 6 votes |
/** * Tests list serialization and deserialization match. * * @see KvStateRequestSerializerTest#testListSerialization() * KvStateRequestSerializerTest#testListSerialization() using the heap state back-end * test */ @Test public void testListSerialization() throws Exception { final long key = 0L; final RocksDBKeyedStateBackend<Long> longHeapKeyedStateBackend = RocksDBTestUtils .builderForTestDefaults(temporaryFolder.getRoot(), LongSerializer.INSTANCE) .build(); longHeapKeyedStateBackend.setCurrentKey(key); final InternalListState<Long, VoidNamespace, Long> listState = longHeapKeyedStateBackend.createInternalState(VoidNamespaceSerializer.INSTANCE, new ListStateDescriptor<>("test", LongSerializer.INSTANCE)); KvStateRequestSerializerTest.testListSerialization(key, listState); longHeapKeyedStateBackend.dispose(); }
Example #9
Source File: SharedBuffer.java From flink with Apache License 2.0 | 6 votes |
public SharedBuffer(KeyedStateStore stateStore, TypeSerializer<V> valueSerializer) { this.eventsBuffer = stateStore.getMapState( new MapStateDescriptor<>( eventsStateName, EventId.EventIdSerializer.INSTANCE, new Lockable.LockableTypeSerializer<>(valueSerializer))); this.entries = stateStore.getMapState( new MapStateDescriptor<>( entriesStateName, new NodeId.NodeIdSerializer(), new Lockable.LockableTypeSerializer<>(new SharedBufferNode.SharedBufferNodeSerializer()))); this.eventsCount = stateStore.getMapState( new MapStateDescriptor<>( eventsCountStateName, LongSerializer.INSTANCE, IntSerializer.INSTANCE)); }
Example #10
Source File: CopyOnWriteSkipListStateMapBasicOpTest.java From flink with Apache License 2.0 | 6 votes |
/** * Test state map iterator illegal next call. */ @Test public void testNamespaceNodeIteratorIllegalNextInvocation() { SkipListKeySerializer<Integer, Long> skipListKeySerializer = new SkipListKeySerializer<>(IntSerializer.INSTANCE, LongSerializer.INSTANCE); byte[] namespaceBytes = skipListKeySerializer.serializeNamespace(namespace); MemorySegment namespaceSegment = MemorySegmentFactory.wrap(namespaceBytes); Iterator<Long> iterator = stateMap.new NamespaceNodeIterator(namespaceSegment, 0, namespaceBytes.length); while (iterator.hasNext()) { iterator.next(); } try { iterator.next(); fail("Should have thrown NoSuchElementException."); } catch (NoSuchElementException e) { // expected } }
Example #11
Source File: LegacyStatefulJobSavepointMigrationITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void open() throws Exception { super.open(); timerService = getInternalTimerService( "timer", LongSerializer.INSTANCE, this); }
Example #12
Source File: LegacyStatefulJobSavepointMigrationITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void processElement(StreamRecord<Tuple2<Long, Long>> element) throws Exception { ValueState<Long> state = getKeyedStateBackend().getPartitionedState( element.getValue().f0, LongSerializer.INSTANCE, stateDescriptor); assertEquals(state.value(), element.getValue().f1); getRuntimeContext().getAccumulator(SUCCESSFUL_PROCESS_CHECK_ACCUMULATOR).add(1); output.collect(element); }
Example #13
Source File: TtlStateFactory.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private IS createReducingState() throws Exception { ReducingStateDescriptor<SV> reducingStateDesc = (ReducingStateDescriptor<SV>) stateDesc; ReducingStateDescriptor<TtlValue<SV>> ttlDescriptor = new ReducingStateDescriptor<>( stateDesc.getName(), new TtlReduceFunction<>(reducingStateDesc.getReduceFunction(), ttlConfig, timeProvider), new TtlSerializer<>(LongSerializer.INSTANCE, stateDesc.getSerializer())); return (IS) new TtlReducingState<>(createTtlStateContext(ttlDescriptor)); }
Example #14
Source File: StatefulJobSavepointMigrationITCase.java From flink with Apache License 2.0 | 5 votes |
@Override public void processElement(StreamRecord<Tuple2<Long, Long>> element) throws Exception { ValueState<Long> state = getKeyedStateBackend().getPartitionedState( element.getValue().f0, LongSerializer.INSTANCE, stateDescriptor); assertEquals(state.value(), element.getValue().f1); getRuntimeContext().getAccumulator(SUCCESSFUL_PROCESS_CHECK_ACCUMULATOR).add(1); output.collect(element); }
Example #15
Source File: TupleComparatorILDXC2Test.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Integer, Long, Double>> createSerializer() { return new TupleSerializer<Tuple3<Integer, Long, Double>>( (Class<Tuple3<Integer, Long, Double>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new IntSerializer(), new LongSerializer(), new DoubleSerializer()}); }
Example #16
Source File: TupleComparatorILDX1Test.java From flink with Apache License 2.0 | 5 votes |
@Override protected TupleComparator<Tuple3<Integer, Long, Double>> createComparator(boolean ascending) { return new TupleComparator<Tuple3<Integer, Long, Double>>( new int[]{1}, new TypeComparator[]{ new LongComparator(ascending) }, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE }); }
Example #17
Source File: KvStateRequestSerializerTest.java From flink with Apache License 2.0 | 5 votes |
/** * Tests map deserialization with too few bytes. */ @Test(expected = IOException.class) public void testDeserializeMapTooShort2() throws Exception { // Long (Key) + 1 byte (incomplete Value) KvStateSerializer.deserializeMap(new byte[]{1, 1, 1, 1, 1, 1, 1, 1, 0}, LongSerializer.INSTANCE, LongSerializer.INSTANCE); }
Example #18
Source File: KvStateRequestSerializerTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Tests map deserialization with too few bytes. */ @Test(expected = IOException.class) public void testDeserializeMapTooShort3() throws Exception { // Long (Key1) + Boolean (false) + Long (Value1) + 1 byte (incomplete Key2) KvStateSerializer.deserializeMap(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3}, LongSerializer.INSTANCE, LongSerializer.INSTANCE); }
Example #19
Source File: TupleComparatorTTT1Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleComparator<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>> createComparator( boolean ascending) { return new TupleComparator<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>( new int[] { 0 }, new TypeComparator[] { new TupleComparator<Tuple2<String, Double>>( new int[] { 0, 1 }, new TypeComparator[] { new StringComparator(ascending), new DoubleComparator(ascending) }, new TypeSerializer[] { StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }) }, new TypeSerializer[] { new TupleSerializer<Tuple2<String, Double>>( (Class<Tuple2<String, Double>>) (Class<?>) Tuple2.class, new TypeSerializer[] { StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }), new TupleSerializer<Tuple2<Long, Long>>( (Class<Tuple2<Long, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[] { LongSerializer.INSTANCE, LongSerializer.INSTANCE }), new TupleSerializer<Tuple2<Integer, Long>>( (Class<Tuple2<Integer, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[] { IntSerializer.INSTANCE, LongSerializer.INSTANCE }) }); }
Example #20
Source File: MigrationUtils.java From flink with Apache License 2.0 | 5 votes |
static <T> Queue<ComputationState> deserializeComputationStates( org.apache.flink.cep.nfa.SharedBuffer<T> sharedBuffer, TypeSerializer<T> eventSerializer, DataInputView source) throws IOException { Queue<ComputationState> computationStates = new LinkedList<>(); StringSerializer stateNameSerializer = StringSerializer.INSTANCE; LongSerializer timestampSerializer = LongSerializer.INSTANCE; DeweyNumber.DeweyNumberSerializer versionSerializer = DeweyNumber.DeweyNumberSerializer.INSTANCE; int computationStateNo = source.readInt(); for (int i = 0; i < computationStateNo; i++) { String state = stateNameSerializer.deserialize(source); String prevState = stateNameSerializer.deserialize(source); long timestamp = timestampSerializer.deserialize(source); DeweyNumber version = versionSerializer.deserialize(source); long startTimestamp = timestampSerializer.deserialize(source); int counter = source.readInt(); T event = null; if (source.readBoolean()) { event = eventSerializer.deserialize(source); } NodeId nodeId; EventId startEventId; if (prevState != null) { nodeId = sharedBuffer.getNodeId(prevState, timestamp, counter, event); startEventId = sharedBuffer.getStartEventId(version.getRun()); } else { nodeId = null; startEventId = null; } computationStates.add(ComputationState.createState(state, nodeId, version, startTimestamp, startEventId)); } return computationStates; }
Example #21
Source File: TupleComparatorILD2Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Integer, Long, Double>> createSerializer() { return new TupleSerializer<Tuple3<Integer, Long, Double>>( (Class<Tuple3<Integer, Long, Double>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new IntSerializer(), new LongSerializer(), new DoubleSerializer()}); }
Example #22
Source File: TupleComparatorTTT2Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleComparator<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>> createComparator( boolean ascending) { return new TupleComparator<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>( new int[] { 0, 2 }, new TypeComparator[] { new TupleComparator<Tuple2<String, Double>>( new int[] { 0, 1 }, new TypeComparator[] { new StringComparator(ascending), new DoubleComparator(ascending) }, new TypeSerializer[] { StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }), new TupleComparator<Tuple2<Integer, Long>>( new int[] { 0, 1 }, new TypeComparator[] { new IntComparator(ascending), new LongComparator(ascending) }, new TypeSerializer[] { IntSerializer.INSTANCE, LongSerializer.INSTANCE }) }, new TypeSerializer[] { new TupleSerializer<Tuple2<String, Double>>( (Class<Tuple2<String, Double>>) (Class<?>) Tuple2.class, new TypeSerializer[] { StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }), new TupleSerializer<Tuple2<Long, Long>>( (Class<Tuple2<Long, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[] { LongSerializer.INSTANCE, LongSerializer.INSTANCE }), new TupleSerializer<Tuple2<Integer, Long>>( (Class<Tuple2<Integer, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[] { IntSerializer.INSTANCE, LongSerializer.INSTANCE }) }); }
Example #23
Source File: TupleComparatorILD3Test.java From flink with Apache License 2.0 | 5 votes |
@Override protected TupleComparator<Tuple3<Integer, Long, Double>> createComparator(boolean ascending) { return new TupleComparator<Tuple3<Integer, Long, Double>>( new int[]{0, 1, 2}, new TypeComparator[]{ new IntComparator(ascending), new LongComparator(ascending), new DoubleComparator(ascending) }, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE, DoubleSerializer.INSTANCE }); }
Example #24
Source File: FlinkKafkaConsumerBase.java From flink with Apache License 2.0 | 5 votes |
/** * Creates state serializer for kafka topic partition to offset tuple. * Using of the explicit state serializer with KryoSerializer is needed because otherwise * users cannot use 'disableGenericTypes' properties with KafkaConsumer. */ @VisibleForTesting static TupleSerializer<Tuple2<KafkaTopicPartition, Long>> createStateSerializer(ExecutionConfig executionConfig) { // explicit serializer will keep the compatibility with GenericTypeInformation and allow to disableGenericTypes for users TypeSerializer<?>[] fieldSerializers = new TypeSerializer<?>[]{ new KryoSerializer<>(KafkaTopicPartition.class, executionConfig), LongSerializer.INSTANCE }; @SuppressWarnings("unchecked") Class<Tuple2<KafkaTopicPartition, Long>> tupleClass = (Class<Tuple2<KafkaTopicPartition, Long>>) (Class<?>) Tuple2.class; return new TupleSerializer<>(tupleClass, fieldSerializers); }
Example #25
Source File: KvStateRequestSerializerTest.java From flink with Apache License 2.0 | 5 votes |
/** * Tests map deserialization with too few bytes. */ @Test(expected = IOException.class) public void testDeserializeMapTooShort2() throws Exception { // Long (Key) + 1 byte (incomplete Value) KvStateSerializer.deserializeMap(new byte[]{1, 1, 1, 1, 1, 1, 1, 1, 0}, LongSerializer.INSTANCE, LongSerializer.INSTANCE); }
Example #26
Source File: TupleComparatorILDX1Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected TupleComparator<Tuple3<Integer, Long, Double>> createComparator(boolean ascending) { return new TupleComparator<Tuple3<Integer, Long, Double>>( new int[]{1}, new TypeComparator[]{ new LongComparator(ascending) }, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE }); }
Example #27
Source File: FunctionGroupOperator.java From stateful-functions with Apache License 2.0 | 5 votes |
private InternalListState<String, Long, Message> delayedMessagesBufferState( ListStateDescriptor<Message> delayedMessageStateDescriptor) { try { KeyedStateBackend<String> keyedStateBackend = getKeyedStateBackend(); return (InternalListState<String, Long, Message>) keyedStateBackend.getOrCreateKeyedState( LongSerializer.INSTANCE, delayedMessageStateDescriptor); } catch (Exception e) { throw new RuntimeException("Error registered Flink state for delayed messages buffer.", e); } }
Example #28
Source File: TupleComparatorILDXC2Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected TupleComparator<Tuple3<Integer, Long, Double>> createComparator(boolean ascending) { return new TupleComparator<Tuple3<Integer, Long, Double>>( new int[]{2, 1}, new TypeComparator[]{ new DoubleComparator(ascending), new LongComparator(ascending) }, new TypeSerializer[]{ IntSerializer.INSTANCE, DoubleSerializer.INSTANCE, LongSerializer.INSTANCE }); }
Example #29
Source File: KvStateRequestSerializerTest.java From flink with Apache License 2.0 | 5 votes |
/** * Tests list deserialization with too few bytes. */ @Test(expected = IOException.class) public void testDeserializeListTooShort2() throws Exception { // Long + 1 byte (separator) + 1 byte (incomplete Long) KvStateSerializer.deserializeList(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 2, 3}, LongSerializer.INSTANCE); }
Example #30
Source File: StatefulSequenceSource.java From flink with Apache License 2.0 | 5 votes |
@Override public void initializeState(FunctionInitializationContext context) throws Exception { Preconditions.checkState(this.checkpointedState == null, "The " + getClass().getSimpleName() + " has already been initialized."); this.checkpointedState = context.getOperatorStateStore().getListState( new ListStateDescriptor<>( "stateful-sequence-source-state", LongSerializer.INSTANCE ) ); this.valuesToEmit = new ArrayDeque<>(); if (context.isRestored()) { // upon restoring for (Long v : this.checkpointedState.get()) { this.valuesToEmit.add(v); } } else { // the first time the job is executed final int stepSize = getRuntimeContext().getNumberOfParallelSubtasks(); final int taskIdx = getRuntimeContext().getIndexOfThisSubtask(); final long congruence = start + taskIdx; long totalNoOfElements = Math.abs(end - start + 1); final int baseSize = safeDivide(totalNoOfElements, stepSize); final int toCollect = (totalNoOfElements % stepSize > taskIdx) ? baseSize + 1 : baseSize; for (long collected = 0; collected < toCollect; collected++) { this.valuesToEmit.add(collected * stepSize + congruence); } } }