akka.stream.javadsl.Source Java Examples

The following examples show how to use akka.stream.javadsl.Source. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SubscriptionActorTest.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
@Test
public void partialFailure() {
    // comment the next line to get logs for debugging
    actorSystem.eventStream().setLogLevel(Attributes.logLevelOff());
    new TestKit(actorSystem) {{
        final ActorRef underTest = watch(newSubscriptionActor(Duration.ofMinutes(1L), this));
        final String subscriptionId = underTest.path().name();
        final DittoRuntimeException error =
                InvalidRqlExpressionException.fromMessage("mock error", DittoHeaders.empty());
        // not possible to use Source.concat -- it forces the second source immediately.
        final Source<JsonArray, NotUsed> lazilyFailingSource =
                Source.from(List.of(Source.single(JsonArray.of(1)),
                        Source.lazily(() -> Source.<JsonArray>failed(error))))
                        .flatMapConcat(x -> x);
        connect(underTest, lazilyFailingSource, this);
        underTest.tell(RequestFromSubscription.of(subscriptionId, 1L, DittoHeaders.empty()), getRef());
        expectMsg(SubscriptionHasNextPage.of(subscriptionId, JsonArray.of(1), DittoHeaders.empty()));
        expectMsg(SubscriptionFailed.of(subscriptionId, error, DittoHeaders.empty()));
    }};
}
 
Example #2
Source File: AriEventProcessing.java    From ari-proxy with GNU Affero General Public License v3.0 6 votes vote down vote up
private static Try<Source<ProducerRecord<String, String>, NotUsed>> createSource(
		String kafkaCommandsTopic,
		String kafkaEventsAndResponsesTopic,
		AriMessageType type,
		LoggingAdapter log,
		String callContext,
		JsonNode messageBody) {

	final AriMessageEnvelope envelope = new AriMessageEnvelope(
			type,
			kafkaCommandsTopic,
			messageBody,
			callContext
	);

	return Try.of(() -> writer.writeValueAsString(envelope))
			.map(marshalledEnvelope -> {
				log.debug("[ARI MESSAGE TYPE] {}", envelope.getType());
				return Source.single(new ProducerRecord<>(
						kafkaEventsAndResponsesTopic,
						callContext,
						marshalledEnvelope
				));
			});
}
 
Example #3
Source File: MongoTimestampPersistence.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
/**
 * Creates the capped collection {@code collectionName} using {@code clientWrapper} if it doesn't exists yet.
 *
 * @param database The database to use.
 * @param collectionName The name of the capped collection that should be created.
 * @param cappedCollectionSizeInBytes The size in bytes of the collection that should be created.
 * @param materializer The actor materializer to pre-materialize the restart source.
 * @return Returns the created or retrieved collection.
 */
private static Source<MongoCollection, NotUsed> createOrGetCappedCollection(
        final MongoDatabase database,
        final String collectionName,
        final long cappedCollectionSizeInBytes,
        final ActorMaterializer materializer) {

    final Source<Success, NotUsed> createCollectionSource =
            repeatableCreateCappedCollectionSource(database, collectionName, cappedCollectionSizeInBytes);

    final Source<MongoCollection, NotUsed> infiniteCollectionSource =
            createCollectionSource.map(success -> database.getCollection(collectionName))
                    .flatMapConcat(Source::repeat);

    final Source<MongoCollection, NotUsed> restartSource =
            RestartSource.withBackoff(BACKOFF_MIN, BACKOFF_MAX, 1.0, () -> infiniteCollectionSource);

    // pre-materialize source with BroadcastHub so that a successfully obtained capped collection is reused
    // until the stream fails, whereupon it gets recreated with backoff.
    return restartSource.runWith(BroadcastHub.of(MongoCollection.class, 1), materializer);
}
 
Example #4
Source File: MongoThingsSearchPersistence.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
@Override
public Source<Long, NotUsed> count(final Query query,
        @Nullable final List<String> authorizationSubjectIds) {

    checkNotNull(query, "query");

    final BsonDocument queryFilter = getMongoFilter(query, authorizationSubjectIds);
    log.debug("count with query filter <{}>.", queryFilter);

    final CountOptions countOptions = new CountOptions()
            .skip(query.getSkip())
            .limit(query.getLimit())
            .maxTime(maxQueryTime.getSeconds(), TimeUnit.SECONDS);

    return Source.fromPublisher(collection.count(queryFilter, countOptions))
            .mapError(handleMongoExecutionTimeExceededException())
            .log("count");
}
 
Example #5
Source File: MongoTimestampPersistence.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
private static Source<Success, NotUsed> repeatableCreateCappedCollectionSource(
        final MongoDatabase database,
        final String collectionName,
        final long cappedCollectionSizeInBytes) {

    final CreateCollectionOptions collectionOptions = new CreateCollectionOptions()
            .capped(true)
            .sizeInBytes(cappedCollectionSizeInBytes)
            .maxDocuments(1);

    return Source.lazily(
            () -> Source.fromPublisher(database.createCollection(collectionName, collectionOptions)))
            .mapMaterializedValue(whatever -> NotUsed.getInstance())
            .withAttributes(Attributes.inputBuffer(1, 1))
            .recoverWithRetries(1, new PFBuilder<Throwable, Source<Success, NotUsed>>()
                    .match(MongoCommandException.class,
                            MongoTimestampPersistence::isCollectionAlreadyExistsError,
                            error -> Source.single(Success.SUCCESS))
                    .build());

}
 
Example #6
Source File: MongoThingsSearchUpdaterPersistence.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
@Override
public Source<PolicyReferenceTag, NotUsed> getPolicyReferenceTags(final Map<PolicyId, Long> policyRevisions) {
    final Bson filter =
            in(PersistenceConstants.FIELD_POLICY_ID, policyRevisions.keySet()
                    .stream()
                    .map(String::valueOf)
                    .collect(Collectors.toSet()));
    final Publisher<Document> publisher =
            collection.find(filter).projection(new Document()
                    .append(PersistenceConstants.FIELD_ID, new BsonInt32(1))
                    .append(PersistenceConstants.FIELD_POLICY_ID, new BsonInt32(1)));
    return Source.fromPublisher(publisher)
            .mapConcat(doc -> {
                final ThingId thingId = ThingId.of(doc.getString(PersistenceConstants.FIELD_ID));
                final String policyIdString = doc.getString(PersistenceConstants.FIELD_POLICY_ID);
                final PolicyId policyId = PolicyId.of(policyIdString);
                final Long revision = policyRevisions.get(policyId);
                if (revision == null) {
                    return Collections.emptyList();
                } else {
                    final PolicyTag policyTag = PolicyTag.of(policyId, revision);
                    return Collections.singletonList(PolicyReferenceTag.of(thingId, policyTag));
                }
            });
}
 
Example #7
Source File: MongoHealthChecker.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
private CompletionStage<Optional<Throwable>> generateStatusResponse() {

        final String id = UUID.randomUUID().toString();

        return Source.fromPublisher(collection.insertOne(new Document(ID_FIELD, id)))
                .flatMapConcat(s ->
                        Source.fromPublisher(collection.find(eq(ID_FIELD, id))).flatMapConcat(r ->
                                Source.fromPublisher(collection.deleteOne(eq(ID_FIELD, id)))
                                        .map(DeleteResult::getDeletedCount)
                        )
                )
                .runWith(Sink.seq(), materializer)
                .handle((result, error) -> {
                    if (error != null) {
                        return Optional.of(error);
                    } else if (!Objects.equals(result, Collections.singletonList(1L))) {
                        final String message = "Expect 1 document inserted and deleted. Found: " + result;
                        return Optional.of(new IllegalStateException(message));
                    } else {
                        return Optional.empty();
                    }
                });
    }
 
Example #8
Source File: ReconnectActorTest.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
@Test
public void testRecoverConnections() {
    new TestKit(actorSystem) {{
        final TestProbe probe = new TestProbe(actorSystem);
        final ConnectionId connectionId1 = ConnectionId.of("connection-1");
        final ConnectionId connectionId2 = ConnectionId.of("connection-2");
        final ConnectionId connectionId3 = ConnectionId.of("connection-3");
        final Props props = ReconnectActor.props(probe.ref(),
                () -> Source.from(Arrays.asList(
                        ConnectionPersistenceActor.PERSISTENCE_ID_PREFIX + connectionId1,
                        "invalid:" + connectionId2,
                        ConnectionPersistenceActor.PERSISTENCE_ID_PREFIX + connectionId3)));

        actorSystem.actorOf(props);

        final RetrieveConnectionStatus msg1 = probe.expectMsgClass(RetrieveConnectionStatus.class);
        assertThat((CharSequence) msg1.getConnectionEntityId()).isEqualTo(connectionId1);
        final RetrieveConnectionStatus msg2 = probe.expectMsgClass(RetrieveConnectionStatus.class);
        assertThat((CharSequence) msg2.getConnectionEntityId()).isEqualTo(connectionId3);
    }};
}
 
Example #9
Source File: SearchSource.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
private Source<Pair<String, JsonObject>, NotUsed> retrieveThingForElement(final String thingId) {
    if (thingIdOnly) {
        final JsonObject idOnlyThingJson = JsonObject.newBuilder().set(Thing.JsonFields.ID, thingId).build();
        return Source.single(Pair.create(thingId, idOnlyThingJson));
    } else {
        return retrieveThing(thingId, fields)
                .map(thingJson -> Pair.create(thingId, thingJson))
                .recoverWithRetries(1,
                        new PFBuilder<Throwable, Graph<SourceShape<Pair<String, JsonObject>>, NotUsed>>()
                                .match(ThingNotAccessibleException.class, thingNotAccessible -> {
                                    // out-of-sync thing detected
                                    final ThingsOutOfSync thingsOutOfSync =
                                            ThingsOutOfSync.of(Collections.singletonList(ThingId.of(thingId)),
                                                    getDittoHeaders());

                                    pubSubMediator.tell(
                                            DistPubSubAccess.publishViaGroup(ThingsOutOfSync.TYPE, thingsOutOfSync),
                                            ActorRef.noSender());
                                    return Source.empty();
                                })
                                .build()
                );
    }
}
 
Example #10
Source File: BackgroundSyncStream.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
/**
 * Emit metadata to trigger index update if the persistence snapshot and the search index entry are inconsistent.
 * Precondition: the thing IDs are identical and the search index entry is outside the tolerance window.
 *
 * @param persisted metadata from the snapshot store of the persistence.
 * @param indexed metadata from the search index with the same thing ID.
 * @return source of a metadata if the persistence and search index are inconsistent, or an empty source otherwise.
 */
private Source<Metadata, NotUsed> emitUnlessConsistent(final Metadata persisted, final Metadata indexed) {
    if (persisted.getThingRevision() > indexed.getThingRevision()) {
        return Source.single(indexed).log("RevisionMismatch");
    } else {
        final Optional<PolicyId> persistedPolicyId = persisted.getPolicyId();
        final Optional<PolicyId> indexedPolicyId = indexed.getPolicyId();
        if (!persistedPolicyId.equals(indexedPolicyId)) {
            return Source.single(indexed).log("PolicyIdMismatch");
        } else if (persistedPolicyId.isPresent()) {
            // policy IDs are equal and nonempty; retrieve and compare policy revision
            return retrievePolicyRevisionAndEmitMismatch(persistedPolicyId.get(), indexed);
        } else {
            // policy IDs are empty - the entries are consistent.
            return Source.empty();
        }
    }
}
 
Example #11
Source File: MongoReadJournal.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
private <T> Source<List<T>, NotUsed> unfoldBatchedSource(
        final String lowerBound,
        final ActorMaterializer mat,
        final Function<T, String> seedCreator,
        final Function<String, Source<T, ?>> sourceCreator) {

    return Source.unfoldAsync("",
            start -> {
                final String actualStart = lowerBound.compareTo(start) >= 0 ? lowerBound : start;
                return sourceCreator.apply(actualStart)
                        .runWith(Sink.seq(), mat)
                        .thenApply(list -> {
                            if (list.isEmpty()) {
                                return Optional.empty();
                            } else {
                                return Optional.of(Pair.create(seedCreator.apply(list.get(list.size() - 1)), list));
                            }
                        });
            })
            .withAttributes(Attributes.inputBuffer(1, 1));
}
 
Example #12
Source File: MongoTimestampPersistence.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
@Override
public Source<Done, NotUsed> setTaggedTimestamp(final Instant timestamp, @Nullable final String tag) {
    final Document toStore = new Document()
            .append(FIELD_TIMESTAMP, Date.from(timestamp))
            .append(FIELD_TAG, tag);
    return getCollection()
            .flatMapConcat(collection -> Source.fromPublisher(collection.insertOne(toStore)))
            .map(success -> {
                LOGGER.debug("Successfully inserted <{}> tagged <{}>.", timestamp, tag);
                return Done.done();
            });
}
 
Example #13
Source File: MongoNamespacePersistenceOperations.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
private Source<List<Throwable>, NotUsed> purge(final MongoPersistenceOperationsSelection selection) {
    final MongoCollection<Document> collection = db.getCollection(selection.getCollectionName());
    if (selection.isEntireCollection()) {
        return MongoOpsUtil.drop(collection);
    } else {
        return MongoOpsUtil.deleteByFilter(collection, selection.getFilter());
    }
}
 
Example #14
Source File: RegistrationServiceImpl.java    From activator-lagom-cargotracker with Apache License 2.0 5 votes vote down vote up
/**
 * Get live registrations service call
 *
 * @return
 */
@Override
public ServiceCall<NotUsed, NotUsed, Source<Cargo, ?>> getLiveRegistrations() {
    return (id, req) -> {
        PubSubRef<Cargo> topic = topics.refFor(TopicId.of(Cargo.class, "topic"));
        return CompletableFuture.completedFuture(topic.subscriber());
    };
}
 
Example #15
Source File: MongoReadJournal.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
/**
 * Retrieve all latest snapshots with unique PIDs in snapshot store above a lower bound.
 * Does not limit database access in any way.
 *
 * @param lowerBoundPid the lower-bound PID.
 * @param batchSize how many snapshots to read in 1 query.
 * @param mat the materializer.
 * @param snapshotFields snapshot fields to project out.
 * @return source of newest snapshots with unique PIDs.
 */
public Source<Document, NotUsed> getNewestSnapshotsAbove(final String lowerBoundPid,
        final int batchSize,
        final ActorMaterializer mat,
        final String... snapshotFields) {

    return getSnapshotStore()
            .withAttributes(Attributes.inputBuffer(1, 1))
            .flatMapConcat(snapshotStore ->
                    listNewestSnapshots(snapshotStore, lowerBoundPid, batchSize, mat,
                            snapshotFields)
            )
            .mapConcat(pids -> pids);
}
 
Example #16
Source File: ThingsMetadataSource.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
private static Source<StreamedSnapshot, NotUsed> getStreamedSnapshots(final SourceRef<?> sourceRef) {
    return sourceRef.getSource()
            .map(element -> {
                if (element instanceof StreamedSnapshot) {
                    return (StreamedSnapshot) element;
                } else {
                    throw new ClassCastException("Not a StreamedSnapshot: " + element);
                }
            });
}
 
Example #17
Source File: EnforcementFlow.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
/**
 * Create a flow from Thing changes to write models by retrieving data from Things shard region and enforcer cache.
 *
 * @param parallelism how many SudoRetrieveThing commands to send in parallel.
 * @return the flow.
 */
public Flow<Map<ThingId, Metadata>, Source<AbstractWriteModel, NotUsed>, NotUsed> create(final int parallelism) {
    return Flow.<Map<ThingId, Metadata>>create().map(changeMap -> {
        log.info("Updating search index of <{}> things", changeMap.size());
        final Set<ThingId> thingIds = changeMap.keySet();
        return sudoRetrieveThingJsons(parallelism, thingIds).flatMapConcat(responseMap ->
                Source.fromIterator(changeMap.values()::iterator).flatMapMerge(parallelism, metadataRef ->
                        computeWriteModel(metadataRef, responseMap.get(metadataRef.getThingId())))
        );
    });

}
 
Example #18
Source File: EventMarshallers.java    From ts-reaktive with MIT License 5 votes vote down vote up
/** 
 * Returns a Marshaller that marshals event envelopes using the given function, writing the resulting protobuf messages as delimited protobuf
 * into a chunked HTTP response with the given content type. 
 */
public static Marshaller<Source<EventEnvelope, ?>, HttpResponse> marshallerWith(Function<EventEnvelope, ? extends MessageLite> f, ContentType contentType) {
    return Marshaller.<Source<EventEnvelope, ?>, HttpResponse>opaque(events -> 
        HttpResponse.create().withEntity(
            HttpEntities.create(
                contentType, events.map(e -> serializeDelimited(f.apply(e)))
            )
        )
    );
}
 
Example #19
Source File: SearchUpdaterStream.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
/**
 * Start a perpetual search updater stream killed only by the kill-switch.
 *
 * @param actorRefFactory where to create actors for this stream.
 * @return kill-switch to terminate the stream.
 */
public KillSwitch start(final ActorRefFactory actorRefFactory) {
    final Source<Source<AbstractWriteModel, NotUsed>, NotUsed> restartSource = createRestartSource();
    final Sink<Source<AbstractWriteModel, NotUsed>, NotUsed> restartSink = createRestartSink();
    final ActorMaterializer actorMaterializer = ActorMaterializer.create(actorRefFactory);
    return restartSource.viaMat(KillSwitches.single(), Keep.right())
            .toMat(restartSink, Keep.left())
            .run(actorMaterializer);
}
 
Example #20
Source File: TransistorTest.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
/**
 * Connect a transistor to 2 test collectors and a test sink.
 */
@Before
public void init() {
    system = ActorSystem.create();
    final Source<Integer, TestPublisher.Probe<Integer>> collectorSource = TestSource.probe(system);
    final Source<Integer, TestPublisher.Probe<Integer>> baseSource = TestSource.probe(system);
    final Sink<Integer, TestSubscriber.Probe<Integer>> emitterSink = TestSink.probe(system);
    final Transistor<Integer> underTest = Transistor.of();

    final Graph<SourceShape<Integer>, Pair<TestPublisher.Probe<Integer>, TestPublisher.Probe<Integer>>>
            collectorGateTransistor =
            GraphDSL$.MODULE$.create3(
                    collectorSource, baseSource, underTest,
                    (collector, base, notUsed) -> Pair.create(collector, base),
                    (builder, collectorShape, baseShape, transistorShape) -> {
                        builder.from(collectorShape.out()).toInlet(transistorShape.in0());
                        builder.from(baseShape.out()).toInlet(transistorShape.in1());
                        return SourceShape.of(transistorShape.out());
                    });

    final Pair<Pair<TestPublisher.Probe<Integer>, TestPublisher.Probe<Integer>>, TestSubscriber.Probe<Integer>> m =
            Source.fromGraph(collectorGateTransistor)
                    .toMat(emitterSink, Keep.both())
                    .run(ActorMaterializer.create(system));

    collector = m.first().first();
    base = m.first().second();
    emitter = m.second();
}
 
Example #21
Source File: HttpHelper.java    From netty-reactive-streams with Apache License 2.0 5 votes vote down vote up
public StreamedHttpRequest createStreamedRequest(String method, String uri, List<String> body) {
    List<HttpContent> content = new ArrayList<>();
    for (String chunk: body) {
        content.add(new DefaultHttpContent(Unpooled.copiedBuffer(chunk, Charset.forName("utf-8"))));
    }
    Publisher<HttpContent> publisher = Source.from(content).runWith(Sink.<HttpContent>asPublisher(AsPublisher.WITH_FANOUT), materializer);
    return new DefaultStreamedHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.valueOf(method), uri,
            publisher);
}
 
Example #22
Source File: ResumeSourceTest.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
@Test
public void testResumption() {
    new TestKit(system) {{
        final Source<Integer, NotUsed> underTest = createResumeSource(getRef(), -1);

        underTest.runWith(testSink, mat);

        // start stream with demand
        sinkProbe.request(100L);
        expectMsg(0);
        reply(testSource);

        // send some elements followed by failure
        sourceProbe.sendNext(1).sendNext(2);
        sinkProbe.expectNext(1, 2);
        sourceProbe.sendError(new IllegalStateException("1st expected error"));

        // expect new seed equal to final element sent
        expectMsg(2);
        rematerializeSource();
        reply(testSource);

        // fail again without sending any element
        sourceProbe.sendError(new IllegalStateException("2nd expected error"));

        // expect new seed unchanged and not reset
        expectMsg(2);
        rematerializeSource();
        reply(testSource);

        // resume stream until completion
        sourceProbe.sendNext(3).sendNext(4).sendNext(5);
        sourceProbe.sendComplete();
        sinkProbe.expectNext(3, 4, 5);
        sinkProbe.expectComplete();
    }};
}
 
Example #23
Source File: MergeSortedAsPairTest.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
private static Source<Integer, NotUsed> equivalentOfMergeSortedUnderTest(
        final Source<Integer, ?> source1,
        final Source<Integer, ?> source2) {

    return mergeSortedAsPair(source1, source2)
            .mapConcat(pair -> pair.first() < pair.second()
                    ? List.of(pair.first())
                    : pair.first() > pair.second()
                    ? List.of(pair.second())
                    : List.of(pair.first(), pair.second())
            )
            .mapMaterializedValue(ignored -> NotUsed.getInstance());
}
 
Example #24
Source File: CsvProtocolSpec.java    From ts-reaktive with MIT License 5 votes vote down vote up
@SafeVarargs
@SuppressWarnings( "varargs" ) // Eclipse thinks it's unneeded, but javac will emit a warning otherwise.
private final <T> String render(Protocol<CsvEvent, T> proto, T... elements)
    throws InterruptedException, ExecutionException, TimeoutException {
    return Source.from(Arrays.asList(elements))
        .via(ProtocolWriter.of(proto))
        .via(new CsvWriter(CsvSettings.RFC4180))
        .runFold("", (a,b) -> a + b, materializer)
        .toCompletableFuture()
        .get(1, TimeUnit.SECONDS);
}
 
Example #25
Source File: PersistenceIdSource.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
/**
 * Create a stream of snapshot revisions of all known entities.
 * The stream fails if there is a failure requesting any stream or processing any stream element.
 *
 * @param config configuration of the persistence ID source.
 * @param pubSubMediator the pub-sub mediator.
 * @return source of entity IDs with revisions of their latest snapshots.
 */
public static Source<EntityIdWithRevision, NotUsed> create(final PersistenceIdsConfig config,
        final ActorRef pubSubMediator) {
    return Source.from(PERSISTENCE_STREAMING_ACTOR_PATHS)
            .buffer(1, OverflowStrategy.backpressure())
            .flatMapConcat(path -> buildResumeSource(config, pubSubMediator, path)
                    // recover to empty source to cleanup other resource types even on long-term failure
                    .recoverWithRetries(1, Throwable.class, Source::empty));
}
 
Example #26
Source File: S3.java    From ts-reaktive with MIT License 5 votes vote down vote up
/**
  * Loads the last known written offset from S3, or returns 0 if not found
  */
 public CompletionStage<Long> loadOffset() {
     return download("_lastOffset")
 		.reduce((bs1, bs2) -> bs1.concat(bs2))
 		.map(bs -> Long.parseLong(bs.utf8String()))
 		.recoverWith(new PFBuilder<Throwable, Source<Long,NotUsed>>()
 			.matchAny(x -> Source.single(0L)) // not found -> start at 0
	.build()
)
 		.runWith(Sink.head(), materializer);
 }
 
Example #27
Source File: ThingsJournalTestHelper.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
private <T> List<T> runBlockingWithReturn(final Source<T, NotUsed> publisher) {
    final CompletionStage<List<T>> done = publisher.runWith(Sink.seq(), mat);
    try {
        return done.toCompletableFuture().get(WAIT_TIMEOUT, TimeUnit.SECONDS);
    } catch (final InterruptedException | ExecutionException | TimeoutException e) {
        throw new IllegalStateException(e);
    }
}
 
Example #28
Source File: AkkaStreams.java    From ts-reaktive with MIT License 5 votes vote down vote up
/**
 * Materializes the given source and waits for it to successfully emit one element. It then completes the returned
 * CompletionStage with the full stream. It will wait indefinitely for that first element, so timeouts will have to be handled
 * separately on the stream, returned future, or both.
 * 
 * This is useful in cases where you want to "fail early" when handling a stream result. For example, you might want
 * to build an http response based on a stream, but want to set a different status code if the stream fails
 * to emit any element.
 */
public static <T> CompletionStage<Source<T,NotUsed>> awaitOne(Source<T,?> source, Materializer materializer) {
    return source.prefixAndTail(1).map(pair -> {
        if (pair.first().isEmpty()) {
            return pair.second();
        } else {
            T head = pair.first().get(0);
            Source<T, NotUsed> tail = pair.second();
            return Source.single(head).concat(tail);                
        }
    }).runWith(Sink.head(), materializer);
}
 
Example #29
Source File: SearchActor.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
private <T> Source<T, NotUsed> processSearchPersistenceResult(Source<T, NotUsed> source,
        final DittoHeaders dittoHeaders) {

    final Flow<T, T, NotUsed> logAndFinishPersistenceSegmentFlow =
            Flow.fromFunction(result -> {
                // we know that the source provides exactly one ResultList
                LogUtil.enhanceLogWithCorrelationId(log, dittoHeaders.getCorrelationId());
                log.debug("Persistence returned: {}", result);
                return result;
            });

    return source.via(logAndFinishPersistenceSegmentFlow);
}
 
Example #30
Source File: SearchActor.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
private static <T> Source<Query, NotUsed> createQuerySource(final Function<T, Query> parser,
        final T command) {

    try {
        return Source.single(parser.apply(command));
    } catch (final Throwable e) {
        return Source.failed(e);
    }
}