org.springframework.integration.annotation.ServiceActivator Java Examples

The following examples show how to use org.springframework.integration.annotation.ServiceActivator. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SenderApplication.java    From spring-cloud-gcp with Apache License 2.0 6 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "pubSubOutputChannel")
public MessageHandler messageSender(PubSubTemplate pubsubTemplate) {
	PubSubMessageHandler adapter =
			new PubSubMessageHandler(pubsubTemplate, "exampleTopic");
	adapter.setPublishCallback(new ListenableFutureCallback<String>() {
		@Override
		public void onFailure(Throwable ex) {
			LOGGER.info("There was an error sending the message.");
		}

		@Override
		public void onSuccess(String result) {
			LOGGER.info("Message was sent successfully.");
		}
	});

	return adapter;
}
 
Example #2
Source File: ContentTypeTckTests.java    From spring-cloud-stream with Apache License 2.0 6 votes vote down vote up
@ServiceActivator(inputChannel = Processor.INPUT, outputChannel = Processor.OUTPUT)
public Message<String> echo(Message<byte[]> value) throws Exception {
	ObjectMapper mapper = new ObjectMapper();
	Person person = mapper.readValue(value.getPayload(), Person.class);
	person.setName("bob");
	String json = mapper.writeValueAsString(person);
	return MessageBuilder.withPayload(json).build();
}
 
Example #3
Source File: AmazonS3SinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 6 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = Sink.INPUT)
public MessageHandler amazonS3MessageHandler(AmazonS3 amazonS3, ResourceIdResolver resourceIdResolver,
		AmazonS3SinkProperties s3SinkProperties) {
	S3MessageHandler s3MessageHandler;
	if (s3SinkProperties.getBucket() != null) {
		s3MessageHandler = new S3MessageHandler(amazonS3, s3SinkProperties.getBucket());
	}
	else {
		s3MessageHandler = new S3MessageHandler(amazonS3, s3SinkProperties.getBucketExpression());
	}
	s3MessageHandler.setResourceIdResolver(resourceIdResolver);
	s3MessageHandler.setKeyExpression(s3SinkProperties.getKeyExpression());
	if (s3SinkProperties.getAcl() != null) {
		s3MessageHandler.setObjectAclExpression(new ValueExpression<>(s3SinkProperties.getAcl()));
	}
	else {
		s3MessageHandler.setObjectAclExpression(s3SinkProperties.getAclExpression());
	}
	s3MessageHandler.setUploadMetadataProvider(this.uploadMetadataProvider);
	s3MessageHandler.setProgressListener(this.s3ProgressListener);
	return s3MessageHandler;
}
 
Example #4
Source File: DemoApplication.java    From activiti-examples with Apache License 2.0 6 votes vote down vote up
@ServiceActivator(inputChannel = "fileChannel")
public void processFile(Message<File> message) throws IOException {
    securityUtil.logInAs("system");

    File payload = message.getPayload();
    logger.info(">>> Processing file: " + payload.getName());

    String content = FileUtils.readFileToString(payload, "UTF-8");

    SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yy HH:mm:ss");

    logger.info("> Processing content: " + content + " at " + formatter.format(new Date()));

    ProcessInstance processInstance = processRuntime.start(ProcessPayloadBuilder
            .start()
            .withProcessDefinitionKey("categorizeProcess")
            .withName("Processing Content: " + content)
            .withVariable("content", content)
            .build());
    logger.info(">>> Created Process Instance: " + processInstance);

    logger.info(">>> Deleting processed file: " + payload.getName());
    payload.delete();

}
 
Example #5
Source File: FieldValueCounterSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 6 votes vote down vote up
@ServiceActivator(inputChannel=Sink.INPUT)
public void process(Message<?> message) {
	Object payload = message.getPayload();
	if (payload instanceof String) {
		try {
			payload = jsonToTupleTransformer.transformPayload(payload.toString());
		}
		catch (Exception e) {
			throw new MessageTransformationException(message, e.getMessage(), e);
		}
	}
	if (payload instanceof Tuple) {
		processTuple(computeMetricName(message), (Tuple) payload);
	}
	else {
		processPojo(computeMetricName(message), payload);
	}
}
 
Example #6
Source File: RedisSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 6 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = Sink.INPUT)
public MessageHandler redisSinkMessageHandler() {
	if (this.redisSinkProperties.isKey()) {
		RedisStoreWritingMessageHandler redisStoreWritingMessageHandler = new RedisStoreWritingMessageHandler(
				this.redisConnectionFactory);
		redisStoreWritingMessageHandler.setKeyExpression(this.redisSinkProperties.keyExpression());
		return redisStoreWritingMessageHandler;
	}
	else if (this.redisSinkProperties.isQueue()) {
		return new RedisQueueOutboundChannelAdapter(this.redisSinkProperties.queueExpression(),
				this.redisConnectionFactory);
	}
	else { // must be topic
		RedisPublishingMessageHandler redisPublishingMessageHandler = new RedisPublishingMessageHandler(
				this.redisConnectionFactory);
		redisPublishingMessageHandler.setTopicExpression(this.redisSinkProperties.topicExpression());
		return redisPublishingMessageHandler;
	}
}
 
Example #7
Source File: CassandraSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 6 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "toSink")
public MessageHandler cassandraSinkMessageHandler() {
	CassandraMessageHandler<?> cassandraMessageHandler =
			this.cassandraSinkProperties.getQueryType() != null
					? new CassandraMessageHandler<>(this.template, this.cassandraSinkProperties.getQueryType())
					: new CassandraMessageHandler<>(this.template);
	cassandraMessageHandler.setProducesReply(false);
	if (this.cassandraSinkProperties.getConsistencyLevel() != null
			|| this.cassandraSinkProperties.getRetryPolicy() != null
			|| this.cassandraSinkProperties.getTtl() > 0) {
		cassandraMessageHandler.setWriteOptions(
				new WriteOptions(this.cassandraSinkProperties.getConsistencyLevel(),
						this.cassandraSinkProperties.getRetryPolicy(), this.cassandraSinkProperties.getTtl()));
	}
	if (StringUtils.hasText(this.cassandraSinkProperties.getIngestQuery())) {
		cassandraMessageHandler.setIngestQuery(this.cassandraSinkProperties.getIngestQuery());
	}
	else if (this.cassandraSinkProperties.getStatementExpression() != null) {
		cassandraMessageHandler.setStatementExpression(this.cassandraSinkProperties.getStatementExpression());
	}
	return cassandraMessageHandler;
}
 
Example #8
Source File: RabbitSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 6 votes vote down vote up
@ServiceActivator(inputChannel = Sink.INPUT)
@Bean
public MessageHandler amqpChannelAdapter(ConnectionFactory rabbitConnectionFactory) {
	AmqpOutboundEndpoint handler = new AmqpOutboundEndpoint(rabbitTemplate(rabbitConnectionFactory));
	DefaultAmqpHeaderMapper mapper = new DefaultAmqpHeaderMapper();
	mapper.setRequestHeaderNames(this.properties.getMappedRequestHeaders());
	handler.setHeaderMapper(mapper);
	handler.setDefaultDeliveryMode(this.properties.getPersistentDeliveryMode()
									? MessageDeliveryMode.PERSISTENT
									: MessageDeliveryMode.NON_PERSISTENT);
	if (this.properties.getExchangeExpression() == null) {
		handler.setExchangeName(this.properties.getExchange());
	}
	else {
		handler.setExpressionExchangeName(this.properties.getExchangeExpression());
	}
	if (this.properties.getRoutingKeyExpression() == null) {
		handler.setRoutingKey(this.properties.getRoutingKey());
	}
	else {
		handler.setExpressionRoutingKey(this.properties.getRoutingKeyExpression());
	}
	return handler;
}
 
Example #9
Source File: SenderConfiguration.java    From spring-cloud-gcp with Apache License 2.0 6 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "pubSubOutputChannel")
public MessageHandler messageSender(PubSubTemplate pubSubTemplate) {
	PubSubMessageHandler adapter = new PubSubMessageHandler(pubSubTemplate, TOPIC_NAME);
	adapter.setPublishCallback(new ListenableFutureCallback<String>() {
		@Override
		public void onFailure(Throwable ex) {
			LOGGER.info("There was an error sending the message.");
		}

		@Override
		public void onSuccess(String result) {
			LOGGER.info("Message was sent successfully.");
		}
	});

	return adapter;
}
 
Example #10
Source File: HdfsDatasetSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 6 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "toSink")
public MessageHandler datasetSinkMessageHandler(final DatasetOperations datasetOperations) {
	return new MessageHandler() {

		@Override
		public void handleMessage(Message<?> message) throws MessagingException {
			Object payload = message.getPayload();
			if (payload instanceof Collection<?>) {
				Collection<?> payloads = (Collection<?>) payload;
				logger.debug("Writing a collection of {} POJOs" + payloads.size());
				datasetOperations.write((Collection<?>) message.getPayload());
			}
			else {
				// This should never happen since message handler is fronted by an aggregator
				throw new IllegalStateException("Expected a collection of POJOs but received " +
						message.getPayload().getClass().getName());
			}
		}
	};
}
 
Example #11
Source File: FeedService.java    From myfeed with Apache License 2.0 6 votes vote down vote up
@ServiceActivator(inputChannel = Sink.INPUT)
@SuppressWarnings("unused")
public void propagate(FeedItem feedItem) {
	/*String username = feedItem.getUsername();
	String userid = user.findId(username).toBlocking().value();
	List<Resource<User>> following = user.getFollowing(userid);
	List<FeedItem> toSave = new ArrayList<>();
	for (Resource<User> followed : following) {
		User user = followed.getContent();
		String followingUserid = user.getUserId(); //getId(user);
		String followingUsername = user.getUsername();
		log.info("Saving feed item to {}:{}", followingUsername, followingUserid);
		toSave.add(new FeedItem(followingUserid, feedItem.getUsername(),
				feedItem.getText(), feedItem.getCreated()));
	}
	Iterable<FeedItem> saved = repo.save(toSave);
	log.info("Saved: "+saved);*/
}
 
Example #12
Source File: HdfsSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 5 votes vote down vote up
@ServiceActivator(inputChannel=Sink.INPUT)
public void hdfsSink(Object payload) {
	try {
		dataStoreWriter.write(payload.toString());
	} catch (IOException e) {
		throw new IllegalStateException("Error while writing", e);
	}
}
 
Example #13
Source File: CustomHeaderPropagationTests.java    From spring-cloud-stream with Apache License 2.0 5 votes vote down vote up
@ServiceActivator(inputChannel = "input", outputChannel = "output")
public Message<String> consume(String data) {
	// if we don't force content to be String, it will be quoted on the outbound
	// channel
	return MessageBuilder.withPayload(data)
			.setHeader(MessageHeaders.CONTENT_TYPE, "text/plain").build();
}
 
Example #14
Source File: FileSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = Sink.INPUT)
public FileWritingMessageHandler fileWritingMessageHandler(FileNameGenerator fileNameGenerator, FileSinkProperties properties) {
	FileWritingMessageHandler handler = (properties.getDirectoryExpression() != null)
			? new FileWritingMessageHandler(properties.getDirectoryExpression())
			: new FileWritingMessageHandler(new File(properties.getDirectory()));
	handler.setAutoCreateDirectory(true);
	handler.setAppendNewLine(!properties.isBinary());
	handler.setCharset(properties.getCharset());
	handler.setExpectReply(false);
	handler.setFileExistsMode(properties.getMode());
	handler.setFileNameGenerator(fileNameGenerator);
	return handler;
}
 
Example #15
Source File: HerokuReplayApplication.java    From heroku-metrics-spring with MIT License 5 votes vote down vote up
@ServiceActivator(inputChannel = "toKafka")
@Bean
public MessageHandler kafkaHandler() throws Exception {
  KafkaProducerMessageHandler<String, String> handler =
      new KafkaProducerMessageHandler<>(kafkaTemplate());
  handler.setTopicExpression(new LiteralExpression(KafkaConfig.getTopic()));
  handler.setMessageKeyExpression(new LiteralExpression(KafkaConfig.getMessageKey()));
  return handler;
}
 
Example #16
Source File: HttpclientProcessorConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 5 votes vote down vote up
@ServiceActivator(inputChannel = Processor.INPUT, outputChannel = Processor.OUTPUT)
public Object process(Message<?> message) {
	try {
		/* construct headers */
		HttpHeaders headers = new HttpHeaders();
		if (properties.getHeadersExpression() != null) {
			Map<?, ?> headersMap = properties.getHeadersExpression().getValue(message, Map.class);
			for (Entry<?, ?> header : headersMap.entrySet()) {
				if (header.getKey() != null && header.getValue() != null) {
					headers.add(header.getKey().toString(),
							header.getValue().toString());
				}
			}
		}

		Class<?> responseType = properties.getExpectedResponseType();
		HttpMethod method = properties.getHttpMethod();
		String url = properties.getUrlExpression().getValue(message, String.class);
		Object body = null;
		if (properties.getBody() != null) {
			body = properties.getBody();
		}
		else if (properties.getBodyExpression() != null) {
			body = properties.getBodyExpression().getValue(message);
		}
		else {
			body = message.getPayload();
		}
		URI uri = new URI(url);
		RequestEntity<?> request = new RequestEntity<>(body, headers, method, uri);
		ResponseEntity<?> response = restTemplate.exchange(request, responseType);
		return properties.getReplyExpression().getValue(response);
	}
	catch (Exception e) {
		LOG.warn("Error in HTTP request", e);
		return null;
	}
}
 
Example #17
Source File: ProducingChannelConfig.java    From spring-jms with MIT License 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "producingChannel")
public MessageHandler jmsMessageHandler(JmsTemplate jmsTemplate) {
  JmsSendingMessageHandler handler =
      new JmsSendingMessageHandler(jmsTemplate);
  handler.setDestinationName(integrationDestination);

  return handler;
}
 
Example #18
Source File: ProducingChannelConfig.java    From spring-kafka with MIT License 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "producingChannel")
public MessageHandler kafkaMessageHandler() {
  KafkaProducerMessageHandler<String, String> handler =
      new KafkaProducerMessageHandler<>(kafkaTemplate());
  handler.setMessageKeyExpression(new LiteralExpression("kafka-integration"));

  return handler;
}
 
Example #19
Source File: MqttReceiveHandler.java    From iot-dc3 with Apache License 2.0 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "mqttInputChannel")
public MessageHandler handler() {
    return message -> {
        log.info(
                "{}\nheader:{}, payload:{}",
                message.getHeaders().get("mqtt_receivedTopic"),
                JSON.toJSONString(message.getHeaders(), true),
                JSON.toJSONString(message.getPayload(), true)
        );
        DevicePayLoad devicePayLoad = JSON.parseObject(message.getPayload().toString(), DevicePayLoad.class);
        PointValue pointValue = driverService.convertValue(devicePayLoad.getDeviceId(), devicePayLoad.getPointId(), devicePayLoad.getValue());
        driverService.pointValueSender(pointValue);
    };
}
 
Example #20
Source File: VerifyEmployeeService.java    From Spring-5.0-Cookbook with MIT License 5 votes vote down vote up
@ServiceActivator(inputChannel=Sink.INPUT)
public void validateEmployee(Integer deptId) {
	Department dept = null;
	try{
		dept = departmentServiceImpl.findDeptByid(deptId);
	}catch(Exception e){
		dept = new Department();
		dept.setName("Non-existent");
	}
	 
       log.info("{}", dept.getName());
}
 
Example #21
Source File: LogSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = Sink.INPUT)
public LoggingHandler logSinkHandler() {
	LoggingHandler loggingHandler = new LoggingHandler(this.properties.getLevel().name());
	loggingHandler.setExpression(this.properties.getExpression());
	loggingHandler.setLoggerName(this.properties.getName());
	return loggingHandler;
}
 
Example #22
Source File: IntegrationConfig.java    From spring-reactive-sample with GNU General Public License v3.0 5 votes vote down vote up
@ServiceActivator(inputChannel = "fluxResultChannel")
@Bean
public WebFluxRequestExecutingMessageHandler reactiveOutbound(WebClient client) {
    WebFluxRequestExecutingMessageHandler handler
        = new WebFluxRequestExecutingMessageHandler("http://localhost:8080/posts", client);
    handler.setHttpMethod(HttpMethod.GET);
    handler.setExpectedResponseType(String.class);
    return handler;
}
 
Example #23
Source File: TensorflowProcessorConfiguration.java    From tensorflow-spring-cloud-stream-app-starters with Apache License 2.0 5 votes vote down vote up
@ServiceActivator(inputChannel = Processor.INPUT, outputChannel = Processor.OUTPUT)
public Message<?> evaluate(Message<?> input) {

	Map<String, Object> processorContext = new ConcurrentHashMap<>();

	Map<String, Object> inputData = tensorflowInputConverter.convert(input, processorContext);

	Tensor outputTensor = tensorFlowService.evaluate(
			inputData, properties.getOutputName(), properties.getOutputIndex());

	Object outputData = tensorflowOutputConverter.convert(outputTensor, processorContext);

	if (properties.isSaveOutputInHeader()) {
		// Add the result to the message header
		return MessageBuilder
				.withPayload(input.getPayload())
				.copyHeadersIfAbsent(input.getHeaders())
				.setHeaderIfAbsent(TF_OUTPUT_HEADER, outputData)
				.build();
	}

	// Add the outputData as part of the message payload
	Message<?> outputMessage = MessageBuilder
			.withPayload(outputData)
			.copyHeadersIfAbsent(input.getHeaders())
			.build();

	return outputMessage;
}
 
Example #24
Source File: MqttSinkConfiguration.java    From mqtt with Apache License 2.0 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = Sink.INPUT)
public MessageHandler mqttOutbound() {
	MqttPahoMessageHandler messageHandler = new MqttPahoMessageHandler(properties.getClientId(), mqttClientFactory);
	messageHandler.setAsync(properties.isAsync());
	messageHandler.setDefaultTopic(properties.getTopic());
	messageHandler.setConverter(pahoMessageConverter());
	return messageHandler;
}
 
Example #25
Source File: ContentTypeTckTests.java    From spring-cloud-stream with Apache License 2.0 5 votes vote down vote up
@ServiceActivator(inputChannel = Processor.INPUT, outputChannel = Processor.OUTPUT)
public Message<String> echo(Message<String> value) throws Exception {
	ObjectMapper mapper = new ObjectMapper();
	Person person = mapper.readValue(value.getPayload(), Person.class);
	return MessageBuilder.withPayload(person.toString())
			.setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN)
			.build();
}
 
Example #26
Source File: GcsMessageHandlerTests.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "siGcsTestChannel")
public MessageHandler outboundAdapter(Storage gcs) {
	GcsMessageHandler adapter = new GcsMessageHandler(new GcsSessionFactory(gcs));
	adapter.setRemoteDirectoryExpression(new ValueExpression<>("testGcsBucket"));

	return adapter;
}
 
Example #27
Source File: GcsSpringIntegrationApplication.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
/**
 * A service activator that receives messages produced by the {@code synchronizerAdapter}
 * and simply outputs the file name of each to the console.
 * @return a message handler
 */
@Bean
@ServiceActivator(inputChannel = "new-file-channel")
public MessageHandler handleNewFileFromSynchronizer() {
	return (message) -> {
		File file = (File) message.getPayload();
		LOGGER.info("File " + file.getName() + " received by the non-streaming inbound "
				+ "channel adapter.");
	};
}
 
Example #28
Source File: ReceiverConfiguration.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@ServiceActivator(inputChannel = "pubSubInputChannel")
public void messageReceiver(Person payload,
		@Header(GcpPubSubHeaders.ORIGINAL_MESSAGE) BasicAcknowledgeablePubsubMessage message) {
	LOGGER.info("Message arrived! Payload: " + payload);
	this.processedPersonsList.add(payload);
	message.ack();
}
 
Example #29
Source File: BigQuerySampleConfiguration.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel = "bigQueryWriteDataChannel")
public MessageHandler messageSender(BigQueryTemplate bigQueryTemplate) {
	BigQueryFileMessageHandler messageHandler = new BigQueryFileMessageHandler(bigQueryTemplate);
	messageHandler.setFormatOptions(FormatOptions.csv());
	messageHandler.setOutputChannel(bigQueryJobReplyChannel());
	return messageHandler;
}
 
Example #30
Source File: GpfdistSinkConfiguration.java    From spring-cloud-stream-app-starters with Apache License 2.0 5 votes vote down vote up
@Bean
@ServiceActivator(inputChannel= Sink.INPUT)
public GpfdistMessageHandler gpfdist(GreenplumLoad greenplumLoad, TaskScheduler sqlTaskScheduler, HostInfoDiscovery hostInfoDiscovery) {
	GpfdistMessageHandler handler = new GpfdistMessageHandler(properties.getGpfdistPort(), properties.getFlushCount(),
			properties.getFlushTime(), properties.getBatchTimeout(), properties.getBatchCount(), properties.getBatchPeriod(),
			properties.getDelimiter(), hostInfoDiscovery);
	handler.setRateInterval(properties.getRateInterval());
	handler.setGreenplumLoad(greenplumLoad);
	handler.setSqlTaskScheduler(sqlTaskScheduler);
	return handler;
}