org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction Java Examples

The following examples show how to use org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ESSink.java    From Mastering-Distributed-Tracing with MIT License 6 votes vote down vote up
public static ElasticsearchSink<TraceSummary> build() {
    List<HttpHost> httpHosts = new ArrayList<>();
    httpHosts.add(new HttpHost("127.0.0.1", 9200, "http"));

    ElasticsearchSink.Builder<TraceSummary> esSinkBuilder = new ElasticsearchSink.Builder<>(httpHosts,
            new ElasticsearchSinkFunction<TraceSummary>() {

                @Override
                public void process(TraceSummary summary, RuntimeContext ctx, RequestIndexer indexer) {
                    indexer.add(Requests.indexRequest()//
                        .index("trace-summaries") //
                        .type("trace-summaries") //
                        .id(summary.traceId) //
                        .source(asJson(summary)));
                }
            });

    // configuration for the bulk requests; this instructs the sink to emit after
    // every element, otherwise they would be buffered
    esSinkBuilder.setBulkFlushMaxActions(1);

    return esSinkBuilder.build();
}
 
Example #2
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSinkForNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction,
		String ipAddress) throws Exception {

	ArrayList<HttpHost> httpHosts = new ArrayList<>();
	httpHosts.add(new HttpHost(ipAddress, 9200, "http"));

	ElasticsearchSink.Builder<Tuple2<Integer, String>> builder = new ElasticsearchSink.Builder<>(httpHosts, elasticsearchSinkFunction);
	builder.setBulkFlushMaxActions(bulkFlushMaxActions);

	return builder.build();
}
 
Example #3
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSinkForEmbeddedNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) throws Exception {

	return createElasticsearchSinkForNode(
			bulkFlushMaxActions, clusterName, elasticsearchSinkFunction, "127.0.0.1");
}
 
Example #4
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSinkForNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction,
		String ipAddress) throws Exception {

	List<InetSocketAddress> transports = new ArrayList<>();
	transports.add(new InetSocketAddress(InetAddress.getByName(ipAddress), 9300));

	return new ElasticsearchSink<>(
			Collections.unmodifiableMap(createUserConfig(bulkFlushMaxActions, clusterName)),
			transports,
			elasticsearchSinkFunction);
}
 
Example #5
Source File: ElasticsearchSink.java    From flink with Apache License 2.0 5 votes vote down vote up
private ElasticsearchSink(
	Map<String, String> bulkRequestsConfig,
	List<HttpHost> httpHosts,
	ElasticsearchSinkFunction<T> elasticsearchSinkFunction,
	ActionRequestFailureHandler failureHandler,
	RestClientFactory restClientFactory) {

	super(new Elasticsearch6ApiCallBridge(httpHosts, restClientFactory),  bulkRequestsConfig, elasticsearchSinkFunction, failureHandler);
}
 
Example #6
Source File: ElasticsearchSinkITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSink(
		int bulkFlushMaxActions,
		String clusterName,
		List<InetSocketAddress> transportAddresses,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) {

	return new ElasticsearchSink<>(
			Collections.unmodifiableMap(createUserConfig(bulkFlushMaxActions, clusterName)),
			transportAddresses,
			elasticsearchSinkFunction);
}
 
Example #7
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSinkForEmbeddedNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) throws Exception {

	return createElasticsearchSinkForNode(
			bulkFlushMaxActions, clusterName, elasticsearchSinkFunction, "127.0.0.1");
}
 
Example #8
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSinkForNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction,
		String ipAddress) throws Exception {

	ArrayList<HttpHost> httpHosts = new ArrayList<>();
	httpHosts.add(new HttpHost(ipAddress, 9200, "http"));

	ElasticsearchSink.Builder<Tuple2<Integer, String>> builder = new ElasticsearchSink.Builder<>(httpHosts, elasticsearchSinkFunction);
	builder.setBulkFlushMaxActions(bulkFlushMaxActions);

	return builder.build();
}
 
Example #9
Source File: Elasticsearch5SinkExample.java    From flink with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

		final ParameterTool parameterTool = ParameterTool.fromArgs(args);

		if (parameterTool.getNumberOfParameters() < 3) {
			System.out.println("Missing parameters!\n" +
				"Usage: --numRecords <numRecords> --index <index> --type <type>");
			return;
		}

		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.getConfig().disableSysoutLogging();
		env.enableCheckpointing(5000);

		DataStream<String> source = env.generateSequence(0, parameterTool.getInt("numRecords") - 1)
			.map(new MapFunction<Long, String>() {
				@Override
				public String map(Long value) throws Exception {
					return "message #" + value;
				}
			});

		Map<String, String> userConfig = new HashMap<>();
		userConfig.put("cluster.name", "elasticsearch");
		// This instructs the sink to emit after every element, otherwise they would be buffered
		userConfig.put(ElasticsearchSink.CONFIG_KEY_BULK_FLUSH_MAX_ACTIONS, "1");

		List<InetSocketAddress> transports = new ArrayList<>();
		transports.add(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 9300));

		source.addSink(new ElasticsearchSink<>(userConfig, transports, new ElasticsearchSinkFunction<String>() {
			@Override
			public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
				indexer.add(createIndexRequest(element, parameterTool));
			}
		}));

		env.execute("Elasticsearch5.x end to end sink test example");
	}
 
Example #10
Source File: ElasticsearchSink.java    From flink with Apache License 2.0 5 votes vote down vote up
private ElasticsearchSink(
	Map<String, String> bulkRequestsConfig,
	List<HttpHost> httpHosts,
	ElasticsearchSinkFunction<T> elasticsearchSinkFunction,
	ActionRequestFailureHandler failureHandler,
	RestClientFactory restClientFactory) {

	super(new Elasticsearch7ApiCallBridge(httpHosts, restClientFactory),  bulkRequestsConfig, elasticsearchSinkFunction, failureHandler);
}
 
Example #11
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSink(
		int bulkFlushMaxActions,
		String clusterName,
		List<HttpHost> httpHosts,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) {

	ElasticsearchSink.Builder<Tuple2<Integer, String>> builder = new ElasticsearchSink.Builder<>(httpHosts, elasticsearchSinkFunction);
	builder.setBulkFlushMaxActions(bulkFlushMaxActions);

	return builder.build();
}
 
Example #12
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSinkForEmbeddedNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) throws Exception {

	return createElasticsearchSinkForNode(
			bulkFlushMaxActions, clusterName, elasticsearchSinkFunction, "127.0.0.1");
}
 
Example #13
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSink(
		int bulkFlushMaxActions,
		String clusterName,
		List<HttpHost> httpHosts,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) {

	ElasticsearchSink.Builder<Tuple2<Integer, String>> builder = new ElasticsearchSink.Builder<>(httpHosts, elasticsearchSinkFunction);
	builder.setBulkFlushMaxActions(bulkFlushMaxActions);

	return builder.build();
}
 
Example #14
Source File: ElasticsearchSink.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new {@code ElasticsearchSink} that connects to the cluster using a {@link TransportClient}.
 *
 * @param userConfig The map of user settings that are used when constructing the {@link TransportClient} and {@link BulkProcessor}
 * @param transportAddresses The addresses of Elasticsearch nodes to which to connect using a {@link TransportClient}
 * @param elasticsearchSinkFunction This is used to generate multiple {@link ActionRequest} from the incoming element
 */
public ElasticsearchSink(
	Map<String, String> userConfig,
	List<InetSocketAddress> transportAddresses,
	ElasticsearchSinkFunction<T> elasticsearchSinkFunction) {

	this(userConfig, transportAddresses, elasticsearchSinkFunction, new NoOpFailureHandler());
}
 
Example #15
Source File: ElasticsearchSink.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new {@code ElasticsearchSink} that connects to the cluster using a {@link TransportClient}.
 *
 * @param userConfig The map of user settings that are used when constructing the {@link TransportClient} and {@link BulkProcessor}
 * @param transportAddresses The addresses of Elasticsearch nodes to which to connect using a {@link TransportClient}
 * @param elasticsearchSinkFunction This is used to generate multiple {@link ActionRequest} from the incoming element
 * @param failureHandler This is used to handle failed {@link ActionRequest}
 */
public ElasticsearchSink(
	Map<String, String> userConfig,
	List<InetSocketAddress> transportAddresses,
	ElasticsearchSinkFunction<T> elasticsearchSinkFunction,
	ActionRequestFailureHandler failureHandler) {

	super(new Elasticsearch5ApiCallBridge(transportAddresses), userConfig, elasticsearchSinkFunction, failureHandler);
}
 
Example #16
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSink(
		int bulkFlushMaxActions,
		String clusterName,
		List<InetSocketAddress> addresses,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) {

	return new ElasticsearchSink<>(
			Collections.unmodifiableMap(createUserConfig(bulkFlushMaxActions, clusterName)),
			addresses,
			elasticsearchSinkFunction);
}
 
Example #17
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSinkForEmbeddedNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) throws Exception {

	return createElasticsearchSinkForNode(
			bulkFlushMaxActions, clusterName, elasticsearchSinkFunction, "127.0.0.1");
}
 
Example #18
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSinkForNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction,
		String ipAddress) throws Exception {

	List<InetSocketAddress> transports = new ArrayList<>();
	transports.add(new InetSocketAddress(InetAddress.getByName(ipAddress), 9300));

	return new ElasticsearchSink<>(
			Collections.unmodifiableMap(createUserConfig(bulkFlushMaxActions, clusterName)),
			transports,
			elasticsearchSinkFunction);
}
 
Example #19
Source File: ElasticsearchSink.java    From flink with Apache License 2.0 5 votes vote down vote up
private ElasticsearchSink(
	Map<String, String> bulkRequestsConfig,
	List<HttpHost> httpHosts,
	ElasticsearchSinkFunction<T> elasticsearchSinkFunction,
	ActionRequestFailureHandler failureHandler,
	RestClientFactory restClientFactory) {

	super(new Elasticsearch6ApiCallBridge(httpHosts, restClientFactory),  bulkRequestsConfig, elasticsearchSinkFunction, failureHandler);
}
 
Example #20
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSink(
		int bulkFlushMaxActions,
		String clusterName,
		List<HttpHost> httpHosts,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) {

	ElasticsearchSink.Builder<Tuple2<Integer, String>> builder = new ElasticsearchSink.Builder<>(httpHosts, elasticsearchSinkFunction);
	builder.setBulkFlushMaxActions(bulkFlushMaxActions);

	return builder.build();
}
 
Example #21
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSinkForEmbeddedNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) throws Exception {

	return createElasticsearchSinkForNode(
			bulkFlushMaxActions, clusterName, elasticsearchSinkFunction, "127.0.0.1");
}
 
Example #22
Source File: ElasticsearchSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSinkForNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction,
		String ipAddress) throws Exception {

	ArrayList<HttpHost> httpHosts = new ArrayList<>();
	httpHosts.add(new HttpHost(ipAddress, 9200, "http"));

	ElasticsearchSink.Builder<Tuple2<Integer, String>> builder = new ElasticsearchSink.Builder<>(httpHosts, elasticsearchSinkFunction);
	builder.setBulkFlushMaxActions(bulkFlushMaxActions);

	return builder.build();
}
 
Example #23
Source File: Elasticsearch5SinkExample.java    From flink with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

		final ParameterTool parameterTool = ParameterTool.fromArgs(args);

		if (parameterTool.getNumberOfParameters() < 3) {
			System.out.println("Missing parameters!\n" +
				"Usage: --numRecords <numRecords> --index <index> --type <type>");
			return;
		}

		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
				env.enableCheckpointing(5000);

		DataStream<String> source = env.generateSequence(0, parameterTool.getInt("numRecords") - 1)
			.map(new MapFunction<Long, String>() {
				@Override
				public String map(Long value) throws Exception {
					return "message #" + value;
				}
			});

		Map<String, String> userConfig = new HashMap<>();
		userConfig.put("cluster.name", "elasticsearch");
		// This instructs the sink to emit after every element, otherwise they would be buffered
		userConfig.put(ElasticsearchSink.CONFIG_KEY_BULK_FLUSH_MAX_ACTIONS, "1");

		List<InetSocketAddress> transports = new ArrayList<>();
		transports.add(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 9300));

		source.addSink(new ElasticsearchSink<>(userConfig, transports, new ElasticsearchSinkFunction<String>() {
			@Override
			public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
				indexer.add(createIndexRequest(element, parameterTool));
			}
		}));

		env.execute("Elasticsearch5.x end to end sink test example");
	}
 
Example #24
Source File: ElasticsearchSinkITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, RestHighLevelClient> createElasticsearchSinkForEmbeddedNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) throws Exception {

	return createElasticsearchSinkForNode(
			bulkFlushMaxActions, clusterName, elasticsearchSinkFunction, "127.0.0.1");
}
 
Example #25
Source File: ElasticsearchSinkITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSinkForEmbeddedNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) throws Exception {

	return createElasticsearchSinkForNode(
			bulkFlushMaxActions, clusterName, elasticsearchSinkFunction, "127.0.0.1");
}
 
Example #26
Source File: ElasticsearchSinkITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSinkForNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction,
		String ipAddress) throws Exception {

	List<InetSocketAddress> transports = new ArrayList<>();
	transports.add(new InetSocketAddress(InetAddress.getByName(ipAddress), 9300));

	return new ElasticsearchSink<>(
			Collections.unmodifiableMap(createUserConfig(bulkFlushMaxActions, clusterName)),
			transports,
			elasticsearchSinkFunction);
}
 
Example #27
Source File: ElasticsearchSink.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new {@code ElasticsearchSink} that connects to the cluster using a {@link TransportClient}.
 *
 * @param userConfig The map of user settings that are used when constructing the {@link TransportClient} and {@link BulkProcessor}
 * @param transportAddresses The addresses of Elasticsearch nodes to which to connect using a {@link TransportClient}
 * @param elasticsearchSinkFunction This is used to generate multiple {@link ActionRequest} from the incoming element
 */
public ElasticsearchSink(
	Map<String, String> userConfig,
	List<InetSocketAddress> transportAddresses,
	ElasticsearchSinkFunction<T> elasticsearchSinkFunction) {

	this(userConfig, transportAddresses, elasticsearchSinkFunction, new NoOpFailureHandler());
}
 
Example #28
Source File: ElasticsearchSink.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new {@code ElasticsearchSink} that connects to the cluster using a {@link TransportClient}.
 *
 * @param userConfig The map of user settings that are used when constructing the {@link TransportClient} and {@link BulkProcessor}
 * @param transportAddresses The addresses of Elasticsearch nodes to which to connect using a {@link TransportClient}
 * @param elasticsearchSinkFunction This is used to generate multiple {@link ActionRequest} from the incoming element
 * @param failureHandler This is used to handle failed {@link ActionRequest}
 */
public ElasticsearchSink(
	Map<String, String> userConfig,
	List<InetSocketAddress> transportAddresses,
	ElasticsearchSinkFunction<T> elasticsearchSinkFunction,
	ActionRequestFailureHandler failureHandler) {

	super(new Elasticsearch5ApiCallBridge(transportAddresses), userConfig, elasticsearchSinkFunction, failureHandler);
}
 
Example #29
Source File: ElasticsearchSinkITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSink(
		int bulkFlushMaxActions,
		String clusterName,
		List<InetSocketAddress> addresses,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) {

	return new ElasticsearchSink<>(
			Collections.unmodifiableMap(createUserConfig(bulkFlushMaxActions, clusterName)),
			addresses,
			elasticsearchSinkFunction);
}
 
Example #30
Source File: ElasticsearchSinkITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected ElasticsearchSinkBase<Tuple2<Integer, String>, TransportClient> createElasticsearchSinkForEmbeddedNode(
		int bulkFlushMaxActions,
		String clusterName,
		ElasticsearchSinkFunction<Tuple2<Integer, String>> elasticsearchSinkFunction) throws Exception {

	return createElasticsearchSinkForNode(
			bulkFlushMaxActions, clusterName, elasticsearchSinkFunction, "127.0.0.1");
}