/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dataartisans; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.TypeExtractor; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.connectors.kafka.KafkaSink; import org.apache.flink.streaming.util.serialization.DeserializationSchema; import org.apache.flink.streaming.util.serialization.SerializationSchema; import org.apache.flink.streaming.util.serialization.SimpleStringSchema; /** * Simple example for writing data into Kafka. * * The following arguments are required: * * - "bootstrap.servers" (comma separated list of kafka brokers) * - "topic" the name of the topic to write data to. * * This is an example command line argument: * "--topic test --bootstrap.servers localhost:9092" */ public class WriteIntoKafka { public static void main(String[] args) throws Exception { // create execution environment StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); // parse user parameters ParameterTool parameterTool = ParameterTool.fromArgs(args); // add a simple source which is writing some strings DataStream<String> messageStream = env.addSource(new SimpleStringGenerator()); // write stream to Kafka messageStream.addSink(new KafkaSink<>(parameterTool.getRequired("bootstrap.servers"), parameterTool.getRequired("topic"), new SimpleStringSchema())); env.execute(); } public static class SimpleStringGenerator implements SourceFunction<String> { private static final long serialVersionUID = 2174904787118597072L; boolean running = true; long i = 0; @Override public void run(SourceContext<String> ctx) throws Exception { while(running) { ctx.collect("element-"+ (i++)); Thread.sleep(10); } } @Override public void cancel() { running = false; } } public static class SimpleStringSchema implements DeserializationSchema<String>, SerializationSchema<String, byte[]> { private static final long serialVersionUID = 1L; public SimpleStringSchema() { } public String deserialize(byte[] message) { return new String(message); } public boolean isEndOfStream(String nextElement) { return false; } public byte[] serialize(String element) { return element.getBytes(); } public TypeInformation<String> getProducedType() { return TypeExtractor.getForClass(String.class); } } }