org.apache.flink.api.common.functions.FlatMapFunction Java Examples
The following examples show how to use
org.apache.flink.api.common.functions.FlatMapFunction.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FlatMapOperator.java From flink with Apache License 2.0 | 6 votes |
@Override protected FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) { String name = getName() != null ? getName() : "FlatMap at " + defaultName; // create operator FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> po = new FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>>(function, new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name); // set input po.setInput(input); // set parallelism if (this.getParallelism() > 0) { // use specified parallelism po.setParallelism(this.getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #2
Source File: DataStreamAllroundTestJobFactory.java From flink with Apache License 2.0 | 6 votes |
static FlatMapFunction<Event, String> createSemanticsCheckMapper(ParameterTool pt) { String semantics = pt.get(TEST_SEMANTICS.key(), TEST_SEMANTICS.defaultValue()); SemanticsCheckMapper.ValidatorFunction validatorFunction; if (semantics.equalsIgnoreCase("exactly-once")) { validatorFunction = SemanticsCheckMapper.ValidatorFunction.exactlyOnce(); } else if (semantics.equalsIgnoreCase("at-least-once")) { validatorFunction = SemanticsCheckMapper.ValidatorFunction.atLeastOnce(); } else { throw new IllegalArgumentException("Unknown semantics requested: " + semantics); } return new SemanticsCheckMapper(validatorFunction); }
Example #3
Source File: AlsPredictBatchOp.java From Alink with Apache License 2.0 | 6 votes |
private static DataSet<Tuple2<Long, float[]>> getFactors(BatchOperator<?> model, final int identity) { return model.getDataSet() .flatMap(new FlatMapFunction<Row, Tuple2<Long, float[]>>() { @Override public void flatMap(Row value, Collector<Tuple2<Long, float[]>> out) throws Exception { int w = AlsModelDataConverter.getIsUser(value) ? 0 : 1; if (w != identity) { return; } long idx = AlsModelDataConverter.getVertexId(value); float[] factors = AlsModelDataConverter.getFactors(value); out.collect(Tuple2.of(idx, factors)); } }); }
Example #4
Source File: FilterOperator.java From flink with Apache License 2.0 | 6 votes |
@Override protected org.apache.flink.api.common.operators.base.FilterOperatorBase<T, FlatMapFunction<T, T>> translateToDataFlow(Operator<T> input) { String name = getName() != null ? getName() : "Filter at " + defaultName; // create operator PlanFilterOperator<T> po = new PlanFilterOperator<T>(function, name, getInputType()); po.setInput(input); // set parallelism if (getParallelism() > 0) { // use specified parallelism po.setParallelism(getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #5
Source File: FilterOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override protected org.apache.flink.api.common.operators.base.FilterOperatorBase<T, FlatMapFunction<T, T>> translateToDataFlow(Operator<T> input) { String name = getName() != null ? getName() : "Filter at " + defaultName; // create operator PlanFilterOperator<T> po = new PlanFilterOperator<T>(function, name, getInputType()); po.setInput(input); // set parallelism if (getParallelism() > 0) { // use specified parallelism po.setParallelism(getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #6
Source File: FlatMapOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override protected FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) { String name = getName() != null ? getName() : "FlatMap at " + defaultName; // create operator FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> po = new FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>>(function, new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name); // set input po.setInput(input); // set parallelism if (this.getParallelism() > 0) { // use specified parallelism po.setParallelism(this.getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #7
Source File: FlatMapOperator.java From flink with Apache License 2.0 | 6 votes |
@Override protected FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) { String name = getName() != null ? getName() : "FlatMap at " + defaultName; // create operator FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> po = new FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>>(function, new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name); // set input po.setInput(input); // set parallelism if (this.getParallelism() > 0) { // use specified parallelism po.setParallelism(this.getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #8
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final ParameterTool params = ParameterTool.fromArgs(args); final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.getConfig().setGlobalJobParameters(params); DataSource<String> dataSource = env.fromElements(WORDS); dataSource.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() { @Override public void flatMap(String line, Collector<Tuple2<String, Integer>> out) throws Exception { String[] words = line.split("\\W+"); for (String word : words) { out.collect(new Tuple2<>(word, 1)); } } }) .groupBy(0) .sum(1) .print(); long count = dataSource.count(); System.out.println(count); }
Example #9
Source File: KafkaDeserializationSchemaTest.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); Properties props = buildKafkaProps(parameterTool); FlinkKafkaConsumer011<ObjectNode> kafkaConsumer = new FlinkKafkaConsumer011<>("zhisheng", new KafkaMetricSchema(true), props); env.addSource(kafkaConsumer) .flatMap(new FlatMapFunction<ObjectNode, MetricEvent>() { @Override public void flatMap(ObjectNode jsonNodes, Collector<MetricEvent> collector) throws Exception { try { // System.out.println(jsonNodes); MetricEvent metricEvent = GsonUtil.fromJson(jsonNodes.get("value").asText(), MetricEvent.class); collector.collect(metricEvent); } catch (Exception e) { log.error("jsonNodes = {} convert to MetricEvent has an error", jsonNodes, e); } } }) .print(); env.execute(); }
Example #10
Source File: FlatMapOperatorBase.java From flink with Apache License 2.0 | 6 votes |
@Override protected List<OUT> executeOnCollections(List<IN> input, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception { FlatMapFunction<IN, OUT> function = userFunction.getUserCodeObject(); FunctionUtils.setFunctionRuntimeContext(function, ctx); FunctionUtils.openFunction(function, parameters); ArrayList<OUT> result = new ArrayList<OUT>(input.size()); TypeSerializer<IN> inSerializer = getOperatorInfo().getInputType().createSerializer(executionConfig); TypeSerializer<OUT> outSerializer = getOperatorInfo().getOutputType().createSerializer(executionConfig); CopyingListCollector<OUT> resultCollector = new CopyingListCollector<OUT>(result, outSerializer); for (IN element : input) { IN inCopy = inSerializer.copy(element); function.flatMap(inCopy, resultCollector); } FunctionUtils.closeFunction(function); return result; }
Example #11
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final ParameterTool params = ParameterTool.fromArgs(args); final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.getConfig().setGlobalJobParameters(params); DataSource<String> dataSource = env.fromElements(WORDS); dataSource.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() { @Override public void flatMap(String line, Collector<Tuple2<String, Integer>> out) throws Exception { String[] words = line.split("\\W+"); for (String word : words) { out.collect(new Tuple2<>(word, 1)); } } }) .groupBy(0) .sum(1) .print(); long count = dataSource.count(); System.out.println(count); }
Example #12
Source File: AssociationRule.java From Alink with Apache License 2.0 | 6 votes |
/** * Generate association rules from frequent patterns. * * @param patterns A DataSet of frequent patterns and there supports. * @param transactionsCnt The number of transactions in the original dataset. * @param itemCounts A DataSet of items and their supports. * @param minConfidence Minimum confidence. * @param minLift Minimum lift. * @param maxConsequentLength Maximum length of a consequent. * @return The association rules with fields: antecedent(left hand side), consequent(right hand side), * support count, [lift, support, confidence]). */ public static DataSet<Tuple4<int[], int[], Integer, double[]>> extractRules( DataSet<Tuple2<int[], Integer>> patterns, DataSet<Long> transactionsCnt, DataSet<Tuple2<Integer, Integer>> itemCounts, final double minConfidence, final double minLift, final int maxConsequentLength) { if (maxConsequentLength <= 0) { return patterns.getExecutionEnvironment().fromElements(0) .flatMap(new FlatMapFunction<Integer, Tuple4<int[], int[], Integer, double[]>>() { @Override public void flatMap(Integer value, Collector<Tuple4<int[], int[], Integer, double[]>> out) throws Exception { } }); } else if (maxConsequentLength == 1) { return extractSingleConsequentRules(patterns, transactionsCnt, itemCounts, minConfidence, minLift); } else { return extractMultiConsequentRules(patterns, transactionsCnt, minConfidence, minLift, maxConsequentLength); } }
Example #13
Source File: KafkaDeserializationSchemaTest.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); Properties props = buildKafkaProps(parameterTool); FlinkKafkaConsumer011<ObjectNode> kafkaConsumer = new FlinkKafkaConsumer011<>("zhisheng", new KafkaMetricSchema(true), props); env.addSource(kafkaConsumer) .flatMap(new FlatMapFunction<ObjectNode, MetricEvent>() { @Override public void flatMap(ObjectNode jsonNodes, Collector<MetricEvent> collector) throws Exception { try { // System.out.println(jsonNodes); MetricEvent metricEvent = GsonUtil.fromJson(jsonNodes.get("value").asText(), MetricEvent.class); collector.collect(metricEvent); } catch (Exception e) { log.error("jsonNodes = {} convert to MetricEvent has an error", jsonNodes, e); } } }) .print(); env.execute(); }
Example #14
Source File: FlinkStreamingTransformTranslators.java From flink-dataflow with Apache License 2.0 | 6 votes |
@Override public void translateNode(Read.Unbounded<T> transform, FlinkStreamingTranslationContext context) { PCollection<T> output = context.getOutput(transform); DataStream<WindowedValue<T>> source; if (transform.getSource().getClass().equals(UnboundedFlinkSource.class)) { UnboundedFlinkSource flinkSource = (UnboundedFlinkSource) transform.getSource(); source = context.getExecutionEnvironment() .addSource(flinkSource.getFlinkSource()) .flatMap(new FlatMapFunction<String, WindowedValue<String>>() { @Override public void flatMap(String s, Collector<WindowedValue<String>> collector) throws Exception { collector.collect(WindowedValue.<String>of(s, Instant.now(), GlobalWindow.INSTANCE, PaneInfo.NO_FIRING)); } }).assignTimestampsAndWatermarks(new IngestionTimeExtractor()); } else { source = context.getExecutionEnvironment() .addSource(new UnboundedSourceWrapper<>(context.getPipelineOptions(), transform)); } context.setOutputDataStream(output, source); }
Example #15
Source File: GraphLoader.java From OSTMap with Apache License 2.0 | 6 votes |
private DataSet<Tuple2<String, UserNodeValues>> getUserNodes(DataSet<JSONObject> jsonData) { DataSet<Tuple2<String, UserNodeValues>> userNodes = jsonData.flatMap(new FlatMapFunction<JSONObject, Tuple2<String, UserNodeValues>>() { @Override public void flatMap(JSONObject jsonObject, Collector<Tuple2<String, UserNodeValues>> out) throws Exception { JSONObject user = jsonObject.getJSONObject("user"); String userId = user.getString("id_str"); String userName = user.getString("name"); out.collect(new Tuple2<String, UserNodeValues>(userId, new UserNodeValues(userId,userName))); // other mentioned users JSONObject entities = jsonObject.getJSONObject("entities"); JSONArray userMentions = entities.getJSONArray("user_mentions"); for (int i = 0; i < userMentions.length(); i++) { JSONObject current = userMentions.getJSONObject(i); String oUserId = current.getString("id_str"); String oUserName = current.getString("name"); out.collect(new Tuple2<String, UserNodeValues>(oUserId, new UserNodeValues(oUserId,oUserName))); } } }).distinct(0); return userNodes; }
Example #16
Source File: SocketTransactionProcessorJob.java From flink-tutorials with Apache License 2.0 | 5 votes |
@Override public DataStream<Query> readQueryStream(ParameterTool params, StreamExecutionEnvironment env) { return env.socketTextStream("localhost", 9999).flatMap(new FlatMapFunction<String, Query>() { private ObjectMapper om = new ObjectMapper(); @Override public void flatMap(String s, Collector<Query> out) throws Exception { try { out.collect(om.readValue(s, Query.class)); } catch (Throwable t) {} } }); }
Example #17
Source File: FlatMapOperatorCollectionTest.java From flink with Apache License 2.0 | 5 votes |
private FlatMapOperatorBase<String, String, FlatMapFunction<String, String>> getTestFlatMapOperator( FlatMapFunction<String, String> udf) { UnaryOperatorInformation<String, String> typeInfo = new UnaryOperatorInformation<String, String>( BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); return new FlatMapOperatorBase<String, String, FlatMapFunction<String, String>>( udf, typeInfo, "flatMap on Collections"); }
Example #18
Source File: IPv6HostnamesITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testClusterWithIPv6host() { try { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(4); env.getConfig().disableSysoutLogging(); // get input data DataSet<String> text = env.fromElements(WordCountData.TEXT.split("\n")); DataSet<Tuple2<String, Integer>> counts = text .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() { @Override public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception { for (String token : value.toLowerCase().split("\\W+")) { if (token.length() > 0) { out.collect(new Tuple2<String, Integer>(token, 1)); } } } }) .groupBy(0).sum(1); List<Tuple2<String, Integer>> result = counts.collect(); TestBaseUtils.compareResultAsText(result, WordCountData.COUNTS_AS_TUPLES); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #19
Source File: LambdaExtractionTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testFlatMapLambda() { FlatMapFunction<Tuple2<Tuple1<Integer>, Boolean>, Tuple2<Tuple1<Integer>, String>> f = (i, out) -> out.collect(null); TypeInformation<?> ti = TypeExtractor.getFlatMapReturnTypes(f, NESTED_TUPLE_BOOLEAN_TYPE, null, true); if (!(ti instanceof MissingTypeInfo)) { assertTrue(ti.isTupleType()); assertEquals(2, ti.getArity()); assertTrue(((TupleTypeInfo<?>) ti).getTypeAt(0).isTupleType()); assertEquals(((TupleTypeInfo<?>) ti).getTypeAt(1), BasicTypeInfo.STRING_TYPE_INFO); } }
Example #20
Source File: TypeExtractor.java From flink with Apache License 2.0 | 5 votes |
@PublicEvolving public static <IN, OUT> TypeInformation<OUT> getFlatMapReturnTypes(FlatMapFunction<IN, OUT> flatMapInterface, TypeInformation<IN> inType, String functionName, boolean allowMissing) { return getUnaryOperatorReturnType( (Function) flatMapInterface, FlatMapFunction.class, 0, 1, new int[]{1, 0}, inType, functionName, allowMissing); }
Example #21
Source File: KafkaConsumerTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Test that ensures that DeserializationSchema.isEndOfStream() is properly evaluated. * * @throws Exception */ public void runEndOfStreamTest() throws Exception { final int elementCount = 300; final String topic = writeSequence("testEndOfStream", elementCount, 1, 1); // read using custom schema final StreamExecutionEnvironment env1 = StreamExecutionEnvironment.getExecutionEnvironment(); env1.setParallelism(1); env1.getConfig().setRestartStrategy(RestartStrategies.noRestart()); env1.getConfig().disableSysoutLogging(); Properties props = new Properties(); props.putAll(standardProps); props.putAll(secureProps); DataStream<Tuple2<Integer, Integer>> fromKafka = env1.addSource(kafkaServer.getConsumer(topic, new FixedNumberDeserializationSchema(elementCount), props)); fromKafka.flatMap(new FlatMapFunction<Tuple2<Integer, Integer>, Void>() { @Override public void flatMap(Tuple2<Integer, Integer> value, Collector<Void> out) throws Exception { // noop ;) } }); tryExecute(env1, "Consume " + elementCount + " elements from Kafka"); deleteTestTopic(topic); }
Example #22
Source File: SampleITCase.java From flink with Apache License 2.0 | 5 votes |
private FlatMapOperator<Tuple3<Integer, Long, String>, String> getSourceDataSet(ExecutionEnvironment env) { return CollectionDataSets.get3TupleDataSet(env).flatMap( new FlatMapFunction<Tuple3<Integer, Long, String>, String>() { @Override public void flatMap(Tuple3<Integer, Long, String> value, Collector<String> out) throws Exception { out.collect(value.f2); } }); }
Example #23
Source File: StreamingOperatorsITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testOperatorChainWithObjectReuseAndNoOutputOperators() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().enableObjectReuse(); DataStream<Integer> input = env.fromElements(1, 2, 3); input.flatMap(new FlatMapFunction<Integer, Integer>() { @Override public void flatMap(Integer value, Collector<Integer> out) throws Exception { out.collect(value << 1); } }); env.execute(); }
Example #24
Source File: WordCountWithAnonymousClass.java From flink with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { // set up the execution environment final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); // get input data DataSet<String> text = StaticData.getDefaultTextLineDataSet(env); DataSet<Tuple2<String, Integer>> counts = // split up the lines in pairs (2-tuples) containing: (word,1) text.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() { @Override public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception { // normalize and split the line String[] tokens = value.toLowerCase().split("\\W+"); // emit the pairs for (String token : tokens) { if (token.length() > 0) { out.collect(new Tuple2<String, Integer>(token, 1)); } } } }) // group by the tuple field "0" and sum up tuple field "1" .groupBy(0) .sum(1); // emit result counts.print(); // execute program env.execute("WordCount Example"); }
Example #25
Source File: StreamingWCJavaApp.java From 163-bigdate-note with GNU General Public License v3.0 | 5 votes |
public static void main(String[] args) throws Exception { String input = "file:///D:/imooc/新一代大数据计算引擎 Flink从入门到实战-v/input"; //获取执行环境 StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); //读取数据 DataStreamSource<String> dataStreamSource = env.socketTextStream("localhost", 9999); //执行转换操作 dataStreamSource.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() { @Override public void flatMap(String value, Collector<Tuple2<String, Integer>> collector) throws Exception { String[] tokens = value.toLowerCase().split(","); for (String token : tokens) { if (token.length() > 0) { collector.collect(new Tuple2<String, Integer>(token, 1)); } } } }).keyBy(0) .timeWindow(Time.seconds(5)) .sum(1) .print() .setParallelism(1); env.execute("StreamingWCJavaApp"); }
Example #26
Source File: StreamingWCJavaApp.java From 163-bigdate-note with GNU General Public License v3.0 | 5 votes |
public static void main(String[] args) throws Exception { String input = "file:///D:/imooc/新一代大数据计算引擎 Flink从入门到实战-v/input"; //获取执行环境 StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); //读取数据 DataStreamSource<String> dataStreamSource = env.socketTextStream("localhost", 9999); //执行转换操作 dataStreamSource.flatMap(new FlatMapFunction<String, WC>() { @Override public void flatMap(String value, Collector<WC> collector) throws Exception { String[] tokens = value.toLowerCase().split(","); for (String token : tokens) { if (token.length() > 0) { collector.collect(new WC(token, 1)); } } } }).keyBy("word") .timeWindow(Time.seconds(5)) .sum("count") .setParallelism(1) .print(); env.execute("StreamingWCJavaApp"); }
Example #27
Source File: AsyncLookupJoinWithCalcRunner.java From flink with Apache License 2.0 | 5 votes |
@Override public TableFunctionResultFuture<RowData> createFetcherResultFuture(Configuration parameters) throws Exception { TableFunctionResultFuture<RowData> joinConditionCollector = super.createFetcherResultFuture(parameters); FlatMapFunction<RowData, RowData> calc = generatedCalc.newInstance(getRuntimeContext().getUserCodeClassLoader()); FunctionUtils.setFunctionRuntimeContext(calc, getRuntimeContext()); FunctionUtils.openFunction(calc, parameters); return new TemporalTableCalcResultFuture(calc, joinConditionCollector); }
Example #28
Source File: SampleITCase.java From flink with Apache License 2.0 | 5 votes |
private FlatMapOperator<Tuple3<Integer, Long, String>, String> getSourceDataSet(ExecutionEnvironment env) { return CollectionDataSets.get3TupleDataSet(env).flatMap( new FlatMapFunction<Tuple3<Integer, Long, String>, String>() { @Override public void flatMap(Tuple3<Integer, Long, String> value, Collector<String> out) throws Exception { out.collect(value.f2); } }); }
Example #29
Source File: Main.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL; Properties props = KafkaConfigUtil.buildKafkaProps(parameterTool); SingleOutputStreamOperator<Tuple2<String, String>> product = env.addSource(new FlinkKafkaConsumer011<>( parameterTool.get(METRICS_TOPIC), //这个 kafka topic 需要和上面的工具类的 topic 一致 new SimpleStringSchema(), props)) .map(string -> GsonUtil.fromJson(string, ProductEvent.class)) //反序列化 JSON .flatMap(new FlatMapFunction<ProductEvent, Tuple2<String, String>>() { @Override public void flatMap(ProductEvent value, Collector<Tuple2<String, String>> out) throws Exception { //收集商品 id 和 price 两个属性 out.collect(new Tuple2<>(value.getId().toString(), value.getPrice().toString())); } }); // product.print(); //单个 Redis FlinkJedisPoolConfig conf = new FlinkJedisPoolConfig.Builder().setHost(parameterTool.get("redis.host")).build(); product.addSink(new RedisSink<Tuple2<String, String>>(conf, new RedisSinkMapper())); //Redis 的 ip 信息一般都从配置文件取出来 //Redis 集群 /* FlinkJedisClusterConfig clusterConfig = new FlinkJedisClusterConfig.Builder() .setNodes(new HashSet<InetSocketAddress>( Arrays.asList(new InetSocketAddress("redis1", 6379)))).build();*/ //Redis Sentinels /* FlinkJedisSentinelConfig sentinelConfig = new FlinkJedisSentinelConfig.Builder() .setMasterName("master") .setSentinels(new HashSet<>(Arrays.asList("sentinel1", "sentinel2"))) .setPassword("") .setDatabase(1).build();*/ env.execute("flink redis connector"); }
Example #30
Source File: ChainedFlatMapDriver.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void setup(AbstractInvokable parent) { @SuppressWarnings("unchecked") final FlatMapFunction<IT, OT> mapper = BatchTask.instantiateUserCode(this.config, userCodeClassLoader, FlatMapFunction.class); this.mapper = mapper; FunctionUtils.setFunctionRuntimeContext(mapper, getUdfRuntimeContext()); }