org.apache.flink.streaming.api.functions.sink.RichSinkFunction Java Examples

The following examples show how to use org.apache.flink.streaming.api.functions.sink.RichSinkFunction. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CoordinatedSourceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("serial")
private void executeAndVerify(StreamExecutionEnvironment env, DataStream<Integer> stream, int numRecords) throws Exception {
	stream.addSink(new RichSinkFunction<Integer>() {
		@Override
		public void open(Configuration parameters) throws Exception {
			getRuntimeContext().addAccumulator("result", new ListAccumulator<Integer>());
		}

		@Override
		public void invoke(Integer value, Context context) throws Exception {
			getRuntimeContext().getAccumulator("result").add(value);
		}
	});
	List<Integer> result = env.execute().getAccumulatorResult("result");
	Collections.sort(result);
	assertEquals(numRecords, result.size());
	assertEquals(0, (int) result.get(0));
	assertEquals(numRecords - 1, (int) result.get(result.size() - 1));
}
 
Example #2
Source File: HdfsSink2.java    From sylph with Apache License 2.0 6 votes vote down vote up
public LocalShuffle(int split, RichSinkFunction<T> userSink)
        throws IOException, ClassNotFoundException, IllegalAccessException, NoSuchFieldException
{
    this.sinks = new ArrayList<>(split);
    SerializedValue<RichSinkFunction<T>> serializedValue = new SerializedValue<>(userSink);
    for (int i = 0; i < split; i++) {
        StreamingFileSink<T> sink = (StreamingFileSink<T>) serializedValue.deserializeValue(this.getClass().getClassLoader());
        Field field = StreamingFileSink.class.getDeclaredField("bucketsBuilder");
        field.setAccessible(true);
        StreamingFileSink<T> mockSink = new StreamingFileSink<T>((StreamingFileSink.BulkFormatBuilder<T, ?>) field.get(sink), 0)
        {
            @Override
            public RuntimeContext getRuntimeContext()
            {
                return LocalShuffle.this.getRuntimeContext();
            }
        };
    }
}
 
Example #3
Source File: HdfsSink2.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Override
public void close()
        throws Exception
{
    running = false;
    super.close();
    service.shutdown();
    for (RichSinkFunction<T> sink : sinks) {
        sink.close();
    }
}
 
Example #4
Source File: BaseKafkaSinkStreamOp.java    From Alink with Apache License 2.0 5 votes vote down vote up
@Override
public T sinkFrom(StreamOperator in) {
    String topic = getParams().get(KafkaSinkParams.TOPIC);
    String fieldDelimiter = getParams().get(KafkaSinkParams.FIELD_DELIMITER);
    HasDataFormat.DataFormat dataFormat = getParams().get(KafkaSinkParams.DATA_FORMAT);
    String bootstrapServer = getParams().get(KafkaSinkParams.BOOTSTRAP_SERVERS);
    String properties = getParams().get(KafkaSinkParams.PROPERTIES);

    Properties props = new Properties();
    props.setProperty("bootstrap.servers", bootstrapServer);

    if (!StringUtils.isNullOrWhitespaceOnly(properties)) {
        String[] kvPairs = properties.split(",");
        for (String kvPair : kvPairs) {
            int pos = kvPair.indexOf('=');
            Preconditions.checkArgument(pos >= 0, "Invalid properties format, should be \"k1=v1,k2=v2,...\"");
            String key = kvPair.substring(0, pos);
            String value = kvPair.substring(pos + 1);
            props.setProperty(key, value);
        }
    }

    BaseKafkaSinkBuilder builder = getKafkaSinkBuilder();
    builder.setTopic(topic);
    builder.setFieldDelimiter(fieldDelimiter);
    builder.setFieldNames(in.getColNames());
    builder.setFieldTypes(in.getColTypes());
    builder.setFormat(dataFormat);
    builder.setProperties(props);

    RichSinkFunction<Row> sink = builder.build();
    in.getDataStream().addSink(sink).name("kafka");
    return (T) this;
}
 
Example #5
Source File: HdfsSink2.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Override
public void onProcessingTime(long timestamp)
        throws Exception
{
    for (RichSinkFunction<T> sink : sinks) {
        if (sink instanceof ProcessingTimeCallback) {
            ((ProcessingTimeCallback) sink).onProcessingTime(timestamp);
        }
    }
}
 
Example #6
Source File: HdfsSink2.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Override
public void initializeState(FunctionInitializationContext context)
        throws Exception
{
    for (RichSinkFunction<T> sink : sinks) {
        if (sink instanceof CheckpointedFunction) {
            ((CheckpointedFunction) sink).initializeState(context);
        }
    }
}
 
Example #7
Source File: HdfsSink2.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Override
public void snapshotState(FunctionSnapshotContext context)
        throws Exception
{
    for (RichSinkFunction<T> sink : sinks) {
        if (sink instanceof CheckpointedFunction) {
            ((CheckpointedFunction) sink).snapshotState(context);
        }
    }
}
 
Example #8
Source File: HdfsSink2.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Override
public void notifyCheckpointComplete(long checkpointId)
        throws Exception
{
    for (RichSinkFunction<T> sink : sinks) {
        if (sink instanceof CheckpointListener) {
            ((CheckpointListener) sink).notifyCheckpointComplete(checkpointId);
        }
    }
}
 
Example #9
Source File: TableSqlTest.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Test
public void selectSinkErrorTest()
        throws Exception
{
    StreamTableEnvironmentImpl tableEnv = (StreamTableEnvironmentImpl) getTableEnv();

    DataStreamSource<Row> stream = tableEnv.execEnv().fromElements(Row.of("a1", "3.14"));
    Table table = tableEnv.fromDataStream(stream, "name,age");
    tableEnv.toAppendStream(tableEnv.sqlQuery("select name,age from " + table), Row.class)
            .addSink(new RichSinkFunction<Row>()
            {
                @Override
                public void invoke(Row value, Context context)
                        throws Exception
                {
                    long a1 = (long) value.getField(1);
                    System.out.println(value);
                }
            });
    try {
        tableEnv.execute("");
        Assert.fail();
    }
    catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example #10
Source File: TableSqlTest.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Test
public void selectCastErrorTest()
        throws Exception
{
    StreamTableEnvironmentImpl tableEnv = (StreamTableEnvironmentImpl) getTableEnv();

    DataStreamSource<Row> stream = tableEnv.execEnv().fromElements(Row.of("a1", "3.14"));
    Table table = tableEnv.fromDataStream(stream, "name,age");

    tableEnv.toAppendStream(tableEnv.sqlQuery("select name,cast(age as bigint) as a1 from " + table), Row.class)
            .addSink(new RichSinkFunction<Row>()
            {
                @Override
                public void invoke(Row value, Context context)
                        throws Exception
                {
                    long a1 = (long) value.getField(0);
                    System.out.println(value);
                }
            });
    try {
        tableEnv.execute("");
        Assert.fail();
    }
    catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example #11
Source File: HdfsSink2.java    From sylph with Apache License 2.0 4 votes vote down vote up
@Override
public void run(DataStream<Row> stream)
{
    final RichSinkFunction<byte[]> sink = StreamingFileSink.forBulkFormat(
            new Path(writerDir),
            (BulkWriter.Factory<byte[]>) fsDataOutputStream -> new BulkWriter<byte[]>()
            {
                private final CompressionCodec codec = ReflectionUtils.newInstance(codecClass, new Configuration());
                private final CompressionOutputStream outputStream = codec.createOutputStream(fsDataOutputStream);
                private long bufferSize;

                @Override
                public void addElement(byte[] element)
                        throws IOException
                {
                    outputStream.write(element);
                    outputStream.write(10); //write \n
                    bufferSize += element.length;
                    if (bufferSize >= batchSize) {
                        outputStream.flush();
                        this.bufferSize = 0;
                    }
                }

                @Override
                public void flush()
                        throws IOException
                {
                    outputStream.flush();
                }

                @Override
                public void finish()
                        throws IOException
                {
                    outputStream.finish();
                    outputStream.close();
                }
            })
            .withBucketAssigner(new DateTimeBucketAssigner<>("yyyy-MM-dd--HH"))
            .build();
    stream.map(row -> {
        StringBuilder builder = new StringBuilder();
        for (int i = 0; i < row.getArity(); i++) {
            builder.append("\u0001").append(row.getField(i));
        }
        return builder.substring(1).getBytes(UTF_8);
    })
            .addSink(sink)
            .name(this.getClass().getSimpleName());
}
 
Example #12
Source File: FileSystemTableSink.java    From alchemy with Apache License 2.0 4 votes vote down vote up
@Override
public void emitDataStream(DataStream<Row> dataStream) {
    RichSinkFunction richSinkFunction = createFileRich();
    dataStream.addSink(richSinkFunction);
}
 
Example #13
Source File: DubboTableSink.java    From alchemy with Apache License 2.0 4 votes vote down vote up
private RichSinkFunction createDubboRich() {
    return new DubboSinkFunction(dubboProperties, fieldNames);
}
 
Example #14
Source File: DubboTableSink.java    From alchemy with Apache License 2.0 4 votes vote down vote up
@Override
public void emitDataStream(DataStream<Row> dataStream) {
    RichSinkFunction richSinkFunction = createDubboRich();
    dataStream.addSink(richSinkFunction);
}
 
Example #15
Source File: RedisTableSink.java    From alchemy with Apache License 2.0 4 votes vote down vote up
private RichSinkFunction createRedisRich() {
    return RedisFactory.getInstance(this.fieldNames, this.fieldTypes, this.redisProperties);
}
 
Example #16
Source File: RedisTableSink.java    From alchemy with Apache License 2.0 4 votes vote down vote up
@Override
public void emitDataStream(DataStream<Tuple2<Boolean, Row>> dataStream) {
    RichSinkFunction richSinkFunction = createRedisRich();
    dataStream.addSink(richSinkFunction);
}
 
Example #17
Source File: Kafka011SinkBuilder.java    From Alink with Apache License 2.0 4 votes vote down vote up
@Override
public RichSinkFunction<Row> build() {
    SerializationSchema<Row> serializationSchema = getSerializationSchema();
    return new FlinkKafkaProducer011<Row>(topic, serializationSchema, properties);
}
 
Example #18
Source File: TsdbTableSink.java    From alchemy with Apache License 2.0 4 votes vote down vote up
private RichSinkFunction createTsdbRich() {
    MapFunction<String, String> tagMapFunction = createMapFunction(tsdbProperties.getMapClazz());
    return new TsdbSinkFunction(tsdbProperties, fieldNames, tagMapFunction);
}
 
Example #19
Source File: KafkaSinkBuilder.java    From Alink with Apache License 2.0 4 votes vote down vote up
@Override
public RichSinkFunction<Row> build() {
    SerializationSchema<Row> serializationSchema = getSerializationSchema();
    return new FlinkKafkaProducer<Row>(topic, serializationSchema, properties);
}
 
Example #20
Source File: Kafka010SinkBuilder.java    From Alink with Apache License 2.0 4 votes vote down vote up
@Override
public RichSinkFunction<Row> build() {
    SerializationSchema<Row> serializationSchema = getSerializationSchema();
    return new FlinkKafkaProducer010<Row>(topic, serializationSchema, properties);
}
 
Example #21
Source File: TsdbTableSink.java    From alchemy with Apache License 2.0 4 votes vote down vote up
@Override
public void emitDataStream(DataStream<Row> dataStream) {
    RichSinkFunction richSinkFunction = createTsdbRich();
    dataStream.addSink(richSinkFunction);
}
 
Example #22
Source File: BaseKafkaSinkBuilder.java    From Alink with Apache License 2.0 2 votes vote down vote up
/**
 * Construct the {@link RichSinkFunction} for specific version of Kafka.
 */
public abstract RichSinkFunction<Row> build();