Java Code Examples for org.apache.spark.streaming.api.java.JavaStreamingContext.receiverStream()

The following are Jave code examples for showing how to use receiverStream() of the org.apache.spark.streaming.api.java.JavaStreamingContext class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
Example 1
Project: assistance-platform-server   File: SparkService.java   Source Code and License Vote up 5 votes
@Override
public <T extends Event> JavaDStream<T> getEventReceiverStream(JavaStreamingContext sc,
    Class<T> eventType) {
  UserFilteredMessagingServiceReceiver<T> messagingReceiver =
      new UserFilteredMessagingServiceReceiver<T>(bundle.getModuleId(),
          PlatformClientFactory.getInstance().getUsedHost(), eventType);

  JavaDStream<T> stream = sc.receiverStream(messagingReceiver);

  return stream;
}
 
Example 2
Project: incubator-pulsar   File: SparkStreamingPulsarReceiverExample.java   Source Code and License Vote up 5 votes
public static void main(String[] args) throws InterruptedException {
    SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("pulsar-spark");
    JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(5));

    ClientConfiguration clientConf = new ClientConfiguration();
    ConsumerConfiguration consConf = new ConsumerConfiguration();
    String url = "pulsar://localhost:6650/";
    String topic = "persistent://sample/standalone/ns1/topic1";
    String subs = "sub1";

    JavaReceiverInputDStream<byte[]> msgs = jssc
            .receiverStream(new SparkStreamingPulsarReceiver(clientConf, consConf, url, topic, subs));

    JavaDStream<Integer> isContainingPulsar = msgs.flatMap(new FlatMapFunction<byte[], Integer>() {
        @Override
        public Iterator<Integer> call(byte[] msg) {
            return Arrays.asList(((new String(msg)).indexOf("Pulsar") != -1) ? 1 : 0).iterator();
        }
    });

    JavaDStream<Integer> numOfPulsar = isContainingPulsar.reduce(new Function2<Integer, Integer, Integer>() {
        @Override
        public Integer call(Integer i1, Integer i2) {
            return i1 + i2;
        }
    });

    numOfPulsar.print();

    jssc.start();
    jssc.awaitTermination();
}
 
Example 3
Project: spark-cstar-canaries   File: Consumer.java   Source Code and License Vote up 5 votes
public void start() {
    final JavaStreamingContext context = new JavaStreamingContext(conf, checkpointInterval);

    // for graceful shutdown of the application ...
    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override
        public void run() {
            System.out.println("Shutting down streaming app...");
            context.stop(true, true);
            System.out.println("Shutdown of streaming app complete.");
        }
    });

    JKinesisReceiver receiver = new JKinesisReceiver(appName, streamName,
                                                     endpointUrl, regionName,
                                                     checkpointInterval,
                                                     InitialPositionInStream.LATEST);

    JavaDStream<String> dstream = context.receiverStream(receiver);

    JavaDStream<EventRecord> recs = dstream.map(new EventRecordMapFunc());

    recs.print();

    // persist to DStream to Cassandra
    javaFunctions(recs)
        .writerBuilder("canary", "eventrecord", mapToRow(EventRecord.class))
        .saveToCassandra();


    System.out.println("Start Spark Stream Processing...");

    context.start();
    context.awaitTermination();

}
 
Example 4
Project: nats-connector-spark   File: JavaCustomReceiver.java   Source Code and License Vote up 4 votes
public static void main(String[] args) throws Exception {
    if (args.length < 2) {
      System.err.println("Usage: JavaCustomReceiver <hostname> <port>");
      System.exit(1);
    }

//    StreamingExamples.setStreamingLogLevels();
    // https://github.com/apache/spark/blob/39e2bad6a866d27c3ca594d15e574a1da3ee84cc/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
    boolean log4jInitialized = Logger.getRootLogger().getAllAppenders().hasMoreElements();
    if (!log4jInitialized) {
      // We first log something to initialize Spark's default logging, then we override the
      // logging level.
///      logInfo("Setting log level to [WARN] for streaming example." +
///        " To override add a custom log4j.properties to the classpath.")
      Logger.getRootLogger().setLevel(Level.WARN);
    }

    // Create the context with a 1 second batch size
    SparkConf sparkConf = new SparkConf().setAppName("JavaCustomReceiver").setMaster("local[*]").set("spark.driver.host", "localhost"); // https://issues.apache.org/jira/browse/
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(5000));

    // Create an input stream with the custom receiver on target ip:port and count the
    // words in input stream of \n delimited text (eg. generated by 'nc')
    final JavaReceiverInputDStream<Tuple2<String, Long>> receiverStream = ssc.receiverStream(new JavaCustomReceiver(args[0], Integer.parseInt(args[1])));
    PairFunction mapFunction = new PairFunction() {

		@Override
		public Tuple2 call(Object arg0) throws Exception {
			return (Tuple2) arg0;
		}
    	
    };
	final JavaPairDStream<String, Long> keyValues = receiverStream.mapToPair(mapFunction);
	
	JavaPairDStream<String, Long> byKeys = keyValues.reduceByKey((a, b) -> a + b);
	byKeys.print();
	
/*    JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
      @Override
      public Iterable<String> call(String x) {
//        return Arrays.asList(SPACE.split(x)).iterator();
    	  return Arrays.asList(SPACE.split(x));
      }
    });
    JavaPairDStream<String, Integer> wordCounts = words.mapToPair(
      new PairFunction<String, String, Integer>() {
        @Override public Tuple2<String, Integer> call(String s) {
          return new Tuple2<>(s, 1);
        }
      }).reduceByKey(new Function2<Integer, Integer, Integer>() {
        @Override
        public Integer call(Integer i1, Integer i2) {
          return i1 + i2;
        }
      });

    wordCounts.print();*/
    ssc.start();
    ssc.awaitTermination();
  }
 
Example 5
Project: vortex-spark   File: VortexUtils.java   Source Code and License Vote up 4 votes
public static <TYPE> JavaReceiverInputDStream<TYPE> createStream(JavaStreamingContext jssc,
                                                                 String topic, String topicRegType, Class<TYPE> topicType,
                                                                 StorageLevel storageLevel) {
    return jssc.receiverStream(new VortexReceiver<>(storageLevel, topic, topicRegType, topicType));
}
 
Example 6
Project: nats-connector-spark   File: StandardNatsToSparkConnectorImpl.java   Source Code and License Vote up 2 votes
/**
 * @param ssc, the (Java based) Spark Streaming Context
 * @return a Spark Stream, belonging to the provided Context, that will collect NATS Messages
 */
public JavaReceiverInputDStream<R> asStreamOf(JavaStreamingContext ssc) {
	return ssc.receiverStream(this);
}
 
Example 7
Project: nats-connector-spark   File: NatsStreamingToSparkConnectorImpl.java   Source Code and License Vote up 2 votes
/**
 * @param ssc, the (Java based) Spark Streaming Context
 * @return a Spark Stream, belonging to the provided Context, that will collect NATS Messages
 */
public JavaReceiverInputDStream<R> asStreamOf(JavaStreamingContext ssc) {
	return ssc.receiverStream(this);
}