Java Code Examples for backtype.storm.topology.TopologyBuilder#setSpout()

The following examples show how to use backtype.storm.topology.TopologyBuilder#setSpout() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WordCountTopology.java    From jstorm with Apache License 2.0 6 votes vote down vote up
/**
 * Main method
 */
public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException {
    if (args.length != 1) {
        throw new RuntimeException("Specify topology name");
    }

    int parallelism = 10;
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("word", new WordSpout(), parallelism);
    builder.setBolt("consumer", new ConsumerBolt(), parallelism)
            .fieldsGrouping("word", new Fields("word"));
    Config conf = new Config();
    conf.setNumStmgrs(parallelism);
/*
Set config here
*/

    StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
}
 
Example 2
Source File: RollingTopWordsTest.java    From jstorm with Apache License 2.0 6 votes vote down vote up
@Test
public void testRollingTopWords()
{
    TopologyBuilder topologyBuilder = new TopologyBuilder();
    topologyBuilder.setSpout("windowTestWordSpout", new WindowTestWordSpout(), 5);
    topologyBuilder.setBolt("windowTestRollingCountBolt", new WindowTestRollingCountBolt(9, 3), 4)
            .fieldsGrouping("windowTestWordSpout", new Fields("word")).addConfiguration(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 3);
    topologyBuilder.setBolt("windowTestIntermediateRankingBolt", new WindowTestIntermediateRankingBolt(DEFAULT_COUNT), 4)
            .fieldsGrouping("windowTestRollingCountBolt", new Fields("obj"));
    topologyBuilder.setBolt("windowTestTotalRankingsBolt", new WindowTestTotalRankingsBolt(DEFAULT_COUNT))
            .globalGrouping("windowTestIntermediateRankingBolt");

    Map config = new HashMap();
    config.put(Config.TOPOLOGY_NAME, "RollingTopWordsTest");

    //I really don't know how to validate if the result is right since
    //the tick time is not precise. It makes the output after passing
    //a window is unpredictable.
    //Now I just let it pass all the time.
    //TODO:FIX ME: how to validate if the result is right?
    JStormUnitTestRunner.submitTopology(topologyBuilder.createTopology(), config, 90, null);
}
 
Example 3
Source File: ManualDRPC.java    From jstorm with Apache License 2.0 6 votes vote down vote up
public static void testDrpc() {
    TopologyBuilder builder = new TopologyBuilder();
    LocalDRPC drpc = new LocalDRPC();
    
    DRPCSpout spout = new DRPCSpout("exclamation", drpc);
    builder.setSpout("drpc", spout);
    builder.setBolt("exclaim", new ExclamationBolt(), 3).shuffleGrouping("drpc");
    builder.setBolt("return", new ReturnResults(), 3).shuffleGrouping("exclaim");
    
    LocalCluster cluster = new LocalCluster();
    Config conf = new Config();
    cluster.submitTopology("exclaim", conf, builder.createTopology());
    
    JStormUtils.sleepMs(30 * 1000);
    
    try {
        System.out.println(drpc.execute("exclamation", "aaa"));
        System.out.println(drpc.execute("exclamation", "bbb"));
    } catch (Exception e) {
        Assert.fail("Failed to test drpc");
    }
    
    drpc.shutdown();
    cluster.shutdown();
}
 
Example 4
Source File: TridentMinMaxOfVehiclesTopology.java    From jstorm with Apache License 2.0 6 votes vote down vote up
public static void test() {
    TopologyBuilder builder = new TopologyBuilder();
    
    builder.setSpout("spout", new InOrderSpout(), 8);
    builder.setBolt("count", new Check(), 8).fieldsGrouping("spout", new Fields("c1"));
    
    conf.setMaxSpoutPending(20);
    
    String[] className = Thread.currentThread().getStackTrace()[1].getClassName().split("\\.");
    String topologyName = className[className.length - 1];
    
    if (isLocal) {
        drpc = new LocalDRPC();
    }
    
    try {
        JStormHelper.runTopology(buildVehiclesTopology(), topologyName, conf, 60,
                new JStormHelper.CheckAckedFail(conf), isLocal);
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        Assert.fail("Failed");
    }
}
 
Example 5
Source File: FastWordCountSessionProcessingTimeWindowTopology.java    From jstorm with Apache License 2.0 6 votes vote down vote up
public static void test() {
    int spout_Parallelism_hint = JStormUtils.parseInt(conf.get(TOPOLOGY_SPOUT_PARALLELISM_HINT), 1);
    int count_Parallelism_hint = JStormUtils.parseInt(conf.get(TOPOLOGY_COUNT_PARALLELISM_HINT), 1);

    TopologyBuilder builder = new TopologyBuilder();

    builder.setSpout("spout", new FastRandomSentenceSpout(), spout_Parallelism_hint);

    WordCount wordCountBolt = new WordCount();
    builder.setBolt("count", wordCountBolt.sessionTimeWindow(Time.seconds(1L))
            .withWindowStateMerger(wordCountBolt), count_Parallelism_hint)
            .fieldsGrouping("spout", new Fields("word"));
    //.allGrouping("spout", Common.WATERMARK_STREAM_ID);

    String[] className = Thread.currentThread().getStackTrace()[1].getClassName().split("\\.");
    String topologyName = className[className.length - 1];

    try {
        JStormHelper.runTopology(builder.createTopology(), topologyName, conf, 60,
                new JStormHelper.CheckAckedFail(conf), true);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 6
Source File: HBaseAuditLogApplication.java    From eagle with Apache License 2.0 6 votes vote down vote up
@Override
public StormTopology execute(Config config, StormEnvironment environment) {
    TopologyBuilder builder = new TopologyBuilder();
    KafkaSpoutProvider provider = new KafkaSpoutProvider();
    IRichSpout spout = provider.getSpout(config);

    HBaseAuditLogParserBolt bolt = new HBaseAuditLogParserBolt();

    int numOfSpoutTasks = config.getInt(SPOUT_TASK_NUM);
    int numOfParserTasks = config.getInt(PARSER_TASK_NUM);
    int numOfJoinTasks = config.getInt(JOIN_TASK_NUM);
    int numOfSinkTasks = config.getInt(SINK_TASK_NUM);

    builder.setSpout("ingest", spout, numOfSpoutTasks);
    BoltDeclarer boltDeclarer = builder.setBolt("parserBolt", bolt, numOfParserTasks);
    boltDeclarer.fieldsGrouping("ingest", new Fields(StringScheme.STRING_SCHEME_KEY));

    HBaseResourceSensitivityDataJoinBolt joinBolt = new HBaseResourceSensitivityDataJoinBolt(config);
    BoltDeclarer joinBoltDeclarer = builder.setBolt("joinBolt", joinBolt, numOfJoinTasks);
    joinBoltDeclarer.fieldsGrouping("parserBolt", new Fields("f1"));

    StormStreamSink sinkBolt = environment.getStreamSink("hbase_audit_log_stream",config);
    BoltDeclarer kafkaBoltDeclarer = builder.setBolt("kafkaSink", sinkBolt, numOfSinkTasks);
    kafkaBoltDeclarer.fieldsGrouping("joinBolt", new Fields("user"));
    return builder.createTopology();
}
 
Example 7
Source File: SimpleTopologyWithConfigParam.java    From flux with Apache License 2.0 5 votes vote down vote up
public StormTopology getTopology(Config config) {
    TopologyBuilder builder = new TopologyBuilder();

    // spouts
    FluxShellSpout spout = new FluxShellSpout(
            new String[]{"node", "randomsentence.js"},
            new String[]{"word"});
    builder.setSpout("sentence-spout", spout, 1);

    // bolts
    builder.setBolt("log-bolt", new LogInfoBolt(), 1)
            .shuffleGrouping("sentence-spout");

    return builder.createTopology();
}
 
Example 8
Source File: Latency.java    From flink-perf with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
	ParameterTool pt = ParameterTool.fromArgs(args);

	int par = pt.getInt("para");

	TopologyBuilder builder = new TopologyBuilder();

	builder.setSpout("source0", new Generator(pt), pt.getInt("sourceParallelism"));
	int i = 0;
	for(; i < pt.getInt("repartitions", 1) - 1;i++) {
		System.out.println("adding source"+i+" --> source"+(i+1));
		builder.setBolt("source"+(i+1), new PassThroughBolt(pt), pt.getInt("sinkParallelism")).fieldsGrouping("source" + i, new Fields("id"));
	}
	System.out.println("adding final source"+i+" --> sink");

	builder.setBolt("sink", new Sink(pt), pt.getInt("sinkParallelism")).fieldsGrouping("source"+i, new Fields("id"));


	Config conf = new Config();
	conf.setDebug(false);
	//System.exit(1);

	if (!pt.has("local")) {
		conf.setNumWorkers(par);

		StormSubmitter.submitTopologyWithProgressBar("throughput-"+pt.get("name", "no_name"), conf, builder.createTopology());
	}
	else {
		conf.setMaxTaskParallelism(par);

		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology("throughput", conf, builder.createTopology());

		Thread.sleep(300000);

		cluster.shutdown();
	}

}
 
Example 9
Source File: FastWordCountTopology.java    From eagle with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

        TopologyBuilder builder = new TopologyBuilder();

        builder.setSpout("spout", new FastRandomSentenceSpout(), 4);

        builder.setBolt("split", new SplitSentence(), 4).shuffleGrouping("spout");
        builder.setBolt("count", new WordCount(), 4).fieldsGrouping("split", new Fields("word"));

        Config conf = new Config();
        conf.registerMetricsConsumer(LoggingMetricsConsumer.class);

        String name = "wc-test";
        if (args != null && args.length > 0) {
            name = args[0];
        }

//        conf.setNumWorkers(1);

        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology(name, conf, builder.createTopology());

        Utils.sleep(Long.MAX_VALUE);
//
//        Map clusterConf = Utils.readStormConfig();
//        clusterConf.putAll(Utils.readCommandLineOpts());
//        Nimbus.Client client = NimbusClient.getConfiguredClient(clusterConf).getClient();
//
//        //Sleep for 5 mins
//        for (int i = 0; i < 10; i++) {
//            Thread.sleep(30 * 1000);
//            printMetrics(client, name);
//        }
//        kill(client, name);
    }
 
Example 10
Source File: TestSuite.java    From jstorm with Apache License 2.0 5 votes vote down vote up
@Test
public void testQuery() {
  TopologyBuilder builder = new TopologyBuilder();
  builder.setSpout("query-spout", new TestQuerySpout());
  EsOutputDeclarer esOutputDeclarer = new EsOutputDeclarer().addField("date");
  EsQueryBolt esIndexBolt = new EsQueryBolt(esConfig, new TestQueryMapper(),
      esOutputDeclarer);
  builder.setBolt("query-bolt", esIndexBolt).shuffleGrouping("query-spout");
  builder.setBolt("end-bolt", new TestQueryBolt()).shuffleGrouping(
      "query-bolt");
  cluster.submitTopology("Query-Test", conf, builder.createTopology());
}
 
Example 11
Source File: SlidingWindowTopologyTest.java    From jstorm with Apache License 2.0 5 votes vote down vote up
@Test
public void testSlidingWindowTopology()
{
    TopologyBuilder topologyBuilder = new TopologyBuilder();
    topologyBuilder.setSpout("spout", new SlidingWindowTestRandomSpout(SPOUT_LIMIT), 1);
    //the following bolt sums all the elements in the window. The window has length of 30 elements
    //and slide every 10 elements.
    //for example, if the spout generate 1, 2, 3, 4 ... then the SumBolt generate 55, 210, 465, 765 ...
    topologyBuilder.setBolt("sum", new SlidingWindowTestSumBolt().withWindow(new BaseWindowedBolt.Count(SUM_BOLT_WINDOW_LENGTH),
            new BaseWindowedBolt.Count(SUM_BOLT_WINDOW_SLIDE)), 1).shuffleGrouping("spout");
    //the following bolt calculate the average value of elements in the window. The window has length
    //of 3. So it generates the average of 3 elements and then wait for another 3 elements.
    topologyBuilder.setBolt("avg", new SlidingWindowTestAvgBolt().withTumblingWindow(new BaseWindowedBolt.Count(AVG_BOLT_WINDOW_LENGTH)), 1)
            .shuffleGrouping("sum");

    Set<String> userDefineMetrics = new HashSet<String>();
    userDefineMetrics.add("SlidingWindowTopologyTest.SpoutAvgSum");
    userDefineMetrics.add("SlidingWindowTopologyTest.BoltAvgSum");

    Map config = new HashMap();
    config.put(Config.TOPOLOGY_NAME, "SlidingWindowTopologyTest");

    JStormUnitTestValidator validator = new JStormUnitTestMetricValidator(userDefineMetrics) {
        @Override
        public boolean validateMetrics(Map<String, Double> metrics) {
            int spoutAvgSum = (int) metrics.get("SlidingWindowTopologyTest.SpoutAvgSum").doubleValue();
            int boltAvgSum = (int) metrics.get("SlidingWindowTopologyTest.BoltAvgSum").doubleValue();
            System.out.println(spoutAvgSum + " " + boltAvgSum);
            assertEquals(spoutAvgSum, boltAvgSum);

            return true;
        }
    };

    JStormUnitTestRunner.submitTopology(topologyBuilder.createTopology(), config, 120, validator);
}
 
Example 12
Source File: WordCountTopology.java    From storm-example with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

        SentenceSpout spout = new SentenceSpout();
        SplitSentenceBolt splitBolt = new SplitSentenceBolt();
        WordCountBolt countBolt = new WordCountBolt();
        ReportBolt reportBolt = new ReportBolt();


        TopologyBuilder builder = new TopologyBuilder();

        builder.setSpout(SENTENCE_SPOUT_ID, spout);
        // SentenceSpout --> SplitSentenceBolt
        builder.setBolt(SPLIT_BOLT_ID, splitBolt)
                .shuffleGrouping(SENTENCE_SPOUT_ID);
        // SplitSentenceBolt --> WordCountBolt
        builder.setBolt(COUNT_BOLT_ID, countBolt)
                .fieldsGrouping(SPLIT_BOLT_ID, new Fields("word"));
        // WordCountBolt --> ReportBolt
        builder.setBolt(REPORT_BOLT_ID, reportBolt)
                .globalGrouping(COUNT_BOLT_ID);

        Config config = new Config();

        LocalCluster cluster = new LocalCluster();

        cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
        waitForSeconds(10);
        cluster.killTopology(TOPOLOGY_NAME);
        cluster.shutdown();
    }
 
Example 13
Source File: DeployTopology.java    From jstorm with Apache License 2.0 5 votes vote down vote up
public void realMain(String[] args) throws Exception {

        String _name = "MetricTest";
/*        if (args.length > 0){
            conf = Utils.loadConf(args[0]);
        }*/

        int _killTopologyTimeout = JStormUtils.parseInt(conf.get(ConfigExtension.TASK_CLEANUP_TIMEOUT_SEC), 180);
        conf.put(ConfigExtension.TASK_CLEANUP_TIMEOUT_SEC, _killTopologyTimeout);

        int _numWorkers = JStormUtils.parseInt(conf.get(Config.TOPOLOGY_WORKERS), 6);

        int _numTopologies = JStormUtils.parseInt(conf.get(TOPOLOGY_NUMS), 1);
        int _spoutParallel = JStormUtils.parseInt(conf.get(TOPOLOGY_SPOUT_PARALLELISM_HINT), 2);
        int _boltParallel = JStormUtils.parseInt(conf.get(TOPOLOGY_BOLT_PARALLELISM_HINT), 4);
        int _messageSize = JStormUtils.parseInt(conf.get(TOPOLOGY_MESSAGE_SIZES), 10);
        int _numAcker = JStormUtils.parseInt(conf.get(Config.TOPOLOGY_ACKER_EXECUTORS), 2);
        int _boltNum = JStormUtils.parseInt(conf.get(TOPOLOGY_BOLTS_NUMS), 3);
        boolean _ackEnabled = false;
        if (_numAcker > 0)
            _ackEnabled = true;

        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            TopologyBuilder builder = new TopologyBuilder();
            builder.setSpout("messageSpout",
                    new DeploySpoult(_messageSize, _ackEnabled), _spoutParallel);
            builder.setBolt("messageBolt1", new DeployBolt(), _boltParallel)
                    .shuffleGrouping("messageSpout");
            for (int levelNum = 2; levelNum <= _boltNum; levelNum++) {
                builder.setBolt("messageBolt" + levelNum, new DeployBolt(), _boltParallel)
                        .shuffleGrouping("messageBolt" + (levelNum - 1));
            }

            conf.put(Config.TOPOLOGY_WORKERS, _numWorkers);
            conf.put(Config.TOPOLOGY_ACKER_EXECUTORS, _numAcker);
            StormSubmitter.submitTopology(_name + "_" + topoNum, conf, builder.createTopology());
        }
    }
 
Example 14
Source File: SequenceFileTopology.java    From storm-hdfs with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    Config config = new Config();
    config.setNumWorkers(1);

    SentenceSpout spout = new SentenceSpout();

    // sync the filesystem after every 1k tuples
    SyncPolicy syncPolicy = new CountSyncPolicy(1000);

    // rotate files when they reach 5MB
    FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(5.0f, Units.MB);

    FileNameFormat fileNameFormat = new DefaultFileNameFormat()
            .withPath("/source/")
            .withExtension(".seq");

    // create sequence format instance.
    DefaultSequenceFormat format = new DefaultSequenceFormat("timestamp", "sentence");

    SequenceFileBolt bolt = new SequenceFileBolt()
            .withFsUrl(args[0])
            .withFileNameFormat(fileNameFormat)
            .withSequenceFormat(format)
            .withRotationPolicy(rotationPolicy)
            .withSyncPolicy(syncPolicy)
            .withCompressionType(SequenceFile.CompressionType.RECORD)
            .withCompressionCodec("deflate")
            .addRotationAction(new MoveFileAction().toDestination("/dest/"));




    TopologyBuilder builder = new TopologyBuilder();

    builder.setSpout(SENTENCE_SPOUT_ID, spout, 1);
    // SentenceSpout --> MyBolt
    builder.setBolt(BOLT_ID, bolt, 4)
            .shuffleGrouping(SENTENCE_SPOUT_ID);


    if (args.length == 1) {
        LocalCluster cluster = new LocalCluster();

        cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
        waitForSeconds(120);
        cluster.killTopology(TOPOLOGY_NAME);
        cluster.shutdown();
        System.exit(0);
    } else if(args.length == 2) {
        StormSubmitter.submitTopology(args[1], config, builder.createTopology());
    }
}
 
Example 15
Source File: E8_BackgroundSubtractionTopology.java    From StormCV with Apache License 2.0 4 votes vote down vote up
/**
 * @param args
 */
public static void main(String[] args) 
{
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	//conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");

	// StormCVConfig.
	conf.setNumWorkers(4);                                           // number of workers in the topology
	conf.setMaxSpoutPending(32);                                     // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true);         // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10);             // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false);      // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30);          // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)

	// taking two live webcam streams from http://webcam.prvgld.nl/
	List<String> urls = new ArrayList<String>();
	urls.add( "rtsp://streaming3.webcam.nl:1935/n224/n224.stream" );
	urls.add( "rtsp://streaming3.webcam.nl:1935/n233/n233.stream" );

	int frameSkip = 13;

	// now create the topology itself (spout -> background subtraction --> streamer)
	TopologyBuilder builder = new TopologyBuilder();

	// just one spout reading streams; i.e. this spout reads two streams in parallel
	builder.setSpout("spout", new CVParticleSpout( new StreamFrameFetcher(urls).frameSkip(frameSkip) ), 1 );

	// add bolt that does background subtraction (choose MOG or MOG2 as algorithm)
	builder.setBolt("backgroundsubtraction", new SingleInputBolt( new BackgroundSubtractionOp().setAlgorithm(BSAlgorithm.MOG)), 1)
	.shuffleGrouping("spout");

	// add bolt that creates a web service on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
			, 1)
			.shuffleGrouping("backgroundsubtraction");

	// NOTE: if the topology is started (locally) go to http://localhost:8558/streaming/tiles and click the image to see the stream!

	try {	
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "BackgroundSubtraction", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run for some time and then kill the topology
		cluster.shutdown();
		System.exit(1);

		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 16
Source File: E3_MultipleFeaturesTopology.java    From StormCV with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(8); // number of workers in the topology
	conf.setMaxSpoutPending(32); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
	// create a list with files to be processed, in this case just one. Multiple files will be spread over the available spouts
	List<String> files = new ArrayList<String>();
	files.add( "file://"+ userDir + "/resources/data/" );

	int frameSkip = 13; 
	
	// now create the topology itself (spout -> scale -> {face detection, sift} -> drawer -> streamer)
	TopologyBuilder builder = new TopologyBuilder();
	 // just one spout reading video files, extracting 1 frame out of 25 (i.e. 1 per second)
	builder.setSpout("spout", new CVParticleSpout( new FileFrameFetcher(files).frameSkip(frameSkip) ), 1 );
	
	// add bolt that scales frames down to 25% of the original size 
	builder.setBolt("scale", new SingleInputBolt( new ScaleImageOp(0.25f)), 1)
		.shuffleGrouping("spout");
	
	// one bolt with a HaarCascade classifier detecting faces. This operation outputs a Frame including the Features with detected faces.
	// the xml file must be present on the classpath!
	builder.setBolt("face", new SingleInputBolt( new HaarCascadeOp("face", "lbpcascade_frontalface.xml").outputFrame(true)), 1)
		.shuffleGrouping("scale");
	
	// add a bolt that performs SIFT keypoint extraction
	builder.setBolt("sift", new SingleInputBolt( new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(false)), 2)
		.shuffleGrouping("scale");
	
	// Batch bolt that waits for input from both the face and sift detection bolts and combines them in a single frame object
	builder.setBolt("combiner", new BatchInputBolt(new SequenceNrBatcher(2), new FeatureCombinerOp()), 1)
		.fieldsGrouping("sift", new Fields(FrameSerializer.STREAMID))
		.fieldsGrouping("face", new Fields(FrameSerializer.STREAMID));
	
	// simple bolt that draws Features (i.e. locations of features) into the frame
	builder.setBolt("drawer", new SingleInputBolt(new DrawFeaturesOp()), 1)
		.shuffleGrouping("combiner");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("drawer");

	// NOTE: if the topology is started (locally) go to http://localhost:8558/streaming/tiles and click the image to see the stream!
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "multifeature", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 17
Source File: E9_ContrastEnhancementTopology.java    From StormCV with Apache License 2.0 4 votes vote down vote up
/**
 * @param args
 */
public static void main(String[] args) 
{
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	//conf.put( StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib" );

	conf.setNumWorkers( 4 );                                           // number of workers in the topology
	conf.setMaxSpoutPending( 32 );                                     // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put( StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE ); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put( Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true );         // True if Storm should timeout messages or not.
	conf.put( Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10 );             // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put( StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false );      // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put( StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30 );          // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)

	// some live camera feeds from http://webcam.prvgld.nl/
	List<String> urls = new ArrayList<String>();
	urls.add( "rtsp://streaming3.webcam.nl:1935/n224/n224.stream" );
	urls.add( "rtsp://streaming3.webcam.nl:1935/n233/n233.stream" );

	int frameSkip = 13;

	// now create the topology itself (spout -> contrast enhancement --> streamer)
	TopologyBuilder builder = new TopologyBuilder();

	// just one spout reading streams; i.e. this spout reads two streams in parallel
	builder.setSpout( "spout", new CVParticleSpout( new StreamFrameFetcher( urls ).frameSkip( frameSkip ) ), 1 );

	// add bolt that does contrast enhancement (choose HSV_EQUALIZE_HIST or GRAY_EQUALIZE_HIST as algorithm)
	builder.setBolt( "contrastenhancement", new SingleInputBolt( new GlobalContrastEnhancementOp().setAlgorithm( CEAlgorithm.HSV_EQUALIZE_HIST ) ), 1 )
	.shuffleGrouping( "spout" );

	// add bolt that creates a web service on port 8558 enabling users to view the result
	builder.setBolt( "streamer", new BatchInputBolt(
			new SlidingWindowBatcher( 2, frameSkip).maxSize( 6 ), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port( 8558 ).framerate( 5 ) ).groupBy( new Fields( FrameSerializer.STREAMID ) )
			, 1)
			.shuffleGrouping( "contrastenhancement" );

	// NOTE: if the topology is started (locally) go to http://localhost:8558/streaming/tiles and click the image to see the stream!

	try 
	{	
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "BackgroundSubtraction", conf, builder.createTopology() );
		Utils.sleep( 120 * 1000 ); // run for one minute and then kill the topology
		cluster.shutdown();
		System.exit( 1 );

		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} 
	catch (Exception e)
	{
		e.printStackTrace();
	}
}
 
Example 18
Source File: AlertTopologyTest.java    From eagle with Apache License 2.0 4 votes vote down vote up
@Ignore
@Test
public void testMultipleTopics() throws Exception {
    final String topoId = "myTopology";
    int numGroupbyBolts = 2;
    int numTotalGroupbyBolts = 3;
    System.setProperty("eagle.correlation.numGroupbyBolts", String.valueOf(numGroupbyBolts));
    System.setProperty("eagle.correlation.topologyName", topoId);
    System.setProperty("eagle.correlation.mode", "local");
    System.setProperty("eagle.correlation.zkHosts", "localhost:2181");
    final String topicName1 = "testTopic3";
    final String topicName2 = "testTopic4";
    // ensure topic ready
    LogManager.getLogger(CorrelationSpout.class).setLevel(Level.DEBUG);
    Config config = ConfigFactory.load();

    CreateTopicUtils.ensureTopicReady(System.getProperty("eagle.correlation.zkHosts"), topicName1);
    CreateTopicUtils.ensureTopicReady(System.getProperty("eagle.correlation.zkHosts"), topicName2);

    TopologyBuilder topoBuilder = new TopologyBuilder();

    int numBolts = config.getInt("eagle.correlation.numGroupbyBolts");
    CorrelationSpout spout = new CorrelationSpout(config, topoId, null, numBolts);
    String spoutId = "correlation-spout";
    SpoutDeclarer declarer = topoBuilder.setSpout(spoutId, spout);
    for (int i = 0; i < numBolts; i++) {
        TestBolt bolt = new TestBolt();
        BoltDeclarer boltDecl = topoBuilder.setBolt("engineBolt" + i, bolt);
        boltDecl.fieldsGrouping(spoutId, "stream_" + i, new Fields());
    }

    String topoName = config.getString("eagle.correlation.topologyName");
    LOG.info("start topology in local mode");
    LocalCluster cluster = new LocalCluster();
    cluster.submitTopology(topoName, new HashMap<>(), topoBuilder.createTopology());

    while (true) {
        try {
            Thread.sleep(1000);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
 
Example 19
Source File: AbstractTopology.java    From jea with Apache License 2.0 4 votes vote down vote up
protected TopologyBuilder initBuilder(){
	TopologyBuilder builder = new TopologyBuilder();
	IRichSpout spout = initSpout();
	builder.setSpout(spoutId, spout);
	return builder;
}
 
Example 20
Source File: ForwardThroughput.java    From flink-perf with Apache License 2.0 3 votes vote down vote up
public static void main(String[] args) throws Exception {
	ParameterTool pt = ParameterTool.fromArgs(args);

	int par = pt.getInt("para");

	TopologyBuilder builder = new TopologyBuilder();

	builder.setSpout("source0", new Generator(pt), pt.getInt("sourceParallelism"));

	//builder.setBolt("sink", new Sink(pt), pt.getInt("sinkParallelism")).noneGrouping("source0");
	builder.setBolt("sink", new Sink(pt), pt.getInt("sinkParallelism")).localOrShuffleGrouping("source0");


	Config conf = new Config();
	conf.setDebug(false);

	conf.setMaxSpoutPending(pt.getInt("maxPending", 1000));
	//System.exit(1);

	if (!pt.has("local")) {
		conf.setNumWorkers(par);

		StormSubmitter.submitTopologyWithProgressBar("forward-throughput-"+pt.get("name", "no_name"), conf, builder.createTopology());
	}
	else {
		conf.setMaxTaskParallelism(par);

		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology("forward-throughput", conf, builder.createTopology());

		Thread.sleep(300000);

		cluster.shutdown();
	}

}