org.apache.storm.Config Java Examples

The following examples show how to use org.apache.storm.Config. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DelegatorRemoteDriverProtocol.java    From storm-crawler with Apache License 2.0 6 votes vote down vote up
@Override
public void configure(Config conf) {
    super.configure(conf);
    String protocolimplementation = ConfUtils.getString(conf,
            PROTOCOL_IMPL_CONFIG);
    try {
        Class protocolClass = Class.forName(protocolimplementation);
        boolean interfaceOK = Protocol.class
                .isAssignableFrom(protocolClass);
        if (!interfaceOK) {
            throw new RuntimeException("Class " + protocolimplementation
                    + " does not implement Protocol");
        }
        directProtocol = (Protocol) protocolClass.newInstance();
        directProtocol.configure(conf);
    } catch (Exception e) {
        throw new RuntimeException(
                "DelegatorRemoteDriverProtocol needs a valid protocol class for the config "
                        + PROTOCOL_IMPL_CONFIG + "but has :"
                        + protocolimplementation);
    }
}
 
Example #2
Source File: ReachTopology.java    From storm-net-adapter with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    LinearDRPCTopologyBuilder builder = construct();

    Config conf = new Config();
    conf.setNumWorkers(6);
    String topoName = "reach-drpc";
    if (args.length > 0) {
        topoName = args[0];
    }
    StormSubmitter.submitTopologyWithProgressBar(topoName, conf, builder.createRemoteTopology());

    try (DRPCClient drpc = DRPCClient.getConfiguredClient(conf)) {
        String[] urlsToTry = new String[]{ "foo.com/blog/1", "engineering.twitter.com/blog/5", "notaurl.com" };
        for (String url : urlsToTry) {
            System.out.println("Reach of " + url + ": " + drpc.execute("reach", url));
        }
    }
}
 
Example #3
Source File: KafkaStormIntegrationTest.java    From incubator-retired-pirk with Apache License 2.0 6 votes vote down vote up
private TestJob createPirkTestJob(final Config config)
{
  final SpoutConfig kafkaConfig = setUpTestKafkaSpout(config);
  return new TestJob()
  {
    StormTopology topology = PirkTopology.getPirkTopology(kafkaConfig);

    @Override
    public void run(ILocalCluster iLocalCluster) throws Exception
    {
      iLocalCluster.submitTopology("pirk_integration_test", config, topology);
      logger.info("Pausing for setup.");
      // Thread.sleep(4000);
      // KafkaProducer producer = new KafkaProducer<String,String>(createKafkaProducerConfig());
      // loadTestData(producer);
      // Thread.sleep(10000);
      while (OutputBolt.latch.getCount() == testCountDown)
      {
        Thread.sleep(1000);
      }
      testCountDown -= 1;

      logger.info("Finished...");
    }
  };
}
 
Example #4
Source File: BatchTimeoutHelper.java    From metron with Apache License 2.0 6 votes vote down vote up
/**
 * @return the recommended TickInterval to request, in seconds
 * Guaranteed positive number.
 */
public int getRecommendedTickInterval() {
  if (!initialized) {this.init();}
  // Remember that parameter settings in the CLI override parameter settings set by the Storm component.
  // We shouldn't have to deal with this in the Metron environment, but just in case,
  // warn if our recommended value will be overridden by cliTickTupleFreqSecs.
  if (cliTickTupleFreqSecs > 0 && cliTickTupleFreqSecs > recommendedTickIntervalSecs) {
    LOG.warn("Parameter '" + Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS + "' has been forced to value '" +
            Integer.toString(cliTickTupleFreqSecs) + "' via CLI configuration.  This will override the desired " +
            "setting of '" + Integer.toString(recommendedTickIntervalSecs) +
            "' and may lead to delayed batch flushing.");
  }
  if (cliTickTupleFreqSecs > 0 && cliTickTupleFreqSecs < recommendedTickIntervalSecs) {
    LOG.info("Parameter '" + Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS + "' has been forced to value '" +
            Integer.toString(cliTickTupleFreqSecs) + "' via CLI configuration.  This will override the desired " +
            "setting of '" + Integer.toString(recommendedTickIntervalSecs) +
            "' and may lead to unexpected periodicity in batch flushing.");
  }
  return recommendedTickIntervalSecs;
}
 
Example #5
Source File: SlidingWindowTopology.java    From storm-net-adapter with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("integer", new RandomIntegerSpout(), 1);
    builder.setBolt("slidingsum", new SlidingWindowSumBolt().withWindow(Count.of(30), Count.of(10)), 1)
           .shuffleGrouping("integer");
    builder.setBolt("tumblingavg", new TumblingWindowAvgBolt().withTumblingWindow(Count.of(3)), 1)
           .shuffleGrouping("slidingsum");
    builder.setBolt("printer", new PrinterBolt(), 1).shuffleGrouping("tumblingavg");
    Config conf = new Config();
    conf.setDebug(true);
    String topoName = "test";
    if (args != null && args.length > 0) {
        topoName = args[0];
    }
    conf.setNumWorkers(1);
    StormSubmitter.submitTopologyWithProgressBar(topoName, conf, builder.createTopology());
}
 
Example #6
Source File: SerializationFactory.java    From incubator-heron with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings({"rawtypes", "unchecked"})
private static Map<String, String> normalizeKryoRegister(Map conf) {
  // TODO: de-duplicate this logic with the code in nimbus
  Object res = conf.get(Config.TOPOLOGY_KRYO_REGISTER);
  if (res == null) {
    return new TreeMap<String, String>();
  }
  Map<String, String> ret = new HashMap<>();
  if (res instanceof Map) {
    ret = (Map<String, String>) res;
  } else {
    for (Object o : (List) res) {
      if (o instanceof Map) {
        ret.putAll((Map) o);
      } else {
        ret.put((String) o, null);
      }
    }
  }

  //ensure always same order for registrations with TreeMap
  return new TreeMap<String, String>(ret);
}
 
Example #7
Source File: StatefulWindowingTopology.java    From storm-net-adapter with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("spout", new RandomIntegerSpout());
    builder.setBolt("sumbolt", new WindowSumBolt().withWindow(new Count(5), new Count(3))
                                                  .withMessageIdField("msgid"), 1).shuffleGrouping("spout");
    builder.setBolt("printer", new PrinterBolt(), 1).shuffleGrouping("sumbolt");
    Config conf = new Config();
    conf.setDebug(false);
    //conf.put(Config.TOPOLOGY_STATE_PROVIDER, "org.apache.storm.redis.state.RedisKeyValueStateProvider");

    String topoName = "test";
    if (args != null && args.length > 0) {
        topoName = args[0];
    }
    conf.setNumWorkers(1);
    StormSubmitter.submitTopologyWithProgressBar(topoName, conf, builder.createTopology());
}
 
Example #8
Source File: AggregateExample.java    From storm-net-adapter with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
    StreamBuilder builder = new StreamBuilder();
    /**
     * Computes average of the stream of numbers emitted by the spout. Internally the per-partition
     * sum and counts are accumulated and emitted to a downstream task where the partially accumulated
     * results are merged and the final result is emitted.
     */
    builder.newStream(new RandomIntegerSpout(), new ValueMapper<Integer>(0), 2)
           .window(TumblingWindows.of(BaseWindowedBolt.Duration.seconds(5)))
           .filter(x -> x > 0 && x < 500)
           .aggregate(new Avg())
           .print();

    Config config = new Config();
    String topoName = "AGG_EXAMPLE";
    if (args.length > 0) {
        topoName = args[0];
    }
    config.setNumWorkers(1);
    StormSubmitter.submitTopologyWithProgressBar(topoName, config, builder.build());
}
 
Example #9
Source File: TopologyRunner.java    From tutorials with MIT License 6 votes vote down vote up
public static void runTopology() {
    String filePath = "./src/main/resources/operations.txt";
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("randomNumberSpout", new RandomNumberSpout());
    builder.setBolt("filteringBolt", new FilteringBolt()).shuffleGrouping("randomNumberSpout");
    builder.setBolt("aggregatingBolt", new AggregatingBolt()
      .withTimestampField("timestamp")
      .withLag(BaseWindowedBolt.Duration.seconds(1))
      .withWindow(BaseWindowedBolt.Duration.seconds(5))).shuffleGrouping("filteringBolt");
    builder.setBolt("fileBolt", new FileWritingBolt(filePath)).shuffleGrouping("aggregatingBolt");

    Config config = new Config();
    config.setDebug(false);
    LocalCluster cluster = new LocalCluster();
    cluster.submitTopology("Test", config, builder.createTopology());
}
 
Example #10
Source File: PerformanceTestTopology.java    From jstorm with Apache License 2.0 6 votes vote down vote up
public static void SetRemoteTopology()
        throws Exception {
    String streamName = (String) conf.get(Config.TOPOLOGY_NAME);
    if (streamName == null) {
        String[] className = Thread.currentThread().getStackTrace()[1].getClassName().split("\\.");
        streamName = className[className.length - 1];
    }
    
    TopologyBuilder builder = new TopologyBuilder();
    
    int spout_Parallelism_hint = Utils.getInt(conf.get(TOPOLOGY_SPOUT_PARALLELISM_HINT), 1);
    int bolt_Parallelism_hint = Utils.getInt(conf.get(TOPOLOGY_BOLT_PARALLELISM_HINT), 2);
    builder.setSpout("spout", new TestSpout(), spout_Parallelism_hint);
    
    BoltDeclarer boltDeclarer = builder.setBolt("bolt", new TestBolt(), bolt_Parallelism_hint);
    // localFirstGrouping is only for jstorm
    // boltDeclarer.localFirstGrouping(SequenceTopologyDef.SEQUENCE_SPOUT_NAME);
    boltDeclarer.shuffleGrouping("spout");
    // .addConfiguration(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 60);
    
    StormSubmitter.submitTopology(streamName, conf, builder.createTopology());
    
}
 
Example #11
Source File: AnchoredWordCount.java    From storm-net-adapter with Apache License 2.0 6 votes vote down vote up
protected int run(String[] args) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();

    builder.setSpout("spout", new RandomSentenceSpout(), 4);

    builder.setBolt("split", new SplitSentence(), 4).shuffleGrouping("spout");
    builder.setBolt("count", new WordCount(), 4).fieldsGrouping("split", new Fields("word"));

    Config conf = new Config();
    conf.setMaxTaskParallelism(3);

    String topologyName = "word-count";

    conf.setNumWorkers(3);

    if (args != null && args.length > 0) {
        topologyName = args[0];
    }
    return submit(topologyName, conf, builder);
}
 
Example #12
Source File: SinkTopology.java    From DBus with Apache License 2.0 6 votes vote down vote up
private StormTopology buildTopology() throws Exception {
    loadSinkerConf();

    Integer spoutSize = Integer.parseInt(sinkerConf.getProperty(SinkerConstants.STORM_KAFKA_READ_SPOUT_PARALLEL));
    Integer boltSize = Integer.parseInt(sinkerConf.getProperty(SinkerConstants.STORM_WRITE_BOUT_PARALLEL));
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("SinkerKafkaReadSpout", new SinkerKafkaReadSpout(), spoutSize);
    builder.setBolt("SinkerWriteBolt", new SinkerWriteBolt(), boltSize)
            .fieldsGrouping("SinkerKafkaReadSpout", "dataStream", new Fields("ns"))
            .allGrouping("SinkerKafkaReadSpout", "ctrlStream");

    Config conf = new Config();
    conf.setDebug(true);
    conf.setNumWorkers(1);

    return builder.createTopology();
}
 
Example #13
Source File: ParserTopologyCLITest.java    From metron with Apache License 2.0 6 votes vote down vote up
public void testConfig_noExtra(boolean longOpt) throws ParseException {
 CommandLine cli = new CLIBuilder().with(ParserTopologyCLI.ParserOptions.BROKER_URL, "mybroker")
                                   .with(ParserTopologyCLI.ParserOptions.ZK_QUORUM, "myzk")
                                   .with(ParserTopologyCLI.ParserOptions.SENSOR_TYPES, "mysensor")
                                   .with(ParserTopologyCLI.ParserOptions.NUM_WORKERS, "1")
                                   .with(ParserTopologyCLI.ParserOptions.NUM_ACKERS, "2")
                                   .with(ParserTopologyCLI.ParserOptions.NUM_MAX_TASK_PARALLELISM, "3")
                                   .with(ParserTopologyCLI.ParserOptions.MESSAGE_TIMEOUT, "4")
                                   .build(longOpt);
  Optional<Config> configOptional = ParserTopologyCLI.ParserOptions.getConfig(cli);
  Config config = configOptional.get();
  assertEquals(1, config.get(Config.TOPOLOGY_WORKERS));
  assertEquals(2, config.get(Config.TOPOLOGY_ACKER_EXECUTORS));
  assertEquals(3, config.get(Config.TOPOLOGY_MAX_TASK_PARALLELISM));
  assertEquals(4, config.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS));
}
 
Example #14
Source File: StormMain.java    From chuidiang-ejemplos with GNU Lesser General Public License v3.0 6 votes vote down vote up
public static void main(String[] args) throws InterruptedException, AlreadyAliveException, InvalidTopologyException, AuthorizationException {
   TopologyBuilder builder = new TopologyBuilder();
   builder.setSpout(DATA_GENERATOR, new ASpout());
   builder.setBolt(DATA_CALCULATOR, new ABolt()).shuffleGrouping(DATA_GENERATOR);
   builder.setBolt(DATA_PRINTER, new DataPrinter()).shuffleGrouping(DATA_CALCULATOR).shuffleGrouping(DATA_GENERATOR);

   Config config = new Config();

   LocalCluster cluster = new LocalCluster();
   cluster.submitTopology(TOPOLOGY_NAME, config,
         builder.createTopology());

   Thread.sleep(100000);
   cluster.killTopology(TOPOLOGY_NAME);
   cluster.shutdown();
}
 
Example #15
Source File: AckingTopology.java    From incubator-heron with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
  if (args.length != 1) {
    throw new RuntimeException("Specify topology name");
  }
  TopologyBuilder builder = new TopologyBuilder();

  int spouts = 2;
  int bolts = 2;
  builder.setSpout("word", new AckingTestWordSpout(), spouts);
  builder.setBolt("exclaim1", new ExclamationBolt(), bolts)
      .shuffleGrouping("word");

  Config conf = new Config();
  conf.setDebug(true);

  // Put an arbitrary large number here if you don't want to slow the topology down
  conf.setMaxSpoutPending(1000 * 1000 * 1000);

  // To enable acking, we need to setEnableAcking true
  conf.setNumAckers(1);
  conf.put(Config.TOPOLOGY_WORKER_CHILDOPTS, "-XX:+HeapDumpOnOutOfMemoryError");

  // Set the number of workers or stream managers
  conf.setNumWorkers(2);
  StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
}
 
Example #16
Source File: MultipleLoggerTopology.java    From storm-net-adapter with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();

    builder.setSpout("word", new TestWordSpout(), 10);
    builder.setBolt("exclaim1", new ExclamationLoggingBolt(), 3).shuffleGrouping("word");
    builder.setBolt("exclaim2", new ExclamationLoggingBolt(), 2).shuffleGrouping("exclaim1");

    Config conf = new Config();
    conf.setDebug(true);
    String topoName = MultipleLoggerTopology.class.getName();
    if (args != null && args.length > 0) {
        topoName = args[0];
    }
    conf.setNumWorkers(2);
    StormSubmitter.submitTopologyWithProgressBar(topoName, conf, builder.createTopology());
}
 
Example #17
Source File: NginxStorm.java    From storm-nginx-log with MIT License 6 votes vote down vote up
public static void main(String[] argv) throws InterruptedException {

        Config config = new Config();
        config.setDebug(true);

        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("LogSpout", new LogSpout(), 1);
        builder.setBolt("SpliteBolt", new SpliteBolt(), 1).shuffleGrouping("LogSpout");
        builder.setBolt("CounterBolt", new CounterBolt(), 1)
                .fieldsGrouping("SpliteBolt", new Fields("item"));

        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("NginxLog", config, builder.createTopology());
//        Thread.sleep(10000);
//
//        cluster.killTopology("NginxLog");
//        cluster.shutdown();
    }
 
Example #18
Source File: SigarLoggingMetricsConsumerTest.java    From bullet-storm with Apache License 2.0 6 votes vote down vote up
@Test
public void testMetricsAdditionNotReplacement() {
    Config config = new Config();
    Map<String, String> metrics = new HashMap<>();
    metrics.put("foo", "foo.bar.baz");
    config.put(Config.TOPOLOGY_WORKER_METRICS, metrics);

    SigarLoggingMetricsConsumer.register(config, null);

    Assert.assertNotNull(config.get(Config.TOPOLOGY_WORKER_METRICS));

    Map<String, String> actual = (Map<String, String>) config.get(Config.TOPOLOGY_WORKER_METRICS);
    Assert.assertTrue(actual.keySet().containsAll(SigarLoggingMetricsConsumer.METRICS.keySet()));
    Assert.assertTrue(actual.values().containsAll(SigarLoggingMetricsConsumer.METRICS.values()));
    Assert.assertEquals(actual.get("foo"), "foo.bar.baz");
}
 
Example #19
Source File: TopologyStarterTest.java    From breeze with Apache License 2.0 6 votes vote down vote up
@Test
public void stormConfTypes() {
	Properties source = new Properties();
	source.put(TOPOLOGY_NAME, "name");
	source.put(TOPOLOGY_DEBUG, true);
	source.put(TOPOLOGY_WORKERS, 1);
	source.put(TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE, 2048);
	source.put(SUPERVISOR_SLOTS_PORTS, asList(7000, 7001));
	source.put(DRPC_SERVERS, asList("host1","host2","host3"));
	source.put("unknown", "2");

	Config result = TopologyStarter.stormConfig(source);
	assertEquals("name", result.get(TOPOLOGY_NAME));
	assertEquals(true, result.get(TOPOLOGY_DEBUG));
	assertEquals(1, result.get(TOPOLOGY_WORKERS));
	assertEquals(2048, result.get(TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE));
	assertEquals(asList("host1", "host2", "host3"), result.get(DRPC_SERVERS));
	assertEquals(asList(7000, 7001), result.get(SUPERVISOR_SLOTS_PORTS));
	assertEquals("2", result.get("unknown"));
}
 
Example #20
Source File: SimpleJMXStormMetricProcessorTest.java    From storm-metrics-reporter with Apache License 2.0 6 votes vote down vote up
@Test
public void testValidJMXObjectName() throws Exception {

  final String topologyName = "someTopology";

  Map config = new HashMap();
  config.put(Config.TOPOLOGY_NAME, topologyName);
  processor = new SimpleJMXStormMetricProcessor(config);

  Metric metric = new Metric("component", "kafkaPartition{host=kafka_9092, partition=0}", 1.9);
  IMetricsConsumer.TaskInfo taskInfo = new IMetricsConsumer.TaskInfo("localhost", 1010, "emitBot", 2, System.currentTimeMillis(), 100);

  String name = processor.mBeanName(metric, taskInfo);
  ObjectName objName = new ObjectName(name);

  assertThat(objName.getCanonicalName(), is("storm:component=component,host-port-task=localhost-1010-2,operation=\"kafkaPartition{host=kafka_9092, partition=0}\",topology=someTopology"));
}
 
Example #21
Source File: StormTestUtil.java    From incubator-atlas with Apache License 2.0 5 votes vote down vote up
public static Config submitTopology(ILocalCluster stormCluster, String topologyName,
                                    StormTopology stormTopology) throws Exception {
    Config stormConf = new Config();
    stormConf.putAll(Utils.readDefaultConfig());
    stormConf.put("storm.cluster.mode", "local");
    stormConf.setDebug(true);
    stormConf.setMaxTaskParallelism(3);
    stormConf.put(Config.STORM_TOPOLOGY_SUBMISSION_NOTIFIER_PLUGIN,
            org.apache.atlas.storm.hook.StormAtlasHook.class.getName());

    stormCluster.submitTopology(topologyName, stormConf, stormTopology);

    Thread.sleep(10000);
    return stormConf;
}
 
Example #22
Source File: ManualDRPC.java    From storm-net-adapter with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();

    DRPCSpout spout = new DRPCSpout("exclamation");
    builder.setSpout("drpc", spout);
    builder.setBolt("exclaim", new ExclamationBolt(), 3).shuffleGrouping("drpc");
    builder.setBolt("return", new ReturnResults(), 3).shuffleGrouping("exclaim");

    Config conf = new Config();
    StormSubmitter.submitTopology("exclaim", conf, builder.createTopology());
    try (DRPCClient drpc = DRPCClient.getConfiguredClient(conf)) {
        System.out.println(drpc.execute("exclamation", "aaa"));
        System.out.println(drpc.execute("exclamation", "bbb"));
    }
}
 
Example #23
Source File: WindowedWordCount.java    From storm-net-adapter with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamBuilder builder = new StreamBuilder();
    // A stream of random sentences
    builder.newStream(new RandomSentenceSpout(), new ValueMapper<String>(0), 2)
           /*
            * a two seconds tumbling window
            */
           .window(TumblingWindows.of(Duration.seconds(2)))
           /*
            * split the sentences to words
            */
           .flatMap(s -> Arrays.asList(s.split(" ")))
           /*
            * create a stream of (word, 1) pairs
            */
           .mapToPair(w -> Pair.of(w, 1))
           /*
            * compute the word counts in the last two second window
            */
           .countByKey()
           /*
            * emit the count for the words that occurred
            * at-least five times in the last two seconds
            */
           .filter(x -> x.getSecond() >= 5)
           /*
            * print the results to stdout
            */
           .print();

    Config config = new Config();
    String topoName = "test";
    if (args.length > 0) {
        topoName = args[0];
    }
    config.setNumWorkers(1);
    StormSubmitter.submitTopologyWithProgressBar(topoName, config, builder.build());
}
 
Example #24
Source File: WindowedBoltExecutor.java    From twister2 with Apache License 2.0 5 votes vote down vote up
protected int getTopologyTimeoutMillis(Map<String, Object> topoConf) {
  if (topoConf.get(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS) != null) {
    boolean timeOutsEnabled = (boolean) topoConf.get(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS);
    if (!timeOutsEnabled) {
      return Integer.MAX_VALUE;
    }
  }
  int timeout = 0;
  if (topoConf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS) != null) {
    timeout = ((Number) topoConf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)).intValue();
  }
  return timeout * 1000;
}
 
Example #25
Source File: InOrderDeliveryTest.java    From storm-net-adapter with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

        TopologyBuilder builder = new TopologyBuilder();

        builder.setSpout("spout", new InOrderSpout(), 8);
        builder.setBolt("count", new Check(), 8).fieldsGrouping("spout", new Fields("c1"));

        Config conf = new Config();
        conf.registerMetricsConsumer(org.apache.storm.metric.LoggingMetricsConsumer.class);

        String name = "in-order-test";
        if (args != null && args.length > 0) {
            name = args[0];
        }

        conf.setNumWorkers(1);
        StormSubmitter.submitTopologyWithProgressBar(name, conf, builder.createTopology());

        Map<String, Object> clusterConf = Utils.readStormConfig();
        clusterConf.putAll(Utils.readCommandLineOpts());
        Nimbus.Iface client = NimbusClient.getConfiguredClient(clusterConf).getClient();

        //Sleep for 50 mins
        for (int i = 0; i < 50; i++) {
            Thread.sleep(30 * 1000);
            printMetrics(client, name);
        }
        kill(client, name);
    }
 
Example #26
Source File: DefaultKryoFactory.java    From incubator-heron with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("rawtypes")
public Kryo getKryo(Map conf) {
  KryoSerializableDefault k = new KryoSerializableDefault();
  k.setRegistrationRequired(
      !((Boolean) conf.get(Config.TOPOLOGY_FALL_BACK_ON_JAVA_SERIALIZATION)));
  k.setReferences(false);
  return k;
}
 
Example #27
Source File: WindowedBoltExecutor.java    From twister2 with Apache License 2.0 5 votes vote down vote up
private void ensureCountLessThanMaxPending(int count, int maxPending) {
  if (count > maxPending) {
    throw new IllegalArgumentException("Window count (length + sliding interval) value " + count
        + " is more than " + Config.TOPOLOGY_MAX_SPOUT_PENDING
        + " value " + maxPending);
  }
}
 
Example #28
Source File: RTJoinExampleTopology.java    From streamline with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
//        if (!NimbusClient.isLocalOverride()) {
//            throw new IllegalStateException("This example only works in local mode.  "
//                    + "Run with storm local not storm jar");
//        }
        FeederSpout genderSpout = new FeederSpout(new Fields("id", "gender"));
        FeederSpout ageSpout = new FeederSpout(new Fields("id", "age"));

        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("genderSpout", genderSpout);
        builder.setSpout("ageSpout", ageSpout);

        // inner join of 'age' and 'gender' records on 'id' field
        RealtimeJoinBolt joiner = new RealtimeJoinBolt(StreamKind.SOURCE)
                .select("genderSpout:id,ageSpout:id,gender,age")
                .from("genderSpout", 5, false)
                .outerJoin("ageSpout", Duration.ofSeconds(5), false, Cmp.equal("genderSpout:id", "ageSpout:id") )
                .withOutputStream("jstream");

        builder.setBolt("joiner", joiner)
                .fieldsGrouping("genderSpout", new Fields("id"))
                .fieldsGrouping("ageSpout", new Fields("id"))         ;

        builder.setBolt("printer", new PrinterBolt() ).shuffleGrouping("joiner", "jstream");

        Config conf = new Config();
        StormSubmitter.submitTopologyWithProgressBar("join-example", conf, builder.createTopology());

        generateGenderData(genderSpout);

        generateAgeData(ageSpout);
    }
 
Example #29
Source File: StormKafkaProcess.java    From BigData with GNU General Public License v3.0 5 votes vote down vote up
public static void main(String[] args)
		throws InterruptedException, InvalidTopologyException, AuthorizationException, AlreadyAliveException {

	String topologyName = "TSAS";// 元组名
	// Zookeeper主机地址,会自动选取其中一个
	ZkHosts zkHosts = new ZkHosts("192.168.230.128:2181,192.168.230.129:2181,192.168.230.131:2181");
	String topic = "trademx";
	String zkRoot = "/storm";// storm在Zookeeper上的根路径
	String id = "tsaPro";

	// 创建SpoutConfig对象
	SpoutConfig spontConfig = new SpoutConfig(zkHosts, topic, zkRoot, id);

	TopologyBuilder builder = new TopologyBuilder();
	builder.setSpout("kafka", new KafkaSpout(spontConfig), 2);
	builder.setBolt("AccBolt", new AccBolt()).shuffleGrouping("kafka");
	builder.setBolt("ToDbBolt", new ToDbBolt()).shuffleGrouping("AccBolt");

	Config config = new Config();
	config.setDebug(false);

	if (args.length == 0) { // 本地运行,用于测试
		LocalCluster localCluster = new LocalCluster();
		localCluster.submitTopology(topologyName, config, builder.createTopology());
		Thread.sleep(1000 * 3600);
		localCluster.killTopology(topologyName);
		localCluster.shutdown();
	} else { // 提交至集群运行
		StormSubmitter.submitTopology(topologyName, config, builder.createTopology());
	}

}
 
Example #30
Source File: ParserTopologyComponent.java    From metron with Apache License 2.0 5 votes vote down vote up
@Override
public void start() throws UnableToStartException {
  try {
    final Map<String, Object> stormConf = new HashMap<>();
    stormConf.put(Config.TOPOLOGY_DEBUG, true);
    ParserTopologyBuilder.ParserTopology topologyBuilder = ParserTopologyBuilder.build (
            topologyProperties.getProperty(ZKServerComponent.ZOOKEEPER_PROPERTY),
            Optional.ofNullable(brokerUrl),
            sensorTypes,
            (x,y) -> Collections.nCopies(sensorTypes.size(), 1),
            (x,y) -> Collections.nCopies(sensorTypes.size(), 1),
            (x,y) -> 1,
            (x,y) -> 1,
            (x,y) -> 1,
            (x,y) -> 1,
            (x,y) -> Collections.nCopies(sensorTypes.size(), new HashMap<>()),
            (x,y) -> null,
            (x,y) -> outputTopic,
            (x,y) -> errorTopic,
            (x,y) -> {
              Config c = new Config();
              c.putAll(stormConf);
              return c;
            }
    );

    stormCluster = new LocalCluster();
    stormCluster.submitTopology(getTopologyName(), stormConf, topologyBuilder.getBuilder().createTopology());
  } catch (Exception e) {
    throw new UnableToStartException("Unable to start parser topology for sensorTypes: " + sensorTypes, e);
  }
}