Java Code Examples for org.apache.storm.utils.Utils

The following examples show how to use org.apache.storm.utils.Utils. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: localization_nifi   Source File: NiFiSpout.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void nextTuple() {
    NiFiDataPacket data = queue.poll();
    if (data == null) {
        Utils.sleep(50);
    } else {
        // always start with the data packet
        Values values = new Values(data);

        // add additional values based on the specified attribute names
        for (String attributeName : attributeNames) {
            if (data.getAttributes().containsKey(attributeName)) {
                values.add(data.getAttributes().get(attributeName));
            }
        }

        spoutOutputCollector.emit(values);
    }
}
 
Example 2
Source Project: nightwatch   Source File: JSONScheme.java    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
@Override
public List<Object> deserialize(ByteBuffer ser) {
    String jsonStr = null;
    if (ser.hasArray()) {
        int base = ser.arrayOffset();
        jsonStr = new String(ser.array(), base + ser.position(), ser.remaining());
    } else {
        jsonStr = new String(Utils.toByteArray(ser), UTF8_CHARSET);
    }
    JSONObject jsonObject = JSONObject.fromObject(jsonStr);
    Values values = new Values();
    for (String outputField : outputFields) {
        if("jsonBody".equals(outputField)) {
            values.add(jsonStr);
        } else {
            if(!jsonObject.containsKey(outputField)) {
                JSONObject rcMsgpara = JSONObject.fromObject(jsonObject.get("rc_msg_para"));
                values.add(rcMsgpara.get(outputField));
            } else {
                values.add(jsonObject.get(outputField));
            }
        }
    }
    return values;
}
 
Example 3
Source Project: storm-crawler   Source File: WARCHdfsBolt.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected AbstractHDFSWriter makeNewWriter(Path path, Tuple tuple)
        throws IOException {
    AbstractHDFSWriter writer = super.makeNewWriter(path, tuple);

    Instant now = Instant.now();

    // overrides the filename and creation date in the headers
    header_fields.put("WARC-Date", WARCRecordFormat.WARC_DF.format(now));
    header_fields.put("WARC-Filename", path.getName());

    byte[] header = WARCRecordFormat.generateWARCInfo(header_fields);

    // write the header at the beginning of the file
    if (header != null && header.length > 0) {
        super.out.write(Utils.gzip(header));
    }

    return writer;
}
 
Example 4
@Test
public void testFail() throws Exception {
    setupExpectationsForTuple();
    setupExpectationsForTopologyContextNoEmit();

    EventCorrelatingOutputCollector sut = getSystemUnderTest();

    Tuple anchor = new TupleImpl(mockedTopologyContext, new Values(PARENT_STREAMLINE_EVENT), TASK_0,
            Utils.DEFAULT_STREAM_ID);

    sut.fail(anchor);

    new Verifications() {{
        mockedOutputCollector.fail(anchor); times = 1;
    }};
}
 
Example 5
Source Project: incubator-atlas   Source File: StormAtlasHook.java    License: Apache License 2.0 6 votes vote down vote up
private void addTopologyInputs(Referenceable topologyReferenceable,
                               Map<String, SpoutSpec> spouts,
                               Map stormConf,
                               String topologyOwner, List<Referenceable> dependentEntities) throws IllegalAccessException {
    final ArrayList<Referenceable> inputDataSets = new ArrayList<>();
    for (Map.Entry<String, SpoutSpec> entry : spouts.entrySet()) {
        Serializable instance = Utils.javaDeserialize(
                entry.getValue().get_spout_object().get_serialized_java(), Serializable.class);

        String simpleName = instance.getClass().getSimpleName();
        final Referenceable datasetRef = createDataSet(simpleName, topologyOwner, instance, stormConf, dependentEntities);
        if (datasetRef != null) {
            inputDataSets.add(datasetRef);
        }
    }

    topologyReferenceable.set("inputs", inputDataSets);
}
 
Example 6
Source Project: bullet-storm   Source File: QuerySpout.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void nextTuple() {
    PubSubMessage message = null;
    try {
        message = subscriber.receive();
    } catch (Exception e) {
        log.error(e.getMessage());
    }
    if (message == null) {
        Utils.sleep(1);
        return;
    }
    String content = message.getContent();
    // If no content, it's a metadata only message. Send it on the METADATA_STREAM.
    if (content == null) {
        collector.emit(METADATA_STREAM, new Values(message.getId(), message.getMetadata()), message.getId());
    } else {
        collector.emit(QUERY_STREAM, new Values(message.getId(), message.getContent(), message.getMetadata()), message.getId());
    }
}
 
Example 7
public static void main(String[] args) throws Exception {
  TopologyBuilder builder = new TopologyBuilder();

  builder.setSpout("word0", new TestWordSpout(), 2);
  builder.setSpout("word1", new TestWordSpout(), 2);
  builder.setSpout("word2", new TestWordSpout(), 2);
  builder.setBolt("exclaim1", new ExclamationBolt(), 2)
      .shuffleGrouping("word0")
      .shuffleGrouping("word1")
      .shuffleGrouping("word2");

  Config conf = new Config();
  conf.setDebug(true);
  conf.setMaxSpoutPending(10);
  conf.put(Config.TOPOLOGY_WORKER_CHILDOPTS, "-XX:+HeapDumpOnOutOfMemoryError");
  if (args != null && args.length > 0) {
    conf.setNumWorkers(3);
    StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
  } else {
    LocalCluster cluster = new LocalCluster();
    cluster.submitTopology("test", conf, builder.createTopology());
    Utils.sleep(10000);
    cluster.killTopology("test");
    cluster.shutdown();
  }
}
 
Example 8
@Test
public void testAck() throws Exception {
    setupExpectationsForTuple();
    setupExpectationsForTopologyContextNoEmit();

    EventCorrelatingOutputCollector sut = getSystemUnderTest();

    Tuple anchor = new TupleImpl(mockedTopologyContext, new Values(PARENT_STREAMLINE_EVENT), TASK_0,
            Utils.DEFAULT_STREAM_ID);

    sut.ack(anchor);

    new Verifications() {{
        mockedOutputCollector.ack(anchor); times = 1;
    }};
}
 
Example 9
Source Project: jstorm   Source File: PerformanceTestTopology.java    License: Apache License 2.0 6 votes vote down vote up
public static void SetRemoteTopology()
        throws Exception {
    String streamName = (String) conf.get(Config.TOPOLOGY_NAME);
    if (streamName == null) {
        String[] className = Thread.currentThread().getStackTrace()[1].getClassName().split("\\.");
        streamName = className[className.length - 1];
    }
    
    TopologyBuilder builder = new TopologyBuilder();
    
    int spout_Parallelism_hint = Utils.getInt(conf.get(TOPOLOGY_SPOUT_PARALLELISM_HINT), 1);
    int bolt_Parallelism_hint = Utils.getInt(conf.get(TOPOLOGY_BOLT_PARALLELISM_HINT), 2);
    builder.setSpout("spout", new TestSpout(), spout_Parallelism_hint);
    
    BoltDeclarer boltDeclarer = builder.setBolt("bolt", new TestBolt(), bolt_Parallelism_hint);
    // localFirstGrouping is only for jstorm
    // boltDeclarer.localFirstGrouping(SequenceTopologyDef.SEQUENCE_SPOUT_NAME);
    boltDeclarer.shuffleGrouping("spout");
    // .addConfiguration(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 60);
    
    StormSubmitter.submitTopology(streamName, conf, builder.createTopology());
    
}
 
Example 10
Source Project: nifi   Source File: NiFiSpout.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void nextTuple() {
    NiFiDataPacket data = queue.poll();
    if (data == null) {
        Utils.sleep(50);
    } else {
        // always start with the data packet
        Values values = new Values(data);

        // add additional values based on the specified attribute names
        for (String attributeName : attributeNames) {
            if (data.getAttributes().containsKey(attributeName)) {
                values.add(data.getAttributes().get(attributeName));
            }
        }

        spoutOutputCollector.emit(values);
    }
}
 
Example 11
Source Project: localization_nifi   Source File: NiFiStormTopology.java    License: Apache License 2.0 5 votes vote down vote up
public static void main( String[] args ) {
    // Build a Site-To-Site client config for pulling data
    final SiteToSiteClientConfig inputConfig = new SiteToSiteClient.Builder()
            .url("http://localhost:8080/nifi")
            .portName("Data for Storm")
            .buildConfig();

    // Build a Site-To-Site client config for pushing data
    final SiteToSiteClientConfig outputConfig = new SiteToSiteClient.Builder()
            .url("http://localhost:8080/nifi")
            .portName("Data from Storm")
            .buildConfig();

    final int tickFrequencySeconds = 5;
    final NiFiDataPacketBuilder niFiDataPacketBuilder = new SimpleNiFiDataPacketBuilder();
    final NiFiBolt niFiBolt = new NiFiBolt(outputConfig, niFiDataPacketBuilder, tickFrequencySeconds)
            //.withBatchSize(1)
            ;

    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("nifiInput", new NiFiSpout(inputConfig));
    builder.setBolt("nifiOutput", niFiBolt).shuffleGrouping("nifiInput");

    // Submit the topology running in local mode
    Config conf = new Config();
    LocalCluster cluster = new LocalCluster();
    cluster.submitTopology("test", conf, builder.createTopology());

    Utils.sleep(90000);
    cluster.shutdown();
}
 
Example 12
Source Project: storm-net-adapter   Source File: FastWordCountTopology.java    License: Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

        TopologyBuilder builder = new TopologyBuilder();

        builder.setSpout("spout", new FastRandomSentenceSpout(), 4);

        builder.setBolt("split", new SplitSentence(), 4).shuffleGrouping("spout");
        builder.setBolt("count", new WordCount(), 4).fieldsGrouping("split", new Fields("word"));

        Config conf = new Config();
        conf.registerMetricsConsumer(org.apache.storm.metric.LoggingMetricsConsumer.class);

        String name = "wc-test";
        if (args != null && args.length > 0) {
            name = args[0];
        }

        conf.setNumWorkers(1);
        StormSubmitter.submitTopologyWithProgressBar(name, conf, builder.createTopology());

        Map<String, Object> clusterConf = Utils.readStormConfig();
        clusterConf.putAll(Utils.readCommandLineOpts());
        Nimbus.Iface client = NimbusClient.getConfiguredClient(clusterConf).getClient();

        //Sleep for 5 mins
        for (int i = 0; i < 10; i++) {
            Thread.sleep(30 * 1000);
            printMetrics(client, name);
        }
        kill(client, name);
    }
 
Example 13
/**
 * 会产生数据,在生产上是从消息队列中获取数据。这个方法是死循环,会一致不停执行
 */
@Override
public void nextTuple() {
    this.collector.emit(new Values(++number));

    System.out.println("Spout: " + number);

    //每隔一秒发送一次,防止太快
    Utils.sleep(1000);
}
 
Example 14
Source Project: elasticsearch-hadoop   Source File: RandomSentenceSpout.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void nextTuple() {
    Utils.sleep(100);
    String[] sentences = new String[] { "the cow jumped over the moon", "an apple a day keeps the doctor away",
            "four score and seven years ago", "snow white and the seven dwarfs", "i am at two with nature" };
    String sentence = sentences[_rand.nextInt(sentences.length)];
    _collector.emit(new Values(sentence));
}
 
Example 15
/**
 * 会产生数据,在生产上是从消息队列中获取数据。这个方法是死循环,会一致不停执行
 */
@Override
public void nextTuple() {
    this.collector.emit(new Values(++number));

    System.out.println("Spout: " + number);

    //每隔一秒发送一次,防止太快
    Utils.sleep(1000);
}
 
Example 16
/**
 * 会产生数据,在生产上是从消息队列中获取数据。这个方法是死循环,会一致不停执行
 */
@Override
public void nextTuple() {
    this.collector.emit(new Values(number % 2, ++number));

    System.out.println("Spout: " + number);

    //每隔一秒发送一次,防止太快
    Utils.sleep(1000);
}
 
Example 17
Source Project: incubator-atlas   Source File: StormTestUtil.java    License: Apache License 2.0 5 votes vote down vote up
public static Config submitTopology(ILocalCluster stormCluster, String topologyName,
                                    StormTopology stormTopology) throws Exception {
    Config stormConf = new Config();
    stormConf.putAll(Utils.readDefaultConfig());
    stormConf.put("storm.cluster.mode", "local");
    stormConf.setDebug(true);
    stormConf.setMaxTaskParallelism(3);
    stormConf.put(Config.STORM_TOPOLOGY_SUBMISSION_NOTIFIER_PLUGIN,
            org.apache.atlas.storm.hook.StormAtlasHook.class.getName());

    stormCluster.submitTopology(topologyName, stormConf, stormTopology);

    Thread.sleep(10000);
    return stormConf;
}
 
Example 18
Source Project: java-study   Source File: SentenceSpout.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * nextTuple()方法是任何Spout实现的核心。
 * Storm调用这个方法,向输出的collector发出tuple。
 * 在这里,我们只是发出当前索引的句子,并增加该索引准备发射下一个句子。
 */
public void nextTuple() {
    //collector.emit(new Values("hello world this is a test"));

    // TODO Auto-generated method stub
    this.collector.emit(new Values(sentences[index]));
    index++;
    if (index>=sentences.length) {
        index=0;
    }
    Utils.sleep(1);
}
 
Example 19
Source Project: atlas   Source File: StormAtlasHook.java    License: Apache License 2.0 5 votes vote down vote up
private void addTopologyInputs(Map<String, SpoutSpec> spouts, Map stormConf, String topologyOwner, AtlasEntity topology, AtlasEntityExtInfo entityExtInfo) {
    List<AtlasEntity> inputs = new ArrayList<>();

    for (Map.Entry<String, SpoutSpec> entry : spouts.entrySet()) {
        Serializable instance = Utils.javaDeserialize(entry.getValue().get_spout_object().get_serialized_java(), Serializable.class);
        String       dsType   = instance.getClass().getSimpleName();
        AtlasEntity  dsEntity = addDataSet(dsType, topologyOwner, instance, stormConf, entityExtInfo);

        if (dsEntity != null) {
            inputs.add(dsEntity);
        }
    }

    topology.setRelationshipAttribute("inputs", AtlasTypeUtil.getAtlasRelatedObjectIds(inputs, RELATIONSHIP_DATASET_PROCESS_INPUTS));
}
 
Example 20
Source Project: atlas   Source File: StormAtlasHook.java    License: Apache License 2.0 5 votes vote down vote up
private AtlasEntity createBoltInstance(String boltName, Bolt stormBolt) {
    AtlasEntity         bolt          = new AtlasEntity(StormDataTypes.STORM_BOLT.getName());
    Serializable        instance      = Utils.javaDeserialize(stormBolt.get_bolt_object().get_serialized_java(), Serializable.class);
    Map<String, String> flatConfigMap = StormTopologyUtil.getFieldValues(instance, true, null);

    bolt.setAttribute(AtlasClient.NAME, boltName);
    bolt.setAttribute("driverClass", instance.getClass().getName());
    bolt.setAttribute("conf", flatConfigMap);

    return bolt;
}
 
Example 21
Source Project: storm-crawler   Source File: ConfUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static float getFloat(Map<String, Object> conf, String key,
        float defaultValue) {
    Object obj = Utils.get(conf, key, defaultValue);
    if (obj instanceof Double)
        return ((Double) obj).floatValue();
    return (Float) obj;
}
 
Example 22
Source Project: twister2   Source File: Twister2Spout.java    License: Apache License 2.0 5 votes vote down vote up
public Twister2Spout(String id, IRichSpout stormSpout) {
  this.id = id;
  this.stormSpout = stormSpout;
  this.spoutDeclarer = new Twister2SpoutDeclarer();
  this.outFieldsForEdge = new EdgeFieldMap(Utils.getDefaultStream(id));
  this.keyedOutEdges = new EdgeFieldMap(Utils.getDefaultStream(id));
  this.stormSpout.declareOutputFields(this.outFieldsForEdge);
}
 
Example 23
Source Project: streamline   Source File: NestSpout.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void nextTuple() {
    Utils.sleep(100);
    long userId = random.nextLong();
    long temperature = random.nextLong() % 150L;
    long evenTime = System.currentTimeMillis();
    long longitude = random.nextLong();
    long latitude = random.nextLong();

    _collector.emit(new Values(new NestMessage(userId, temperature, evenTime, longitude, latitude).serialize()));
}
 
Example 24
Source Project: incubator-atlas   Source File: StormAtlasHook.java    License: Apache License 2.0 5 votes vote down vote up
private Referenceable createSpoutInstance(String spoutName,
                                          SpoutSpec stormSpout) throws IllegalAccessException {
    Referenceable spoutReferenceable = new Referenceable(StormDataTypes.STORM_SPOUT.getName());
    spoutReferenceable.set(AtlasClient.NAME, spoutName);

    Serializable instance = Utils.javaDeserialize(
            stormSpout.get_spout_object().get_serialized_java(), Serializable.class);
    spoutReferenceable.set("driverClass", instance.getClass().getName());

    Map<String, String> flatConfigMap = StormTopologyUtil.getFieldValues(instance, true, null);
    spoutReferenceable.set("conf", flatConfigMap);

    return spoutReferenceable;
}
 
Example 25
@Override
public void nextTuple() {
    Utils.sleep(5000);
    for (Values v : values.get(index)) {
        collector.emit(v);
    }
    index = (index + 1) % values.size();
}
 
Example 26
Source Project: twister2   Source File: CheckpointSpout.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void nextTuple() {
  if (shouldRecover()) {
    handleRecovery();
    startProgress();
  } else if (shouldCheckpoint()) {
    doCheckpoint();
    startProgress();
  } else {
    Utils.sleep(sleepInterval);
  }
}
 
Example 27
Source Project: metron   Source File: BatchTimeoutHelper.java    License: Apache License 2.0 5 votes vote down vote up
private Map readStormConfigWithoutCLI() {
  Map ret = Utils.readDefaultConfig();
  String confFile = System.getProperty("storm.conf.file");
  Map storm;
  if (confFile == null || confFile.equals("")) {
    storm = Utils.findAndReadConfigFile("storm.yaml", false);
  } else {
    storm = Utils.findAndReadConfigFile(confFile, true);
  }
  ret.putAll(storm);
  return ret;
}
 
Example 28
/**
 * Provides various StreamIds to test emitting out of.
 */
public static Object[][] provideStreamIds() {
    return new Object[][]{
        // No explicitly defined streamId should use the default streamId.
        { null, Utils.DEFAULT_STREAM_ID },

        // Explicitly defined streamId should get used as is.
        { "SpecialStreamId", "SpecialStreamId" }
    };
}
 
Example 29
Source Project: tutorials   Source File: RandomNumberSpout.java    License: MIT License 5 votes vote down vote up
@Override
public void nextTuple() {
    Utils.sleep(1000);
    //This will select random int from the range (0, 100)
    int operation = random.nextInt(101);
    long timestamp = System.currentTimeMillis();

    Values values = new Values(operation, timestamp);
    collector.emit(values);
}
 
Example 30
/**
 * Provides various StreamIds to test emitting out of.
 */
public static Object[][] provideStreamIds() {
    return new Object[][]{
        // No explicitly defined streamId should use the default streamId.
        { null, Utils.DEFAULT_STREAM_ID },

        // Explicitly defined streamId should get used as is.
        { "SpecialStreamId", "SpecialStreamId" }
    };
}