Java Code Examples for backtype.storm.tuple.Tuple#getStringByField()

The following examples show how to use backtype.storm.tuple.Tuple#getStringByField() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SVMBolt.java    From senti-storm with Apache License 2.0 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
  String text = tuple.getStringByField("text");
  Map<Integer, Double> featureVector = (Map<Integer, Double>) tuple
      .getValueByField("featureVector");

  // Create feature nodes
  svm_node[] testNodes = new svm_node[featureVector.size()];
  int i = 0;
  for (Map.Entry<Integer, Double> feature : featureVector.entrySet()) {
    svm_node node = new svm_node();
    node.index = feature.getKey();
    node.value = feature.getValue();
    testNodes[i] = node;
    i++;
  }

  double predictedClass = svm.svm_predict(m_model, testNodes);

  if (m_logging) {
    LOG.info("Tweet: " + text + " predictedSentiment: "
        + SentimentClass.fromScore(m_dataset, (int) predictedClass));
  }
}
 
Example 2
Source File: POSTaggerBolt.java    From senti-storm with Apache License 2.0 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
  String text = tuple.getStringByField("text");
  List<String> preprocessedTokens = (List<String>) tuple
      .getValueByField("preprocessedTokens");

  // POS Tagging
  List<TaggedToken> taggedTokens = tag(preprocessedTokens);

  if (m_logging) {
    LOG.info("Tweet: " + taggedTokens);
  }

  // Emit new tuples
  collector.emit(new Values(text, taggedTokens));
}
 
Example 3
Source File: CalcMovingAvgBolt.java    From hadoop-arch-book with Apache License 2.0 6 votes vote down vote up
/**
 * For each ticker in input stream, calculate the moving average.
 */
@Override
public void execute(Tuple tuple) {
  String ticker = tuple.getStringByField("ticker");
  String quote = tuple.getStringByField("price");
    
  Double num = Double.parseDouble(quote);
  LinkedList<Double> window = (LinkedList)getQuotesForTicker(ticker);
  window.add(num);
    
  // Print to System.out for test purposes. In a real implementation this
  // would go to a downstream bolt for further processing, or persisted, etc.
  System.out.println("----------------------------------------");
  System.out.println("moving average for ticker " + ticker + "=" + getAvg(window)); 
  System.out.println("----------------------------------------");
}
 
Example 4
Source File: EmailExtractor.java    From C2-Github-commit-count with MIT License 5 votes vote down vote up
@Override
public void execute(Tuple tuple,
                    BasicOutputCollector outputCollector) {
  String commit = tuple.getStringByField("commit");
  String[] parts = commit.split(" ");
  outputCollector.emit(new Values(parts[1]));
}
 
Example 5
Source File: WordCountBolt.java    From storm-example with Apache License 2.0 5 votes vote down vote up
public void execute(Tuple tuple) {
    String word = tuple.getStringByField("word");
    Long count = this.counts.get(word);
    if(count == null){
        count = 0L;
    }
    count++;
    this.counts.put(word, count);
    this.collector.emit(new Values(word, count));
}
 
Example 6
Source File: DefaultTupleMapper.java    From storm-trident-elasticsearch with Apache License 2.0 5 votes vote down vote up
@Override
public Document<String> map(Tuple input) {
    String id   = input.getStringByField(FIELD_ID);
    String name = input.getStringByField(FIELD_NAME);
    String type = input.getStringByField(FIELD_TYPE);
    String parentId = ( input.contains(FIELD_PARENT_ID) ) ? input.getStringByField(FIELD_PARENT_ID) : null;

    return new Document<>(name, type, sourceMapperStrategy.map(input), id, parentId);
}
 
Example 7
Source File: TestQueryBolt.java    From jstorm with Apache License 2.0 5 votes vote down vote up
public void execute(Tuple input) {
  String date = input.getStringByField("date");
  try {
    fos.write((date + "\n").getBytes());
  } catch (IOException e) {
    e.printStackTrace();
  }
}
 
Example 8
Source File: ParseTicksBolt.java    From hadoop-arch-book with Apache License 2.0 5 votes vote down vote up
@Override
public void execute(Tuple tuple) {
  String tick = tuple.getStringByField("tick");
  String[] parts = tick.split(",");
  outputCollector.emit(new Values(parts[0], parts[4]));
  outputCollector.ack(tuple); 
}
 
Example 9
Source File: FrameSerializer.java    From StormCV with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
protected Frame createObject(Tuple tuple) throws IOException {
	byte[] buffer = tuple.getBinaryByField(IMAGE);
	Frame frame;
	if(buffer == null){
		frame = new Frame(tuple, tuple.getStringByField(IMAGETYPE), null, tuple.getLongByField(TIMESTAMP), (Rectangle)tuple.getValueByField(BOUNDINGBOX));
	}else{
		frame = new Frame(tuple, tuple.getStringByField(IMAGETYPE), buffer, tuple.getLongByField(TIMESTAMP), (Rectangle)tuple.getValueByField(BOUNDINGBOX));
	}
	frame.getFeatures().addAll((List<Feature>)tuple.getValueByField(FEATURES));
	return frame;
}
 
Example 10
Source File: FeatureSerializer.java    From StormCV with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
protected Feature createObject(Tuple tuple) throws IOException {
	List<Descriptor> sparseDescriptors = (List<Descriptor>) tuple.getValueByField(SPARSE_DESCR);
	float[][][] denseDescriptors = (float[][][])tuple.getValueByField(DENSE_DESCR);
	Feature feature = new Feature(tuple, tuple.getStringByField(NAME), tuple.getLongByField(DURATION), sparseDescriptors, denseDescriptors);
	return feature;
}
 
Example 11
Source File: FlowInfo.java    From flowmix with Apache License 2.0 5 votes vote down vote up
public FlowInfo(Tuple tuple) {
  flowId = tuple.getStringByField(FLOW_ID);
  event = (Event) tuple.getValueByField(EVENT);
  idx = tuple.getIntegerByField(FLOW_OP_IDX);
  idx++;
  streamName = tuple.getStringByField(STREAM_NAME);
  previousStream = tuple.getStringByField(LAST_STREAM);

  if(tuple.contains(PARTITION))
    partition = tuple.getStringByField(PARTITION);
}
 
Example 12
Source File: SplitSentenceBolt.java    From storm-example with Apache License 2.0 5 votes vote down vote up
public void execute(Tuple tuple) {
    String sentence = tuple.getStringByField("sentence");
    String[] words = sentence.split(" ");
    for(String word : words){
        this.collector.emit(tuple, new Values(word));
    }
    this.collector.ack(tuple);
}
 
Example 13
Source File: TopologyDataPersistBolt.java    From eagle with Apache License 2.0 5 votes vote down vote up
@Override
public void execute(Tuple input) {
    if (input == null) {
        return;
    }
    String serviceName = input.getStringByField(TopologyConstants.SERVICE_NAME_FIELD);
    TopologyEntityParserResult result = (TopologyEntityParserResult) input.getValueByField(TopologyConstants.TOPOLOGY_DATA_FIELD);
    Set<String> availableHostNames = new HashSet<String>();
    List<TopologyBaseAPIEntity> entitiesForDeletion = new ArrayList<>();
    List<TopologyBaseAPIEntity> entitiesToWrite = new ArrayList<>();

    filterEntitiesToWrite(result, availableHostNames, entitiesToWrite);

    String query = String.format("%s[@site=\"%s\"]{*}", serviceName, this.config.dataExtractorConfig.site);
    try {
        GenericServiceAPIResponseEntity<TopologyBaseAPIEntity> response = client.search().query(query).pageSize(Integer.MAX_VALUE).send();
        if (response.isSuccess() && response.getObj() != null) {
            for (TopologyBaseAPIEntity entity : response.getObj()) {
                if (!availableHostNames.isEmpty() && !availableHostNames.contains(generatePersistKey(entity))) {
                    entitiesForDeletion.add(entity);
                }
            }
        }
        deleteEntities(entitiesForDeletion, serviceName);
        writeEntities(entitiesToWrite, result.getMetrics(), serviceName);
        emitToKafkaBolt(result);
        this.collector.ack(input);
    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
        this.collector.fail(input);
    }
}
 
Example 14
Source File: PreprocessorBolt.java    From senti-storm with Apache License 2.0 5 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
  String text = tuple.getStringByField("text");
  List<String> tokens = (List<String>) tuple.getValueByField("tokens");

  // Preprocess
  List<String> preprocessedTokens = m_preprocessor.preprocess(tokens);

  if (m_logging) {
    LOG.info("Tweet: " + preprocessedTokens);
  }

  // Emit new tuples
  collector.emit(new Values(text, preprocessedTokens));
}
 
Example 15
Source File: SplitSentenceBolt.java    From storm-example with Apache License 2.0 5 votes vote down vote up
public void execute(Tuple tuple) {
    String sentence = tuple.getStringByField("sentence");
    String[] words = sentence.split(" ");
    for(String word : words){
        this.collector.emit(new Values(word));
    }
}
 
Example 16
Source File: ReportBolt.java    From storm-example with Apache License 2.0 4 votes vote down vote up
public void execute(Tuple tuple) {
    String word = tuple.getStringByField("word");
    Long count = tuple.getLongByField("count");
    this.counts.put(word, count);
    this.collector.ack(tuple);
}
 
Example 17
Source File: TotalCount.java    From jstorm with Apache License 2.0 4 votes vote down vote up
@Override
public void execute(Tuple input) {
    
    if (TupleHelpers.isTickTuple(input)) {
        LOG.info("Receive one Ticket Tuple " + input.getSourceComponent());
        return;
    }
    if (input.getSourceStreamId().equals(SequenceTopologyDef.CONTROL_STREAM_ID)) {
        String str = (input.getStringByField("CONTROL"));
        LOG.warn(str);
        return;
    }
    
    long before = System.currentTimeMillis();
    myCounter.update(1);
    tpCounter.update(1);
    myMeter.update(1);

    if (checkTupleId) {
        Long tupleId = input.getLong(0);
        if (tupleId <= lastTupleId) {

            /***
             * Display warning
             */
            String errorMessage = ("LastTupleId is " + lastTupleId + ", but now:" + tupleId);

            JStormUtils.reportError(context, errorMessage);
        }
        lastTupleId = tupleId;
    }

    TradeCustomer tradeCustomer;
    try {
        tradeCustomer = (TradeCustomer) input.getValue(1);
    } catch (Exception e) {
        LOG.error(input.getSourceComponent() + "  " + input.getSourceTask() + " " + input.getSourceStreamId()
                + " target " +  input);
        throw new RuntimeException(e);
    }

    tradeSum.addAndGet(tradeCustomer.getTrade().getValue());
    customerSum.addAndGet(tradeCustomer.getCustomer().getValue());

    collector.ack(input);

    long now = System.currentTimeMillis();
    long spend = now - tradeCustomer.getTimestamp();

    tpsCounter.count(spend);
    myJStormHistogram.update(now - before);

    if (slowDonw) {
        JStormUtils.sleepMs(20);
    }

}
 
Example 18
Source File: ReportBolt.java    From storm-example with Apache License 2.0 4 votes vote down vote up
public void execute(Tuple tuple) {
    String word = tuple.getStringByField("word");
    Long count = tuple.getLongByField("count");
    this.counts.put(word, count);
}
 
Example 19
Source File: BatchBolt.java    From StormCV with Apache License 2.0 2 votes vote down vote up
/**
 * Deserializes a Tuple into a CVParticle type
 * @param tuple
 * @return
 * @throws IOException 
 */
private CVParticle deserialize(Tuple tuple) throws IOException{
	String typeName = tuple.getStringByField(CVParticleSerializer.TYPE);
	return serializers.get(typeName).fromTuple(tuple);
}
 
Example 20
Source File: CVParticleBolt.java    From StormCV with Apache License 2.0 2 votes vote down vote up
/**
 * Deserializes a Tuple into a CVParticle type
 * @param tuple
 * @return
 * @throws IOException 
 */
protected CVParticle deserialize(Tuple tuple) throws IOException{
	String typeName = tuple.getStringByField(CVParticleSerializer.TYPE);
	return serializers.get(typeName).fromTuple(tuple);
}