Java Code Examples for backtype.storm.tuple.Tuple

The following are top voted examples for showing how to use backtype.storm.tuple.Tuple. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: fiware-sinfonier   File: BaseSinfonierDrain.java   View source code 6 votes vote down vote up
@Override
public final void execute(Tuple input, BasicOutputCollector collector) {

    _collector = collector;

    if (isTickTuple(input)) {
        tickTupleCase();
    } else {
        try {
            this.rawJson = input.getStringByField("map");
            this.json = mapper.readValue(rawJson, new TypeReference<Map<String, Object>>() {});
            this.userexecute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
 
Example 2
Project: storm-demos   File: CalculateBolt.java   View source code 6 votes vote down vote up
public void execute(Tuple tuple) {	

		this.saveMaps(tuple);
		// TODO : Use a independent bolt to instead of this method
		// This mechanism may lead to inaccurate if data is sparse
		if(this.isNewTimeBucke(this.timestamp)){
			logger.info("Crontab time: Emit maps !");
			logger.info("Before clean , size is  : " + this.tsdbMap.size() + "-" + this.hbaseMap.size() + "-"
					+ this.channelCountMap.size());
			long start = System.currentTimeMillis();
			this.timestamp = System.currentTimeMillis()/1000/this.sendCheckFreq + 1;//save as next send timestamp
			this.emitTsdbMap(ChannelTopology.OPENTSDB_STREAM,ChannelTopology.TRANSFER_STREAM,
					this.collector, this.tsdbMap, this.channelCountMap);
			this.emitHbaseMap(ChannelTopology.HBASE_STREAM, this.collector, this.hbaseMap);
			this.channelCountMap.clear();
			this.tsdbMap.clear();
			this.hbaseMap.clear();
			logger.info("After clean , size is  : " + this.tsdbMap.size() + "-" + this.hbaseMap.size() + "-"
					+ this.channelCountMap.size());
			logger.info("clean maps successful cost : " + (System.currentTimeMillis()-start));
		}
	}
 
Example 3
Project: storm-kafka-examples   File: CheckOrderBolt.java   View source code 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
	SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");//设置日期格式
	String nowData = df.format(new Date()); // new Date()为获取当前系统时间,检测是否为最新数据

	String data = tuple.getString(0);
	//订单号		用户id	     原金额	                      优惠价	          标示字段		下单时间
	//id		memberid  	totalprice					preprice		sendpay		createdate
	if(data!=null && data.length()>0) {
		String[] values = data.split("\t");
		if(values.length==6) {
			String id = values[0];
			String memberid = values[1];
			String totalprice = values[2];
			String preprice = values[3];
			String sendpay = values[4];
			String createdate = values[5];
			
			if(StringUtils.isNotEmpty(id)&&StringUtils.isNotEmpty(memberid)&&StringUtils.isNotEmpty(totalprice)) {
				if(DateUtils.isValidDate(createdate, nowData)) {
					collector.emit(new Values(id,memberid,totalprice,preprice,sendpay,createdate));
				}
			}
		}
	}
}
 
Example 4
Project: hadooparchitecturebook   File: CalcMovingAvgBolt.java   View source code 6 votes vote down vote up
/**
 * For each ticker in input stream, calculate the moving average.
 */
@Override
public void execute(Tuple tuple) {
  String ticker = tuple.getStringByField("ticker");
  String quote = tuple.getStringByField("price");
    
  Double num = Double.parseDouble(quote);
  LinkedList<Double> window = (LinkedList)getQuotesForTicker(ticker);
  window.add(num);
    
  // Print to System.out for test purposes. In a real implementation this
  // would go to a downstream bolt for further processing, or persisted, etc.
  System.out.println("----------------------------------------");
  System.out.println("moving average for ticker " + ticker + "=" + getAvg(window)); 
  System.out.println("----------------------------------------");
}
 
Example 5
Project: miner   File: StoreBolt.java   View source code 6 votes vote down vote up
public void execute(Tuple tuple) {
	long startTime=System.currentTimeMillis();
       String globalInfo  = tuple.getString(0);
       String data = tuple.getString(1);
	try {
           String workspace_id = get_workspace_id(globalInfo);
           //利用redis来进行数据的去重
           if(!jedis.sismember(workspace_id+"_unique", globalInfo)) {
               //将数据存放进HBase
               ImportData.importData(data);
               logger.info(globalInfo + ":save into hbase succeed!");
               jedis.sadd(workspace_id+"_unique", globalInfo);
               _collector.ack(tuple);
           }else{
               logger.warn(globalInfo+":已经存进数据库了.");
           }
	} catch (Exception ex) {
		_collector.fail(tuple);
		logger.error("store error!"+MySysLogger.formatException(ex));
		ex.printStackTrace();
	}

       long endTime=System.currentTimeMillis();
       logger.info(globalInfo+"在StoreBolt的处理时间:"+(endTime-startTime)/1000+"s.");
}
 
Example 6
Project: erad2016-streamprocessing   File: PositiveSentimentBolt.java   View source code 6 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    LOGGER.debug("Calculating positive score");

    Long id     = input.getLong(input.fieldIndex("tweet_id"));
    String text = input.getString(input.fieldIndex("tweet_text"));

    Set<String> posWords = PositiveWords.getWords();
    String[] words = text.split(" ");

    int numWords = words.length;
    int numPosWords = 0;

    for (String word : words) {
        if (posWords.contains(word))
            numPosWords++;
    }

    collector.emit(new Values(id, (float) numPosWords / numWords, text));
}
 
Example 7
Project: erad2016-streamprocessing   File: TwitterFilterBolt.java   View source code 6 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    LOGGER.debug("filttering incoming tweets");
    String json = input.getString(0);

    try {
        JsonNode root = mapper.readValue(json, JsonNode.class);

        long id;
        String text;

        if (root.get("lang") != null && "en".equals(root.get("lang").textValue())) {
            if (root.get("id") != null && root.get("text") != null) {
                id   = root.get("id").longValue();
                text = root.get("text").textValue();
                collector.emit(new Values(id, text));
            } else {
                LOGGER.debug("tweet id and/ or text was null");
            }
        } else {
            LOGGER.debug("Ignoring non-english tweet");
        }
    } catch (IOException ex) {
        LOGGER.error("IO error while filtering tweets", ex);
        LOGGER.trace(null, ex);
    }
}
 
Example 8
Project: erad2016-streamprocessing   File: NodeNotifierBolt.java   View source code 6 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    Long id      = input.getLong(input.fieldIndex("tweet_id"));
    String tweet = input.getString(input.fieldIndex("tweet_text"));
    Float pos    = input.getFloat(input.fieldIndex("pos_score"));
    Float neg    = input.getFloat(input.fieldIndex("neg_score"));
    String score = input.getString(input.fieldIndex("score"));

    HttpPost post = new HttpPost(this.webserver);
    String content = String.format(
        "{\"id\": \"%d\", "  +
        "\"text\": \"%s\", " +
        "\"pos\": \"%f\", "  +
        "\"neg\": \"%f\", "  +
        "\"score\": \"%s\" }",
        id, tweet, pos, neg, score);

    try {
        post.setEntity(new StringEntity(content));
        HttpResponse response = client.execute(post);
        org.apache.http.util.EntityUtils.consume(response.getEntity());
    } catch (Exception ex) {
        LOGGER.error("exception thrown while attempting post", ex);
        LOGGER.trace(null, ex);
        reconnect();
    }
}
 
Example 9
Project: erad2016-streamprocessing   File: NegativeSentimentBolt.java   View source code 6 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    LOGGER.debug("Calculating negitive score");

    Long id     = input.getLong(input.fieldIndex("tweet_id"));
    String text = input.getString(input.fieldIndex("tweet_text"));

    Set<String> negWords = NegativeWords.getWords();
    String[] words = text.split(" ");

    int numWords = words.length;
    int numNegWords = 0;
    for (String word : words) {
        if (negWords.contains(word))
            numNegWords++;
    }

    collector.emit(new Values(id, (float) numNegWords / numWords, text));
}
 
Example 10
Project: Get-ENVS   File: SplitSentence.java   View source code 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
  //Get the sentence content from the tuple
  String sentence = tuple.getString(0);
  //An iterator to get each word
  BreakIterator boundary=BreakIterator.getWordInstance();
  //Give the iterator the sentence
  boundary.setText(sentence);
  //Find the beginning first word
  int start=boundary.first();
  //Iterate over each word and emit it to the output stream
  for (int end=boundary.next(); end != BreakIterator.DONE; start=end, end=boundary.next()) {
    //get the word
    String word=sentence.substring(start,end);
    //If a word is whitespace characters, replace it with empty
    word=word.replaceAll("\\s+","");
    //if it's an actual word, emit it
    if (!word.equals("")) {
      collector.emit(new Values(word));
    }
  }
}
 
Example 11
Project: es-hadoop-v2.2.0   File: StormTupleBytesConverter.java   View source code 6 votes vote down vote up
@Override
public void convert(Object from, BytesArray to) {
    Assert.isTrue(from == null || from instanceof Tuple,
            String.format("Unexpected object type, expecting [%s], given [%s]", Tuple.class, from.getClass()));

    // handle common cases
    Tuple tuple = (Tuple) from;

    if (tuple == null || tuple.size() == 0) {
        to.bytes("{}");
        return;
    }
    Assert.isTrue(tuple.size() == 1, "When using JSON input, only one field is expected");

    super.convert(tuple.getValue(0), to);
}
 
Example 12
Project: es-hadoop-v2.2.0   File: StormTupleFieldExtractor.java   View source code 6 votes vote down vote up
@Override
protected Object extractField(Object target) {
    List<String> fieldNames = getFieldNames();
    for (int i = 0; i < fieldNames.size(); i++) {
        String field = fieldNames.get(i);
        if (target instanceof Tuple) {
            target = ((Tuple) target).getValueByField(field);
            if (target == null) {
                return NOT_FOUND;
            }
        }
        else {
            return NOT_FOUND;
        }
    }
    return target;
}
 
Example 13
Project: es-hadoop-v2.2.0   File: TestBolt.java   View source code 6 votes vote down vote up
@Override
public void execute(Tuple input) {
    // cleanup first to make sure the connection to ES is closed before the test suite shuts down

    if (done) {
        return;
    }

    if (log.isDebugEnabled()) {
        log.debug("Received tuple " + input);
    }
    if (TestSpout.DONE.equals(input.getValue(0))) {
        delegate.cleanup();
        done = true;
        MultiIndexSpoutStormSuite.COMPONENT_HAS_COMPLETED.decrement();
    }
    if (!done) {
        delegate.execute(input);
    }
}
 
Example 14
Project: RealEstate-Streaming   File: PhoenixJDBC.java   View source code 6 votes vote down vote up
@Override
public void execute(Tuple input) {
	LOG.info("About to process tuple[" + input + "]");
	
      String sentence = input.getString(0);
      String[] words = sentence.split(" ");
      
      for(String word: words) {
         word = word.trim();
         
         if(!word.isEmpty()) {
            word = word.toLowerCase();
            outputCollector.emit(new Values(word));
         }
         
      }
      
      outputCollector.ack(input);
       
}
 
Example 15
Project: ignite-book-code-samples   File: WordCount.java   View source code 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
    //Get the word contents from the tuple
    String word = tuple.getString(0);
    //Have we counted any already?
    Integer count = counts.get(word);
    if (count == null)
        count = 0;
    //Increment the count and store it
    count++;
    counts.put(word, count);
    //Emit the word and the current count
    //collector.emit(new Values(IGNITE_FIELD, count));
    TreeMap<String, Integer> words = new TreeMap<>();
    words.put(word,count);

    collector.emit(new Values(words));
    //Log information
    logger.info("Emitting a count of " + count + " for word " + word);
}
 
Example 16
Project: ignite-book-code-samples   File: SplitSentence.java   View source code 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
    //Get the sentence content from the tuple
    String sentence = tuple.getString(0);
    //An iterator to get each word
    BreakIterator boundary=BreakIterator.getWordInstance();
    //Give the iterator the sentence
    boundary.setText(sentence);
    //Find the beginning first word
    int start=boundary.first();
    //Iterate over each word and emit it to the output stream
    for (int end = boundary.next(); end != BreakIterator.DONE; start=end, end=boundary.next()) {
        //get the word
        String word=sentence.substring(start,end);
        //If a word is whitespace characters, replace it with empty
        word=word.replaceAll("\\s+","");
        //if it's an actual word, emit it
        if (!word.equals("")) {
            collector.emit(new Values(word));
        }
    }
}
 
Example 17
Project: storm-demo   File: CropBolt.java   View source code 6 votes vote down vote up
@Override
public void execute(Tuple tuple) {
    // todo 写注释
    String message = tuple.getString(0);

    // to avoid NullPointerException
    if (message != null) {
        String domain, service, timestamp;

        HashMap map = makeMapOfMessage(message);
        domain = (String) map.get("domain");
        LOG.info("domain name of message {} is {}", tuple.getMessageId(), domain);
        timestamp = (String) map.get("time_local");
        LOG.info("timestamp of message {} is {}", tuple.getMessageId(), timestamp);

        if (domain.endsWith(ServerConfig.getUrlSuffix())) {
            service = domain.split("\\.")[0];
            collector.emit(tuple, new Values(timestamp, message, service));
            collector.ack(tuple);
        }
    }
}
 
Example 18
Project: Practical-Real-time-Processing-and-Analytics   File: TDRCassandraBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector arg1) {
	
	PacketDetailDTO packetDetailDTO = (PacketDetailDTO) input.getValueByField("tdrstream");
	System.out.println("field value "+ packetDetailDTO);
	session.execute("INSERT INTO packet_tdr (phone_number, bin, bout, timestamp) VALUES ("
			+ packetDetailDTO.getPhoneNumber()
			+ ", "
			+ packetDetailDTO.getBin()
			+ ","
			+ packetDetailDTO.getBout()
			+ "," + packetDetailDTO.getTimestamp() + ")");
}
 
Example 19
Project: Practical-Real-time-Processing-and-Analytics   File: ParserBolt.java   View source code 5 votes vote down vote up
@Override
public void execute(Tuple input, BasicOutputCollector collector) {
	String valueByField = input.getString(0);
	System.out.println("field value "+ valueByField);
	String[] split = valueByField.split(",");
	PacketDetailDTO tdrPacketDetailDTO = new PacketDetailDTO();
	tdrPacketDetailDTO.setPhoneNumber(Long.parseLong(split[0]));
	tdrPacketDetailDTO.setBin(Integer.parseInt(split[1]));
	tdrPacketDetailDTO.setBout(Integer.parseInt(split[2]));
	tdrPacketDetailDTO.setTimestamp(Long.parseLong(split[3]));

	collector.emit("tdrstream", new Values(tdrPacketDetailDTO));
}
 
Example 20
Project: Practical-Real-time-Processing-and-Analytics   File: TDRCassandraBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector arg1) {
	
	PacketDetailDTO packetDetailDTO = (PacketDetailDTO) input.getValueByField("tdrstream");
	System.out.println("field value "+ packetDetailDTO);
	session.execute("INSERT INTO packet_tdr (phone_number, bin, bout, timestamp) VALUES ("
			+ packetDetailDTO.getPhoneNumber()
			+ ", "
			+ packetDetailDTO.getBin()
			+ ","
			+ packetDetailDTO.getBout()
			+ "," + packetDetailDTO.getTimestamp() + ")");
}
 
Example 21
Project: Practical-Real-time-Processing-and-Analytics   File: ParserBolt.java   View source code 5 votes vote down vote up
@Override
public void execute(Tuple input, BasicOutputCollector collector) {
	String valueByField = input.getString(0);
	System.out.println("field value "+ valueByField);
	String[] split = valueByField.split(",");
	PacketDetailDTO tdrPacketDetailDTO = new PacketDetailDTO();
	tdrPacketDetailDTO.setPhoneNumber(Long.parseLong(split[0]));
	tdrPacketDetailDTO.setBin(Integer.parseInt(split[1]));
	tdrPacketDetailDTO.setBout(Integer.parseInt(split[2]));
	tdrPacketDetailDTO.setTimestamp(Long.parseLong(split[3]));

	collector.emit("tdrstream", new Values(tdrPacketDetailDTO));
}
 
Example 22
Project: Mastering-Apache-Storm   File: StormRedisBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
	Map<String, Object> record = new HashMap<String, Object>();
	//"firstName","lastName","companyName")
	record.put("firstName", input.getValueByField("firstName"));
	record.put("lastName", input.getValueByField("lastName"));
	record.put("companyName", input.getValueByField("companyName"));
	redisOperations.insert(record, UUID.randomUUID().toString());
}
 
Example 23
Project: storm-demo   File: SequenceFileBolt.java   View source code 5 votes vote down vote up
@Override
protected AbstractHDFSWriter makeNewWriter(Path path, Tuple tuple) throws IOException {
    SequenceFile.Writer writer = SequenceFile.createWriter(
            this.hdfsConfig,
            SequenceFile.Writer.file(path),
            SequenceFile.Writer.keyClass(this.format.keyClass()),
            SequenceFile.Writer.valueClass(this.format.valueClass()),
            SequenceFile.Writer.compression(this.compressionType, this.codecFactory.getCodecByName(this.compressionCodec))
    );

    return new SequenceFileWriter(this.rotationPolicy, path, writer, this.format);
}
 
Example 24
Project: storm-scheduler   File: NothingPayloadBolt.java   View source code 5 votes vote down vote up
@Override
public void execute(Tuple input) {
    if (!this.disableAniello) {
        taskMonitor.notifyTupleReceived(input);
    }

    this.collector.emit(input, new Values(input.getString(0), input.getString(1)));
    this.collector.ack(input);
}
 
Example 25
Project: storm-scheduler   File: NothingBolt.java   View source code 5 votes vote down vote up
@Override
public void execute(Tuple input) {
    if (!this.disableAniello) {
        taskMonitor.notifyTupleReceived(input);
    }

    this.collector.emit(input, new Values(input.getString(0))); // we assume there is only one field
    this.collector.ack(input);
}
 
Example 26
Project: storm-hbase-1.0.x   File: HBaseBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
    byte[] rowKey = this.mapper.rowKey(tuple);
    ColumnList cols = this.mapper.columns(tuple);
    List<Mutation> mutations = hBaseClient.constructMutationReq(rowKey, cols, writeToWAL? Durability.SYNC_WAL : Durability.SKIP_WAL);

    try {
        this.hBaseClient.batchMutate(mutations);
    } catch(Exception e){
        LOG.warn("Failing tuple. Error writing rowKey " + rowKey, e);
        this.collector.fail(tuple);
        return;
    }

    this.collector.ack(tuple);
}
 
Example 27
Project: storm-hbase-1.0.x   File: HBaseLookupBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
    byte[] rowKey = this.mapper.rowKey(tuple);
    Get get = hBaseClient.constructGetRequests(rowKey, projectionCriteria);

    try {
        Result result = hBaseClient.batchGet(Lists.newArrayList(get))[0];
        for(Values values : rowToTupleMapper.toValues(result)) {
            this.collector.emit(values);
        }
        this.collector.ack(tuple);
    } catch (Exception e) {
        LOG.warn("Could not perform Lookup for rowKey =" + rowKey + " from Hbase.", e);
        this.collector.fail(tuple);
    }
}
 
Example 28
Project: reddit-sentiment-storm   File: SentimentCalculatorBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
	String subreddit = tuple.getStringByField("subreddit");
	String storyId = tuple.getStringByField("storyid");
	String storyURL = tuple.getStringByField("storyurl");
	String storyTitle = tuple.getStringByField("storytitle");
	String commentId = tuple.getStringByField("commentid");
	String comment = tuple.getStringByField("comment");
	long timestamp = tuple.getLongByField("timestamp");
	
	LOG.info("Received {}:{}:{}:{}:{}:[{}]", subreddit, storyId, storyURL, storyTitle, commentId, comment);
	
	String[] tokens = comment.split("\\s+");
	int sentimentScore = 0;
	for (String t  : tokens) {
		if (t == null || t.isEmpty()) {
			continue;
		}
		Long value = sentimentData.get(t);
		if (value != null) {
			sentimentScore += value;
		}
	}
	collector.emit(tuple, new Values(subreddit, storyId, storyURL, storyTitle, 
			commentId, comment, sentimentScore, timestamp));
	LOG.info("Emit {}:{}:{}:{}:{}:{}:[{}]", subreddit, sentimentScore, storyId, storyURL, 
			storyTitle, commentId, comment);
	
	collector.ack(tuple);
}
 
Example 29
Project: reddit-sentiment-storm   File: SummarizerBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
	String subreddit = tuple.getStringByField("subreddit");
	String storyId = tuple.getStringByField("storyid");
	String storyURL = tuple.getStringByField("storyurl");
	String storyTitle = tuple.getStringByField("storytitle");
	String commentId = tuple.getStringByField("commentid");
	String comment = tuple.getStringByField("comment");
	int sentimentScore = tuple.getIntegerByField("score");
	long timestamp = tuple.getLongByField("timestamp");

	LOG.info("Received {}:{}:{}:{}:{}:{}:[{}]", subreddit, sentimentScore, storyId, storyURL, 
			storyTitle, commentId, comment);
	
	collector.ack(tuple);
	
	summary.update(subreddit, storyId, storyURL, storyTitle, commentId,
			comment, sentimentScore, timestamp);
	
	// Publish updated statistics only every 30 secs.
	long curTime = System.currentTimeMillis();
	if (lastPublishedTimestamp == 0 ) {
		// Since messages come one by one to Summarizer, publishing immediately on first message
		// will show just 1 comment and looks odd. Instead, mark now as last published time
		// so that by next publishing window, we'd have received a couple of comments to show meaningful
		// rankings.
		lastPublishedTimestamp = curTime;
		
	} else if (curTime - lastPublishedTimestamp > 30000) {
		
		LOG.info("Publishing statistics to ZK");
		this.publisher.publish(summary);
		lastPublishedTimestamp = curTime;
	}
}
 
Example 30
Project: storm-demos   File: MessageSplitBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
	String[] channelLogs = tuple.getValue(0).toString().split("\n");
	for (String logString : channelLogs) {
		String[] logContent = logString.split(" ");
		String device = logContent[1];// device
		String channel = logContent[2];// channel
		String code = logContent[3];// code
		String count = logContent[4];// count
		String total = logContent[5];// total
		String ratio = logContent[6];// ratio
		collector.emit(new Values(device, channel, code, count, total, ratio));
	}
	this.collector.ack(tuple);
}
 
Example 31
Project: storm-demos   File: WriteHbaseBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
	try {
		if (tuple.getSourceComponent().equals(Constants.SYSTEM_COMPONENT_ID)
				&& tuple.getSourceStreamId().equals(Constants.SYSTEM_TICK_STREAM_ID)) {
			logger.info("flush commit hbase !");
			table.flushCommits();
		} else {
			writeHbase(this.configure, tuple.getStringByField("rowkey"), hbaseColumnFamlity,
					tuple.getStringByField("column"), tuple.getStringByField("columnvalue"));
		}
	} catch (IOException e) {
		logger.error("Hbase save wrong !\n", e);
	}
}
 
Example 32
Project: splice-community-sample-code   File: MySqlSpliceBolt.java   View source code 5 votes vote down vote up
@Override
public void execute(Tuple input, BasicOutputCollector collector) {
    fields = new ArrayList<String>();
    fields = (ArrayList<String>) input.getFields().toList();
    fieldValues = new ArrayList<Object>();
    fieldValues = (ArrayList<Object>) input.getValues();
    try {
        communicator.insertRow(this.tableName, fields, fieldValues);
    } catch (SQLException e) {
        System.out.println("Exception occurred in adding a row");
        e.printStackTrace();
    }
}
 
Example 33
Project: splice-community-sample-code   File: SpliceDumperBolt.java   View source code 5 votes vote down vote up
@Override
public void execute(Tuple input, BasicOutputCollector collector) {
    fields = new ArrayList<String>();
    fields = (ArrayList<String>) input.getFields().toList();
    fieldValues = new ArrayList<Object>();
    fieldValues = (ArrayList<Object>) input.getValues();
    try {
        communicator.insertRow(this.tableName, fields, fieldValues);
    } catch (SQLException e) {
        System.out.println("Exception occurred in adding a row");
        e.printStackTrace();
    }
}
 
Example 34
Project: preliminary.demo   File: WordCount.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
    String word = tuple.getString(0);
    Integer count = counts.get(word);
    if (count == null)
        count = 0;
    counts.put(word, ++count);
    collector.ack(tuple);
}
 
Example 35
Project: preliminary.demo   File: WordCountLocal.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
    String word = tuple.getString(0);
    Integer count = counts.get(word);
    if (count == null)
        count = 0;
    counts.put(word, ++count);
    collector.ack(tuple);
}
 
Example 36
Project: storm-demo   File: DefaultSequenceFormat.java   View source code 5 votes vote down vote up
@Override
public Writable key(Tuple tuple) {
    if(this.key == null){
        this.key  = new LongWritable();
    }
    this.key.set(tuple.getLongByField(this.keyField));
    return this.key;
}
 
Example 37
Project: storm-kafka-examples   File: CounterBolt.java   View source code 5 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
	List<Object> data = tuple.getValues();

	String id = (String) data.get(0);
	String memberid = (String) data.get(1);
	String totalprice = (String) data.get(2);
	String preprice = (String) data.get(3);
	String sendpay = (String) data.get(4);
	String createdate = (String) data.get(5);
	collector.emit(new Values(id,memberid,totalprice,preprice,sendpay,createdate));
	logger.info("+++++++++++++++++++++++++++++++++Valid+++++++++++++++++++++++++++++++++");
	logger.info("msg = "+data+" [email protected]@[email protected]@[email protected] = "+(counter++));
	logger.info("+++++++++++++++++++++++++++++++++Valid+++++++++++++++++++++++++++++++++");
}
 
Example 38
Project: hadooparchitecturebook   File: ParseTicksBolt.java   View source code 5 votes vote down vote up
@Override
public void execute(Tuple tuple) {
  String tick = tuple.getStringByField("tick");
  String[] parts = tick.split(",");
  outputCollector.emit(new Values(parts[0], parts[4]));
  outputCollector.ack(tuple); 
}
 
Example 39
Project: miner   File: FetchBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple tuple) {
    long startTime=System.currentTimeMillis();

    String globalInfo = tuple.getString(0);
    String downloadUrl = tuple.getString(1);
    String proxy = tuple.getString(2);
    String resource = "";
    try{
        if(proxy.equals("none")){
            //不加代理请求
            resource = Crawl4HttpClient.downLoadPage(downloadUrl);
        }else {
            //加上代理请求
            resource = Crawl4HttpClient.downLoadPage(downloadUrl, proxy);
        }
        if (resource.equals("exception")) {
            logger.error("fetch exception:" + downloadUrl);
            _collector.fail(tuple);
        } else if(resource.equals("error")){
            logger.error("fetch error:" + downloadUrl);
            _collector.fail(tuple);
            //返回值一般不会为空
        }else if(resource.equals("") || resource == null){
            logger.warn(downloadUrl + "return null.");
            _collector.fail(tuple);
        } else {
            _collector.emit(tuple, new Values(globalInfo, resource));
            logger.info(downloadUrl + ":fetch succeed!" + resource);
            _collector.ack(tuple);
        }
    } catch (Exception ex) {
        logger.error("fetch error:" +downloadUrl+" error:"+MySysLogger.formatException(ex));
        _collector.fail(tuple);
    }

    long endTime=System.currentTimeMillis();
    logger.info(globalInfo+"在FetchBolt的处理时间:"+(endTime-startTime)/1000+"s.");

}
 
Example 40
Project: miner   File: PrintBolt.java   View source code 5 votes vote down vote up
public void execute(Tuple input) {
    try {
        String result = input.getString(0);
        System.out.println(result+"---");
        _collector.ack(input);
    }catch (Exception ex){
        ex.printStackTrace();
    }
}