Java Code Examples for backtype.storm.tuple.Tuple

The following are top voted examples for showing how to use backtype.storm.tuple.Tuple. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: fiware-sinfonier   File: BaseSinfonierDrain.java   Source Code and License 6 votes vote down vote up
@Override
public final void execute(Tuple input, BasicOutputCollector collector) {

    _collector = collector;

    if (isTickTuple(input)) {
        tickTupleCase();
    } else {
        try {
            this.rawJson = input.getStringByField("map");
            this.json = mapper.readValue(rawJson, new TypeReference<Map<String, Object>>() {});
            this.userexecute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
 
Example 2
Project: storm-demos   File: CalculateBolt.java   Source Code and License 6 votes vote down vote up
public void execute(Tuple tuple) {	

		this.saveMaps(tuple);
		// TODO : Use a independent bolt to instead of this method
		// This mechanism may lead to inaccurate if data is sparse
		if(this.isNewTimeBucke(this.timestamp)){
			logger.info("Crontab time: Emit maps !");
			logger.info("Before clean , size is  : " + this.tsdbMap.size() + "-" + this.hbaseMap.size() + "-"
					+ this.channelCountMap.size());
			long start = System.currentTimeMillis();
			this.timestamp = System.currentTimeMillis()/1000/this.sendCheckFreq + 1;//save as next send timestamp
			this.emitTsdbMap(ChannelTopology.OPENTSDB_STREAM,ChannelTopology.TRANSFER_STREAM,
					this.collector, this.tsdbMap, this.channelCountMap);
			this.emitHbaseMap(ChannelTopology.HBASE_STREAM, this.collector, this.hbaseMap);
			this.channelCountMap.clear();
			this.tsdbMap.clear();
			this.hbaseMap.clear();
			logger.info("After clean , size is  : " + this.tsdbMap.size() + "-" + this.hbaseMap.size() + "-"
					+ this.channelCountMap.size());
			logger.info("clean maps successful cost : " + (System.currentTimeMillis()-start));
		}
	}
 
Example 3
Project: storm-kafka-examples   File: CheckOrderBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
	SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");//设置日期格式
	String nowData = df.format(new Date()); // new Date()为获取当前系统时间,检测是否为最新数据

	String data = tuple.getString(0);
	//订单号		用户id	     原金额	                      优惠价	          标示字段		下单时间
	//id		memberid  	totalprice					preprice		sendpay		createdate
	if(data!=null && data.length()>0) {
		String[] values = data.split("\t");
		if(values.length==6) {
			String id = values[0];
			String memberid = values[1];
			String totalprice = values[2];
			String preprice = values[3];
			String sendpay = values[4];
			String createdate = values[5];
			
			if(StringUtils.isNotEmpty(id)&&StringUtils.isNotEmpty(memberid)&&StringUtils.isNotEmpty(totalprice)) {
				if(DateUtils.isValidDate(createdate, nowData)) {
					collector.emit(new Values(id,memberid,totalprice,preprice,sendpay,createdate));
				}
			}
		}
	}
}
 
Example 4
Project: hadooparchitecturebook   File: CalcMovingAvgBolt.java   Source Code and License 6 votes vote down vote up
/**
 * For each ticker in input stream, calculate the moving average.
 */
@Override
public void execute(Tuple tuple) {
  String ticker = tuple.getStringByField("ticker");
  String quote = tuple.getStringByField("price");
    
  Double num = Double.parseDouble(quote);
  LinkedList<Double> window = (LinkedList)getQuotesForTicker(ticker);
  window.add(num);
    
  // Print to System.out for test purposes. In a real implementation this
  // would go to a downstream bolt for further processing, or persisted, etc.
  System.out.println("----------------------------------------");
  System.out.println("moving average for ticker " + ticker + "=" + getAvg(window)); 
  System.out.println("----------------------------------------");
}
 
Example 5
Project: miner   File: StoreBolt.java   Source Code and License 6 votes vote down vote up
public void execute(Tuple tuple) {
	long startTime=System.currentTimeMillis();
       String globalInfo  = tuple.getString(0);
       String data = tuple.getString(1);
	try {
           String workspace_id = get_workspace_id(globalInfo);
           //利用redis来进行数据的去重
           if(!jedis.sismember(workspace_id+"_unique", globalInfo)) {
               //将数据存放进HBase
               ImportData.importData(data);
               logger.info(globalInfo + ":save into hbase succeed!");
               jedis.sadd(workspace_id+"_unique", globalInfo);
               _collector.ack(tuple);
           }else{
               logger.warn(globalInfo+":已经存进数据库了.");
           }
	} catch (Exception ex) {
		_collector.fail(tuple);
		logger.error("store error!"+MySysLogger.formatException(ex));
		ex.printStackTrace();
	}

       long endTime=System.currentTimeMillis();
       logger.info(globalInfo+"在StoreBolt的处理时间:"+(endTime-startTime)/1000+"s.");
}
 
Example 6
Project: erad2016-streamprocessing   File: PositiveSentimentBolt.java   Source Code and License 6 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    LOGGER.debug("Calculating positive score");

    Long id     = input.getLong(input.fieldIndex("tweet_id"));
    String text = input.getString(input.fieldIndex("tweet_text"));

    Set<String> posWords = PositiveWords.getWords();
    String[] words = text.split(" ");

    int numWords = words.length;
    int numPosWords = 0;

    for (String word : words) {
        if (posWords.contains(word))
            numPosWords++;
    }

    collector.emit(new Values(id, (float) numPosWords / numWords, text));
}
 
Example 7
Project: erad2016-streamprocessing   File: TwitterFilterBolt.java   Source Code and License 6 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    LOGGER.debug("filttering incoming tweets");
    String json = input.getString(0);

    try {
        JsonNode root = mapper.readValue(json, JsonNode.class);

        long id;
        String text;

        if (root.get("lang") != null && "en".equals(root.get("lang").textValue())) {
            if (root.get("id") != null && root.get("text") != null) {
                id   = root.get("id").longValue();
                text = root.get("text").textValue();
                collector.emit(new Values(id, text));
            } else {
                LOGGER.debug("tweet id and/ or text was null");
            }
        } else {
            LOGGER.debug("Ignoring non-english tweet");
        }
    } catch (IOException ex) {
        LOGGER.error("IO error while filtering tweets", ex);
        LOGGER.trace(null, ex);
    }
}
 
Example 8
Project: erad2016-streamprocessing   File: NodeNotifierBolt.java   Source Code and License 6 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    Long id      = input.getLong(input.fieldIndex("tweet_id"));
    String tweet = input.getString(input.fieldIndex("tweet_text"));
    Float pos    = input.getFloat(input.fieldIndex("pos_score"));
    Float neg    = input.getFloat(input.fieldIndex("neg_score"));
    String score = input.getString(input.fieldIndex("score"));

    HttpPost post = new HttpPost(this.webserver);
    String content = String.format(
        "{\"id\": \"%d\", "  +
        "\"text\": \"%s\", " +
        "\"pos\": \"%f\", "  +
        "\"neg\": \"%f\", "  +
        "\"score\": \"%s\" }",
        id, tweet, pos, neg, score);

    try {
        post.setEntity(new StringEntity(content));
        HttpResponse response = client.execute(post);
        org.apache.http.util.EntityUtils.consume(response.getEntity());
    } catch (Exception ex) {
        LOGGER.error("exception thrown while attempting post", ex);
        LOGGER.trace(null, ex);
        reconnect();
    }
}
 
Example 9
Project: erad2016-streamprocessing   File: NegativeSentimentBolt.java   Source Code and License 6 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    LOGGER.debug("Calculating negitive score");

    Long id     = input.getLong(input.fieldIndex("tweet_id"));
    String text = input.getString(input.fieldIndex("tweet_text"));

    Set<String> negWords = NegativeWords.getWords();
    String[] words = text.split(" ");

    int numWords = words.length;
    int numNegWords = 0;
    for (String word : words) {
        if (negWords.contains(word))
            numNegWords++;
    }

    collector.emit(new Values(id, (float) numNegWords / numWords, text));
}
 
Example 10
Project: Get-ENVS   File: SplitSentence.java   Source Code and License 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
  //Get the sentence content from the tuple
  String sentence = tuple.getString(0);
  //An iterator to get each word
  BreakIterator boundary=BreakIterator.getWordInstance();
  //Give the iterator the sentence
  boundary.setText(sentence);
  //Find the beginning first word
  int start=boundary.first();
  //Iterate over each word and emit it to the output stream
  for (int end=boundary.next(); end != BreakIterator.DONE; start=end, end=boundary.next()) {
    //get the word
    String word=sentence.substring(start,end);
    //If a word is whitespace characters, replace it with empty
    word=word.replaceAll("\\s+","");
    //if it's an actual word, emit it
    if (!word.equals("")) {
      collector.emit(new Values(word));
    }
  }
}
 
Example 11
Project: es-hadoop-v2.2.0   File: StormTupleBytesConverter.java   Source Code and License 6 votes vote down vote up
@Override
public void convert(Object from, BytesArray to) {
    Assert.isTrue(from == null || from instanceof Tuple,
            String.format("Unexpected object type, expecting [%s], given [%s]", Tuple.class, from.getClass()));

    // handle common cases
    Tuple tuple = (Tuple) from;

    if (tuple == null || tuple.size() == 0) {
        to.bytes("{}");
        return;
    }
    Assert.isTrue(tuple.size() == 1, "When using JSON input, only one field is expected");

    super.convert(tuple.getValue(0), to);
}
 
Example 12
Project: es-hadoop-v2.2.0   File: StormTupleFieldExtractor.java   Source Code and License 6 votes vote down vote up
@Override
protected Object extractField(Object target) {
    List<String> fieldNames = getFieldNames();
    for (int i = 0; i < fieldNames.size(); i++) {
        String field = fieldNames.get(i);
        if (target instanceof Tuple) {
            target = ((Tuple) target).getValueByField(field);
            if (target == null) {
                return NOT_FOUND;
            }
        }
        else {
            return NOT_FOUND;
        }
    }
    return target;
}
 
Example 13
Project: es-hadoop-v2.2.0   File: TestBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void execute(Tuple input) {
    // cleanup first to make sure the connection to ES is closed before the test suite shuts down

    if (done) {
        return;
    }

    if (log.isDebugEnabled()) {
        log.debug("Received tuple " + input);
    }
    if (TestSpout.DONE.equals(input.getValue(0))) {
        delegate.cleanup();
        done = true;
        MultiIndexSpoutStormSuite.COMPONENT_HAS_COMPLETED.decrement();
    }
    if (!done) {
        delegate.execute(input);
    }
}
 
Example 14
Project: RealEstate-Streaming   File: PhoenixJDBC.java   Source Code and License 6 votes vote down vote up
@Override
public void execute(Tuple input) {
	LOG.info("About to process tuple[" + input + "]");
	
      String sentence = input.getString(0);
      String[] words = sentence.split(" ");
      
      for(String word: words) {
         word = word.trim();
         
         if(!word.isEmpty()) {
            word = word.toLowerCase();
            outputCollector.emit(new Values(word));
         }
         
      }
      
      outputCollector.ack(input);
       
}
 
Example 15
Project: ignite-book-code-samples   File: WordCount.java   Source Code and License 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
    //Get the word contents from the tuple
    String word = tuple.getString(0);
    //Have we counted any already?
    Integer count = counts.get(word);
    if (count == null)
        count = 0;
    //Increment the count and store it
    count++;
    counts.put(word, count);
    //Emit the word and the current count
    //collector.emit(new Values(IGNITE_FIELD, count));
    TreeMap<String, Integer> words = new TreeMap<>();
    words.put(word,count);

    collector.emit(new Values(words));
    //Log information
    logger.info("Emitting a count of " + count + " for word " + word);
}
 
Example 16
Project: ignite-book-code-samples   File: SplitSentence.java   Source Code and License 6 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
    //Get the sentence content from the tuple
    String sentence = tuple.getString(0);
    //An iterator to get each word
    BreakIterator boundary=BreakIterator.getWordInstance();
    //Give the iterator the sentence
    boundary.setText(sentence);
    //Find the beginning first word
    int start=boundary.first();
    //Iterate over each word and emit it to the output stream
    for (int end = boundary.next(); end != BreakIterator.DONE; start=end, end=boundary.next()) {
        //get the word
        String word=sentence.substring(start,end);
        //If a word is whitespace characters, replace it with empty
        word=word.replaceAll("\\s+","");
        //if it's an actual word, emit it
        if (!word.equals("")) {
            collector.emit(new Values(word));
        }
    }
}
 
Example 17
Project: storm-demo   File: CropBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void execute(Tuple tuple) {
    // todo 写注释
    String message = tuple.getString(0);

    // to avoid NullPointerException
    if (message != null) {
        String domain, service, timestamp;

        HashMap map = makeMapOfMessage(message);
        domain = (String) map.get("domain");
        LOG.info("domain name of message {} is {}", tuple.getMessageId(), domain);
        timestamp = (String) map.get("time_local");
        LOG.info("timestamp of message {} is {}", tuple.getMessageId(), timestamp);

        if (domain.endsWith(ServerConfig.getUrlSuffix())) {
            service = domain.split("\\.")[0];
            collector.emit(tuple, new Values(timestamp, message, service));
            collector.ack(tuple);
        }
    }
}
 
Example 18
Project: Practical-Real-time-Processing-and-Analytics   File: TDRCassandraBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector arg1) {
	
	PacketDetailDTO packetDetailDTO = (PacketDetailDTO) input.getValueByField("tdrstream");
	System.out.println("field value "+ packetDetailDTO);
	session.execute("INSERT INTO packet_tdr (phone_number, bin, bout, timestamp) VALUES ("
			+ packetDetailDTO.getPhoneNumber()
			+ ", "
			+ packetDetailDTO.getBin()
			+ ","
			+ packetDetailDTO.getBout()
			+ "," + packetDetailDTO.getTimestamp() + ")");
}
 
Example 19
Project: Practical-Real-time-Processing-and-Analytics   File: ParserBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple input, BasicOutputCollector collector) {
	String valueByField = input.getString(0);
	System.out.println("field value "+ valueByField);
	String[] split = valueByField.split(",");
	PacketDetailDTO tdrPacketDetailDTO = new PacketDetailDTO();
	tdrPacketDetailDTO.setPhoneNumber(Long.parseLong(split[0]));
	tdrPacketDetailDTO.setBin(Integer.parseInt(split[1]));
	tdrPacketDetailDTO.setBout(Integer.parseInt(split[2]));
	tdrPacketDetailDTO.setTimestamp(Long.parseLong(split[3]));

	collector.emit("tdrstream", new Values(tdrPacketDetailDTO));
}
 
Example 20
Project: Practical-Real-time-Processing-and-Analytics   File: TDRCassandraBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector arg1) {
	
	PacketDetailDTO packetDetailDTO = (PacketDetailDTO) input.getValueByField("tdrstream");
	System.out.println("field value "+ packetDetailDTO);
	session.execute("INSERT INTO packet_tdr (phone_number, bin, bout, timestamp) VALUES ("
			+ packetDetailDTO.getPhoneNumber()
			+ ", "
			+ packetDetailDTO.getBin()
			+ ","
			+ packetDetailDTO.getBout()
			+ "," + packetDetailDTO.getTimestamp() + ")");
}
 
Example 21
Project: Practical-Real-time-Processing-and-Analytics   File: ParserBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple input, BasicOutputCollector collector) {
	String valueByField = input.getString(0);
	System.out.println("field value "+ valueByField);
	String[] split = valueByField.split(",");
	PacketDetailDTO tdrPacketDetailDTO = new PacketDetailDTO();
	tdrPacketDetailDTO.setPhoneNumber(Long.parseLong(split[0]));
	tdrPacketDetailDTO.setBin(Integer.parseInt(split[1]));
	tdrPacketDetailDTO.setBout(Integer.parseInt(split[2]));
	tdrPacketDetailDTO.setTimestamp(Long.parseLong(split[3]));

	collector.emit("tdrstream", new Values(tdrPacketDetailDTO));
}
 
Example 22
Project: Mastering-Apache-Storm   File: StormRedisBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
	Map<String, Object> record = new HashMap<String, Object>();
	//"firstName","lastName","companyName")
	record.put("firstName", input.getValueByField("firstName"));
	record.put("lastName", input.getValueByField("lastName"));
	record.put("companyName", input.getValueByField("companyName"));
	redisOperations.insert(record, UUID.randomUUID().toString());
}
 
Example 23
Project: storm-demo   File: SequenceFileBolt.java   Source Code and License 5 votes vote down vote up
@Override
protected AbstractHDFSWriter makeNewWriter(Path path, Tuple tuple) throws IOException {
    SequenceFile.Writer writer = SequenceFile.createWriter(
            this.hdfsConfig,
            SequenceFile.Writer.file(path),
            SequenceFile.Writer.keyClass(this.format.keyClass()),
            SequenceFile.Writer.valueClass(this.format.valueClass()),
            SequenceFile.Writer.compression(this.compressionType, this.codecFactory.getCodecByName(this.compressionCodec))
    );

    return new SequenceFileWriter(this.rotationPolicy, path, writer, this.format);
}
 
Example 24
Project: storm-scheduler   File: NothingPayloadBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple input) {
    if (!this.disableAniello) {
        taskMonitor.notifyTupleReceived(input);
    }

    this.collector.emit(input, new Values(input.getString(0), input.getString(1)));
    this.collector.ack(input);
}
 
Example 25
Project: storm-scheduler   File: NothingBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple input) {
    if (!this.disableAniello) {
        taskMonitor.notifyTupleReceived(input);
    }

    this.collector.emit(input, new Values(input.getString(0))); // we assume there is only one field
    this.collector.ack(input);
}
 
Example 26
Project: storm-hbase-1.0.x   File: HBaseBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
    byte[] rowKey = this.mapper.rowKey(tuple);
    ColumnList cols = this.mapper.columns(tuple);
    List<Mutation> mutations = hBaseClient.constructMutationReq(rowKey, cols, writeToWAL? Durability.SYNC_WAL : Durability.SKIP_WAL);

    try {
        this.hBaseClient.batchMutate(mutations);
    } catch(Exception e){
        LOG.warn("Failing tuple. Error writing rowKey " + rowKey, e);
        this.collector.fail(tuple);
        return;
    }

    this.collector.ack(tuple);
}
 
Example 27
Project: storm-hbase-1.0.x   File: HBaseLookupBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
    byte[] rowKey = this.mapper.rowKey(tuple);
    Get get = hBaseClient.constructGetRequests(rowKey, projectionCriteria);

    try {
        Result result = hBaseClient.batchGet(Lists.newArrayList(get))[0];
        for(Values values : rowToTupleMapper.toValues(result)) {
            this.collector.emit(values);
        }
        this.collector.ack(tuple);
    } catch (Exception e) {
        LOG.warn("Could not perform Lookup for rowKey =" + rowKey + " from Hbase.", e);
        this.collector.fail(tuple);
    }
}
 
Example 28
Project: reddit-sentiment-storm   File: SentimentCalculatorBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
	String subreddit = tuple.getStringByField("subreddit");
	String storyId = tuple.getStringByField("storyid");
	String storyURL = tuple.getStringByField("storyurl");
	String storyTitle = tuple.getStringByField("storytitle");
	String commentId = tuple.getStringByField("commentid");
	String comment = tuple.getStringByField("comment");
	long timestamp = tuple.getLongByField("timestamp");
	
	LOG.info("Received {}:{}:{}:{}:{}:[{}]", subreddit, storyId, storyURL, storyTitle, commentId, comment);
	
	String[] tokens = comment.split("\\s+");
	int sentimentScore = 0;
	for (String t  : tokens) {
		if (t == null || t.isEmpty()) {
			continue;
		}
		Long value = sentimentData.get(t);
		if (value != null) {
			sentimentScore += value;
		}
	}
	collector.emit(tuple, new Values(subreddit, storyId, storyURL, storyTitle, 
			commentId, comment, sentimentScore, timestamp));
	LOG.info("Emit {}:{}:{}:{}:{}:{}:[{}]", subreddit, sentimentScore, storyId, storyURL, 
			storyTitle, commentId, comment);
	
	collector.ack(tuple);
}
 
Example 29
Project: reddit-sentiment-storm   File: SummarizerBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
	String subreddit = tuple.getStringByField("subreddit");
	String storyId = tuple.getStringByField("storyid");
	String storyURL = tuple.getStringByField("storyurl");
	String storyTitle = tuple.getStringByField("storytitle");
	String commentId = tuple.getStringByField("commentid");
	String comment = tuple.getStringByField("comment");
	int sentimentScore = tuple.getIntegerByField("score");
	long timestamp = tuple.getLongByField("timestamp");

	LOG.info("Received {}:{}:{}:{}:{}:{}:[{}]", subreddit, sentimentScore, storyId, storyURL, 
			storyTitle, commentId, comment);
	
	collector.ack(tuple);
	
	summary.update(subreddit, storyId, storyURL, storyTitle, commentId,
			comment, sentimentScore, timestamp);
	
	// Publish updated statistics only every 30 secs.
	long curTime = System.currentTimeMillis();
	if (lastPublishedTimestamp == 0 ) {
		// Since messages come one by one to Summarizer, publishing immediately on first message
		// will show just 1 comment and looks odd. Instead, mark now as last published time
		// so that by next publishing window, we'd have received a couple of comments to show meaningful
		// rankings.
		lastPublishedTimestamp = curTime;
		
	} else if (curTime - lastPublishedTimestamp > 30000) {
		
		LOG.info("Publishing statistics to ZK");
		this.publisher.publish(summary);
		lastPublishedTimestamp = curTime;
	}
}
 
Example 30
Project: storm-demos   File: MessageSplitBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
	String[] channelLogs = tuple.getValue(0).toString().split("\n");
	for (String logString : channelLogs) {
		String[] logContent = logString.split(" ");
		String device = logContent[1];// device
		String channel = logContent[2];// channel
		String code = logContent[3];// code
		String count = logContent[4];// count
		String total = logContent[5];// total
		String ratio = logContent[6];// ratio
		collector.emit(new Values(device, channel, code, count, total, ratio));
	}
	this.collector.ack(tuple);
}
 
Example 31
Project: storm-demos   File: WriteHbaseBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
	try {
		if (tuple.getSourceComponent().equals(Constants.SYSTEM_COMPONENT_ID)
				&& tuple.getSourceStreamId().equals(Constants.SYSTEM_TICK_STREAM_ID)) {
			logger.info("flush commit hbase !");
			table.flushCommits();
		} else {
			writeHbase(this.configure, tuple.getStringByField("rowkey"), hbaseColumnFamlity,
					tuple.getStringByField("column"), tuple.getStringByField("columnvalue"));
		}
	} catch (IOException e) {
		logger.error("Hbase save wrong !\n", e);
	}
}
 
Example 32
Project: splice-community-sample-code   File: MySqlSpliceBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple input, BasicOutputCollector collector) {
    fields = new ArrayList<String>();
    fields = (ArrayList<String>) input.getFields().toList();
    fieldValues = new ArrayList<Object>();
    fieldValues = (ArrayList<Object>) input.getValues();
    try {
        communicator.insertRow(this.tableName, fields, fieldValues);
    } catch (SQLException e) {
        System.out.println("Exception occurred in adding a row");
        e.printStackTrace();
    }
}
 
Example 33
Project: splice-community-sample-code   File: SpliceDumperBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple input, BasicOutputCollector collector) {
    fields = new ArrayList<String>();
    fields = (ArrayList<String>) input.getFields().toList();
    fieldValues = new ArrayList<Object>();
    fieldValues = (ArrayList<Object>) input.getValues();
    try {
        communicator.insertRow(this.tableName, fields, fieldValues);
    } catch (SQLException e) {
        System.out.println("Exception occurred in adding a row");
        e.printStackTrace();
    }
}
 
Example 34
Project: preliminary.demo   File: WordCount.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
    String word = tuple.getString(0);
    Integer count = counts.get(word);
    if (count == null)
        count = 0;
    counts.put(word, ++count);
    collector.ack(tuple);
}
 
Example 35
Project: preliminary.demo   File: WordCountLocal.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
    String word = tuple.getString(0);
    Integer count = counts.get(word);
    if (count == null)
        count = 0;
    counts.put(word, ++count);
    collector.ack(tuple);
}
 
Example 36
Project: storm-demo   File: DefaultSequenceFormat.java   Source Code and License 5 votes vote down vote up
@Override
public Writable key(Tuple tuple) {
    if(this.key == null){
        this.key  = new LongWritable();
    }
    this.key.set(tuple.getLongByField(this.keyField));
    return this.key;
}
 
Example 37
Project: storm-kafka-examples   File: CounterBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
	List<Object> data = tuple.getValues();

	String id = (String) data.get(0);
	String memberid = (String) data.get(1);
	String totalprice = (String) data.get(2);
	String preprice = (String) data.get(3);
	String sendpay = (String) data.get(4);
	String createdate = (String) data.get(5);
	collector.emit(new Values(id,memberid,totalprice,preprice,sendpay,createdate));
	logger.info("+++++++++++++++++++++++++++++++++Valid+++++++++++++++++++++++++++++++++");
	logger.info("msg = "+data+" [email protected]@[email protected]@[email protected] = "+(counter++));
	logger.info("+++++++++++++++++++++++++++++++++Valid+++++++++++++++++++++++++++++++++");
}
 
Example 38
Project: hadooparchitecturebook   File: ParseTicksBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple tuple) {
  String tick = tuple.getStringByField("tick");
  String[] parts = tick.split(",");
  outputCollector.emit(new Values(parts[0], parts[4]));
  outputCollector.ack(tuple); 
}
 
Example 39
Project: miner   File: FetchBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple) {
    long startTime=System.currentTimeMillis();

    String globalInfo = tuple.getString(0);
    String downloadUrl = tuple.getString(1);
    String proxy = tuple.getString(2);
    String resource = "";
    try{
        if(proxy.equals("none")){
            //不加代理请求
            resource = Crawl4HttpClient.downLoadPage(downloadUrl);
        }else {
            //加上代理请求
            resource = Crawl4HttpClient.downLoadPage(downloadUrl, proxy);
        }
        if (resource.equals("exception")) {
            logger.error("fetch exception:" + downloadUrl);
            _collector.fail(tuple);
        } else if(resource.equals("error")){
            logger.error("fetch error:" + downloadUrl);
            _collector.fail(tuple);
            //返回值一般不会为空
        }else if(resource.equals("") || resource == null){
            logger.warn(downloadUrl + "return null.");
            _collector.fail(tuple);
        } else {
            _collector.emit(tuple, new Values(globalInfo, resource));
            logger.info(downloadUrl + ":fetch succeed!" + resource);
            _collector.ack(tuple);
        }
    } catch (Exception ex) {
        logger.error("fetch error:" +downloadUrl+" error:"+MySysLogger.formatException(ex));
        _collector.fail(tuple);
    }

    long endTime=System.currentTimeMillis();
    logger.info(globalInfo+"在FetchBolt的处理时间:"+(endTime-startTime)/1000+"s.");

}
 
Example 40
Project: miner   File: PrintBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple input) {
    try {
        String result = input.getString(0);
        System.out.println(result+"---");
        _collector.ack(input);
    }catch (Exception ex){
        ex.printStackTrace();
    }
}
 
Example 41
Project: miner   File: ParseLoopBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple input) {
	try {
		String result = input.getString(0);
		Thread.sleep(30000);
		_collector.emit(input, new Values(result));
		_collector.ack(input);
	}catch (Exception ex){
		ex.printStackTrace();
	}
}
 
Example 42
Project: storm-demo   File: FileSizeRotationPolicy.java   Source Code and License 5 votes vote down vote up
@Override
public boolean mark(Tuple tuple, long offset) {
    long diff = offset - this.lastOffset;
    this.currentBytesWritten += diff;
    this.lastOffset = offset;
    return this.currentBytesWritten >= this.maxBytes;
}
 
Example 43
Project: miner   File: SplitRecordBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple input){
    String type = input.getString(0);
    String line = input.getString(1);
    if(line != null && !line.trim().isEmpty()){
        for(String word:line.split("\\s+")){
            collector.emit(input, new Values(type, word));
            System.out.println("Word emitted: type="+type+",word="+word);
            collector.ack(input);
        }
    }
}
 
Example 44
Project: miner   File: ProxyBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple, BasicOutputCollector collector) {
        /* 这个初始化能不能放在prepare方法里面? */
        ru = new RedisUtil("127.0.0.1",6379,"password", 0);
        jedis = ru.getJedisInstance();
        String global_info = (String) tuple.getValue(0);
        String download_url= (String) tuple.getValue(1);
        /* delay_time需要从上一个得到 */
        int delay_time=2*1000;
        String workspace_id= get_workspace_id(global_info);
//        System.err.println("WID "+workspace_id);

        /* ------加入workspace的setting------ */
        if(!workspace_setting.containsKey(workspace_id)){
            workspace_setting.put(workspace_id,new ProxySetting(delay_time));
            /*-----IMPORTANT!!! 这里记录了使用代理的所有workspace------*/
            jedis.sadd("workspace_pool",workspace_id);
        }
        ProxySetting current_workspace_setting=workspace_setting.get(workspace_id);
//        System.err.println(current_workspace_setting==null);
        /* ----更新当前workspace的IP pool---- */
        Long last_update_time = current_workspace_setting.get_last_update_time();
        /* 暂且设置成10秒更新一次
         * 也可以强制每次都更新
         * */
        if(System.currentTimeMillis()-last_update_time>1000*10){
            refresh_workspace_proxy_pool(workspace_id);
            current_workspace_setting.set_last_update_time(System.currentTimeMillis());
        }

        String proxy=null;
        do{
            /* ----------更新黑白名单------------ */
            Set<String> black_set = jedis.smembers(workspace_id+"_black_set");
            Iterator<String> it=black_set.iterator();
            while (it.hasNext()){
                String tmp_ele=it.next();
                String[] tmp=tmp_ele.split("_");
                Long now=System.currentTimeMillis();
                if(now-Long.parseLong(tmp[1])>current_workspace_setting.get_delay_time()) {
                    jedis.srem(workspace_id+"_black_set", tmp_ele);
                    jedis.sadd(workspace_id+"_white_set",tmp[0]);
                }
            }
            /* -------------查询--------------- */
            proxy=ru.pick(jedis,workspace_id+"_white_set");
        } while(proxy==null||proxy.equals(""));
        ru.add(jedis, workspace_id + "_black_set", proxy + "_" + System.currentTimeMillis());
        current_workspace_setting.set_last_action_time(System.currentTimeMillis());
        /* -------------回收--------------- */
        for(Map.Entry<String,ProxySetting> entry:workspace_setting.entrySet()) {
            String key=entry.getKey();
            ProxySetting tps=entry.getValue();
            Long last_action_time = tps.get_last_action_time();
            Long elapse_time = System.currentTimeMillis() - last_action_time;
            int dead_time = tps.get_dead_time();
            if (elapse_time > dead_time) {
                /* 在Redis中删除这个set */
                ru.clean_set(jedis, key + "_white_set");
                ru.clean_set(jedis, key + "_black_set");
                workspace_setting.remove(key);
                /* 在这里也要删除workspace_id */
                jedis.srem("workspace_pool",workspace_id);
            }
        }
        collector.emit(new Values(global_info, download_url,proxy));
    }
 
Example 45
Project: miner   File: StoreTestBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
		try {
			String globalInfo  = input.getString(0);
			String data = input.getString(1);

			System.out.println("globalINfo:"+globalInfo);
			System.out.println("data:" + data);

//			ImportData.importData(data);
		} catch (Exception ex) {
			logger.error("store error!"+ex);
			ex.printStackTrace();
		}
		
	}
 
Example 46
Project: storm-demo   File: SplitFieldsBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple tuple) {
    // 其中的field就是刚才outputDeclarer中定义的
    String timestamp = tuple.getStringByField("timestamp");
    String message = tuple.getStringByField("message");
    String service = tuple.getStringByField("service");

    try {
        String[] fields = message.split(ServerConfig.getFieldSeparator());
        StringBuilder stringBuilder = new StringBuilder();

        for (String field : fields) {

            if (field != null) {
                String[] pair = field.split(ServerConfig.getPairSeparator(), 2);
                stringBuilder.append(pair[1]).append(SPLITTER);
            }
        }
        stringBuilder.append(service);

        collector.emit(tuple, new Values(timestamp, stringBuilder.toString()));
        collector.ack(tuple);
    } catch (ArrayIndexOutOfBoundsException e) {
        collector.fail(tuple);
        e.printStackTrace();
    }
}
 
Example 47
Project: erad2016-streamprocessing   File: SentimentScoringBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple, BasicOutputCollector collector) {
    LOGGER.debug("Scoring tweet");

    Long id     = tuple.getLong(tuple.fieldIndex("tweet_id"));
    String text = tuple.getString(tuple.fieldIndex("tweet_text"));
    Float pos   = tuple.getFloat(tuple.fieldIndex("pos_score"));
    Float neg   = tuple.getFloat(tuple.fieldIndex("neg_score"));

    String score = pos > neg ? "positive" : "negative";

    LOGGER.debug(String.format("tweet %s: %s", id, score));
    collector.emit(new Values(id, text, pos, neg, score));
}
 
Example 48
Project: erad2016-streamprocessing   File: CsvBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple tuple, BasicOutputCollector collector) {
    List<String> row = new ArrayList<>();

    for (Object value : tuple.getValues()) {
        row.add(value.toString());
    }

    String[] stringArray = row.toArray(new String[0]);
    csvWriter.writeNext(stringArray);
}
 
Example 49
Project: erad2016-streamprocessing   File: StemmingBolt.java   Source Code and License 5 votes vote down vote up
public void execute(Tuple input, BasicOutputCollector collector) {
    LOGGER.debug("removing stop words");

    Long id     = input.getLong(input.fieldIndex("tweet_id"));
    String text = input.getString(input.fieldIndex("tweet_text"));

    List<String> stopWords = StopWords.getWords();

    for (String word : stopWords) {
        text = text.replaceAll("\\b" + word + "\\b", "");
    }

    collector.emit(new Values(id, text));
}
 
Example 50
Project: Get-ENVS   File: WordCount.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
  //Get the word contents from the tuple
  String word = tuple.getString(0);
  //Have we counted any already?
  Integer count = counts.get(word);
  if (count == null)
    count = 0;
  //Increment the count and store it
  count++;
  counts.put(word, count);
  //Emit the word and the current count
  collector.emit(new Values(word, count));
}
 
Example 51
Project: es-hadoop-v2.2.0   File: StormTupleFieldExtractor.java   Source Code and License 5 votes vote down vote up
@Override
public String toString(Object field) {
    if (field instanceof Tuple) {
        return ((Tuple) field).getValues().toString();
    }
    return field.toString();
}
 
Example 52
Project: storm-demo   File: DelimitedRecordFormat.java   Source Code and License 5 votes vote down vote up
@Override
public byte[] format(Tuple tuple) {
    StringBuilder sb = new StringBuilder();
    Fields fields = this.fields == null ? tuple.getFields() : this.fields;
    int size = fields.size();
    for(int i = 0; i < size; i++){
        sb.append(tuple.getValueByField(fields.get(i)));
        if(i != size - 1){
            sb.append(this.fieldDelimiter);
        }
    }
    sb.append(this.recordDelimiter);
    return sb.toString().getBytes();
}
 
Example 53
Project: es-hadoop-v2.2.0   File: EsBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;

    LinkedHashMap copy = new LinkedHashMap(conf);
    copy.putAll(boltConfig);

    StormSettings settings = new StormSettings(copy);
    flushOnTickTuple = settings.getStormTickTupleFlush();
    ackWrites = settings.getStormBoltAck();

    // trigger manual flush
    if (ackWrites) {
        settings.setProperty(ES_BATCH_FLUSH_MANUAL, Boolean.TRUE.toString());

        // align Bolt / es-hadoop batch settings
        numberOfEntries = settings.getStormBulkSize();
        settings.setProperty(ES_BATCH_SIZE_ENTRIES, String.valueOf(numberOfEntries));

        inflightTuples = new ArrayList<Tuple>(numberOfEntries + 1);
    }

    int totalTasks = context.getComponentTasks(context.getThisComponentId()).size();

    InitializationUtils.setValueWriterIfNotSet(settings, StormValueWriter.class, log);
    InitializationUtils.setBytesConverterIfNeeded(settings, StormTupleBytesConverter.class, log);
    InitializationUtils.setFieldExtractorIfNotSet(settings, StormTupleFieldExtractor.class, log);

    writer = RestService.createWriter(settings, context.getThisTaskIndex(), totalTasks, log);
}
 
Example 54
Project: es-hadoop-v2.2.0   File: EsBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple input) {
    if (flushOnTickTuple && TupleUtils.isTickTuple(input)) {
        flush();
        return;
    }
    if (ackWrites) {
        inflightTuples.add(input);
    }
    try {
        writer.repository.writeToIndex(input);

        // manual flush in case of ack writes - handle it here.
        if (numberOfEntries > 0 && inflightTuples.size() >= numberOfEntries) {
            flush();
        }

        if (!ackWrites) {
            collector.ack(input);
        }
    } catch (RuntimeException ex) {
        if (!ackWrites) {
            collector.fail(input);
        }
        throw ex;
    }
}
 
Example 55
Project: es-hadoop-v2.2.0   File: EsBolt.java   Source Code and License 5 votes vote down vote up
private void flushWithAck() {
    BitSet flush = null;

    try {
        flush = writer.repository.tryFlush();
        writer.repository.discard();
    } catch (EsHadoopException ex) {
        // fail all recorded tuples
        for (Tuple input : inflightTuples) {
            collector.fail(input);
        }
        inflightTuples.clear();
        throw ex;
    }

    for (int index = 0; index < inflightTuples.size(); index++) {
        Tuple tuple = inflightTuples.get(index);
        // bit set means the entry hasn't been removed and thus wasn't written to ES
        if (flush.get(index)) {
            collector.fail(tuple);
        }
        else {
            collector.ack(tuple);
        }
    }

    // clear everything in bulk to prevent 'noisy' remove()
    inflightTuples.clear();
}
 
Example 56
Project: es-hadoop-v2.2.0   File: CapturingBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple tuple) {
    if (TestSpout.DONE.equals(tuple.getValue(0))) {
        MultiIndexSpoutStormSuite.COMPONENT_HAS_COMPLETED.decrement();
    }
    else {
        CAPTURED.add(tuple);
        if (log.isDebugEnabled()) {
            log.debug("Received tuple " + tuple);
        }
    }
    collector.ack(tuple);
}
 
Example 57
Project: RealEstate-Streaming   File: InsertBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple input) {
Property prop = new Property();
   prop.setTitle(input.getString(0));
   prop.setLink(input.getString(1));
   prop.setDescription(input.getString(2));
   prop.setPubDate(input.getString(3));
   prop.setThumbnail(input.getString(4));
   
   insertRow(prop);
   
   collector.ack(input);
}
 
Example 58
Project: storm-demo   File: AbstractHdfsBolt.java   Source Code and License 5 votes vote down vote up
protected Path getBasePathForNextFile(Tuple tuple) {

        final String partitionPath = this.partitioner.getPartitionPath(tuple);
        final int rotation;
        if (rotationCounterMap.containsKey(partitionPath)) {
            rotation = rotationCounterMap.get(partitionPath) + 1;
        } else {
            rotation = 0;
        }
        rotationCounterMap.put(partitionPath, rotation);

        return new Path(this.fsUrl + this.fileNameFormat.getPath() + partitionPath,
                this.fileNameFormat.getName(rotation, System.currentTimeMillis()));
    }
 
Example 59
Project: java   File: TradeReportPersistenceBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void execute(Tuple tuple) {
  LOGGER.info("Processing ELIGIBLE Trade");
  long newTime = 0;
  try {
    if (CONFIG.is("REPORTING_TIME_DELAY_ON")) {
      Utils.sleep(CONFIG.getLong("REPORTING_PERSISTENCE_TIME"));
    }
    FileWriter fileWriter = new FileWriter(CONFIG.get("REPT_PERSISTENCE_PATH"), true);
    BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
    bufferedWriter.write(tuple.getString(0));
    bufferedWriter.write(COMMA_SEPARATOR);
    bufferedWriter.write(String.valueOf(new Date()));
    bufferedWriter.write(COMMA_SEPARATOR);
    newTime = new Date().getTime();
    bufferedWriter.write(String.valueOf(newTime));
    bufferedWriter.write(COMMA_SEPARATOR);
    bufferedWriter.write(
        String.valueOf(newTime - Long.parseLong(tuple.getString(0).split(COMMA_SEPARATOR)[4])));
    bufferedWriter.newLine();
    bufferedWriter.close();
    // Checking and Performing Ack
    if (CONFIG.is("ACK_ON")) {
      _collector.ack(tuple);
    }
  } catch (Throwable e) {
    LOGGER.error(EXEC_EXCP_MSG, e);
    _collector.fail(tuple);

  }

}
 
Example 60
Project: storm-demo   File: AbstractHdfsBolt.java   Source Code and License 5 votes vote down vote up
private AbstractHDFSWriter getOrCreateWriter(String writerKey, Tuple tuple) throws IOException {
    AbstractHDFSWriter writer;

    writer = writers.get(writerKey);
    if (writer == null) {
        Path pathForNextFile = getBasePathForNextFile(tuple);
        writer = makeNewWriter(pathForNextFile, tuple);
        writers.put(writerKey, writer);
    }
    return writer;
}