Java Code Examples for kafka.javaapi.consumer.ConsumerConnector

The following are top voted examples for showing how to use kafka.javaapi.consumer.ConsumerConnector. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: Kafka-Insight   File: KafkaOffsetGetter.java   View source code 8 votes vote down vote up
/**
 * When an object implementing interface <code>Runnable</code> is used
 * to create a thread, starting the thread causes the object's
 * <code>run</code> method to be called in that separately executing
 * thread.
 * <p>
 * The general contract of the method <code>run</code> is that it may
 * take any action whatsoever.
 *
 * @see Thread#run()
 */
@Override
public void run() {
    ConsumerConnector consumerConnector = KafkaUtils.createConsumerConnector(zkAddr, group);
    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(CONSUMER_OFFSET_TOPIC, new Integer(1));
    KafkaStream<byte[], byte[]> offsetMsgStream = consumerConnector.createMessageStreams(topicCountMap).get(CONSUMER_OFFSET_TOPIC).get(0);

    ConsumerIterator<byte[], byte[]> it = offsetMsgStream.iterator();
    while (true) {

        MessageAndMetadata<byte[], byte[]> offsetMsg = it.next();
        if (ByteBuffer.wrap(offsetMsg.key()).getShort() < 2) {
            try {
                GroupTopicPartition commitKey = readMessageKey(ByteBuffer.wrap(offsetMsg.key()));
                if (offsetMsg.message() == null) {
                    continue;
                }
                kafka.common.OffsetAndMetadata commitValue = readMessageValue(ByteBuffer.wrap(offsetMsg.message()));
                kafkaConsumerOffsets.put(commitKey, commitValue);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }
}
 
Example 2
Project: bootique-kafka-client   File: DefaultConsumerFactory.java   View source code 7 votes vote down vote up
@Override
public ConsumerConnector newConsumerConnector(String name, ConsumerConfig configOverrides) {

    Properties mergedProps = new Properties();

    Map<String, String> config = configs.get(name);

    if (config != null) {
        mergedProps.putAll(config);
    }

    if (configOverrides != null) {
        mergedProps.putAll(configOverrides.createConsumerConfig());
    }

    return Consumer.createJavaConsumerConnector(new kafka.consumer.ConsumerConfig(mergedProps));
}
 
Example 3
Project: jlogstash-input-plugin   File: KafkaDistributed.java   View source code 7 votes vote down vote up
@Override
public void release() {
	try {
		for(ConsumerConnector consumer : consumerConnMap.values()){
			consumer.commitOffsets(true);
			consumer.shutdown();
		}
		for(ExecutorService executor : executorMap.values()){
			executor.shutdownNow();
		}

		if(scheduleExecutor != null){
			scheduleExecutor.shutdownNow();
		}

		this.zkDistributed.realse();
	} catch (Exception e) {
		// TODO Auto-generated catch block
		logger.error(ExceptionUtil.getErrorMessage(e));
	}
}
 
Example 4
Project: dmaap-framework   File: KafkaConsumer.java   View source code 6 votes vote down vote up
/**
 * KafkaConsumer() is constructor. It has following 4 parameters:-
 * @param topic
 * @param group
 * @param id
 * @param cc
 * 
 */

public KafkaConsumer(String topic, String group, String id, ConsumerConnector cc) {
	fTopic = topic;
	fGroup = group;
	fId = id;
	fConnector = cc;

	fCreateTimeMs = System.currentTimeMillis();
	fLastTouch = fCreateTimeMs;

	fLogTag = fGroup + "(" + fId + ")/" + fTopic;
	offset = 0;

	state = KafkaConsumer.State.OPENED;

	final Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put(fTopic, 1);
	final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = fConnector
			.createMessageStreams(topicCountMap);
	final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(fTopic);
	fStream = streams.iterator().next();
}
 
Example 5
Project: indexr   File: Kafka08Fetcher.java   View source code 6 votes vote down vote up
@Override
public synchronized void close() throws IOException {
    logger.debug("Stop kafka fetcher. [topic: {}]", topics);
    ConsumerConnector connector = this.connector;
    this.connector = null;
    if (connector != null) {
        connector.commitOffsets();
        connector.shutdown();
    }

    IOUtil.closeQuietly(eventItr);
    // Some events could exists in the buffer, try to save them.
    List<byte[]> remaining = new ArrayList<>();
    try {
        while (eventItr.hasNext()) {
            remaining.add(eventItr.next());
        }
    } catch (Exception e) {
        // Ignore
    }
    eventItr = null;
    if (!remaining.isEmpty()) {
        this.remaining = remaining;
    }
}
 
Example 6
Project: geomesa-tutorials   File: OSMKafkaSpout.java   View source code 6 votes vote down vote up
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
    _collector = spoutOutputCollector;
    Properties props = new Properties();
    props.put("zookeeper.connect", conf.get(OSMIngest.ZOOKEEPERS));
    props.put("group.id", groupId);
    props.put("zookeeper.sync.time.ms", "200");
    props.put("auto.commit.interval.ms", "1000");
    ConsumerConfig consumerConfig = new ConsumerConfig(props);
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, new StringDecoder(new VerifiableProperties()), new StringDecoder(new VerifiableProperties()));
    List<KafkaStream<String, String>> streams = consumerMap.get(topic);
    KafkaStream<String, String> stream = null;
    if (streams.size() == 1) {
        stream = streams.get(0);
    } else {
        log.error("Streams should be of size 1");
    }
    kafkaIterator = stream.iterator();
}
 
Example 7
Project: flink   File: KafkaConsumerTestBase.java   View source code 6 votes vote down vote up
/**
 * Read topic to list, only using Kafka code.
 */
private static List<MessageAndMetadata<byte[], byte[]>> readTopicToList(String topicName, ConsumerConfig config, final int stopAfter) {
	ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(config);
	// we request only one stream per consumer instance. Kafka will make sure that each consumer group
	// will see each message only once.
	Map<String,Integer> topicCountMap = Collections.singletonMap(topicName, 1);
	Map<String, List<KafkaStream<byte[], byte[]>>> streams = consumerConnector.createMessageStreams(topicCountMap);
	if (streams.size() != 1) {
		throw new RuntimeException("Expected only one message stream but got "+streams.size());
	}
	List<KafkaStream<byte[], byte[]>> kafkaStreams = streams.get(topicName);
	if (kafkaStreams == null) {
		throw new RuntimeException("Requested stream not available. Available streams: "+streams.toString());
	}
	if (kafkaStreams.size() != 1) {
		throw new RuntimeException("Requested 1 stream from Kafka, bot got "+kafkaStreams.size()+" streams");
	}
	LOG.info("Opening Consumer instance for topic '{}' on group '{}'", topicName, config.groupId());
	ConsumerIterator<byte[], byte[]> iteratorToRead = kafkaStreams.get(0).iterator();

	List<MessageAndMetadata<byte[], byte[]>> result = new ArrayList<>();
	int read = 0;
	while(iteratorToRead.hasNext()) {
		read++;
		result.add(iteratorToRead.next());
		if (read == stopAfter) {
			LOG.info("Read "+read+" elements");
			return result;
		}
	}
	return result;
}
 
Example 8
Project: jlogstash-input-plugin   File: KafkaDistributed.java   View source code 6 votes vote down vote up
@SuppressWarnings("unchecked")
public void prepare() {
	Properties props = geneConsumerProp();
	
	for(String topicName : topic.keySet()){
		ConsumerConnector consumer = kafka.consumer.Consumer
				.createJavaConsumerConnector(new ConsumerConfig(props));
		
		consumerConnMap.put(topicName, consumer);
	}
	if(distributed!=null){
		try {
			logger.warn("zkDistributed is start...");
			zkDistributed = ZkDistributed.getSingleZkDistributed(distributed);
			zkDistributed.zkRegistration();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			logger.error("zkRegistration fail:{}",ExceptionUtil.getErrorMessage(e));
		}
	}
}
 
Example 9
Project: jlogstash-input-plugin   File: KafkaDistributed.java   View source code 6 votes vote down vote up
public void addNewConsumer(String topic, Integer threads){
	ConsumerConnector consumer = consumerConnMap.get(topic);
	Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = null;
	
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put(topic, threads);
	consumerMap = consumer.createMessageStreams(topicCountMap);
	
	List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
	ExecutorService executor = Executors.newFixedThreadPool(threads);

	for (final KafkaStream<byte[], byte[]> stream : streams) {
		executor.submit(new Consumer(stream, this));
	}
	
	executorMap.put(topic, executor);
}
 
Example 10
Project: jlogstash-input-plugin   File: KafkaDistributed.java   View source code 6 votes vote down vote up
public void reconnConsumer(String topicName){
		
		//停止topic 对应的conn
		ConsumerConnector consumerConn = consumerConnMap.get(topicName);
		consumerConn.commitOffsets(true);
		consumerConn.shutdown();
		consumerConnMap.remove(topicName);
		
		//停止topic 对应的stream消耗线程
		ExecutorService es = executorMap.get(topicName);
		es.shutdownNow();
		executorMap.remove(topicName);

		Properties prop = geneConsumerProp();
		ConsumerConnector newConsumerConn = kafka.consumer.Consumer
				.createJavaConsumerConnector(new ConsumerConfig(prop));
		consumerConnMap.put(topicName, newConsumerConn);

		addNewConsumer(topicName, topic.get(topicName));
}
 
Example 11
Project: jlogstash-input-plugin   File: Kafka.java   View source code 6 votes vote down vote up
public void addNewConsumer(String topic, Integer threads){
	ConsumerConnector consumer = consumerConnMap.get(topic);
	Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = null;
	
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put(topic, threads);
	consumerMap = consumer.createMessageStreams(topicCountMap);
	
	List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
	ExecutorService executor = Executors.newFixedThreadPool(threads);

	for (final KafkaStream<byte[], byte[]> stream : streams) {
		executor.submit(new Consumer(stream, this));
	}
	
	executorMap.put(topic, executor);
}
 
Example 12
Project: jlogstash-input-plugin   File: Kafka.java   View source code 6 votes vote down vote up
public void reconnConsumer(String topicName){
	
	//停止topic 对应的conn
	ConsumerConnector consumerConn = consumerConnMap.get(topicName);
	consumerConn.commitOffsets(true);
	consumerConn.shutdown();
	consumerConnMap.remove(topicName);
	
	//停止topic 对应的stream消耗线程
	ExecutorService es = executorMap.get(topicName);
	es.shutdownNow();	
	executorMap.remove(topicName);
	
	Properties prop = geneConsumerProp();
	ConsumerConnector newConsumerConn = kafka.consumer.Consumer
			.createJavaConsumerConnector(new ConsumerConfig(prop));
	consumerConnMap.put(topicName, newConsumerConn);
	
	addNewConsumer(topicName, topic.get(topicName));
}
 
Example 13
Project: apex-malhar   File: HighlevelKafkaConsumer.java   View source code 6 votes vote down vote up
@Override
public void create()
{
  super.create();
  if (standardConsumer == null) {
    standardConsumer = new HashMap<String, ConsumerConnector>();
  }

  // This is important to let kafka know how to distribute the reads among
  // different consumers in same consumer group
  // Don't reuse any id for recovery to avoid rebalancing error because
  // there is some delay for zookeeper to
  // find out the old consumer is dead and delete the entry even new
  // consumer is back online
  consumerConfig.put("consumer.id", "consumer" + System.currentTimeMillis());
  if (initialOffset.equalsIgnoreCase("earliest")) {
    consumerConfig.put("auto.offset.reset", "smallest");
  } else {
    consumerConfig.put("auto.offset.reset", "largest");
  }

}
 
Example 14
Project: easyframe-msg   File: KafkaHelper.java   View source code 6 votes vote down vote up
/**消费消息  [指定Topic]
 * 
 * @param topicName 队列名称
 * @param groupId Group Name
 * @return
 */
static MsgIterator consume(String topicName, String groupId) {
	ConsumerConnector consumerConnector = KafkaHelper.getConsumer(groupId);
	
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();	//(topic, #stream) pair
	topicCountMap.put(topicName, new Integer(1));

	//TODO: 可消费多个topic
	Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap);	//Using default decoder
	List<KafkaStream<byte[], byte[]>> streamList = consumerMap.get(topicName);	//The number of items in the list is #streams, Each Stream supoorts an iterator over message/metadata pair
	KafkaStream<byte[], byte[]> stream = streamList.get(0);
	
	//KafkaStream[K,V] K代表partitio Key的类型,V代表Message Value的类型
	ConsumerIterator<byte[], byte[]> it = stream.iterator();
	MsgIterator iter = new MsgIterator(it);
	return iter;
}
 
Example 15
Project: easyframe-msg   File: KafkaHelper.java   View source code 6 votes vote down vote up
/**消费消息  [指定Topic] 指定线程
 * 
 * @param topicName 队列名称
 * @param numStreams Number of streams to return
 * @return A list of MsgIterator each of which provides an iterator over message over allowed topics
 */
static List<MsgIterator> consume(String topicName, int numStreams, String groupId) {
	ConsumerConnector consumerConnector = KafkaHelper.getConsumer(groupId);
	
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();	//(topic, #stream) pair
	topicCountMap.put(topicName, numStreams);

	//TODO: 可消费多个topic
	Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap);	//Using default decoder
	List<KafkaStream<byte[], byte[]>> streamList = consumerMap.get(topicName);	//The number of items in the list is #streams, Each Stream supoorts an iterator over message/metadata pair
	
	List<MsgIterator> iterList = new ArrayList<MsgIterator>();
	for (KafkaStream<byte[], byte[]> stream : streamList) {
		ConsumerIterator<byte[], byte[]> it = stream.iterator();
		MsgIterator iter = new MsgIterator(it);
		iterList.add(iter);
	}
	
	//KafkaStream[K,V] K代表partitio Key的类型,V代表Message Value的类型
	return iterList;
}
 
Example 16
Project: easyframe-msg   File: KafkaHelper.java   View source code 6 votes vote down vote up
public static ConsumerConnector getConsumer(String groupId) {
	//加上线程名字的考虑是:保证每个线程只有一个Consumer,但是每个线程又可以有一个独立的Consumer,从而消费不同的partition
	String consumerKey = groupId + "|" + Thread.currentThread().getName();
	ConsumerConnector msgConnector = groupConsumers.get(consumerKey);
	if (msgConnector == null) {
		try {
			consumerLock.lock();
			msgConnector = groupConsumers.get(consumerKey);
			if (msgConnector == null) {
				msgConnector = Consumer.createJavaConsumerConnector(getConsumerRealConfig(groupId));
				groupConsumers.put(consumerKey, msgConnector);
			}
		} finally {
			consumerLock.unlock();
		}
	}

	return msgConnector;
}
 
Example 17
Project: punxsutawney   File: HttpClient.java   View source code 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    if (id == null) throw new IllegalStateException("Undefined HC_ID");
    if (zk == null) throw new IllegalStateException("Undefined HC_ZK");

    out.println("Starting " + HttpClient.class.getSimpleName());
    out.println("Using zk:" + zk + ", id:" + id);

    Properties props = new Properties();
    props.put("zookeeper.connect", zk);
    props.put("group.id", id);
    props.put("zookeeper.session.timeout.ms", "400");
    props.put("zookeeper.sync.time.ms", "200");

    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
    KafkaStream<byte[],byte[]> stream = consumer.createMessageStreams(Collections.singletonMap(id, 1)).get(id).get(0);

    consume(consumer, stream);
}
 
Example 18
Project: space-shuttle-demo   File: KafkaConfiguration.java   View source code 6 votes vote down vote up
@Bean
protected KafkaStream<String, float[]> kafkaStream() {

    final String topicName = retrieveTopicNameFromGatewayAddress(gatewayUrl());

    ConsumerConnector consumerConnector =
            Consumer.createJavaConsumerConnector(consumerConfig());
    Map<String, Integer> topicCounts = new HashMap<>();
    topicCounts.put(topicName, 1);
    VerifiableProperties emptyProps = new VerifiableProperties();
    StringDecoder keyDecoder = new StringDecoder(emptyProps);
    FeatureVectorDecoder valueDecoder = new FeatureVectorDecoder();
    Map<String, List<KafkaStream<String, float[]>>> streams =
            consumerConnector.createMessageStreams(topicCounts, keyDecoder, valueDecoder);
    List<KafkaStream<String, float[]>> streamsByTopic = streams.get(topicName);
    Preconditions.checkNotNull(streamsByTopic, String.format("Topic %s not found in streams map.", topicName));
    Preconditions.checkElementIndex(0, streamsByTopic.size(),
            String.format("List of streams of topic %s is empty.", topicName));
    return streamsByTopic.get(0);
}
 
Example 19
Project: datacollector   File: SdcKafkaTestUtil.java   View source code 6 votes vote down vote up
public List<KafkaStream<byte[], byte[]>> createKafkaStream(
    String zookeeperConnectString,
    String topic,
    int partitions
) {
  //create consumer
  Properties consumerProps = new Properties();
  consumerProps.put("zookeeper.connect", zookeeperConnectString);
  consumerProps.put("group.id", "testClient");
  consumerProps.put("zookeeper.session.timeout.ms", "6000");
  consumerProps.put("zookeeper.sync.time.ms", "200");
  consumerProps.put("auto.commit.interval.ms", "1000");
  consumerProps.put("consumer.timeout.ms", "500");
  ConsumerConfig consumerConfig = new ConsumerConfig(consumerProps);
  ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
  Map<String, Integer> topicCountMap = new HashMap<>();
  topicCountMap.put(topic, partitions);
  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
  return consumerMap.get(topic);
}
 
Example 20
Project: datacollector   File: KafkaTestUtil.java   View source code 6 votes vote down vote up
public static List<KafkaStream<byte[], byte[]>> createKafkaStream(String zookeeperConnectString, String topic, int partitions) {
  //create consumer
  Properties consumerProps = new Properties();
  consumerProps.put("zookeeper.connect", zookeeperConnectString);
  consumerProps.put("group.id", "testClient");
  consumerProps.put("zookeeper.session.timeout.ms", "6000");
  consumerProps.put("zookeeper.sync.time.ms", "200");
  consumerProps.put("auto.commit.interval.ms", "1000");
  consumerProps.put("consumer.timeout.ms", "500");
  ConsumerConfig consumerConfig = new ConsumerConfig(consumerProps);
  ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
  Map<String, Integer> topicCountMap = new HashMap<>();
  topicCountMap.put(topic, partitions);
  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
  return consumerMap.get(topic);

}
 
Example 21
Project: java-kafka-client-libs   File: KafkaConsumer.java   View source code 6 votes vote down vote up
private void setUpConsumer( Map<String, Integer> topicMap, MessageHandler<?> handler, Properties consumerProps ) {
    _executors = new HashMap<String, ExecutorService>();
    _topicConsumers = new HashMap<String, ConsumerConnector>();

    for ( String topic : topicMap.keySet() ) {
        String normalizedTopic = topic.replace( ".", "_" );
        String normalizedConsumerGroupId = getGroupId( consumerProps.getProperty( "group.id" ), normalizedTopic );
        consumerProps.setProperty( "group.id", normalizedConsumerGroupId );
        LOG.warn( "Consuming topic '" + topic + "' with group.id '" + normalizedConsumerGroupId + "'" );
        LOG.warn( consumerProps.toString() );
        ConsumerConfig topicConfig = new ConsumerConfig( consumerProps );
        _topicConsumers.put( topic, kafka.consumer.Consumer.createJavaConsumerConnector( topicConfig ) );
    }
    _topicMap = topicMap;
    _handler = handler;
}
 
Example 22
Project: debezium-proto   File: KafkaMessageBus.java   View source code 6 votes vote down vote up
@Override
public void shutdown() {
    // Mark this as no longer running; consumer runners will automatically terminate
    running = false;
    try {
        // Stop the producer ...
        shutdownProducer(producer.getAndUpdate(existing->NO_OP_PRODUCER));
    } finally {
        try {
            // Shutdown each of the consumer connectors ...
            connectors.values().forEach(ConsumerConnector::shutdown);
        } finally {
            connectors.clear();
        }
    }
}
 
Example 23
Project: debezium-proto   File: KafkaMessageBus.java   View source code 6 votes vote down vote up
private ConsumerConnector getOrCreateConnector(Properties props) {
    ConsumerConfig config = new ConsumerConfig(props);
    ConsumerConnector connector = connectors.get(props);
    if (connector == null) {
        logger.debug("Creating new consumer with config: {}", props);
        ConsumerConnector newConnector = kafka.consumer.Consumer.createJavaConsumerConnector(config);
        // It's possible that we and another thread might have concurrently created a consumer with the same config ...
        connector = connectors.putIfAbsent(props, newConnector);
        if (connector != null) {
            // Rare, but the new connector we created was not needed ...
            logger.debug("New consumer was not needed, so shutting down");
            executor.get().execute(() -> newConnector.shutdown());
        } else {
            logger.debug("Created new consumer with config: {}", props);
            connector = newConnector;
        }
    }
    assert connector != null;
    return connector;
}
 
Example 24
Project: MIB   File: EventDispatcher.java   View source code 6 votes vote down vote up
@Override
public void run() {
	int cpus = Runtime.getRuntime().availableProcessors();
	ExecutorService executor = Executors.newFixedThreadPool(cpus);
	
	ConsumerConnector consumer = kafka.consumer.Consumer
			.createJavaConsumerConnector(this.consumerConfig);

	// map topics to thread count
	Map<String, Integer> topicCountMap = new HashMap<>();
	topicCountMap.put(this.topic, threadsPerTopic);
	
	// map topics to list of streams (1 stream per thread per topic)
	Map<String, List<KafkaStream<String, TIn>>> consumerMap = consumer
			.createMessageStreams(topicCountMap, this.keyDecoder, this.valueDecoder);
	
	// actually create/submit threads
	for (final KafkaStream<String, TIn> stream : consumerMap.get(this.topic)) {
		executor.submit(new Consumer<String, TIn>(stream, dispatcherCommand));
	}

	// do not close producer while threads are still running
	// this.producer.close();
}
 
Example 25
Project: CadalWorkspace   File: myConsumer.java   View source code 6 votes vote down vote up
public static void main(String[] args) {
	Properties props = new Properties();
	props.put("zookeeper.connect","10.15.62.76:2181");
	props.put("group.id","mygroup001");
	props.put("zookeeper.session.timeout.ms","40000");
	props.put("zookeeper.sync.time.ms","200");
	props.put("auto.commit.interval.ms","1000");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);
	
	Map<String,Integer> topicCountMap = new HashMap<String,Integer>();
	topicCountMap.put("my-topic",new Integer(1));
	System.out.println("zzzzzzzzzzzzz");
	Map<String,List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
	List<KafkaStream<byte[], byte[]>> streams = consumerMap.get("my-topic");
	
	KafkaStream<byte[], byte[]> stream = streams.get(0);
	
	ConsumerIterator<byte[], byte[]> it = stream.iterator();
	System.out.println("before while...");
	while(it.hasNext()){
		System.out.println(new String(it.next().message()));
	}
}
 
Example 26
Project: CadalWorkspace   File: SpoutKafka.java   View source code 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void open(Map conf, TopologyContext context,
		SpoutOutputCollector collector) {
	this._collector = collector;
	
	
	Properties props = new Properties();
	props.put("zk.connect", "10.15.62.104:2181");
	props.put("groupid", "group1");

	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer
			.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("newtopic", new Integer(1));

	Map<String, List<KafkaMessageStream>> consumerMap = consumer
			.createMessageStreams(topicCountMap);

	KafkaMessageStream stream = consumerMap.get("newtopic").get(0);
	
	this.it = stream.iterator();		
}
 
Example 27
Project: CadalWorkspace   File: SpoutKafka.java   View source code 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void open(Map conf, TopologyContext context,
		SpoutOutputCollector collector) {
	this._collector = collector;
	this.logger = Logger.getLogger(BoltCassandra.class.getClass().getName());
	
	// Construct kafka part
	Properties props = new Properties();
	props.put("zk.connect", "10.15.62.75:2181");
	props.put("groupid", "sec-group-1");		// 

	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("sec-stream-one", new Integer(1));		// 

	Map<String, List<KafkaMessageStream>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaMessageStream stream = consumerMap.get("sec-stream-one").get(0);		//
	
	this.it = stream.iterator();
}
 
Example 28
Project: CadalWorkspace   File: SecSpout.java   View source code 6 votes vote down vote up
public void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "CadalSec");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Read-common", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Read-common").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
	
}
 
Example 29
Project: CadalWorkspace   File: RecBookRecPageSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecRecPage");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-recPage", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-recPage").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 30
Project: CadalWorkspace   File: RecTagRecPageSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecTagRecPage");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-recPageTagTag", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-recPageTagTag").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 31
Project: CadalWorkspace   File: RecBookPersonalPageSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecPersonalPage");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-personalPage", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-personalPage").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 32
Project: CadalWorkspace   File: RecTagBookSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecTagBook");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-recPageTagBook", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-recPageTagBook").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 33
Project: CadalWorkspace   File: RecPersonalPageUserSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecPersonalPageUser");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-personalPageUser", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-personalPageUser").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 34
Project: CadalWorkspace   File: RecBookHomePageSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecHomePage");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-homePage", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-homePage").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 35
Project: CadalWorkspace   File: SearchClickSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "SearchClick");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Search-click", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Search-click").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 36
Project: CadalWorkspace   File: SearchTermSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "SearchTerm");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Search-query", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Search-query").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 37
Project: CadalWorkspace   File: PersonalReplySpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "PersonalReply");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Personal-reply", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Personal-reply").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 38
Project: CadalWorkspace   File: PersonalVisitSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "PersonalVisit");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Personal-visit", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Personal-visit").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 39
Project: CadalWorkspace   File: PersonalShareSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "PersonalShare");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Personal-share", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Personal-share").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
Example 40
Project: CadalWorkspace   File: PersonalButtonSpout.java   View source code 6 votes vote down vote up
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "PersonalButton");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Personal-button", new Integer(1));// 第二个参数是指用几个流,多个流是为了并行处理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Personal-button").get(0);// 这里只有一个流,所以得get(0)就可以了。
	this.it = stream.iterator();
}