Java Code Examples for backtype.storm.task.OutputCollector

The following are top voted examples for showing how to use backtype.storm.task.OutputCollector. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: rb-bi   File: KafkaBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    //for backward compatibility.
    if(mapper == null) {
        this.mapper = new FieldNameBasedTupleToKafkaMapper<K,V>();
    }

    //for backward compatibility.
    if(topicSelector == null) {
        this.topicSelector = new DefaultTopicSelector((String) stormConf.get(TOPIC));
    }

    Map configMap = (Map) stormConf.get(KAFKA_BROKER_PROPERTIES);
    Properties properties = new Properties();
    properties.putAll(configMap);
    ProducerConfig config = new ProducerConfig(properties);
    producer = new Producer<K, V>(config);
    this.collector = collector;
}
 
Example 2
Project: storm-hbase-1.0.x   File: AbstractHBaseBolt.java   Source Code and License 6 votes vote down vote up
public void prepare(Map map, TopologyContext topologyContext, OutputCollector collector) {
    this.collector = collector;
    final Configuration hbConfig = HBaseConfiguration.create();
    
    Map<String, Object> conf = (Map<String, Object>)map.get(this.configKey);
    if(conf == null) {
        throw new IllegalArgumentException("HBase configuration not found using key '" + this.configKey + "'");
    }
    if(conf.get("hbase.rootdir") == null) {
        LOG.warn("No 'hbase.rootdir' value found in configuration! Using HBase defaults.");
    }
    for(String key : conf.keySet()) {
        hbConfig.set(key, String.valueOf(conf.get(key)));
    }

    this.hBaseClient = new HBaseClient(conf, hbConfig, tableName);
}
 
Example 3
Project: reddit-sentiment-storm   File: SummarizerBolt.java   Source Code and License 6 votes vote down vote up
public void prepare(Map conf, TopologyContext ctx, OutputCollector collector) {
	
	this.collector = collector;
	this.myId = ctx.getThisComponentId() + "-" + ctx.getThisTaskId();
	
	this.summary = new Summary();
	
	this.publisher = new ZkPublisher();
	try {
		this.publisher.init(conf);
	} catch (Exception e) {
		throw new RuntimeException(e);
	}
	
	this.lastPublishedTimestamp = 0;
}
 
Example 4
Project: spring-usc   File: KarmaBolt.java   Source Code and License 6 votes vote down vote up
@SuppressWarnings("rawtypes")
@Override
public void prepare(Map globalConfig, TopologyContext arg1, OutputCollector outputCollector) {
	this.outputCollector = outputCollector;
	String karmaHomeDirectory = null;
	if(karmaHomeStrategy != null){

		karmaHomeStrategy.prepare(globalConfig);
		karmaHomeDirectory = karmaHomeStrategy.getKarmaHomeDirectory();	
	}
	karma = new BaseKarma();
	karma.setup(karmaHomeDirectory, (String)localConfig.get("karma.input.type"), (String)localConfig.get("model.uri"), (String)localConfig.get("model.file"), 
			(String)localConfig.get("base.uri"), (String)localConfig.get("context.uri"), 
			(String)localConfig.get("rdf.generation.root"), (String)localConfig.get("rdf.generation.selection"));
	
}
 
Example 5
Project: storm-solr   File: FusionBoltAction.java   Source Code and License 6 votes vote down vote up
public SpringBolt.ExecuteResult execute(Tuple input, OutputCollector collector) {
  String docId = input.getString(0);
  Map<String,Object> values = (Map<String,Object>)input.getValue(1);

  Map<String,Object> json = new HashMap<String,Object>(10);
  json.put("id", docId);
  List fieldList = new ArrayList();
  for (String field : values.keySet())
    fieldList.add(buildField(field, values.get(field)));
  json.put("fields", fieldList);

  try {
    fusionPipelineClient.postBatchToPipeline(Collections.singletonList(json));
  } catch (Exception e) {
    log.error("Failed to send doc "+docId+" to Fusion due to: "+e);
    throw new RuntimeException(e);
  }

  return SpringBolt.ExecuteResult.ACK;
}
 
Example 6
Project: storm-solr   File: SolrBoltAction.java   Source Code and License 6 votes vote down vote up
public ExecuteResult execute(Tuple input, OutputCollector outputCollector) {

    if (tuplesReceived != null) {
      tuplesReceived.inc();
    }

    String docId = input.getString(0);
    Object docObj = input.getValue(1);
    if (docId == null || docObj == null) {

      log.warn("Ignored tuple: "+input);

      return ExecuteResult.IGNORED; // nothing to index
    }

    try {
      return processInputDoc(docId, docObj);
    } catch (Exception exc) {
      log.error("Failed to process "+docId+" due to: "+exc);
      if (exc instanceof RuntimeException) {
        throw (RuntimeException)exc;
      } else {
        throw new RuntimeException(exc);
      }
    }
  }
 
Example 7
Project: aeolus   File: AbstractFileOutputBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(@SuppressWarnings("rawtypes") Map stormConf, TopologyContext context, OutputCollector collector) {
	String fileName = (String)stormConf.get(OUTPUT_FILE_NAME);
	if(fileName != null) {
		this.outputFileName = fileName;
	}
	
	String dirName = (String)stormConf.get(OUTPUT_DIR_NAME);
	if(dirName != null) {
		this.outputDirName = dirName;
	}
	
	try {
		new File(this.outputDirName).mkdirs();
		this.writer = new BufferedWriter(new FileWriter(this.outputDirName + File.separator + this.outputFileName));
	} catch(IOException e) {
		logger.error("Could not open output file <{}> for writing.", this.outputDirName + File.separator
			+ this.outputFileName);
	}
	
	this.collector = collector;
}
 
Example 8
Project: iot-masterclass   File: TruckHBaseBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context,
                    OutputCollector collector) {

  this.collector = collector;
  try {
    this.connection = HConnectionManager.createConnection(constructConfiguration());
    this.dangerousEventsTable = connection.getTable(DANGEROUS_EVENTS_TABLE_NAME);
    this.eventsCountTable = connection.getTable(EVENTS_COUNT_TABLE_NAME);
    this.eventsTable = connection.getTable(EVENTS_TABLE_NAME);

  } catch (Exception e) {
    String errMsg = "Error retrievinging connection and access to dangerousEventsTable";
    LOG.error(errMsg, e);
    throw new RuntimeException(errMsg, e);
  }
}
 
Example 9
Project: jstrom   File: RedisSinkBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {
    this.collector = collector;
    
    GenericObjectPoolConfig pconf = new GenericObjectPoolConfig();
    pconf.setMaxWaitMillis(2000);
    pconf.setMaxTotal(1000);
    pconf.setTestOnBorrow(false);
    pconf.setTestOnReturn(false);
    pconf.setTestWhileIdle(true);
    pconf.setMinEvictableIdleTimeMillis(120000);
    pconf.setTimeBetweenEvictionRunsMillis(60000);
    pconf.setNumTestsPerEvictionRun(-1);
    
    pool = new JedisPool(pconf, redisHost, redisPort, timeout);
}
 
Example 10
Project: alfresco-apache-storm-demo   File: IndexerBolt.java   Source Code and License 6 votes vote down vote up
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {
    super.prepare(conf, context, collector);
    _collector = collector;

    indexName = ConfUtils.getString(conf, IndexerBolt.ESIndexNameParamName,
            "fetcher");
    docType = ConfUtils.getString(conf, IndexerBolt.ESDocTypeParamName,
            "doc");
    create = ConfUtils.getBoolean(conf, IndexerBolt.ESCreateParamName,
            false);

    try {
        connection = ElasticSearchConnection
                .getConnection(conf, ESBoltType);
    } catch (Exception e1) {
        LOG.error("Can't connect to ElasticSearch", e1);
        throw new RuntimeException(e1);
    }

    this.eventCounter = context.registerMetric("ElasticSearchIndexer",
            new MultiCountMetric(), 10);
}
 
Example 11
Project: jstorm-0.9.6.3-   File: MetricsConsumerBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context,
		OutputCollector collector) {
	try {
		_metricsConsumer = (IMetricsConsumer) Class.forName(
				_consumerClassName).newInstance();
	} catch (Exception e) {
		throw new RuntimeException(
				"Could not instantiate a class listed in config under section "
						+ Config.TOPOLOGY_METRICS_CONSUMER_REGISTER
						+ " with fully qualified name "
						+ _consumerClassName, e);
	}
	_metricsConsumer.prepare(stormConf, _registrationArgument, context,
			collector);
	_collector = collector;
}
 
Example 12
Project: aeolus   File: TimestampMerger.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(@SuppressWarnings("rawtypes") Map arg0, TopologyContext arg1, OutputCollector arg2) {
	// for each logical input stream (ie, each producer bolt), we get an input partition for each of its tasks
	LinkedList<Integer> taskIds = new LinkedList<Integer>();
	for(Entry<GlobalStreamId, Grouping> inputStream : arg1.getThisSources().entrySet()) {
		taskIds.addAll(arg1.getComponentTasks(inputStream.getKey().get_componentId()));
	}
	
	logger.debug("Detected producer tasks: {}", taskIds);
	
	if(this.tsIndex != -1) {
		assert (this.tsAttributeName == null && this.tsExtractor == null);
		this.merger = new StreamMerger<Tuple>(taskIds, this.tsIndex);
	} else if(this.tsAttributeName != null) {
		assert (this.tsExtractor == null);
		this.merger = new StreamMerger<Tuple>(taskIds, this.tsAttributeName);
	} else {
		assert (this.tsExtractor != null);
		this.merger = new StreamMerger<Tuple>(taskIds, this.tsExtractor);
	}
	
	this.wrappedBolt.prepare(arg0, arg1, arg2);
}
 
Example 13
Project: alfresco-apache-storm-demo   File: IndexerBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {

    // get the implementation to use
    // and instantiate it
    String className = ConfUtils.getString(conf,
            "stormcrawler.indexer.class");

    if (StringUtils.isBlank(className)) {
        throw new RuntimeException("No configuration found for indexing");
    }

    try {
        final Class<BaseRichBolt> implClass = (Class<BaseRichBolt>) Class
                .forName(className);
        endpoint = implClass.newInstance();
    } catch (final Exception e) {
        throw new RuntimeException("Couldn't create " + className, e);
    }

    if (endpoint != null)
        endpoint.prepare(conf, context, collector);
}
 
Example 14
Project: aeolus   File: ForwardBoltTest.java   Source Code and License 6 votes vote down vote up
@Test
public void testExecute() {
	ForwardBolt bolt = new ForwardBolt(new Fields("dummy"));
	
	TestOutputCollector collector = new TestOutputCollector();
	bolt.prepare(null, null, new OutputCollector(collector));
	
	LinkedList<Tuple> tuples = new LinkedList<Tuple>();
	List<List<Object>> result = new LinkedList<List<Object>>();
	
	for(int i = 0; i < 3; ++i) {
		ArrayList<Object> attributes = new ArrayList<Object>();
		attributes.add(new Integer(i));
		
		tuples.add(mock(Tuple.class));
		when(tuples.get(i).getValues()).thenReturn(attributes);
		result.add(attributes);
		
		bolt.execute(tuples.get(i));
		Assert.assertEquals(tuples, collector.acked);
	}
	
	Assert.assertEquals(result, collector.output.get(Utils.DEFAULT_STREAM_ID));
}
 
Example 15
Project: cloudpelican-lsd   File: OutlierDetectionBolt.java   Source Code and License 6 votes vote down vote up
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
    _collector = collector;
    liveFilters = new HashMap<String, Long>();
    jsonParser = new JsonParser();
    filterMaxTsAnalayzed = new HashMap<String, Long>();

    // Active analyzers
    analyzers = new ArrayList<ITimeserieAnalyzer>();
    analyzers.add(new NoopTimeserieAnalyzer());
    analyzers.add(new NormalDistributionTimeserieAnalyzer());
    analyzers.add(new LogNormalDistributionTimeserieAnalyzer());
    analyzers.add(new SimpleRegressionTimeserieAnalyzer());
    analyzers.add(new MovingAverageTimeserieAnalyzer());
    analyzers.add(new PolynomialRegressionTimeserieAnalyzer());
    analyzers.add(new IntervalInterceptorTimeserieAnalyzer());
    analyzers.add(new RandomWalkRegressionTimeserieAnalyzer());
    analyzers.add(new OneClassSVMTimeserieAnalyzer());
    analyzers.add(new TimeBucketSimpleRegressionTimeserieAnalyzer());
    analyzers.add(new MultipleLinearRegressionTimeserieAnalyzer());
    analyzers.add(new SimpleExponentialSmoothingTimeserieAnalyzer());

    // Start time
    startTime = now();
}
 
Example 16
Project: learn_jstorm   File: MetricsConsumerBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context,
		OutputCollector collector) {
	try {
		_metricsConsumer = (IMetricsConsumer) Class.forName(
				_consumerClassName).newInstance();
	} catch (Exception e) {
		throw new RuntimeException(
				"Could not instantiate a class listed in config under section "
						+ Config.TOPOLOGY_METRICS_CONSUMER_REGISTER
						+ " with fully qualified name "
						+ _consumerClassName, e);
	}
	_metricsConsumer.prepare(stormConf, _registrationArgument, context,
			(IErrorReporter) collector);
	_collector = collector;
}
 
Example 17
Project: aeolus   File: InputDebatcherTest.java   Source Code and License 6 votes vote down vote up
@Test
public void testExecuteNoBatching() {
	InputDebatcher bolt = new InputDebatcher(new ForwardBolt(new Fields("dummy")));
	
	TestOutputCollector collector = new TestOutputCollector();
	bolt.prepare(null, null, new OutputCollector(collector));
	
	List<Values> expectedResult = new LinkedList<Values>();
	
	final int numberOfTuples = 10;
	for(int i = 0; i < numberOfTuples; ++i) {
		Values value = new Values(new Integer(this.r.nextInt()));
		expectedResult.add(value);
		
		Tuple input = mock(Tuple.class);
		when(input.getValues()).thenReturn(value);
		
		bolt.execute(input);
	}
	
	Assert.assertEquals(expectedResult, collector.output.get(Utils.DEFAULT_STREAM_ID));
}
 
Example 18
Project: aeolus   File: ThroughputBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(@SuppressWarnings("rawtypes") Map stormConf, TopologyContext context, OutputCollector collector) {
	final Integer taskId = context.getThisTaskId();
	this.inputCounter = new BoltThroughputCounter(collector, this.reportStream, true, taskId);
	this.inputReporter = new BoltInputReportingThread(this.inputCounter, this.interval);
	this.inputReporter.start();
	
	if(!this.isSink) {
		ThroughputOutputCollector col = new ThroughputOutputCollector(collector, this.reportStream, taskId);
		collector = col;
		
		this.outputReporter = new BoltOutputReportingThread(col, this.interval);
		this.outputReporter.start();
	}
	
	this.userBolt.prepare(stormConf, context, collector);
}
 
Example 19
Project: cdh-storm   File: RollingCountBoltTest.java   Source Code and License 6 votes vote down vote up
@SuppressWarnings("rawtypes")
@Test
public void shouldEmitSomethingIfAtLeastOneObjectWasCountedAndTickTupleIsReceived() {
  // given
  Tuple normalTuple = mockNormalTuple(new Object());
  Tuple tickTuple = MockTupleHelpers.mockTickTuple();

  RollingCountBolt bolt = new RollingCountBolt();
  Map conf = mock(Map.class);
  TopologyContext context = mock(TopologyContext.class);
  OutputCollector collector = mock(OutputCollector.class);
  bolt.prepare(conf, context, collector);

  // when
  bolt.execute(normalTuple);
  bolt.execute(tickTuple);

  // then
  verify(collector).emit(any(Values.class));
}
 
Example 20
Project: aeolus   File: FileSinkBolt.java   Source Code and License 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void prepare(@SuppressWarnings("rawtypes") Map stormConf, TopologyContext context, OutputCollector collector) {
	HashMap<Object, Object> conf = new HashMap<Object, Object>(stormConf);
	String fileName = (String)conf.get(AbstractFileOutputBolt.OUTPUT_FILE_NAME);
	if(fileName == null) {
		conf.put(AbstractFileOutputBolt.OUTPUT_FILE_NAME, this.outputFileName);
	}
	
	String dirName = (String)conf.get(AbstractFileOutputBolt.OUTPUT_DIR_NAME);
	if(dirName == null && this.outputDirName != null) {
		conf.put(AbstractFileOutputBolt.OUTPUT_DIR_NAME, this.outputDirName);
	}
	
	super.prepare(conf, context, collector);
}
 
Example 21
Project: storm-scheduler   File: NothingPayloadBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;

    if (!this.disableAniello) {
        // this will create/configure the worker monitor once per worker
        WorkerMonitor.getInstance().setContextInfo(context);

        // this object is used in the emit/execute method to compute the number of inter-node messages
        this.taskMonitor = new TaskMonitor(context.getThisTaskId());
    }
}
 
Example 22
Project: storm-scheduler   File: NothingBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    // this object is used in the emit/execute method to compute the number of inter-node messages
    this.taskMonitor = new TaskMonitor(context.getThisTaskId());

    this.collector = collector;

    if (!this.disableAniello) {
        // this will create/configure the worker monitor once per worker
        WorkerMonitor.getInstance().setContextInfo(context);

        // this object is used in the emit/execute method to compute the number of inter-node messages
        this.taskMonitor = new TaskMonitor(context.getThisTaskId());
    }
}
 
Example 23
Project: reddit-sentiment-storm   File: SentimentCalculatorBolt.java   Source Code and License 5 votes vote down vote up
public void prepare(Map conf, TopologyContext ctx, OutputCollector collector) {
	this.collector = collector;
	this.myId = ctx.getThisComponentId() + "-" + ctx.getThisTaskId();
	
	this.sentimentData = (Map<String, Long>) conf.get("sentimentData");
	if (this.sentimentData != null) {
		LOG.info("SentiCalcBolt " + myId + " has received sentimentData");
	}
}
 
Example 24
Project: storm-demos   File: CalculateBolt.java   Source Code and License 5 votes vote down vote up
public void prepare(@SuppressWarnings("rawtypes") Map conf, TopologyContext context, OutputCollector collector) {
	this.channelCountMap = new HashMap<String, Long>();
	this.tsdbMap = new HashMap<String, Long>();
	this.hbaseMap = new HashMap<String, Map<String,String>>();
	this.timestamp = System.currentTimeMillis()/1000/300+1;
	this.collector = collector;
}
 
Example 25
Project: storm-demos   File: CalculateBolt.java   Source Code and License 5 votes vote down vote up
private boolean emitTsdbMap(String tsdbStreamId, String transferStreamId, OutputCollector collector, 
		Map<String, Long> tsdbMap, Map<String, Long> channelCountMap){
	
	/**
	 *  emit date in opentsdb-map to write-opentsdb-bold as a stream
	 */
	
	for (String tsdbKey : tsdbMap.keySet()){
		String[] params = tsdbKey.split("#");
		String channel = params[0];
		String code = params[1];
		long timestamp = (Long.valueOf(params[2]))*300;
		
		Long total = channelCountMap.get(channel);
		if (total == null){
			logger.error("compute channel count wrong : " + channel);
			continue;
		}
		double num = (tsdbMap.get(tsdbKey));
		double ratio = num/total;
		
		//future : separate transfer stream from tsdb stream 
		collector.emit(tsdbStreamId, new Values(channel, code, timestamp, num, ratio));
		collector.emit(transferStreamId, new Values(channel, code, timestamp, num, ratio));
		logger.debug(channel + "-" + code + ":" + num + "-" + ratio);
	}
	return true;
}
 
Example 26
Project: storm-demos   File: CalculateBolt.java   Source Code and License 5 votes vote down vote up
private boolean emitHbaseMap(String streamId, OutputCollector collector,
		Map<String, Map<String,String>> hbaseMap){
	for (String rowKey : hbaseMap.keySet()){
           Map<String, String> rowMap = hbaseMap.get(rowKey);
           for (String column : rowMap.keySet()){
           	collector.emit(streamId, new Values(rowKey, column, rowMap.get(column)));
           	logger.debug("emit hbase date" + hbaseMap.toString());
           }
       }
	return true;	
}
 
Example 27
Project: storm-demos   File: WriteHbaseBolt.java   Source Code and License 5 votes vote down vote up
public void prepare(@SuppressWarnings("rawtypes") Map conf, TopologyContext context, OutputCollector collector) {
	configure = HBaseConfiguration.create();
	configure.set("hbase.zookeeper.quorum", hbaseZookeeperQuorum);
	configure.set("hbase.cluster.distributed", hbaseClusterDistirbuted);
	configure.set("hbase.rootdir", hbaseRootdir);
	configure.set("hbase.master", hbaseMaster);
	try {
		table = new HTable(configure, Bytes.toBytes(hbaseTable));
		table.setAutoFlush(false, true);
	} catch (IOException e) {
		logger.error("init hbase table wrong !\n", e);
	}
}
 
Example 28
Project: hadooparchitecturebook   File: CalcMovingAvgBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map config,
      TopologyContext topologyContext,			
      OutputCollector collector) {
  outputCollector = collector;
  windowMap = new HashMap<String, LinkedList<Double>>();
}
 
Example 29
Project: miner   File: ParseBolt.java   Source Code and License 5 votes vote down vote up
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector){
	this._collector = collector;
	_dataScheme = MysqlUtil.getData();
	_regex = MysqlUtil.getRegex();
	_ru = new RedisUtil();
	_redis = _ru.getJedisInstance();
}
 
Example 30
Project: es-hadoop-v2.2.0   File: EsBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;

    LinkedHashMap copy = new LinkedHashMap(conf);
    copy.putAll(boltConfig);

    StormSettings settings = new StormSettings(copy);
    flushOnTickTuple = settings.getStormTickTupleFlush();
    ackWrites = settings.getStormBoltAck();

    // trigger manual flush
    if (ackWrites) {
        settings.setProperty(ES_BATCH_FLUSH_MANUAL, Boolean.TRUE.toString());

        // align Bolt / es-hadoop batch settings
        numberOfEntries = settings.getStormBulkSize();
        settings.setProperty(ES_BATCH_SIZE_ENTRIES, String.valueOf(numberOfEntries));

        inflightTuples = new ArrayList<Tuple>(numberOfEntries + 1);
    }

    int totalTasks = context.getComponentTasks(context.getThisComponentId()).size();

    InitializationUtils.setValueWriterIfNotSet(settings, StormValueWriter.class, log);
    InitializationUtils.setBytesConverterIfNeeded(settings, StormTupleBytesConverter.class, log);
    InitializationUtils.setFieldExtractorIfNotSet(settings, StormTupleFieldExtractor.class, log);

    writer = RestService.createWriter(settings, context.getThisTaskIndex(), totalTasks, log);
}
 
Example 31
Project: RealEstate-Streaming   File: InsertBolt.java   Source Code and License 5 votes vote down vote up
@Override
 public void prepare(Map stormConf, TopologyContext context,
 OutputCollector collector) {
 
    this.collector = collector;
    try {
conn = getConnection();
checkTableSchema(conn);
conn.setAutoCommit(true);
    } catch (SQLException e) {
    	  LOG.info("Unable to obtain connection");
    	  e.printStackTrace();
    }
 }
 
Example 32
Project: RealEstate-Streaming   File: PhoenixJDBC.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map arg0, TopologyContext arg1, OutputCollector outputCollector) {
       LOG.info("The PersistAllEvents Flag is set to: " + persistAllEvents);
       this.outputCollector = outputCollector;
       try {
	conn = getConnection();
	conn.setAutoCommit(true);
	checkTableSchema(conn);
       } catch (SQLException e) {
       	  LOG.info("ISSUE OSB");
       	  e.printStackTrace();
       }
}
 
Example 33
Project: sourcevirtues-samples   File: MorphlinesBolt.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings("rawtypes")
@Override
public void prepare(Map stormConf, TopologyContext topologyContext, OutputCollector collector) {
    LOG.info("START prepare");

    this.collector = collector;

    File confFile = loadFile(topologyContext.getThisWorkerPort().toString());

    if (morphlineContext == null) {
        //TODO Make FaultTolerance configurable
        FaultTolerance faultTolerance = new FaultTolerance(true, false, null);

        morphlineContext = new MorphlineContext.Builder()
                .setExceptionHandler(faultTolerance)
                //.setMetricRegistry(SharedMetricRegistries.getOrCreate(customMorphlineId))
                .build();
    }

    Config override = ConfigFactory.parseMap(new HashMap<String, Object>());
    finalChild = new SimpleCommandCollector();
    morphline = new Compiler().compile(confFile, morphlineId, morphlineContext, finalChild, override);

    if (!EmptyUtils.nullOrEmpty(outputFieldNames)) {
        terminalBolt = false;
    }
}
 
Example 34
Project: sourcevirtues-samples   File: LoggingBolt.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings("rawtypes")
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    _collector = collector;

    if (fields == null) {
        fields = new String[] { CmnStormCons.TUPLE_FIELD_MSG };
    }
}
 
Example 35
Project: java   File: DeliveryCheckBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map arg0, TopologyContext arg1, OutputCollector arg2) {
  // TODO Auto-generated method stub
  _collector = arg2;
  // File file = new File();

}
 
Example 36
Project: streaming_outliers   File: OutlierBolt.java   Source Code and License 5 votes vote down vote up
/**
 * Called when a task for this component is initialized within a worker on the cluster.
 * It provides the bolt with the environment in which the bolt executes.
 * <p/>
 * <p>This includes the:</p>
 *
 * @param stormConf The Storm configuration for this bolt. This is the configuration provided to the topology merged in with cluster configuration on this machine.
 * @param context   This object can be used to get information about this task's place within the topology, including the task id and component id of this task, input and output information, etc.
 * @param collector The collector is used to emit tuples from this bolt. Tuples can be emitted at any time, including the prepare and cleanup methods. The collector is thread-safe and should be saved as an instance variable of this bolt object.
 */
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    _collector = collector;
    sketchyOutlierAlgorithm = outlierConfig.getSketchyOutlierAlgorithm();
    sketchyOutlierAlgorithm.configure(outlierConfig);
    batchOutlierAlgorithm = outlierConfig.getBatchOutlierAlgorithm();
    batchOutlierAlgorithm.configure(outlierConfig);
    tsdbHandler = persistenceConfig.getDatabaseHandler();
    tsdbHandler.configure(persistenceConfig.getConfig());
}
 
Example 37
Project: storm-demo   File: AbstractHdfsBolt.java   Source Code and License 5 votes vote down vote up
/**
 * Marked as final to prevent override. Subclasses should implement the doPrepare() method.
 * @param conf
 * @param topologyContext
 * @param collector
 */
public final void prepare(Map conf, TopologyContext topologyContext, OutputCollector collector) {
    this.writeLock = new Object();
    if (this.syncPolicy == null) throw new IllegalStateException("SyncPolicy must be specified.");
    if (this.rotationPolicy == null) throw new IllegalStateException("RotationPolicy must be specified.");
    if (this.fsUrl == null) {
        throw new IllegalStateException("File system URL must be specified.");
    }

    writers = new WritersMap(this.maxOpenFiles);

    this.collector = collector;
    this.fileNameFormat.prepare(conf, topologyContext);
    this.hdfsConfig = new Configuration();
    Map<String, Object> map = (Map<String, Object>) conf.get(this.configKey);
    if (map != null) {
        for (String key : map.keySet()) {
            this.hdfsConfig.set(key, String.valueOf(map.get(key)));
        }
    }

    try {
        HdfsSecurityUtil.login(conf, hdfsConfig);
        doPrepare(conf, topologyContext, collector);
    } catch (Exception e) {
        throw new RuntimeException("Error preparing HdfsBolt: " + e.getMessage(), e);
    }

    if (this.rotationPolicy instanceof TimedRotationPolicy) {
        startTimedRotationPolicy();
    }
}
 
Example 38
Project: storm-demo   File: SequenceFileBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void doPrepare(Map conf, TopologyContext topologyContext, OutputCollector collector) throws IOException {
    LOG.info("Preparing Sequence File Bolt...");
    if (this.format == null) throw new IllegalStateException("SequenceFormat must be specified.");

    this.fs = FileSystem.get(URI.create(this.fsUrl), hdfsConfig);
    this.codecFactory = new CompressionCodecFactory(hdfsConfig);
}
 
Example 39
Project: Infrastructure   File: Process.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings("rawtypes")
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    super.prepare(stormConf, context, collector);
    this.collector = collector;
    input = new AlgInput();
    output = new AlgOutput();
    if (Naming.defaultInitializeAlgorithms(stormConf)) {
        alg = new Alg1();
        sendAlgorithmChangeEvent(Naming.NODE_PROCESS_ALG1);
        System.out.println("PROCESS DELAY INIT " 
            + PipelineOptions.getExecutorIntArgument(stormConf, getName(), "delay", 0));
    }
}
 
Example 40
Project: storm-topology-examples   File: MongoUpsertBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(
        @SuppressWarnings("rawtypes") Map stormConf, TopologyContext context, OutputCollector collector) {

    this.collector = collector;
    try {
        this.mongoDB = new MongoClient(mongoHost, mongoPort).getDB(mongoDbName);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}