Java Code Examples for backtype.storm.task.TopologyContext

The following are top voted examples for showing how to use backtype.storm.task.TopologyContext. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: rb-bi   File: KafkaBolt.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    //for backward compatibility.
    if(mapper == null) {
        this.mapper = new FieldNameBasedTupleToKafkaMapper<K,V>();
    }

    //for backward compatibility.
    if(topicSelector == null) {
        this.topicSelector = new DefaultTopicSelector((String) stormConf.get(TOPIC));
    }

    Map configMap = (Map) stormConf.get(KAFKA_BROKER_PROPERTIES);
    Properties properties = new Properties();
    properties.putAll(configMap);
    ProducerConfig config = new ProducerConfig(properties);
    producer = new Producer<K, V>(config);
    this.collector = collector;
}
 
Example 2
Project: rb-bi   File: KafkaConsumerMonitorMetrics.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map map, Object conf, TopologyContext topologyContext, IErrorReporter iErrorReporter) {
    Map<String, Object> config = (Map<String, Object>) conf;
    RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
    client = CuratorFrameworkFactory.newClient(config.get("zookeeper").toString(), retryPolicy);
    client.start();


    try {
        if (client.checkExists().forPath("/consumers/rb-storm") == null) {
            client.create().creatingParentsIfNeeded().forPath("/consumers/rb-storm");
            System.out.println("Creating /consumers/rb-storm path ...");
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}
 
Example 3
Project: storm-scheduler   File: MonitoringMetricsToGraphiteWriter.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map stormConf, Object registrationArgument, TopologyContext context,
                    final IErrorReporter errorReporter) {

    LOG.info("Initializing the " + getClass().getCanonicalName());

    this.graphitePath = getConfiguredGraphitBasePath(stormConf);
    if (!stormConf.containsKey(CONF_MONITORING_GRAPHITE_SERVER)) {
        throw new RuntimeException("Missing graphite configuration. You need to specify the server and the port" +
                "under which the graphite server can be reached. Example: " + CONF_MONITORING_GRAPHITE_SERVER
                + "=graphite.yourdomain.com:2003");
    }
    this.graphiteConnection = (String) stormConf.get(CONF_MONITORING_GRAPHITE_SERVER);

    getGraphiteWriter();

    this.metricsToProcess = new HashMap<>();
    this.metricsToProcess.put(MonitoringMetricsCollectionHook.METRIC_COMPLETE_LATENCY, new AverageAggregator());
    this.metricsToProcess.put(MonitoringMetricsCollectionHook.METRIC_THROUGHPUT, new SumAggregator());
    this.metricsToProcess.put(MonitoringMetricsCollectionHook.METRIC_WORKER_CPU_LOAD, new SumAggregator());
    this.metricsToProcess.put(MonitoringMetricsCollectionHook.METRIC_WORKER_NETWORK_BYTES, new SumAggregator());
}
 
Example 4
Project: storm-hbase-1.0.x   File: AbstractHBaseBolt.java   Source Code and License 6 votes vote down vote up
public void prepare(Map map, TopologyContext topologyContext, OutputCollector collector) {
    this.collector = collector;
    final Configuration hbConfig = HBaseConfiguration.create();
    
    Map<String, Object> conf = (Map<String, Object>)map.get(this.configKey);
    if(conf == null) {
        throw new IllegalArgumentException("HBase configuration not found using key '" + this.configKey + "'");
    }
    if(conf.get("hbase.rootdir") == null) {
        LOG.warn("No 'hbase.rootdir' value found in configuration! Using HBase defaults.");
    }
    for(String key : conf.keySet()) {
        hbConfig.set(key, String.valueOf(conf.get(key)));
    }

    this.hBaseClient = new HBaseClient(conf, hbConfig, tableName);
}
 
Example 5
Project: fiware-sinfonier   File: ConditionalFields.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context) {
	mapper = new ObjectMapper();
	this.configParams = (JSONObject) new JSONObject(this.config).get("params");
       try {
		this.configMap = mapper.readValue(this.config,new TypeReference<Map<String, Object>>() {});
		mapParams = (Map<String,Object>) configMap.get("params");
	} catch (IOException e) {
		e.printStackTrace();
	}

	field = getParam("field1", true);
	operator = getParam("operator", true);
	field2 = getParam("field2", true);
	entity = getParam("entity");

}
 
Example 6
Project: fiware-sinfonier   File: Conditional.java   Source Code and License 6 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context) {

	mapper = new ObjectMapper();
	this.configParams = (JSONObject) new JSONObject(this.config).get("params");
       try {
		this.configMap = mapper.readValue(this.config,new TypeReference<Map<String, Object>>() {});
		mapParams = (Map<String,Object>) configMap.get("params");
	} catch (IOException e) {
		e.printStackTrace();
	}

	field = getParam("field", true);
	operator = (String) getParam("operator", true);
	value = (String) getParam("value", true);
	entity = (String) getParam("value");

	if (operator.equals("RegexExpression")) {
		pattern = Pattern.compile(value, Pattern.DOTALL);
	}

}
 
Example 7
Project: reddit-sentiment-storm   File: SubredditCommentsSpout.java   Source Code and License 6 votes vote down vote up
public void open(Map conf, TopologyContext ctx, SpoutOutputCollector collector) {
	this.collector = collector;
	this.history = new ProcessedHistory();
	this.subreddit = (String) conf.get("subreddit");
	
	try {
		this.subredditCommentsfeedURL = new URL((String)conf.get("feedURL"));
	} catch (MalformedURLException e) {
		throw new RuntimeException(e);
	}
	LOG.info("Spout subreddit:{} feedURL:{}", this.subreddit, this.subredditCommentsfeedURL);
	
	if (conf.containsKey("sentimentData")) {
		LOG.info("Spouts can also see sentimentData");
	}
}
 
Example 8
Project: reddit-sentiment-storm   File: SummarizerBolt.java   Source Code and License 6 votes vote down vote up
public void prepare(Map conf, TopologyContext ctx, OutputCollector collector) {
	
	this.collector = collector;
	this.myId = ctx.getThisComponentId() + "-" + ctx.getThisTaskId();
	
	this.summary = new Summary();
	
	this.publisher = new ZkPublisher();
	try {
		this.publisher.init(conf);
	} catch (Exception e) {
		throw new RuntimeException(e);
	}
	
	this.lastPublishedTimestamp = 0;
}
 
Example 9
Project: hadooparchitecturebook   File: StockTicksSpout.java   Source Code and License 6 votes vote down vote up
/**
 * Open file with stock tick data and read into List object.
 */
@Override
public void open(Map map,
                 TopologyContext context,
                 SpoutOutputCollector outputCollector) {
  this.outputCollector = outputCollector;

  try {
    ticks = 
      IOUtils.readLines(ClassLoader.getSystemResourceAsStream(
 "NASDAQ_daily_prices_A.csv"),
        Charset.defaultCharset().name());
  } catch (IOException e) {
      throw new RuntimeException(e);
  }
}
 
Example 10
Project: miner   File: BeginSpout.java   Source Code and License 6 votes vote down vote up
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
	try {
		_collector = collector;

		ru = new RedisUtil();
		redis = ru.getJedisInstance();

		SchedulerFactory schedulerFactory = new StdSchedulerFactory();
		Scheduler scheduler = schedulerFactory.getScheduler();
		_qManager = new QuartzManager();
		_qManager.setScheduler(scheduler);
		PlatformUtils.initRegisterProject(_qManager);
		scheduler.start();

		//init Hbase tables
		CreateTable.initHbaseTable();
	}catch(Exception ex){
		logger.error("error:"+MySysLogger.formatException(ex));
		ex.printStackTrace();
	}
}
 
Example 11
Project: erad2016-streamprocessing   File: TwitterSpout.java   Source Code and License 6 votes vote down vote up
public void open(Map conf, TopologyContext context,
                 SpoutOutputCollector collector) {
    queue = new LinkedBlockingQueue<String>(1000);
    this.collector = collector;

    StatusListener listener = new StatusListener() {
        public void onStatus(Status status) {
            queue.offer(TwitterObjectFactory.getRawJSON(status));
        }

        public void onDeletionNotice(StatusDeletionNotice sdn) { }
        public void onTrackLimitationNotice(int i) { }
        public void onScrubGeo(long l, long l1) { }
        public void onStallWarning(StallWarning stallWarning) { }
        public void onException(Exception e) { }
    };

    ConfigurationBuilder cb = new ConfigurationBuilder();
    cb.setJSONStoreEnabled(true);

    TwitterStreamFactory factory = new TwitterStreamFactory(cb.build());
    twitterStream = factory.getInstance();
    twitterStream.addListener(listener);
    twitterStream.filter(new FilterQuery().language("en").track("trump"));
}
 
Example 12
Project: java   File: DeliveryCheckSpout.java   Source Code and License 6 votes vote down vote up
@Override
public void open(Map arg0, TopologyContext arg1, SpoutOutputCollector arg2) {
  /*
   * FileReader fileReader; try { fileReader = new
   * FileReader("C:/proj/Migration/apache-storm-0.9.4/apache-storm-0.9.4/logs/InPut.txt");
   * bufferedReader =new BufferedReader(fileReader); _collector= arg2; } catch
   * (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); }
   */

  try {
    _collector = arg2;
    setUpConnection();
  } catch (JMSException e) {
    LOGGER.error(e);
  }

}
 
Example 13
Project: Practical-Real-time-Processing-and-Analytics   File: FileSpout.java   Source Code and License 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
	//fileName = (String) conf.get("file");
	this.collector = collector;

	try {
		reader = new BufferedReader(new FileReader(fileName));
	} catch (Exception e) {
		throw new RuntimeException(e);
	}
}
 
Example 14
Project: Practical-Real-time-Processing-and-Analytics   File: FileSpout.java   Source Code and License 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
	//fileName = (String) conf.get("file");
	this.collector = collector;

	try {
		reader = new BufferedReader(new FileReader(fileName));
	} catch (Exception e) {
		throw new RuntimeException(e);
	}
}
 
Example 15
Project: rb-bi   File: TridentKafkaEmitter.java   Source Code and License 5 votes vote down vote up
public TridentKafkaEmitter(Map conf, TopologyContext context, TridentKafkaConfig config, String topologyInstanceId) {
    _config = config;
    _topologyInstanceId = topologyInstanceId;
    _connections = new DynamicPartitionConnections(_config, KafkaUtils.makeBrokerReader(conf, _config));
    _topologyName = (String) conf.get(Config.TOPOLOGY_NAME);
    _kafkaOffsetMetric = new KafkaUtils.KafkaOffsetMetric(_config.topic, _connections);
    context.registerMetric("kafkaOffset", _kafkaOffsetMetric, _config.metricsTimeBucketSizeInSecs);
    _kafkaMeanFetchLatencyMetric = context.registerMetric("kafkaFetchAvg", new MeanReducer(), _config.metricsTimeBucketSizeInSecs);
    _kafkaMaxFetchLatencyMetric = context.registerMetric("kafkaFetchMax", new MaxMetric(), _config.metricsTimeBucketSizeInSecs);
}
 
Example 16
Project: storm-scheduler   File: LoggingMetricsConsumer.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map stormConf,
                    Object registrationArgument,
                    TopologyContext context,
                    IErrorReporter errorReporter) {
    this.stormId = context.getStormId();
    this.extemptMetrics = new HashSet<>();

    this.extemptMetrics.add(SchedulingMetricsCollectionHook.METRIC_EMITTED_MESSAGES);
}
 
Example 17
Project: storm-scheduler   File: UuidSpout.java   Source Code and License 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    MessageDigest md;
    int counter;

    this.thisTaskIndex = context.getThisTaskIndex();
    this.numSpouts = context.getComponentTasks(context.getThisComponentId()).size();
    counter = 0;

    try {
        md = MessageDigest.getInstance("MD5");
    } catch (NoSuchAlgorithmException e) {
        throw new RuntimeException("Couldn't find MD5 algorithm.", e);
    }

    // we want to create a message that hashes to exacly one of the following spouts. As there are the same number
    // of bolts on each level as there are spouts, we just keep looking until we find a uuid whose hash code would
    // be assigned to the id of this spout (if it were a bolt).
    do {
        if (++counter > 1000 * 1000) {
            throw new RuntimeException("Unable to generate required UUID in 1 mio tries.");
        }
        byte[] bytes = md.digest(UUID.randomUUID().toString().getBytes());
        this.uuid = new String(bytes);
    } while (this.uuid.hashCode() % this.numSpouts != this.thisTaskIndex);

    this.collector = collector;

    if (!this.disableAniello) {
        // this will create/configure the worker monitor once per worker
        WorkerMonitor.getInstance().setContextInfo(context);

        // this object is used in the emit/execute method to compute the number of inter-node messages
        this.taskMonitor = new TaskMonitor(context.getThisTaskId());
    }
}
 
Example 18
Project: storm-scheduler   File: NothingPayloadBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;

    if (!this.disableAniello) {
        // this will create/configure the worker monitor once per worker
        WorkerMonitor.getInstance().setContextInfo(context);

        // this object is used in the emit/execute method to compute the number of inter-node messages
        this.taskMonitor = new TaskMonitor(context.getThisTaskId());
    }
}
 
Example 19
Project: storm-scheduler   File: NothingBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    // this object is used in the emit/execute method to compute the number of inter-node messages
    this.taskMonitor = new TaskMonitor(context.getThisTaskId());

    this.collector = collector;

    if (!this.disableAniello) {
        // this will create/configure the worker monitor once per worker
        WorkerMonitor.getInstance().setContextInfo(context);

        // this object is used in the emit/execute method to compute the number of inter-node messages
        this.taskMonitor = new TaskMonitor(context.getThisTaskId());
    }
}
 
Example 20
Project: storm-scheduler   File: RandomSpout.java   Source Code and License 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    this.collector = collector;
    this.rnd = new Random();

    if (!this.disableAniello) {
        // this will create/configure the worker monitor once per worker
        WorkerMonitor.getInstance().setContextInfo(context);

        // this object is used in the emit/execute method to compute the number of inter-node messages
        this.taskMonitor = new TaskMonitor(context.getThisTaskId());
    }
}
 
Example 21
Project: fiware-sinfonier   File: BaseSinfonierSpout.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings({ "rawtypes", "unchecked" })
  @Override
  public final void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
      mapper = new ObjectMapper();
      _collector = collector;
      this.configParams = (JSONObject)new JSONObject(this.config).get("params");
      try {
	this.configMap = mapper.readValue(this.config,new TypeReference<Map<String, Object>>() {});
	mapParams = (Map<String,Object>) configMap.get("params");
} catch (IOException e) {
	e.printStackTrace();
}
      this.useropen();
  }
 
Example 22
Project: fiware-sinfonier   File: Exists.java   Source Code and License 5 votes vote down vote up
@Override
  public void prepare(Map stormConf, TopologyContext context) {
  	mapper = new ObjectMapper();
      this.configParams = (JSONObject)new JSONObject(this.config).get("params");
      try {
	this.configMap = mapper.readValue(this.config,new TypeReference<Map<String, Object>>() {});
	mapParams = (Map<String,Object>) configMap.get("params");
} catch (IOException e) {
	e.printStackTrace();
}
      
      field = getParam("field", true);
  }
 
Example 23
Project: fiware-sinfonier   File: BaseSinfonierBolt.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings({ "rawtypes", "unchecked" })
  @Override
  public final void prepare(Map stormConf, TopologyContext context) {
      mapper = new ObjectMapper();
      this.configParams = (JSONObject)new JSONObject(this.config).get("params");
      try {
	this.configMap = mapper.readValue(this.config,new TypeReference<Map<String, Object>>() {});
	mapParams = (Map<String,Object>) configMap.get("params");
} catch (IOException e) {
	e.printStackTrace();
}
      this.userprepare();
  }
 
Example 24
Project: reddit-sentiment-storm   File: SentimentCalculatorBolt.java   Source Code and License 5 votes vote down vote up
public void prepare(Map conf, TopologyContext ctx, OutputCollector collector) {
	this.collector = collector;
	this.myId = ctx.getThisComponentId() + "-" + ctx.getThisTaskId();
	
	this.sentimentData = (Map<String, Long>) conf.get("sentimentData");
	if (this.sentimentData != null) {
		LOG.info("SentiCalcBolt " + myId + " has received sentimentData");
	}
}
 
Example 25
Project: storm-demos   File: CalculateBolt.java   Source Code and License 5 votes vote down vote up
public void prepare(@SuppressWarnings("rawtypes") Map conf, TopologyContext context, OutputCollector collector) {
	this.channelCountMap = new HashMap<String, Long>();
	this.tsdbMap = new HashMap<String, Long>();
	this.hbaseMap = new HashMap<String, Map<String,String>>();
	this.timestamp = System.currentTimeMillis()/1000/300+1;
	this.collector = collector;
}
 
Example 26
Project: storm-demos   File: WriteHbaseBolt.java   Source Code and License 5 votes vote down vote up
public void prepare(@SuppressWarnings("rawtypes") Map conf, TopologyContext context, OutputCollector collector) {
	configure = HBaseConfiguration.create();
	configure.set("hbase.zookeeper.quorum", hbaseZookeeperQuorum);
	configure.set("hbase.cluster.distributed", hbaseClusterDistirbuted);
	configure.set("hbase.rootdir", hbaseRootdir);
	configure.set("hbase.master", hbaseMaster);
	try {
		table = new HTable(configure, Bytes.toBytes(hbaseTable));
		table.setAutoFlush(false, true);
	} catch (IOException e) {
		logger.error("init hbase table wrong !\n", e);
	}
}
 
Example 27
Project: hadooparchitecturebook   File: CalcMovingAvgBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map config,
      TopologyContext topologyContext,			
      OutputCollector collector) {
  outputCollector = collector;
  windowMap = new HashMap<String, LinkedList<Double>>();
}
 
Example 28
Project: miner   File: ParseBolt.java   Source Code and License 5 votes vote down vote up
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector){
	this._collector = collector;
	_dataScheme = MysqlUtil.getData();
	_regex = MysqlUtil.getRegex();
	_ru = new RedisUtil();
	_redis = _ru.getJedisInstance();
}
 
Example 29
Project: storm-demo   File: SequenceFileBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void doPrepare(Map conf, TopologyContext topologyContext, OutputCollector collector) throws IOException {
    LOG.info("Preparing Sequence File Bolt...");
    if (this.format == null) throw new IllegalStateException("SequenceFormat must be specified.");

    this.fs = FileSystem.get(URI.create(this.fsUrl), hdfsConfig);
    this.codecFactory = new CompressionCodecFactory(hdfsConfig);
}
 
Example 30
Project: Get-ENVS   File: RandomSentenceSpout.java   Source Code and License 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
//Set the instance collector to the one passed in
  _collector = collector;
  //For randomness
  _rand = new Random();
}
 
Example 31
Project: es-hadoop-v2.2.0   File: EsSpout.java   Source Code and License 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    this.collector = collector;

    LinkedHashMap copy = new LinkedHashMap(conf);
    copy.putAll(spoutConfig);

    StormSettings settings = new StormSettings(copy);

    InitializationUtils.setValueReaderIfNotSet(settings, JdkValueReader.class, log);

    ackReads = settings.getStormSpoutReliable();

    if (ackReads) {
        inTransitQueue = new LinkedHashMap<Object, Object>();
        replayQueue = new LinkedList<Object[]>();
        retries = new HashMap<Object, Integer>();
        queueSize = settings.getStormSpoutReliableQueueSize();
        tupleRetries = settings.getStormSpoutReliableRetriesPerTuple();
        tupleFailure = settings.getStormSpoutReliableTupleFailureHandling();
    }

    int totalTasks = context.getComponentTasks(context.getThisComponentId()).size();
    int currentTask = context.getThisTaskIndex();

    // match the partitions based on the current topology
    List<PartitionDefinition> partitions = RestService.findPartitions(settings, log);
    List<PartitionDefinition> assigned = RestService.assignPartitions(partitions, currentTask, totalTasks);
    iterator = RestService.multiReader(settings, assigned, log);
}
 
Example 32
Project: es-hadoop-v2.2.0   File: EsBolt.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;

    LinkedHashMap copy = new LinkedHashMap(conf);
    copy.putAll(boltConfig);

    StormSettings settings = new StormSettings(copy);
    flushOnTickTuple = settings.getStormTickTupleFlush();
    ackWrites = settings.getStormBoltAck();

    // trigger manual flush
    if (ackWrites) {
        settings.setProperty(ES_BATCH_FLUSH_MANUAL, Boolean.TRUE.toString());

        // align Bolt / es-hadoop batch settings
        numberOfEntries = settings.getStormBulkSize();
        settings.setProperty(ES_BATCH_SIZE_ENTRIES, String.valueOf(numberOfEntries));

        inflightTuples = new ArrayList<Tuple>(numberOfEntries + 1);
    }

    int totalTasks = context.getComponentTasks(context.getThisComponentId()).size();

    InitializationUtils.setValueWriterIfNotSet(settings, StormValueWriter.class, log);
    InitializationUtils.setBytesConverterIfNeeded(settings, StormTupleBytesConverter.class, log);
    InitializationUtils.setFieldExtractorIfNotSet(settings, StormTupleFieldExtractor.class, log);

    writer = RestService.createWriter(settings, context.getThisTaskIndex(), totalTasks, log);
}
 
Example 33
Project: storm-demo   File: SimpleFileNameFormat.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void prepare(Map conf, TopologyContext topologyContext) {
    this.componentId = topologyContext.getThisComponentId();
    this.taskId = topologyContext.getThisTaskId();
    try {
        this.host = Utils.localHostname();
    } catch (UnknownHostException e) {
        throw new RuntimeException(e);
    }
}
 
Example 34
Project: es-hadoop-v2.2.0   File: TestSpout.java   Source Code and License 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    this.collector = new InterceptingSpoutOutputCollector(collector);

    if (spout != null) {
        spout.open(conf, context, this.collector);
    }
}
 
Example 35
Project: RealEstate-Streaming   File: InsertBolt.java   Source Code and License 5 votes vote down vote up
@Override
 public void prepare(Map stormConf, TopologyContext context,
 OutputCollector collector) {
 
    this.collector = collector;
    try {
conn = getConnection();
checkTableSchema(conn);
conn.setAutoCommit(true);
    } catch (SQLException e) {
    	  LOG.info("Unable to obtain connection");
    	  e.printStackTrace();
    }
 }
 
Example 36
Project: RealEstate-Streaming   File: PhoenixJDBC.java   Source Code and License 5 votes vote down vote up
@Override
public void prepare(Map arg0, TopologyContext arg1, OutputCollector outputCollector) {
       LOG.info("The PersistAllEvents Flag is set to: " + persistAllEvents);
       this.outputCollector = outputCollector;
       try {
	conn = getConnection();
	conn.setAutoCommit(true);
	checkTableSchema(conn);
       } catch (SQLException e) {
       	  LOG.info("ISSUE OSB");
       	  e.printStackTrace();
       }
}
 
Example 37
Project: ignite-book-code-samples   File: RandomSentenceSpout.java   Source Code and License 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    //Set the instance collector to the one passed in
    _collector = collector;
    //For randomness
    _rand = new Random();
}
 
Example 38
Project: sourcevirtues-samples   File: MorphlinesBolt.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings("rawtypes")
@Override
public void prepare(Map stormConf, TopologyContext topologyContext, OutputCollector collector) {
    LOG.info("START prepare");

    this.collector = collector;

    File confFile = loadFile(topologyContext.getThisWorkerPort().toString());

    if (morphlineContext == null) {
        //TODO Make FaultTolerance configurable
        FaultTolerance faultTolerance = new FaultTolerance(true, false, null);

        morphlineContext = new MorphlineContext.Builder()
                .setExceptionHandler(faultTolerance)
                //.setMetricRegistry(SharedMetricRegistries.getOrCreate(customMorphlineId))
                .build();
    }

    Config override = ConfigFactory.parseMap(new HashMap<String, Object>());
    finalChild = new SimpleCommandCollector();
    morphline = new Compiler().compile(confFile, morphlineId, morphlineContext, finalChild, override);

    if (!EmptyUtils.nullOrEmpty(outputFieldNames)) {
        terminalBolt = false;
    }
}
 
Example 39
Project: sourcevirtues-samples   File: LoggingBolt.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings("rawtypes")
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    _collector = collector;

    if (fields == null) {
        fields = new String[] { CmnStormCons.TUPLE_FIELD_MSG };
    }
}
 
Example 40
Project: sourcevirtues-samples   File: RandomSentenceTestSpout.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings("rawtypes")
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    _collector = collector;
    _rand = new Random();

    genSentences();
}