org.apache.storm.task.TopologyContext Java Examples

The following examples show how to use org.apache.storm.task.TopologyContext. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DbusAppenderBolt.java    From DBus with Apache License 2.0 6 votes vote down vote up
@Override
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;
    this.context = context;

    if (!initialized) {
        this.topologyId = (String) conf.get(StormConfigKey.TOPOLOGY_ID);
        this.datasource = (String) conf.get(StormConfigKey.DATASOURCE);
        this.zkConnect = (String) conf.get(StormConfigKey.ZKCONNECT);
        this.zkRoot = Utils.buildZKTopologyPath(topologyId);
        // 初始化配置文件
        try {
            PropertiesHolder.initialize(zkConnect, zkRoot);
            GlobalCache.initialize(datasource);
            handlerManager = new BoltHandlerManager(buildProvider());
            reloadBolt(null);
            logger.info(getClass().getName() + " Initialized!");
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
            throw new InitializationException(e);
        }
        initialized = true;
    }
}
 
Example #2
Source File: DRPCQuerySubscriber.java    From bullet-storm with Apache License 2.0 6 votes vote down vote up
/**
 * Exposed for testing.
 *
 * @param config The config containing the String function in {@link DRPCConfig#DRPC_FUNCTION}, the Storm configuration
 *               {@link Map} as {@link com.yahoo.bullet.storm.BulletStormConfig#STORM_CONFIG} and the Storm
 *               {@link TopologyContext} as {@link com.yahoo.bullet.storm.BulletStormConfig#STORM_CONTEXT}.
 * @param maxUnCommittedQueries The maximum number of queries that can be read without committing them.
 * @param collector The {@link DRPCOutputCollector} to use.
 * @param spout The {@link DRPCSpout} to use.
 */
DRPCQuerySubscriber(BulletConfig config, int maxUnCommittedQueries, DRPCOutputCollector collector, DRPCSpout spout) {
    super(maxUnCommittedQueries);

    this.collector = collector;
    this.spout = spout;
    emittedIDs = new HashMap<>();

    // Get the Storm Config that has all the relevant cluster settings and properties
    Map stormConfig = config.getRequiredConfigAs(DRPCConfig.STORM_CONFIG, Map.class);

    // Get the TopologyContext
    TopologyContext context = config.getRequiredConfigAs(DRPCConfig.STORM_CONTEXT, TopologyContext.class);

    // Wrap the collector in a SpoutOutputCollector (it just delegates to the underlying DRPCOutputCollector)
    SpoutOutputCollector spoutOutputCollector = new SpoutOutputCollector(collector);

    spout.open(stormConfig, context, spoutOutputCollector);
}
 
Example #3
Source File: DynamicSpoutTest.java    From storm-dynamic-spout with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
/**
 * Validates that we require the ConsumerIdPrefix configuration value.
 * and if its missing we toss an IllegalStateException during open()
 */
@Test
public void testMissingRequiredConfigurationConsumerIdPrefix() {
    // Create our config missing the consumerIdPrefix
    final Map<String, Object> config = getDefaultConfig(null, null);
    config.remove(SpoutConfig.VIRTUAL_SPOUT_ID_PREFIX);

    // Some mock stuff to get going
    final TopologyContext topologyContext = new MockTopologyContext();
    final MockSpoutOutputCollector spoutOutputCollector = new MockSpoutOutputCollector();

    // Create spout and call open
    final DynamicSpout spout = new DynamicSpout(config);

    final Throwable thrown = Assertions.assertThrows(IllegalStateException.class, () ->
        // When we call open, we expect illegal state exception about our missing configuration item
        spout.open(config, topologyContext, spoutOutputCollector)
    );

    spout.close();

    MatcherAssert.assertThat(thrown.getMessage(), Matchers.containsString(SpoutConfig.VIRTUAL_SPOUT_ID_PREFIX));
}
 
Example #4
Source File: StormRecorderTest.java    From storm-dynamic-spout with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
/**
 * Validate that we fall back gracefully for invalid value.
 */
@Test
public void testOpen_customTimeWindowInvalid() {
    final boolean timeBucketCfg = true;

    // Create empty config
    final Map<String, Object> config = new HashMap<>();
    config.put(SpoutConfig.METRICS_RECORDER_TIME_BUCKET, timeBucketCfg);

    // Create mock TopologyContet
    final TopologyContext mockTopologyContext = mock(TopologyContext.class);

    // Create recorder and call open.
    final StormRecorder recorder = new StormRecorder();
    recorder.open(config, mockTopologyContext);

    // Validate we got called as expected.

    // Shouldn't have interacted with the taskId
    verify(mockTopologyContext, never()).getThisTaskIndex();

    // Should have registered 4 metrics.
    verify(mockTopologyContext, times(1)).registerMetric(eq("GAUGES"), any(MultiAssignableMetric.class), eq(defaultTimeWindow));
    verify(mockTopologyContext, times(1)).registerMetric(eq("TIMERS"), any(MultiReducedMetric.class), eq(defaultTimeWindow));
    verify(mockTopologyContext, times(1)).registerMetric(eq("COUNTERS"), any(MultiCountMetric.class), eq(defaultTimeWindow));
}
 
Example #5
Source File: AlarmCreationBolt.java    From monasca-thresh with Apache License 2.0 6 votes vote down vote up
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
  logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
  logger.info("Preparing");
  this.collector = collector;

  if (alarmDefDAO == null) {
    Injector.registerIfNotBound(AlarmDefinitionDAO.class, new PersistenceModule(dbConfig));
    alarmDefDAO = Injector.getInstance(AlarmDefinitionDAO.class);
  }

  if (alarmDAO == null) {
    Injector.registerIfNotBound(AlarmDAO.class, new PersistenceModule(dbConfig));
    alarmDAO = Injector.getInstance(AlarmDAO.class);
  }
}
 
Example #6
Source File: SpringComponent.java    From breeze with Apache License 2.0 6 votes vote down vote up
/**
 * Instantiates the non-serializable state.
 */
protected void init(Map stormConf, TopologyContext topologyContext) {
	setId(topologyContext.getThisComponentId());

	try {
		method = inputSignature.findMethod(beanType);
		logger.info("{} uses {}", this, method.toGenericString());
	} catch (ReflectiveOperationException e) {
		throw new IllegalStateException("Unusable input signature", e);
	}

	if (spring == null)
		spring = SingletonApplicationContext.get(stormConf, topologyContext);

	spring.getBean(beanType);
	logger.debug("Bean lookup successful");
}
 
Example #7
Source File: RulesBolt.java    From streamline with Apache License 2.0 6 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    super.prepare(stormConf, context, collector);

    if (this.rulesProcessor == null) {
        throw new RuntimeException("rulesProcessor cannot be null");
    }
    ruleProcessorRuntime = new RuleProcessorRuntime(rulesProcessor, scriptType);

    Map<String, Object> config = Collections.emptyMap();
    if (stormConf != null) {
        config = new HashMap<>();
        config.put(Constants.CATALOG_ROOT_URL, stormConf.get(Constants.CATALOG_ROOT_URL));
        config.put(Constants.LOCAL_FILES_PATH, stormConf.get(Constants.LOCAL_FILES_PATH));
    }
    ruleProcessorRuntime.initialize(config);
}
 
Example #8
Source File: MetricsConsumer.java    From storm-crawler with Apache License 2.0 6 votes vote down vote up
@Override
public void prepare(Map stormConf, Object registrationArgument,
        TopologyContext context, IErrorReporter errorReporter) {
    final String tableName = ConfUtils.getString(stormConf,
            Constants.SQL_METRICS_TABLE_PARAM_NAME, "metrics");
    query = "INSERT INTO "
            + tableName
            + " (srcComponentId, srcTaskId, srcWorkerHost, srcWorkerPort, name, value, timestamp)"
            + " values (?, ?, ?, ?, ?, ?, ?)";
    try {
        connection = SQLUtil.getConnection(stormConf);
    } catch (SQLException ex) {
        LOG.error(ex.getMessage(), ex);
        throw new RuntimeException(ex);
    }
}
 
Example #9
Source File: DynamicSpoutTest.java    From storm-dynamic-spout with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
/**
 * Test you cannot open DynamicSpout multiple times.
 */
@Test
public void testCannotOpenMultipleTimes() {
    // Define our ConsumerId prefix
    final String consumerIdPrefix = "TestDynamicSpout";

    // Create our config
    final Map<String, Object> config = getDefaultConfig(consumerIdPrefix, null);

    // Some mock storm topology stuff to get going
    final TopologyContext topologyContext = new MockTopologyContext();
    final MockSpoutOutputCollector spoutOutputCollector = new MockSpoutOutputCollector();

    // Create spout and call open
    final DynamicSpout spout = new DynamicSpout(config);
    spout.open(config, topologyContext, spoutOutputCollector);

    final Throwable thrown = Assertions.assertThrows(IllegalStateException.class, () ->
        spout.open(config, topologyContext, spoutOutputCollector)
    );

    spout.close();

    MatcherAssert.assertThat(thrown.getMessage(), Matchers.containsString("opened"));
}
 
Example #10
Source File: CheckpointSpout.java    From twister2 with Apache License 2.0 6 votes vote down vote up
/**
 * Loads the last saved checkpoint state the from persistent storage.
 */
private KeyValueState<String, CheckPointState> loadCheckpointState(Map<String, Object> conf,
                                                                   TopologyContext ctx) {
  String namespace = ctx.getThisComponentId() + "-" + ctx.getThisTaskId();
  KeyValueState<String, CheckPointState> state
      = (KeyValueState<String, CheckPointState>) StateFactory.getState(namespace, conf, ctx);
  if (state.get(TX_STATE_KEY) == null) {
    CheckPointState txState = new CheckPointState(-1, COMMITTED);
    state.put(TX_STATE_KEY, txState);
    state.commit();
    LOG.fine(() -> String.format("Initialized checkpoint spout state with txState %s", txState));
  } else {
    LOG.fine(() -> String.format("Got checkpoint spout state %s", state.get(TX_STATE_KEY)));
  }
  return state;
}
 
Example #11
Source File: DataPullingSpout.java    From DBus with Apache License 2.0 6 votes vote down vote up
/**
 * 初始化collectors
 */
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {

    this.collector = collector;
    this.zkConnect = (String) conf.get(FullPullConstants.ZKCONNECT);
    this.topologyId = (String) conf.get(FullPullConstants.FULL_PULLER_TOPOLOGY_ID);
    this.dsName = (String) conf.get(FullPullConstants.DS_NAME);
    this.shardsProcessManager = new ShardsProcessManager();
    try {
        //设置topo类型,用于获取配置信息路径
        FullPullHelper.setTopologyType(FullPullConstants.FULL_PULLER_TYPE);
        loadRunningConf(null);
        FullPullHelper.updatePendingTasksToHistoryTable(dsName, FullPullConstants.FULLPULL_PENDING_TASKS_OP_PULL_TOPOLOGY_RESTART, consumer,
                commonProps.getProperty(FullPullConstants.FULL_PULL_MEDIANT_TOPIC));
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        throw new InitializationException();
    }
    logger.info("[pull spout] {} init complete!", topologyId);
}
 
Example #12
Source File: TestWordSpout.java    From incubator-heron with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("rawtypes")
public void open(
    Map conf,
    TopologyContext context,
    SpoutOutputCollector acollector) {
  collector = acollector;
  words = new String[]{"nathan", "mike", "jackson", "golda", "bertels"};
  rand = new Random();
}
 
Example #13
Source File: KafkaProducerBolt.java    From DBus with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    this.conf = stormConf;
    this.collector = collector;
    this.context = context;

    this.zkServers = (String) conf.get(Constants.ZOOKEEPER_SERVERS);
    this.extractorName = (String) conf.get(Constants.EXTRACTOR_TOPOLOGY_ID);
    this.extractorRoot = Constants.EXTRACTOR_ROOT + "/";
    reloadConfig(null);

}
 
Example #14
Source File: NiFiSpout.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
    this.spoutOutputCollector = spoutOutputCollector;
    this.queue = new LinkedBlockingQueue<>(1000);

    this.spoutReceiver = new NiFiSpoutReceiver();
    this.spoutReceiver.setDaemon(true);
    this.spoutReceiver.setName("NiFi Spout Receiver");
    this.spoutReceiver.start();
}
 
Example #15
Source File: IndexerBolt.java    From storm-crawler with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {
    super.prepare(conf, context, collector);
    _collector = collector;

    this.eventCounter = context.registerMetric("SQLIndexer",
            new MultiCountMetric(), 10);

    this.tableName = ConfUtils.getString(conf, SQL_INDEX_TABLE_PARAM_NAME);

    this.conf = conf;
}
 
Example #16
Source File: AlarmThresholdingBoltTest.java    From monasca-thresh with Apache License 2.0 5 votes vote down vote up
@BeforeMethod
protected void beforeMethod() {
  final StringBuilder builder = new StringBuilder();
  for (final String subExpression : subExpressions) {
    if (builder.length() > 0) {
      builder.append(" or ");
    }
    builder.append(subExpression);
  }
  final String expression = builder.toString();
  alarmExpression = new AlarmExpression(expression);
  alarmDefinition =
      new AlarmDefinition(tenantId, "Test CPU Alarm", "Description of Alarm",
          alarmExpression, "LOW", true, new ArrayList<String>());
  alarm = new Alarm(alarmDefinition, AlarmState.OK);
  subAlarms = new ArrayList<SubAlarm>(alarm.getSubAlarms());

  alarmEventForwarder = mock(AlarmEventForwarder.class);
  alarmDAO = mock(AlarmDAO.class);
  alarmDefinitionDAO = mock(AlarmDefinitionDAO.class);
  bolt = new MockAlarmThreshholdBolt(alarmDAO, alarmDefinitionDAO, alarmEventForwarder);
  collector = mock(OutputCollector.class);
  final Map<String, String> config = new HashMap<>();
  final TopologyContext context = mock(TopologyContext.class);
  bolt.prepare(config, context, collector);

  for (SubAlarm subAlarm : subAlarms) {
    if (subAlarm.getExpression().getFunction().equals(AggregateFunction.LAST)) {
      lastSubAlarm = subAlarm;
    }
  }
  assertNotNull(lastSubAlarm, "Did not find a SubAlarm with Function of last");
  lastSubAlarm.setState(AlarmState.OK);
}
 
Example #17
Source File: BaseSpout.java    From DBus with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    this.context = context;
    try {
        inner = new BaseSpoutInner(conf);
        inner.init();
        postOpen();
    } catch (Exception e) {
        logger.error("base spout open error", e);
    }
}
 
Example #18
Source File: StdOutIndexer.java    From storm-crawler with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("rawtypes")
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {
    super.prepare(conf, context, collector);
    _collector = collector;
}
 
Example #19
Source File: TestNiFiBolt.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void testTickTupleWhenExceedingBatchInterval() throws InterruptedException {
    final int batchInterval = 1;
    final NiFiBolt bolt = new TestableNiFiBolt(siteToSiteClientConfig, niFiDataPacketBuilder, tickFrequency)
            .withBatchInterval(batchInterval);

    // prepare the bolt
    Map conf = mock(Map.class);
    TopologyContext context = mock(TopologyContext.class);
    OutputCollector collector = mock(OutputCollector.class);
    bolt.prepare(conf, context, collector);

    // process a regular tuple
    Tuple dataTuple = MockTupleHelpers.mockTuple("nifi", "nifi");
    bolt.execute(dataTuple);

    // sleep so we pass the batch interval
    Thread.sleep(batchInterval + 1000);

    // process a tick tuple
    Tuple tickTuple = MockTupleHelpers.mockTickTuple();
    bolt.execute(tickTuple);

    // should have produced one data packet and acked it
    verify(niFiDataPacketBuilder, times(1)).createNiFiDataPacket(eq(dataTuple));
    verify(collector, times(1)).ack(eq(dataTuple));
}
 
Example #20
Source File: AlarmBolt.java    From nightwatch with GNU Lesser General Public License v3.0 5 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;
    this.alarmEPLManager = new AlarmEPLManager(stormConf, new CountDownLatch(1));
    try {
        Thread.sleep(3000);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}
 
Example #21
Source File: IndexerBolt.java    From storm-crawler with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {
    super.prepare(conf, context, collector);
    _collector = collector;
    if (indexName == null) {
        indexName = ConfUtils.getString(conf,
                IndexerBolt.ESIndexNameParamName, "content");
    }

    create = ConfUtils.getBoolean(conf, IndexerBolt.ESCreateParamName,
            false);
    pipeline = ConfUtils.getString(conf,
            IndexerBolt.ESIndexPipelineParamName);

    try {
        connection = ElasticSearchConnection.getConnection(conf, ESBoltType,
                this);
    } catch (Exception e1) {
        LOG.error("Can't connect to ElasticSearch", e1);
        throw new RuntimeException(e1);
    }

    this.eventCounter = context.registerMetric("ElasticSearchIndexer",
            new MultiCountMetric(), 10);

    this.perSecMetrics = context.registerMetric("Indexer_average_persec",
            new MultiReducedMetric(new PerSecondReducer()), 10);

    waitAck = CacheBuilder.newBuilder()
            .expireAfterWrite(60, TimeUnit.SECONDS).removalListener(this)
            .build();

    context.registerMetric("waitAck", () -> waitAck.size(), 10);
}
 
Example #22
Source File: TickSpout.java    From bullet-storm with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    this.collector = collector;
    tick = 0;
    lastTickTime = System.currentTimeMillis();
    id = new Random().nextInt();
}
 
Example #23
Source File: AckingTopology.java    From incubator-heron with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("rawtypes")
public void open(
    Map conf,
    TopologyContext context,
    SpoutOutputCollector acollector) {
  collector = acollector;
  words = new String[]{"nathan", "mike", "jackson", "golda", "bertels"};
  rand = new Random();
}
 
Example #24
Source File: MySqlBinLogSpout.java    From storm-mysql with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Map conf, final TopologyContext context, final SpoutOutputCollector spoutOutputCollector) {

    Preconditions.checkNotNull(this.spoutConfig.getZkBinLogStateConfig(),
            "Zookeeper Config cannot be null");

    Preconditions.checkNotNull(this.spoutConfig.getMysqlConfig(),
            "Mysql Config cannot be null");

    LOGGER.info("Initiating MySql Spout with config {}", this.spoutConfig.toString());

    this.collector          = spoutOutputCollector;
    this.topologyInstanceId = context.getStormId();
    this.topologyName       = conf.get(Config.TOPOLOGY_NAME).toString();
    this.databaseName       = this.spoutConfig.getMysqlConfig().getDatabase();
    this.sidelineStrategy   = this.spoutConfig.getFailureConfig().getSidelineStrategy();
    this.sidelineStrategy.initialize(conf, context);

    initializeAndRegisterAllMetrics(context, this.spoutConfig.getMetricsTimeBucketSizeInSecs());

    txQueue = this.clientFactory.initializeBuffer(this.spoutConfig.getBufferCapacity());
    zkClient = this.clientFactory.getZkClient(conf, this.spoutConfig.getZkBinLogStateConfig());
    mySqlClient = this.clientFactory.getMySqlClient(this.spoutConfig.getMysqlConfig());
    openReplicatorClient = this.clientFactory.getReplicatorClient(mySqlClient, zkClient);

    begin();
}
 
Example #25
Source File: CollapsingSpout.java    From storm-crawler with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Map stormConf, TopologyContext context,
        SpoutOutputCollector collector) {
    maxStartOffset = ConfUtils.getInt(stormConf, ESMaxStartOffsetParamName,
            -1);
    super.open(stormConf, context, collector);
}
 
Example #26
Source File: LogProcessorStatBolt.java    From DBus with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;
    this.context = context;
    inner = new LogProcessorKafkaWriteBoltInner(conf);
    init();
    List<Integer> taskIds = context.getComponentTasks("LogProcessorTransformBolt");
    logProcessorWindow = new LogProcessorWindow(10000, computeTaskIdsSum(taskIds));
    logger.info("LogProcessorStatBolt is started!");
}
 
Example #27
Source File: IRichSpoutDelegate.java    From incubator-heron with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("rawtypes")
public void open(Map conf, org.apache.heron.api.topology.TopologyContext context,
                 SpoutOutputCollector collector) {
  topologyContextImpl = new TopologyContext(context);
  spoutOutputCollectorImpl = new SpoutOutputCollectorImpl(collector);
  delegate.open(conf, topologyContextImpl, spoutOutputCollectorImpl);
}
 
Example #28
Source File: BulkMessageWriterBolt.java    From metron with Apache License 2.0 5 votes vote down vote up
/**
 * Used only for unit testing.
 */
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector, Clock clock) {
  prepare(stormConf, context, collector);
  BulkWriterComponent<JSONObject> bulkWriterComponent = new BulkWriterComponent<>(maxBatchTimeout, clock);
  bulkWriterComponent.addFlushPolicy(ackTuplesPolicy);
  setWriterComponent(bulkWriterComponent);
}
 
Example #29
Source File: ProfileSplitterBolt.java    From metron with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
  super.prepare(stormConf, context, collector);
  this.collector = collector;
  this.parser = new JSONParser();
  this.router = new DefaultMessageRouter(getStellarContext());
}
 
Example #30
Source File: IRichBoltDelegate.java    From incubator-heron with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("rawtypes")
public void prepare(
    Map<String, Object> conf,
    org.apache.heron.api.topology.TopologyContext context,
    org.apache.heron.api.bolt.OutputCollector collector) {
  topologyContextImpl = new TopologyContext(context);
  outputCollectorImpl = new OutputCollectorImpl(collector);
  delegate.prepare(conf, topologyContextImpl, outputCollectorImpl);
}