backtype.storm.task.WorkerTopologyContext Java Examples

The following examples show how to use backtype.storm.task.WorkerTopologyContext. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HBaseStreamPartitioner.java    From opensoc-streaming with Apache License 2.0 6 votes vote down vote up
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
  
  System.out.println("preparing HBaseStreamPartitioner for streamId " + stream.get_streamId());
  this.targetTasks = targetTasks;
  this.targetTasksSize = this.targetTasks.size();

  Configuration conf = HBaseConfiguration.create();
  try {
    hTable = new HTable(conf, tableName);
    refreshRegionInfo(tableName);

    System.out.println("regionStartKeyRegionNameMap: " + regionStartKeyRegionNameMap);

  } catch (IOException e) {
    e.printStackTrace();
  }

}
 
Example #2
Source File: NGrouping.java    From jstorm with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
    targetTasks = new ArrayList<Integer>(targetTasks);
    Collections.sort(targetTasks);
    _outTasks = new ArrayList<Integer>();
    for (int i = 0; i < _n; i++) {
        _outTasks.add(targetTasks.get(i));
    }
}
 
Example #3
Source File: PartialKeyGrouping.java    From jstorm with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
    this.targetTasks = targetTasks;
    targetTaskStats = new long[this.targetTasks.size()];
    if (this.fields != null) {
        this.outFields = context.getComponentOutputFields(stream);
    }
}
 
Example #4
Source File: IdentityGrouping.java    From jstorm with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> tasks) {
    List<Integer> sourceTasks = new ArrayList<Integer>(context.getComponentTasks(stream.get_componentId()));
    Collections.sort(sourceTasks);
    if (sourceTasks.size() != tasks.size()) {
        throw new RuntimeException("Can only do an identity grouping when source and target have same number of tasks");
    }
    tasks = new ArrayList<Integer>(tasks);
    Collections.sort(tasks);
    for (int i = 0; i < sourceTasks.size(); i++) {
        int s = sourceTasks.get(i);
        int t = tasks.get(i);
        _precomputed.put(s, Arrays.asList(t));
    }
}
 
Example #5
Source File: WorkerData.java    From jstorm with Apache License 2.0 5 votes vote down vote up
protected List<AsyncLoopThread> setSerializeThreads() {
    WorkerTopologyContext workerTopologyContext = contextMaker.makeWorkerTopologyContext(sysTopology);
    int tasksNum = shutdownTasks.size();
    double workerRatio = ConfigExtension.getWorkerSerializeThreadRatio(stormConf);
    int workerSerialThreadNum = Utils.getInt(Math.ceil(workerRatio * tasksNum));
    if (workerSerialThreadNum > 0 && tasksNum > 0) {
        double average = tasksNum / (double) workerSerialThreadNum;
        for (int i = 0; i < workerSerialThreadNum; i++) {
            int startRunTaskIndex = Utils.getInt(Math.rint(average * i));
            serializeThreads.add(new AsyncLoopThread(new WorkerSerializeRunnable(
                    shutdownTasks, stormConf, workerTopologyContext, startRunTaskIndex, i)));
        }
    }
    return serializeThreads;
}
 
Example #6
Source File: WorkerData.java    From jstorm with Apache License 2.0 5 votes vote down vote up
protected List<AsyncLoopThread> setDeserializeThreads() {
    WorkerTopologyContext workerTopologyContext = contextMaker.makeWorkerTopologyContext(sysTopology);
    int tasksNum = shutdownTasks.size();
    double workerRatio = ConfigExtension.getWorkerDeserializeThreadRatio(stormConf);
    int workerDeserThreadNum = Utils.getInt(Math.ceil(workerRatio * tasksNum));
    if (workerDeserThreadNum > 0 && tasksNum > 0) {
        double average = tasksNum / (double) workerDeserThreadNum;
        for (int i = 0; i < workerDeserThreadNum; i++) {
            int startRunTaskIndex = Utils.getInt(Math.rint(average * i));
            deserializeThreads.add(new AsyncLoopThread(new WorkerDeserializeRunnable(
                    shutdownTasks, stormConf, workerTopologyContext, startRunTaskIndex, i)));
        }
    }
    return deserializeThreads;
}
 
Example #7
Source File: WorkerData.java    From jstorm with Apache License 2.0 5 votes vote down vote up
public void updateKryoSerializer() {
    WorkerTopologyContext workerTopologyContext = contextMaker.makeWorkerTopologyContext(sysTopology);
    KryoTupleDeserializer kryoTupleDeserializer = new KryoTupleDeserializer(stormConf, workerTopologyContext, workerTopologyContext.getRawTopology());
    KryoTupleSerializer kryoTupleSerializer = new KryoTupleSerializer(stormConf, workerTopologyContext.getRawTopology());

    atomKryoDeserializer.getAndSet(kryoTupleDeserializer);
    atomKryoSerializer.getAndSet(kryoTupleSerializer);
}
 
Example #8
Source File: ContextMaker.java    From jstorm with Apache License 2.0 5 votes vote down vote up
public WorkerTopologyContext makeWorkerTopologyContext(StormTopology topology) {
    Map stormConf = workerData.getStormConf();
    String topologyId = workerData.getTopologyId();

    HashMap<String, Map<String, Fields>> componentToStreamToFields =
            workerData.generateComponentToStreamToFields(topology);

    return new WorkerTopologyContext(topology, stormConf, workerData.getTasksToComponent(),
            workerData.getComponentToSortedTasks(), componentToStreamToFields,
            topologyId, resourcePath, workerId, workerData.getPort(), workerTasks,
            workerData.getDefaultResources(), workerData.getUserResources());
}
 
Example #9
Source File: TileGrouping.java    From StormCV with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
	this.targetTasks = targetTasks;
	
	Fields tupleFields = context.getComponentOutputFields(stream);
	for(int i=0; i<tupleFields.size(); i++){
		if(tupleFields.get(i).equals(CVParticleSerializer.STREAMID)){
			streamIdIndex = i;
		}else if(tupleFields.get(i).equals(CVParticleSerializer.SEQUENCENR)){
			sequenceNrIndex = i;
		}
	}
}
 
Example #10
Source File: SupervicedGrouping.java    From StormCV with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
	List<Integer> sources = context.getComponentTasks(stream.get_componentId());
	for(int i=0; i<sources.size(); i++){
		idMapping.put(sources.get(i), targetTasks.get(i%targetTasks.size()));
	}
}
 
Example #11
Source File: RoutePhysicalGrouping.java    From eagle with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
    this.outdegreeTasks = new ArrayList<>(targetTasks);
    shuffleGroupingDelegate = new ShuffleGrouping();
    shuffleGroupingDelegate.prepare(context, stream, targetTasks);
    globalGroupingDelegate = new GlobalGrouping();
    globalGroupingDelegate.prepare(context, stream, targetTasks);
    connectedTargetIds = new HashMap<>();
    for (Integer targetId : targetTasks) {
        String targetComponentId = context.getComponentId(targetId);
        connectedTargetIds.put(targetComponentId, targetId);
    }
    LOG.info("OutDegree components: [{}]", StringUtils.join(connectedTargetIds.values(), ","));
}
 
Example #12
Source File: ShuffleGrouping.java    From eagle with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
    random = new Random();
    choices = new ArrayList<List<Integer>>(targetTasks.size());
    for (Integer i : targetTasks) {
        choices.add(Arrays.asList(i));
    }
    Collections.shuffle(choices, random);
    current = new AtomicInteger(0);
}
 
Example #13
Source File: ShardGrouping.java    From storm-solr with Apache License 2.0 5 votes vote down vote up
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
  this.targetTasks = targetTasks;
  int numTasks = targetTasks.size();
  int numShards = initShardInfo(); // setup for doing shard to task mapping 
  if (numTasks % numShards != 0)
    throw new IllegalArgumentException("Number of tasks ("+numTasks+") should be a multiple of the number of shards ("+numShards+")!");

  this.tasksPerShard = numTasks/numShards;
  this.random = new UniformIntegerDistribution(0, tasksPerShard-1);
}
 
Example #14
Source File: HashRangeGrouping.java    From storm-solr with Apache License 2.0 5 votes vote down vote up
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
  this.targetTasks = targetTasks;
  int numTasks = targetTasks.size();
  if (numTasks % numShards != 0)
    throw new IllegalArgumentException("Number of tasks ("+numTasks+") should be a multiple of the number of shards ("+numShards+")!");

  this.tasksPerShard = numTasks/numShards;
  this.random = new UniformIntegerDistribution(0, tasksPerShard-1);

  CompositeIdRouter docRouter =  new CompositeIdRouter();
  this.ranges = docRouter.partitionRange(numShards, docRouter.fullRange());
}
 
Example #15
Source File: MetadataGrouping.java    From StormCV with Apache License 2.0 4 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId streamId, List<Integer> targetTasks) {
	this.targetTasks = targetTasks;
}
 
Example #16
Source File: FeatureGrouping.java    From StormCV with Apache License 2.0 4 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
	this.targetTasks = targetTasks;
}
 
Example #17
Source File: ModGrouping.java    From StormCV with Apache License 2.0 4 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
	this.targetTasks = targetTasks;
}
 
Example #18
Source File: DummyTileGrouping.java    From StormCV with Apache License 2.0 4 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
	this.targetTasks = targetTasks;		
}
 
Example #19
Source File: TestStormCustomGroupingRouting.java    From eagle with Apache License 2.0 4 votes vote down vote up
@Ignore
@Test
public void testRoutingByCustomGrouping() throws Exception {
    Config conf = new Config();
    conf.setNumWorkers(2); // use two worker processes
    TopologyBuilder topologyBuilder = new TopologyBuilder();
    topologyBuilder.setSpout("blue-spout", new BlueSpout()); // parallelism hint

    topologyBuilder.setBolt("green-bolt-1", new GreenBolt(0)).setNumTasks(2)
        .customGrouping("blue-spout", new CustomStreamGrouping() {
            int count = 0;
            List<Integer> targetTask;

            @Override
            public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
                this.targetTask = targetTasks;
            }

            @Override
            public List<Integer> chooseTasks(int taskId, List<Object> values) {
                if (count % 2 == 0) {
                    count++;
                    return Arrays.asList(targetTask.get(0));
                } else {
                    count++;
                    return Arrays.asList(targetTask.get(1));
                }
            }
        });

    LocalCluster cluster = new LocalCluster();
    cluster.submitTopology("mytopology", new HashMap(), topologyBuilder.createTopology());

    while (true) {
        try {
            Thread.sleep(1000);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
 
Example #20
Source File: CustomPartitionGrouping.java    From eagle with Apache License 2.0 4 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
    this.targetTasks = new ArrayList<>(targetTasks);
}
 
Example #21
Source File: IndexHashGrouping.java    From jstorm with Apache License 2.0 4 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
    _targets = targetTasks;
}
 
Example #22
Source File: GlobalGrouping.java    From jstorm with Apache License 2.0 4 votes vote down vote up
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targets) {
    List<Integer> sorted = new ArrayList<Integer>(targets);
    Collections.sort(sorted);
    target = Arrays.asList(sorted.get(0));
}
 
Example #23
Source File: CustomStreamGrouping.java    From jstorm with Apache License 2.0 2 votes vote down vote up
/**
 * Tells the stream grouping at runtime the tasks in the target bolt.
 * This information should be used in chooseTasks to determine the target tasks.
 * It also tells the grouping the metadata on the stream this grouping will be used on.
 */
void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks);
 
Example #24
Source File: BaseWorkerHook.java    From jstorm with Apache License 2.0 2 votes vote down vote up
/**
 * This method is called when a worker is started
 *
 * @param stormConf The Storm configuration for this worker
 * @param context This object can be used to get information about this worker's place within the topology
 */
@Override
public void start(Map stormConf, WorkerTopologyContext context) {
    // NOOP
}
 
Example #25
Source File: IWorkerHook.java    From jstorm with Apache License 2.0 2 votes vote down vote up
/**
 * This method is called when a worker is started
 *
 * @param stormConf The Storm configuration for this worker
 * @param context This object can be used to get information about this worker's place within the topology
 */
void start(Map stormConf, WorkerTopologyContext context);
 
Example #26
Source File: CustomStreamGrouping.java    From incubator-heron with Apache License 2.0 2 votes vote down vote up
/**
 * Tells the stream grouping at runtime the tasks in the target bolt.
 * This information should be used in chooseTasks to determine the target tasks.
 * <p>
 * It also tells the grouping the metadata on the stream this grouping will be used on.
 */
void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks);