org.apache.storm.generated.TopologyInfo Java Examples

The following examples show how to use org.apache.storm.generated.TopologyInfo. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StormAtlasHook.java    From atlas with Apache License 2.0 6 votes vote down vote up
private AtlasEntity createTopologyInstance(TopologyInfo topologyInfo, Map stormConf) {
    AtlasEntity topology = new AtlasEntity(StormDataTypes.STORM_TOPOLOGY.getName());
    String      owner    = topologyInfo.get_owner();

    if (StringUtils.isEmpty(owner)) {
        owner = ANONYMOUS_OWNER;
    }

    topology.setAttribute("id", topologyInfo.get_id());
    topology.setAttribute(AtlasClient.NAME, topologyInfo.get_name());
    topology.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, topologyInfo.get_name());
    topology.setAttribute(AtlasClient.OWNER, owner);
    topology.setAttribute("startTime", new Date(System.currentTimeMillis()));
    topology.setAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, getMetadataNamespace());

    return topology;
}
 
Example #2
Source File: StormAtlasHook.java    From atlas with Apache License 2.0 6 votes vote down vote up
@Override
public void notify(TopologyInfo topologyInfo, Map stormConf, StormTopology stormTopology)
    throws IllegalAccessException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> StormAtlasHook.notify({}, {}, {})", topologyInfo, stormConf, stormTopology);
    }

    try {
        activatePluginClassLoader();
        stormHook.notify(topologyInfo, stormConf, stormTopology);
    } finally {
        deactivatePluginClassLoader();
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== StormAtlasHook.notify({}, {}, {})", topologyInfo, stormConf, stormTopology);
    }
}
 
Example #3
Source File: StormAtlasHook.java    From incubator-atlas with Apache License 2.0 6 votes vote down vote up
private Referenceable createTopologyInstance(TopologyInfo topologyInfo, Map stormConf) throws Exception {
    Referenceable topologyReferenceable = new Referenceable(
            StormDataTypes.STORM_TOPOLOGY.getName());
    topologyReferenceable.set("id", topologyInfo.get_id());
    topologyReferenceable.set(AtlasClient.NAME, topologyInfo.get_name());
    topologyReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, topologyInfo.get_name());
    String owner = topologyInfo.get_owner();
    if (StringUtils.isEmpty(owner)) {
        owner = ANONYMOUS_OWNER;
    }
    topologyReferenceable.set(AtlasClient.OWNER, owner);
    topologyReferenceable.set("startTime", new Date(System.currentTimeMillis()));
    topologyReferenceable.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, getClusterName(stormConf));

    return topologyReferenceable;
}
 
Example #4
Source File: StormAtlasHook.java    From incubator-atlas with Apache License 2.0 6 votes vote down vote up
@Override
public void notify(TopologyInfo topologyInfo, Map stormConf, StormTopology stormTopology)
    throws IllegalAccessException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> StormAtlasHook.notify({}, {}, {})", topologyInfo, stormConf, stormTopology);
    }

    try {
        activatePluginClassLoader();
        stormHook.notify(topologyInfo, stormConf, stormTopology);
    } finally {
        deactivatePluginClassLoader();
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== StormAtlasHook.notify({}, {}, {})", topologyInfo, stormConf, stormTopology);
    }
}
 
Example #5
Source File: StormAtlasHook.java    From atlas with Apache License 2.0 5 votes vote down vote up
/**
 * This is the client-side hook that storm fires when a topology is added.
 *
 * @param topologyInfo topology info
 * @param stormConf configuration
 * @param stormTopology a storm topology
 */
@Override
public void notify(TopologyInfo topologyInfo, Map stormConf, StormTopology stormTopology) {
    LOG.info("Collecting metadata for a new storm topology: {}", topologyInfo.get_name());

    try {
        String                   user     = getUser(topologyInfo.get_owner(), null);
        AtlasEntity              topology = createTopologyInstance(topologyInfo, stormConf);
        AtlasEntitiesWithExtInfo entity   = new AtlasEntitiesWithExtInfo(topology);

        addTopologyDataSets(stormTopology, topologyInfo.get_owner(), stormConf, topology, entity);

        // create the graph for the topology
        List<AtlasEntity> graphNodes = createTopologyGraph(stormTopology, stormTopology.get_spouts(), stormTopology.get_bolts());

        if (CollectionUtils.isNotEmpty(graphNodes)) {
            // add the connection from topology to the graph
            topology.setRelationshipAttribute("nodes", AtlasTypeUtil.getAtlasRelatedObjectIds(graphNodes, RELATIONSHIP_STORM_TOPOLOGY_NODES));

            for (AtlasEntity graphNode : graphNodes) {
                entity.addReferredEntity(graphNode);
            }
        }

        List<HookNotification> hookNotifications = Collections.singletonList(new EntityCreateRequestV2(user, entity));

        notifyEntities(hookNotifications, null);
    } catch (Exception e) {
        throw new RuntimeException("Atlas hook is unable to process the topology.", e);
    }
}
 
Example #6
Source File: StormAtlasHook.java    From incubator-atlas with Apache License 2.0 5 votes vote down vote up
/**
 * This is the client-side hook that storm fires when a topology is added.
 *
 * @param topologyInfo topology info
 * @param stormConf configuration
 * @param stormTopology a storm topology
 * @throws IllegalAccessException
 */
@Override
public void notify(TopologyInfo topologyInfo, Map stormConf,
                   StormTopology stormTopology) throws IllegalAccessException {

    LOG.info("Collecting metadata for a new storm topology: {}", topologyInfo.get_name());
    try {
        ArrayList<Referenceable> entities = new ArrayList<>();
        Referenceable topologyReferenceable = createTopologyInstance(topologyInfo, stormConf);
        List<Referenceable> dependentEntities = addTopologyDataSets(stormTopology, topologyReferenceable,
                topologyInfo.get_owner(), stormConf);
        if (dependentEntities.size()>0) {
            entities.addAll(dependentEntities);
        }
        // create the graph for the topology
        ArrayList<Referenceable> graphNodes = createTopologyGraph(
                stormTopology, stormTopology.get_spouts(), stormTopology.get_bolts());
        // add the connection from topology to the graph
        topologyReferenceable.set("nodes", graphNodes);
        entities.add(topologyReferenceable);

        LOG.debug("notifying entities, size = {}", entities.size());
        String user = getUser(topologyInfo.get_owner(), null);
        notifyEntities(user, entities);
    } catch (Exception e) {
        throw new RuntimeException("Atlas hook is unable to process the topology.", e);
    }
}
 
Example #7
Source File: FastWordCountTopology.java    From storm-net-adapter with Apache License 2.0 5 votes vote down vote up
public static void printMetrics(Nimbus.Iface client, String name) throws Exception {
    ClusterSummary summary = client.getClusterInfo();
    String id = null;
    for (TopologySummary ts : summary.get_topologies()) {
        if (name.equals(ts.get_name())) {
            id = ts.get_id();
        }
    }
    if (id == null) {
        throw new Exception("Could not find a topology named " + name);
    }
    TopologyInfo info = client.getTopologyInfo(id);
    int uptime = info.get_uptime_secs();
    long acked = 0;
    long failed = 0;
    double weightedAvgTotal = 0.0;
    for (ExecutorSummary exec : info.get_executors()) {
        if ("spout".equals(exec.get_component_id())) {
            SpoutStats stats = exec.get_stats().get_specific().get_spout();
            Map<String, Long> failedMap = stats.get_failed().get(":all-time");
            Map<String, Long> ackedMap = stats.get_acked().get(":all-time");
            Map<String, Double> avgLatMap = stats.get_complete_ms_avg().get(":all-time");
            for (String key : ackedMap.keySet()) {
                if (failedMap != null) {
                    Long tmp = failedMap.get(key);
                    if (tmp != null) {
                        failed += tmp;
                    }
                }
                long ackVal = ackedMap.get(key);
                double latVal = avgLatMap.get(key) * ackVal;
                acked += ackVal;
                weightedAvgTotal += latVal;
            }
        }
    }
    double avgLatency = weightedAvgTotal / acked;
    System.out.println("uptime: " + uptime + " acked: " + acked + " avgLatency: " + avgLatency + " acked/sec: " +
                       (((double) acked) / uptime + " failed: " + failed));
}
 
Example #8
Source File: InOrderDeliveryTest.java    From storm-net-adapter with Apache License 2.0 5 votes vote down vote up
public static void printMetrics(Nimbus.Iface client, String name) throws Exception {
    ClusterSummary summary = client.getClusterInfo();
    String id = null;
    for (TopologySummary ts : summary.get_topologies()) {
        if (name.equals(ts.get_name())) {
            id = ts.get_id();
        }
    }
    if (id == null) {
        throw new Exception("Could not find a topology named " + name);
    }
    TopologyInfo info = client.getTopologyInfo(id);
    int uptime = info.get_uptime_secs();
    long acked = 0;
    long failed = 0;
    double weightedAvgTotal = 0.0;
    for (ExecutorSummary exec : info.get_executors()) {
        if ("spout".equals(exec.get_component_id())) {
            SpoutStats stats = exec.get_stats().get_specific().get_spout();
            Map<String, Long> failedMap = stats.get_failed().get(":all-time");
            Map<String, Long> ackedMap = stats.get_acked().get(":all-time");
            Map<String, Double> avgLatMap = stats.get_complete_ms_avg().get(":all-time");
            for (String key : ackedMap.keySet()) {
                if (failedMap != null) {
                    Long tmp = failedMap.get(key);
                    if (tmp != null) {
                        failed += tmp;
                    }
                }
                long ackVal = ackedMap.get(key);
                double latVal = avgLatMap.get(key) * ackVal;
                acked += ackVal;
                weightedAvgTotal += latVal;
            }
        }
    }
    double avgLatency = weightedAvgTotal / acked;
    System.out.println("uptime: " + uptime + " acked: " + acked + " avgLatency: " + avgLatency + " acked/sec: " +
                       (((double) acked) / uptime + " failed: " + failed));
}