Java Code Examples for scala.collection.JavaConversions#asJavaList()

The following examples show how to use scala.collection.JavaConversions#asJavaList() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaInfos.java    From DCMonitor with MIT License 6 votes vote down vote up
public List<PartitionInfo> getPartitionInfos(String group, String topic) {
  Seq<String> singleTopic = JavaConversions.asScalaBuffer(Collections.singletonList(topic)).toSeq();
  scala.collection.Map<String, Seq<Object>> pidMap = ZkUtils.getPartitionsForTopics(zkClient, singleTopic);
  Option<Seq<Object>> partitions = pidMap.get(topic);
  if (partitions.get() == null) {
    return Collections.emptyList();
  }
  List<PartitionInfo> infos = Lists.newArrayList();
  for (Object o : JavaConversions.asJavaList(partitions.get())) {
    PartitionInfo info = getPartitionInfo(group, topic, Int.unbox(o));
    if (info != null) {
      infos.add(info);
    }
  }
  return infos;
}
 
Example 2
Source File: KafkaClient.java    From kafka-service-broker with Apache License 2.0 5 votes vote down vote up
List<String> listTopics() throws Exception {
    ZkUtils zu = null;
    try {
        zu = util.getUtils();
        return JavaConversions.asJavaList(zu.getAllTopics());
    } finally {
        if (zu != null) {
            zu.close();
        }
    }
}
 
Example 3
Source File: KafkaInfos.java    From DCMonitor with MIT License 5 votes vote down vote up
public List<String> getTopics() {
  try {
    return JavaConversions.asJavaList(ZkUtils.getAllTopics(zkClient));
  } catch (Exception e) {
    log.error(e, "could not get topics");
    return Collections.emptyList();
  }
}
 
Example 4
Source File: SparkInterpreter.java    From Explorer with Apache License 2.0 5 votes vote down vote up
private int[] getProgressFromStage_1_1x(JobProgressListener sparkListener, Stage stage) {
    int numTasks = stage.numTasks();
    int completedTasks = 0;

    try {
        Method stageIdToData = sparkListener.getClass().getMethod("stageIdToData");
        HashMap<Tuple2<Object, Object>, Object> stageIdData = (HashMap<Tuple2<Object, Object>, Object>) stageIdToData
                .invoke(sparkListener);
        Class<?> stageUIDataClass = this.getClass().forName("org.apache.spark.ui.jobs.UIData$StageUIData");

        Method numCompletedTasks = stageUIDataClass.getMethod("numCompleteTasks");

        Set<Tuple2<Object, Object>> keys = JavaConverters.asJavaSetConverter(stageIdData.keySet()).asJava();
        for (Tuple2<Object, Object> k : keys) {
            if (stage.id() == (int) k._1()) {
                Object uiData = stageIdData.get(k).get();
                completedTasks += (int) numCompletedTasks.invoke(uiData);
            }
        }
    } catch (Exception e) {
        logger.error("Error on getting progress information", e);
    }

    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
    if (parents != null) {
        for (Stage s : parents) {
            int[] p = getProgressFromStage_1_1x(sparkListener, s);
            numTasks += p[0];
            completedTasks += p[1];
        }
    }
    return new int[] { numTasks, completedTasks };
}
 
Example 5
Source File: SparkSqlInterpreter.java    From Explorer with Apache License 2.0 5 votes vote down vote up
private int[] getProgressFromStage_1_1x(JobProgressListener sparkListener, Stage stage) {
    int numTasks = stage.numTasks();
    int completedTasks = 0;

    try {
        Method stageIdToData = sparkListener.getClass().getMethod("stageIdToData");
        HashMap<Tuple2<Object, Object>, Object> stageIdData = (HashMap<Tuple2<Object, Object>, Object>) stageIdToData
                .invoke(sparkListener);
        Class<?> stageUIDataClass = this.getClass().forName("org.apache.spark.ui.jobs.UIData$StageUIData");

        Method numCompletedTasks = stageUIDataClass.getMethod("numCompleteTasks");

        Set<Tuple2<Object, Object>> keys = JavaConverters.asJavaSetConverter(stageIdData.keySet()).asJava();
        for (Tuple2<Object, Object> k : keys) {
            if (stage.id() == (int) k._1()) {
                Object uiData = stageIdData.get(k).get();
                completedTasks += (int) numCompletedTasks.invoke(uiData);
            }
        }
    } catch (Exception e) {
        logger.error("Error on getting progress information", e);
    }

    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
    if (parents != null) {
        for (Stage s : parents) {
            int[] p = getProgressFromStage_1_1x(sparkListener, s);
            numTasks += p[0];
            completedTasks += p[1];
        }
    }
    return new int[] { numTasks, completedTasks };
}
 
Example 6
Source File: TestKafkaConsumerGroupService.java    From kafka-monitor with Apache License 2.0 4 votes vote down vote up
public List<GroupOverview> groupList() {
    return JavaConversions.asJavaList(adminClient.listAllConsumerGroupsFlattened());
}
 
Example 7
Source File: TestKafkaConsumerGroupService.java    From kafka-monitor with Apache License 2.0 4 votes vote down vote up
@Override
public void describeGroup(String group) {

    List<AdminClient.ConsumerSummary> consumerSummaryList = JavaConversions.asJavaList(adminClient.describeConsumerGroup(group));

    Consumer consumer = getConsumer();
    logger.debug("consumerList  -----  {}", consumerSummaryList);

    consumerSummaryList.stream().forEach(e -> {
        List<TopicPartition> topicPartitions = JavaConversions.asJavaList(e.assignment());
        Stream<Map<String, Long>> partitionOffsets = topicPartitions.stream().flatMap(topicPartition -> {
            Map<String, Long> topic = new HashMap<>();
            OffsetAndMetadata metadata = consumer.committed(new TopicPartition(topicPartition.topic(), topicPartition.partition()));
            if(metadata!=null) {
                topic.put(topicPartition.topic(), metadata.offset());
                logger.debug("-------- offset {}", metadata.offset());
            }

            return Stream.of(topic);
        });
        //partitionOffsets
        // logger.debug("partitionOffsets {}", partitionOffsets.collect(Collectors.toList()));

        final Map<String, Long> partitionOffsetsMap = topicPartitions.size() > 0 ? partitionOffsets.findFirst().get() : new HashMap<>();

        topicPartitions.forEach(tp -> {
            long endOff = findLogEndOffset(tp.topic(), tp.partition());
            long currentOff = 0;
            if (partitionOffsetsMap.size() > 0)
                currentOff = partitionOffsetsMap.get(tp.topic());
            logger.debug("{}",
                    String.format("%s %s %s %s %s %s %s %s",
                            group, tp.topic(), String.valueOf(tp.partition()),
                            currentOff, endOff, endOff - currentOff,
                            e.clientId(), e.clientHost()));
        });


    });

}
 
Example 8
Source File: ReachablesProjectionUpdate.java    From act with GNU General Public License v3.0 4 votes vote down vote up
public ReachablesProjectionUpdate(ProjectionResult projectionResult) {
  this.ros = Arrays.asList(projectionResult.ros());
  this.substrates = JavaConversions.asJavaList(projectionResult.substrates());
  this.products = JavaConversions.asJavaList(projectionResult.products());
}
 
Example 9
Source File: CustomConsumerGroupService.java    From kafka-monitor with Apache License 2.0 2 votes vote down vote up
/**
 * 取得所有group
 *
 * @return
 */
public List<GroupOverview> groupList() {
    return JavaConversions.asJavaList(adminClient.listAllConsumerGroupsFlattened());
}