Java Code Examples for scala.collection.JavaConversions#mapAsJavaMap()

The following examples show how to use scala.collection.JavaConversions#mapAsJavaMap() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Topicutil.java    From dk-fitting with Apache License 2.0 6 votes vote down vote up
/**
     * 查询所有topic,包括已经被标记删除,还没有删除的topic。
     * @return topic的list
     */
    public static List<String> queryAllTopic(){
        ZkUtils zkUtils = ZkUtils.apply(zkUrl, sessionTimeout, connectionTimeout, JaasUtils.isZkSecurityEnabled());
        ArrayList<String> topics = new ArrayList<String>();
//        AdminUtils.topicExists()
        scala.collection.Map<String, Properties> stringPropertiesMap = AdminUtils.fetchAllTopicConfigs(zkUtils);
        Map<String, Properties> javaMap = JavaConversions.mapAsJavaMap(stringPropertiesMap);
        Iterator<String> iterator = javaMap.keySet().iterator();
        while(iterator.hasNext()){
            String key = iterator.next();
            Properties properties = javaMap.get(key);
            topics.add(key);
        }
        zkUtils.close();
        return  topics;
    }
 
Example 2
Source File: KafkaTopicConfigProvider.java    From cruise-control with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Override
public Map<String, Properties> allTopicConfigs() {
  KafkaZkClient kafkaZkClient = KafkaCruiseControlUtils.createKafkaZkClient(_connectString,
                                                                            ZK_KAFKA_TOPIC_CONFIG_PROVIDER_METRIC_GROUP,
                                                                            ZK_KAFKA_TOPIC_CONFIG_PROVIDER_METRIC_TYPE,
                                                                            _zkSecurityEnabled);
  try {
    AdminZkClient adminZkClient = new AdminZkClient(kafkaZkClient);
    return JavaConversions.mapAsJavaMap(adminZkClient.getAllTopicConfigs());
  } finally {
    KafkaCruiseControlUtils.closeKafkaZkClientWithTimeout(kafkaZkClient);
  }
}
 
Example 3
Source File: ZKOffsetGetter.java    From kmanager with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, List<String>> getActiveTopicMap() {
	Map<String, List<String>> topicGroupsMap = new HashMap<String, List<String>>();
	List<String> consumers = ZKUtils.getChildren(ZkUtils.ConsumersPath());
	for (String consumer : consumers) {
		Map<String, scala.collection.immutable.List<ConsumerThreadId>> consumer_consumerThreadId = null;
		try {
			consumer_consumerThreadId = JavaConversions
					.mapAsJavaMap(ZKUtils.getZKUtilsFromKafka().getConsumersPerTopic(consumer, true));
		} catch (Exception e) {
			LOG.warn("getActiveTopicMap-> getConsumersPerTopic for group: " + consumer + "failed! "
					+ e.getMessage());
			// TODO /consumers/{group}/ids/{id} 节点的内容不符合要求。这个group有问题
			continue;
		}
		Set<String> topics = consumer_consumerThreadId.keySet();
		topics.forEach(topic -> {
			List<String> _groups = null;
			if (topicGroupsMap.containsKey(topic)) {
				_groups = topicGroupsMap.get(topic);
				_groups.add(consumer);
			} else {
				_groups = new ArrayList<String>();
				_groups.add(consumer);
			}
			topicGroupsMap.put(topic, _groups);
		});
	}
	return topicGroupsMap;
}
 
Example 4
Source File: KafkaManager.java    From spark-streaming-direct-kafka with Apache License 2.0 5 votes vote down vote up
private static void fillInLatestOffsets(Map<TopicAndPartition, Long> offsets, Map<String, String> kafkaParams) {
    if (offsets.containsValue(null)) {

        Set<TopicAndPartition> needOffset = offsets.entrySet().stream().filter(entry -> entry.getValue() == null)
                .map(Map.Entry::getKey).collect(Collectors.toSet());
        log.info("No initial offsets for " + needOffset + " reading from Kafka");

        // The high price of calling private Scala stuff:
        @SuppressWarnings("unchecked")
        scala.collection.immutable.Map<String, String> kafkaParamsScalaMap =
                (scala.collection.immutable.Map<String, String>)
                        scala.collection.immutable.Map$.MODULE$.apply(JavaConversions.mapAsScalaMap(kafkaParams)
                                .toSeq());
        @SuppressWarnings("unchecked")
        scala.collection.immutable.Set<TopicAndPartition> needOffsetScalaSet =
                (scala.collection.immutable.Set<TopicAndPartition>)
                        scala.collection.immutable.Set$.MODULE$.apply(JavaConversions.asScalaSet(needOffset)
                                .toSeq());

        KafkaCluster kc = new KafkaCluster(kafkaParamsScalaMap);
        Map<TopicAndPartition, ?> leaderOffsets =
                JavaConversions.mapAsJavaMap(kc.getLatestLeaderOffsets(needOffsetScalaSet).right().get());
        leaderOffsets.forEach((tAndP, leaderOffsetsObj) -> {
            // Can't reference LeaderOffset class, so, hack away:
            Matcher m = Pattern.compile("LeaderOffset\\([^,]+,[^,]+,([^)]+)\\)").matcher(leaderOffsetsObj
                    .toString());
            Preconditions.checkState(m.matches());
            offsets.put(tAndP, Long.valueOf(m.group(1)));
        });
    }
}
 
Example 5
Source File: KafkaTopicAssigner.java    From kafka-assigner with Apache License 2.0 5 votes vote down vote up
/**
 * Convert a Scala Kafka partition assignment into a Java one.
 * @param topicMap the output from ZkUtils#getPartitionAssignmentForTopics
 * @return a Java map representing the same data
 */
static Map<String, Map<Integer, List<Integer>>> topicMapToJavaMap(
        scala.collection.Map<String,
                scala.collection.Map<Object,
                        scala.collection.Seq<Object>>> topicMap) {
    // We can actually use utilities like Maps#transformEntries, but since that doesn't allow
    // changing the key type from Object to Integer, this code just goes into each map and makes
    // copies all the way down. Copying is also useful for avoiding possible repeated lazy
    // evaluations by the rebalancing algorithm.
    Map<String, Map<Integer, List<Integer>>> resultTopicMap = Maps.newHashMap();
    Map<String, scala.collection.Map<Object, scala.collection.Seq<Object>>> convertedTopicMap =
            JavaConversions.mapAsJavaMap(topicMap);
    for (Map.Entry<String, scala.collection.Map<Object,
            scala.collection.Seq<Object>>> topicMapEntry : convertedTopicMap.entrySet()) {
        String topic = topicMapEntry.getKey();
        Map<Object, scala.collection.Seq<Object>> convertedPartitionMap =
                JavaConversions.mapAsJavaMap(topicMapEntry.getValue());
        Map<Integer, List<Integer>> resultPartitionMap = Maps.newHashMap();
        for (Map.Entry<Object, scala.collection.Seq<Object>> partitionMapEntry :
                convertedPartitionMap.entrySet()) {
            Integer partition = (Integer) partitionMapEntry.getKey();
            List<Integer> replicaList = Lists.newArrayList(Lists.transform(
                    JavaConversions.seqAsJavaList(partitionMapEntry.getValue()),
                    new Function<Object, Integer>() {
                        @Override
                        public Integer apply(Object raw) {
                            return (Integer) raw;
                        }
                    }));
            resultPartitionMap.put(partition, replicaList);
        }
        resultTopicMap.put(topic, resultPartitionMap);
    }
    return resultTopicMap;
}
 
Example 6
Source File: TestKafkaSystemFactoryJava.java    From samza with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetIntermediateStreamProperties() {
  Map<String, String> config = new HashMap<>();
  KafkaSystemFactory factory = new KafkaSystemFactory();
  Map<String, Properties> properties = JavaConversions.mapAsJavaMap(
      factory.getIntermediateStreamProperties(new MapConfig(config)));
  assertTrue(properties.isEmpty());

  // no properties for stream
  config.put("streams.test.samza.intermediate", "true");
  config.put("streams.test.compression.type", "lz4"); //some random config
  properties = JavaConversions.mapAsJavaMap(
      factory.getIntermediateStreamProperties(new MapConfig(config)));
  assertTrue(properties.isEmpty());

  config.put(ApplicationConfig.APP_MODE, ApplicationConfig.ApplicationMode.BATCH.name());

  KafkaSystemAdmin admin = createSystemAdmin(SYSTEM(), config);
  StreamSpec spec = new StreamSpec("test", "test", SYSTEM(),
      Collections.singletonMap("replication.factor", "1"));
  KafkaStreamSpec kspec = admin.toKafkaSpec(spec);

  Properties prop = kspec.getProperties();
  assertEquals(prop.getProperty("retention.ms"), String.valueOf(KafkaConfig.DEFAULT_RETENTION_MS_FOR_BATCH()));
  assertEquals(prop.getProperty("compression.type"), "lz4");

  // replication.factor should be removed from the properties and set on the spec directly
  assertEquals(kspec.getReplicationFactor(), 1);
  assertNull(prop.getProperty("replication.factor"));
}
 
Example 7
Source File: NoopTracerSetTraceContextInterceptor.java    From pinpoint with Apache License 2.0 5 votes vote down vote up
private Trace populateTraceId(scala.Option context) {

        scala.collection.Map traceContextMap = (scala.collection.Map)context.get();
        Map map = JavaConversions.mapAsJavaMap(traceContextMap);

        String transactionId = (String)map.get("transactionId");
        String spanId = (String)map.get("spanId");
        String parentSpanId = (String)map.get("parentSpanId");
        String flag = (String)map.get("flag");
        String applicationName = (String)map.get("applicationName");
        String serverTypeCode = (String)map.get("serverTypeCode");
        String entityPath = (String)map.get("entityPath");
        String endPoint = (String)map.get("endPoint");

        TraceId traceId = traceContext.createTraceId(
                transactionId,
                NumberUtils.parseLong(parentSpanId, SpanId.NULL),
                NumberUtils.parseLong(spanId, SpanId.NULL),
                NumberUtils.parseShort(flag, (short) 0)
        );

        if (traceId != null) {
            Trace trace = traceContext.continueAsyncTraceObject(traceId);

            final SpanRecorder recorder = trace.getSpanRecorder();
            recorder.recordServiceType(OpenwhiskConstants.OPENWHISK_INVOKER);
            recorder.recordApi(descriptor);

            recorder.recordAcceptorHost(endPoint);
            recorder.recordRpcName(entityPath);

            // Record parent application
            recorder.recordParentApplication(applicationName, Short.valueOf(serverTypeCode));
            return trace;
        }
        return null;
    }
 
Example 8
Source File: Script.java    From systemds with Apache License 2.0 3 votes vote down vote up
/**
 * Pass a Scala Map of inputs to the script.
 * <p>
 * Note that the {@code Map} value type is not explicitly specified on this
 * method because {@code [String, Any]} can't be recognized on the Java side
 * since {@code Any} doesn't have an equivalent in the Java class hierarchy
 * ({@code scala.Any} is a superclass of {@code scala.AnyRef}, which is
 * equivalent to {@code java.lang.Object}). Therefore, specifying
 * {@code scala.collection.Map<String, Object>} as an input parameter to
 * this Java method is not encompassing enough and would require types such
 * as a {@code scala.Double} to be cast using {@code asInstanceOf[AnyRef]}.
 *
 * @param inputs
 *            Scala Map of inputs (parameters ($) and variables).
 * @return {@code this} Script object to allow chaining of methods
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public Script in(scala.collection.Map<String, ?> inputs) {
	Map javaMap = JavaConversions.mapAsJavaMap(inputs);
	in(javaMap);

	return this;
}
 
Example 9
Source File: Script.java    From systemds with Apache License 2.0 3 votes vote down vote up
/**
 * Pass a Scala Map of inputs to the script.
 * <p>
 * Note that the {@code Map} value type is not explicitly specified on this
 * method because {@code [String, Any]} can't be recognized on the Java side
 * since {@code Any} doesn't have an equivalent in the Java class hierarchy
 * ({@code scala.Any} is a superclass of {@code scala.AnyRef}, which is
 * equivalent to {@code java.lang.Object}). Therefore, specifying
 * {@code scala.collection.Map<String, Object>} as an input parameter to
 * this Java method is not encompassing enough and would require types such
 * as a {@code scala.Double} to be cast using {@code asInstanceOf[AnyRef]}.
 *
 * @param inputs
 *            Scala Map of inputs (parameters ($) and variables).
 * @return {@code this} Script object to allow chaining of methods
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public Script in(scala.collection.Map<String, ?> inputs) {
	Map javaMap = JavaConversions.mapAsJavaMap(inputs);
	in(javaMap);

	return this;
}