Java Code Examples for scala.collection.JavaConversions

The following are top voted examples for showing how to use scala.collection.JavaConversions. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: management-sdk-for-kafka   File: KFBrokerWatcher.java   View source code 7 votes vote down vote up
/**
 * Get Kafka broker metadata for a specific address
 *
 * @param kafkaBrokers    list of registered Kafka brokers
 * @param kfBrokerAddress address to look for
 * @return Kafka broker metadata
 */
private KFBrokerMetadata getBrokerMetadataByAddress(final List<Broker> kafkaBrokers,
                                                    final InetSocketAddress kfBrokerAddress) {

    KFBrokerMetadata brokerMetadata = new KFBrokerMetadata();

    kafkaBrokers.forEach(broker -> {
        JavaConversions.mapAsJavaMap(broker.endPoints())
                .forEach((protocol, endpoint) -> {
                    if (endpoint.host().equals(kfBrokerAddress.getHostName())
                            && endpoint.port() == kfBrokerAddress.getPort()) {
                        brokerMetadata.setBrokerId(broker.id());
                        brokerMetadata.setHost(endpoint.host());
                        brokerMetadata.setPort(endpoint.port());
                        brokerMetadata.setConnectionString(endpoint.connectionString());
                        brokerMetadata.setSecurityProtocol(protocol.name);
                    }
                });
    });
    return brokerMetadata;
}
 
Example 2
Project: kmanager   File: OffsetGetter.java   View source code 6 votes vote down vote up
public List<OffsetInfo> processTopic(String group, String topic) throws Exception {
	List<String> partitionIds = null;
	try {
		partitionIds = JavaConversions.seqAsJavaList(ZKUtils.getZKUtilsFromKafka()
				.getChildren(ZkUtils.BrokerTopicsPath() + "/" + topic + "/partitions"));
	} catch (Exception e) {
		if (e instanceof NoNodeException) {
			LOG.warn("Is topic >" + topic + "< exists!", e);
			return null;
		}
	}
	List<OffsetInfo> offsetInfos = new ArrayList<OffsetInfo>();
	OffsetInfo offsetInfo = null;
	if (partitionIds == null) {
		// TODO that topic exists in consumer node but not in topics node?!
		return null;
	}

	for (String partitionId : partitionIds) {
		offsetInfo = processPartition(group, topic, partitionId);
		if (offsetInfo != null) {
			offsetInfos.add(offsetInfo);
		}
	}
	return offsetInfos;
}
 
Example 3
Project: doctorkafka   File: OstrichAdminService.java   View source code 6 votes vote down vote up
public void startAdminHttpService() {
  try {
    Properties properties = new Properties();
    properties.load(this.getClass().getResource("build.properties").openStream());
    LOG.info("build.properties build_revision: {}",
        properties.getProperty("build_revision", "unknown"));
  } catch (Throwable t) {
    LOG.warn("Failed to load properties from build.properties", t);
  }
  Duration[] defaultLatchIntervals = {Duration.apply(1, TimeUnit.MINUTES)};
  Iterator<Duration> durationIterator = Arrays.asList(defaultLatchIntervals).iterator();
  @SuppressWarnings("deprecation")
  AdminServiceFactory adminServiceFactory = new AdminServiceFactory(
      this.port,
      20,
      List$.MODULE$.empty(),
      Option.empty(),
      List$.MODULE$.empty(),
      Map$.MODULE$.empty(),
      JavaConversions.asScalaIterator(durationIterator).toList());
  RuntimeEnvironment runtimeEnvironment = new RuntimeEnvironment(this);
  AdminHttpService service = adminServiceFactory.apply(runtimeEnvironment);
  for (Map.Entry<String, CustomHttpHandler> entry : this.customHttpHandlerMap.entrySet()) {
    service.httpServer().createContext(entry.getKey(), entry.getValue());
  }
}
 
Example 4
Project: oryx2   File: KafkaUtils.java   View source code 6 votes vote down vote up
/**
 * @param zkServers Zookeeper server string: host1:port1[,host2:port2,...]
 * @param groupID consumer group to get offsets for
 * @param topic topic to get offsets for
 * @return mapping of (topic and) partition to offset
 */
public static Map<Pair<String,Integer>,Long> getOffsets(String zkServers,
                                                        String groupID,
                                                        String topic) {
  ZKGroupTopicDirs topicDirs = new ZKGroupTopicDirs(groupID, topic);
  Map<Pair<String,Integer>,Long> offsets = new HashMap<>();
  ZkUtils zkUtils = ZkUtils.apply(zkServers, ZK_TIMEOUT_MSEC, ZK_TIMEOUT_MSEC, false);
  try {
    List<?> partitions = JavaConversions.seqAsJavaList(
        zkUtils.getPartitionsForTopics(
          JavaConversions.asScalaBuffer(Collections.singletonList(topic))).head()._2());
    partitions.forEach(partition -> {
      String partitionOffsetPath = topicDirs.consumerOffsetDir() + "/" + partition;
      Option<String> maybeOffset = zkUtils.readDataMaybeNull(partitionOffsetPath)._1();
      Long offset = maybeOffset.isDefined() ? Long.valueOf(maybeOffset.get()) : null;
      offsets.put(new Pair<>(topic, Integer.valueOf(partition.toString())), offset);
    });
  } finally {
    zkUtils.close();
  }
  return offsets;
}
 
Example 5
Project: LiteGraph   File: WrappedArraySerializer.java   View source code 6 votes vote down vote up
@Override
public void write(final Kryo kryo, final Output output, final WrappedArray<T> iterable) {
    output.writeVarInt(iterable.size(), true);
    JavaConversions.asJavaCollection(iterable).forEach(t -> {
        kryo.writeClassAndObject(output, t);
        output.flush();
    });
}
 
Example 6
Project: spark-rdf4j   File: SparkRDF4JDefaultSource.java   View source code 6 votes vote down vote up
@Override
public SparkRDF4JSparqlRelation createRelation(SQLContext sqlContext,
		scala.collection.immutable.Map<String, String> scalaParameters, StructType schema) {
	Map<String, String> parameters = JavaConversions.asJavaMap(scalaParameters);
	String service = Optional.ofNullable(parameters.get("service")).orElseThrow(() -> new RuntimeException(
			"Spark RDF4J Sparql requires a SPARQL 'service' to be specified in the parameters"));
	String query = Optional.ofNullable(parameters.get("query")).orElseThrow(() -> new RuntimeException(
			"Spark RDF4J Sparql requires a 'query' to be specified in the parameters"));

	try {
		ParsedQuery parsedQuery = QueryParserUtil.parseQuery(QueryLanguage.SPARQL, query, null);
		if(!(parsedQuery instanceof ParsedTupleQuery)) {
			throw new RuntimeException("Spark RDF4J can only be used with Tuple (Select) queries right now.");
		}
		return new SparkRDF4JSparqlRelation(service, parsedQuery, schema, sqlContext);
	} catch (MalformedQueryException e) {
		throw new RuntimeException("Query was not valid SPARQL", e);
	}

}
 
Example 7
Project: envelope   File: ExcludeDeriver.java   View source code 6 votes vote down vote up
@Override
public Dataset<Row> derive(Map<String, Dataset<Row>> dependencies) throws Exception {

  Dataset<Row> compare, with;

  if (!dependencies.containsKey(compareDataset)) {
    throw new RuntimeException("Designated comparison target dataset is not a dependency: " + compareDataset);
  } else {
    compare = dependencies.get(compareDataset);
  }

  if (!dependencies.containsKey(withDataset)) {
    throw new RuntimeException("Designated comparison reference dataset is not a dependency: " + withDataset);
  } else {
    with = dependencies.get(withDataset);
  }

  return compare.join(with, JavaConversions.asScalaBuffer(fields).toList(), "leftanti");

}
 
Example 8
Project: akkaTrader   File: Main.java   View source code 6 votes vote down vote up
public static void main(String[] args) {

		if(args.length > 0){
			NUM_KIDS = Integer.parseInt(args[0]);
		}
		if(args.length > 1){
			DELAY = Long.parseLong(args[1]);
		}
		if(args.length > 2){
			DB_HOST = args[2];
		}
		
		ActorRef listener = system.actorOf(Props.create(HttpActor.class), "httpActor"); 
		
		InetSocketAddress endpoint = new InetSocketAddress(3000);
		int backlog = 100;
		List<Inet.SocketOption> options = JavaConversions.asScalaBuffer(new ArrayList<Inet.SocketOption>()).toList();
		Option<ServerSettings> settings = scala.Option.empty();
		ServerSSLEngineProvider sslEngineProvider = null;
		Bind bind = new Http.Bind(listener, endpoint, backlog, options, settings, sslEngineProvider);
		IO.apply(spray.can.Http$.MODULE$, system).tell(bind, ActorRef.noSender());
		
		system.scheduler().schedule(new FiniteDuration(5, TimeUnit.SECONDS), new FiniteDuration(5, TimeUnit.SECONDS), ()->{
			System.out.println(new Date() + " - numSales=" + numSales.get());
		}, system.dispatcher());
	}
 
Example 9
Project: akkaTrader   File: Main.java   View source code 6 votes vote down vote up
@Override
public Routee select(Object message, IndexedSeq<Routee> routees) {

	//find which product ID is relevant here
	String productId = null;
	if(message instanceof PurchaseOrder){
		productId = ((PurchaseOrder) message).getProductId();
	}else if(message instanceof SalesOrder){
		productId = ((SalesOrder) message).getProductId();
	}
	ActorRef actorHandlingProduct = kids.get(productId);

	//no go find the routee for the relevant actor
	for(Routee r : JavaConversions.asJavaIterable(routees)){
		ActorRef a = ((ActorRefRoutee) r).ref(); //cast ok, since the are by definition in this program all routees to ActorRefs
		if(a.equals(actorHandlingProduct)){
			return r;
		}
	}
	
	return akka.routing.NoRoutee$.MODULE$; //none found, return NoRoutee
}
 
Example 10
Project: Gaffer   File: RFileReaderRDD.java   View source code 6 votes vote down vote up
public RFileReaderRDD(final SparkContext sparkContext,
                      final String instanceName,
                      final String zookeepers,
                      final String user,
                      final String password,
                      final String tableName,
                      final Set<String> auths,
                      final byte[] serialisedConfiguration) {
    super(sparkContext, JavaConversions.asScalaBuffer(new ArrayList<>()),
            ClassTag$.MODULE$.apply(Map.Entry.class));
    this.instanceName = instanceName;
    this.zookeepers = zookeepers;
    this.user = user;
    this.password = password;
    this.tableName = tableName;
    this.auths = auths;
    this.serialisedConfiguration = serialisedConfiguration;
}
 
Example 11
Project: hssd   File: HSSDEditorMoveEntry.java   View source code 6 votes vote down vote up
private void move(EntryTree tree,
		TreeNode[] srcs,
		TreeNode tgt,
		boolean movingLeaf,
		Set<TraitThypeLike> desiredTraits) {

	if(movingLeaf && !desiredTraits.isEmpty()) {
		EntryData tgtEd = EntryData.of(tgt);
        tgtEd.insertTraits(JavaConversions.iterableAsScalaIterable(
                desiredTraits));
	}

	for(TreeNode src: srcs) {
		tree.move(src, tgt);
        EntryData srcEd = EntryData.of(src);
		if(!movingLeaf && !desiredTraits.isEmpty()) {
            srcEd.insertTraits(JavaConversions.iterableAsScalaIterable(
                    desiredTraits));
		}
		srcEd.markDirty();
	}
}
 
Example 12
Project: systemml   File: Script.java   View source code 6 votes vote down vote up
/**
 * Pass a Scala Seq of inputs to the script. The inputs are either two-value
 * or three-value tuples, where the first value is the variable name, the
 * second value is the variable value, and the third optional value is the
 * metadata.
 *
 * @param inputs
 *            Scala Seq of inputs (parameters ($) and variables).
 * @return {@code this} Script object to allow chaining of methods
 */
public Script in(scala.collection.Seq<Object> inputs) {
	List<Object> list = JavaConversions.seqAsJavaList(inputs);
	for (Object obj : list) {
		if (obj instanceof Tuple3) {
			@SuppressWarnings("unchecked")
			Tuple3<String, Object, MatrixMetadata> t3 = (Tuple3<String, Object, MatrixMetadata>) obj;
			in(t3._1(), t3._2(), t3._3());
		} else if (obj instanceof Tuple2) {
			@SuppressWarnings("unchecked")
			Tuple2<String, Object> t2 = (Tuple2<String, Object>) obj;
			in(t2._1(), t2._2());
		} else {
			throw new MLContextException("Only Tuples of 2 or 3 values are permitted");
		}
	}
	return this;
}
 
Example 13
Project: systemml   File: MLContextConversionUtil.java   View source code 6 votes vote down vote up
/**
 * Convert a {@code MatrixObject} to a {@code RDD<String>} in IJV format.
 *
 * @param matrixObject
 *            the {@code MatrixObject}
 * @return the {@code MatrixObject} converted to a {@code RDD<String>}
 */
public static RDD<String> matrixObjectToRDDStringIJV(MatrixObject matrixObject) {

	// NOTE: The following works when called from Java but does not
	// currently work when called from Spark Shell (when you call
	// collect() on the RDD<String>).
	//
	// JavaRDD<String> javaRDD = jsc.parallelize(list);
	// RDD<String> rdd = JavaRDD.toRDD(javaRDD);
	//
	// Therefore, we call parallelize() on the SparkContext rather than
	// the JavaSparkContext to produce the RDD<String> for Scala.

	List<String> list = matrixObjectToListStringIJV(matrixObject);

	ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
	return sc().parallelize(JavaConversions.asScalaBuffer(list), sc().defaultParallelism(), tag);
}
 
Example 14
Project: systemml   File: MLContextConversionUtil.java   View source code 6 votes vote down vote up
/**
 * Convert a {@code FrameObject} to a {@code RDD<String>} in IJV format.
 *
 * @param frameObject
 *            the {@code FrameObject}
 * @return the {@code FrameObject} converted to a {@code RDD<String>}
 */
public static RDD<String> frameObjectToRDDStringIJV(FrameObject frameObject) {

	// NOTE: The following works when called from Java but does not
	// currently work when called from Spark Shell (when you call
	// collect() on the RDD<String>).
	//
	// JavaRDD<String> javaRDD = jsc.parallelize(list);
	// RDD<String> rdd = JavaRDD.toRDD(javaRDD);
	//
	// Therefore, we call parallelize() on the SparkContext rather than
	// the JavaSparkContext to produce the RDD<String> for Scala.

	List<String> list = frameObjectToListStringIJV(frameObject);

	ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
	return sc().parallelize(JavaConversions.asScalaBuffer(list), sc().defaultParallelism(), tag);
}
 
Example 15
Project: systemml   File: MLContextConversionUtil.java   View source code 6 votes vote down vote up
/**
 * Convert a {@code MatrixObject} to a {@code RDD<String>} in CSV format.
 *
 * @param matrixObject
 *            the {@code MatrixObject}
 * @return the {@code MatrixObject} converted to a {@code RDD<String>}
 */
public static RDD<String> matrixObjectToRDDStringCSV(MatrixObject matrixObject) {

	// NOTE: The following works when called from Java but does not
	// currently work when called from Spark Shell (when you call
	// collect() on the RDD<String>).
	//
	// JavaRDD<String> javaRDD = jsc.parallelize(list);
	// RDD<String> rdd = JavaRDD.toRDD(javaRDD);
	//
	// Therefore, we call parallelize() on the SparkContext rather than
	// the JavaSparkContext to produce the RDD<String> for Scala.

	List<String> list = matrixObjectToListStringCSV(matrixObject);

	ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
	return sc().parallelize(JavaConversions.asScalaBuffer(list), sc().defaultParallelism(), tag);
}
 
Example 16
Project: systemml   File: MLContextConversionUtil.java   View source code 6 votes vote down vote up
/**
 * Convert a {@code FrameObject} to a {@code RDD<String>} in CSV format.
 *
 * @param frameObject
 *            the {@code FrameObject}
 * @param delimiter
 *            the delimiter
 * @return the {@code FrameObject} converted to a {@code RDD<String>}
 */
public static RDD<String> frameObjectToRDDStringCSV(FrameObject frameObject, String delimiter) {

	// NOTE: The following works when called from Java but does not
	// currently work when called from Spark Shell (when you call
	// collect() on the RDD<String>).
	//
	// JavaRDD<String> javaRDD = jsc.parallelize(list);
	// RDD<String> rdd = JavaRDD.toRDD(javaRDD);
	//
	// Therefore, we call parallelize() on the SparkContext rather than
	// the JavaSparkContext to produce the RDD<String> for Scala.

	List<String> list = frameObjectToListStringCSV(frameObject, delimiter);

	ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
	return sc().parallelize(JavaConversions.asScalaBuffer(list), sc().defaultParallelism(), tag);
}
 
Example 17
Project: systemml   File: MLContextTest.java   View source code 6 votes vote down vote up
@Test
public void testInputScalaMapDML() {
	System.out.println("MLContextTest - input Scala map DML");

	List<String> list = new ArrayList<String>();
	list.add("10,20");
	list.add("30,40");
	final JavaRDD<String> javaRDD = sc.parallelize(list);

	Map<String, Object> inputs = new HashMap<String, Object>() {
		private static final long serialVersionUID = 1L;
		{
			put("$X", 2);
			put("M", javaRDD);
		}
	};

	scala.collection.mutable.Map<String, Object> scalaMap = JavaConversions.mapAsScalaMap(inputs);

	String s = "M = M + $X; print('sum: ' + sum(M));";
	Script script = dml(s).in(scalaMap);
	setExpectedStdOut("sum: 108.0");
	ml.execute(script);
}
 
Example 18
Project: systemml   File: MLContextTest.java   View source code 6 votes vote down vote up
@Test
public void testInputScalaMapPYDML() {
	System.out.println("MLContextTest - input Scala map PYDML");

	List<String> list = new ArrayList<String>();
	list.add("10,20");
	list.add("30,40");
	final JavaRDD<String> javaRDD = sc.parallelize(list);

	Map<String, Object> inputs = new HashMap<String, Object>() {
		private static final long serialVersionUID = 1L;
		{
			put("$X", 2);
			put("M", javaRDD);
		}
	};

	scala.collection.mutable.Map<String, Object> scalaMap = JavaConversions.mapAsScalaMap(inputs);

	String s = "M = M + $X\nprint('sum: ' + sum(M))";
	Script script = pydml(s).in(scalaMap);
	setExpectedStdOut("sum: 108.0");
	ml.execute(script);
}
 
Example 19
Project: samza   File: TestKafkaSystemFactoryJava.java   View source code 6 votes vote down vote up
@Test
public void testGetIntermediateStreamProperties() {
  Map<String, String> config = new HashMap<>();
  KafkaSystemFactory factory = new KafkaSystemFactory();
  Map<String, Properties> properties = JavaConversions.mapAsJavaMap(
      factory.getIntermediateStreamProperties(new MapConfig(config)));
  assertTrue(properties.isEmpty());

  // no properties for stream
  config.put("streams.test.samza.intermediate", "true");
  config.put("streams.test.compression.type", "lz4"); //some random config
  properties = JavaConversions.mapAsJavaMap(
      factory.getIntermediateStreamProperties(new MapConfig(config)));
  assertTrue(properties.isEmpty());

  config.put(ApplicationConfig.APP_MODE, ApplicationConfig.ApplicationMode.BATCH.name());
  properties = JavaConversions.mapAsJavaMap(
      factory.getIntermediateStreamProperties(new MapConfig(config)));
  assertTrue(!properties.isEmpty());
  Properties prop = properties.get("test");
  assertEquals(prop.getProperty("retention.ms"), String.valueOf(KafkaConfig.DEFAULT_RETENTION_MS_FOR_BATCH()));
  assertEquals(prop.getProperty("compression.type"), "lz4");
}
 
Example 20
Project: easyframe-msg   File: AdminUtil.java   View source code 6 votes vote down vote up
/**
 * 根据指定topic获取该topic的partition列表
 * @param topic
 * @return
 */
public static List<Integer> getAllPartitionIds(String topic) {
	List list = new ArrayList();
	list.add(topic);
	Buffer buffer = JavaConversions.asScalaBuffer(list);

	Map<String, Seq<Object>> topicPartMap = JavaConversions.asJavaMap(ZkUtils.getPartitionsForTopics(getZkClient(), buffer));
	List<Object> javaList = JavaConversions.asJavaList(topicPartMap.get(topic));
	
	List<Integer> retList = new ArrayList<Integer>();
	for (Object obj : javaList) {
		retList.add((Integer)obj);
	}
	
	return retList;
}
 
Example 21
Project: DCMonitor   File: KafkaInfos.java   View source code 6 votes vote down vote up
public List<PartitionInfo> getPartitionInfos(String group, String topic) {
  Seq<String> singleTopic = JavaConversions.asScalaBuffer(Collections.singletonList(topic)).toSeq();
  scala.collection.Map<String, Seq<Object>> pidMap = ZkUtils.getPartitionsForTopics(zkClient, singleTopic);
  Option<Seq<Object>> partitions = pidMap.get(topic);
  if (partitions.get() == null) {
    return Collections.emptyList();
  }
  List<PartitionInfo> infos = Lists.newArrayList();
  for (Object o : JavaConversions.asJavaList(partitions.get())) {
    PartitionInfo info = getPartitionInfo(group, topic, Int.unbox(o));
    if (info != null) {
      infos.add(info);
    }
  }
  return infos;
}
 
Example 22
Project: eddy   File: Converter.java   View source code 6 votes vote down vote up
UnresolvedClassItem(@NotNull final Converter env, @NotNull final PsiClassReferenceType cls, @Nullable final Parent parent) {
  this.env = env;
  this.cls = cls;
  PsiPackage pkg = Place.getElementPackage(cls.getReference(), env.project);
  _parent = parent == null ? pkg == null ? LocalPkg$.MODULE$ : (Package)env.addContainer(pkg) : parent;

  if (cls instanceof PsiModifierListOwner) {
    _isFinal = ((PsiModifierListOwner)cls).hasModifierProperty(PsiModifier.FINAL);
    _isAbstract = ((PsiModifierListOwner)cls).hasModifierProperty(PsiModifier.ABSTRACT);
    _isStatic = Place.isStatic((PsiModifierListOwner) cls);
  }

  List<TypeArg> jargs = new SmartList<TypeArg>();
  for (PsiType arg : cls.getParameters())
    jargs.add(env.convertTypeArg(arg,parent));
  _targs = scala.collection.JavaConversions.asScalaBuffer(jargs).toList();
}
 
Example 23
Project: eddy   File: EnvironmentProcessorTest.java   View source code 6 votes vote down vote up
private TypeVar addTypeParam(Map<PsiElement,Item> global_envitems, Map<PsiElement,Item> local_envitems, PsiTypeParameter p) {
  if (global_envitems.containsKey(p))
    return (TypeVar)global_envitems.get(p);
  if (local_envitems.containsKey(p))
    return (TypeVar)local_envitems.get(p);

  // Add maker here to break recursion
  TypeVarMaker ti = new TypeVarMaker(p.getName());
  local_envitems.put(p,ti);

  PsiClassType[] extended = p.getExtendsList().getReferencedTypes();
  List<ClassType> etypes = new SmartList<ClassType>();
  for (PsiClassType e : extended) {
    etypes.add((ClassType)convertType(global_envitems, local_envitems, e));
  }

  if (etypes.isEmpty())
    ti.set(ObjectType$.MODULE$, JavaConversions.asScalaBuffer(etypes).toList());
  else
    ti.set(etypes.get(0), JavaConversions.asScalaBuffer(etypes.subList(1,etypes.size())).toList());

  return ti;
}
 
Example 24
Project: components   File: FilterRowDoFn.java   View source code 6 votes vote down vote up
private List<Object> getInputFields(IndexedRecord inputRecord, String columnName) {
    // Adapt non-avpath syntax to avpath.
    // TODO: This should probably not be automatic, use the actual syntax.
    if (!columnName.startsWith("."))
        columnName = "." + columnName;
    Try<scala.collection.immutable.List<Evaluator.Ctx>> result = wandou.avpath.package$.MODULE$.select(inputRecord,
            columnName);
    List<Object> values = new ArrayList<Object>();
    if (result.isSuccess()) {
        for (Evaluator.Ctx ctx : JavaConversions.asJavaCollection(result.get())) {
            values.add(ctx.value());
        }
    } else {
        // Evaluating the expression failed, and we can handle the exception.
        Throwable t = result.failed().get();
        throw ProcessingErrorCode.createAvpathSyntaxError(t, columnName, -1);
    }
    return values;
}
 
Example 25
Project: tinkerpop   File: Spark.java   View source code 6 votes vote down vote up
public static void refresh() {
    if (null == CONTEXT)
        throw new IllegalStateException("The Spark context has not been created.");
    if (CONTEXT.isStopped())
        recreateStopped();

    final Set<String> keepNames = new HashSet<>();
    for (final RDD<?> rdd : JavaConversions.asJavaIterable(CONTEXT.persistentRdds().values())) {
        if (null != rdd.name()) {
            keepNames.add(rdd.name());
            NAME_TO_RDD.put(rdd.name(), rdd);
        }
    }
    // remove all stale names in the NAME_TO_RDD map
    NAME_TO_RDD.keySet().stream().filter(key -> !keepNames.contains(key)).collect(Collectors.toList()).forEach(NAME_TO_RDD::remove);
}
 
Example 26
Project: akkaTrader   File: Main.java   View source code 6 votes vote down vote up
public static void main(String[] args) {

		if(args.length > 0){
			NUM_KIDS = Integer.parseInt(args[0]);
		}
		if(args.length > 1){
			DELAY = Long.parseLong(args[1]);
		}
		if(args.length > 2){
			DB_HOST = args[2];
		}
		
		ActorRef listener = system.actorOf(Props.create(HttpActor.class), "httpActor"); 
		
		InetSocketAddress endpoint = new InetSocketAddress(3000);
		int backlog = 100;
		List<Inet.SocketOption> options = JavaConversions.asScalaBuffer(new ArrayList<Inet.SocketOption>()).toList();
		Option<ServerSettings> settings = scala.Option.empty();
		ServerSSLEngineProvider sslEngineProvider = null;
		Bind bind = new Http.Bind(listener, endpoint, backlog, options, settings, sslEngineProvider);
		IO.apply(spray.can.Http$.MODULE$, system).tell(bind, ActorRef.noSender());
		
		system.scheduler().schedule(new FiniteDuration(5, TimeUnit.SECONDS), new FiniteDuration(5, TimeUnit.SECONDS), ()->{
			System.out.println(new Date() + " - numSales=" + numSales.get());
		}, system.dispatcher());
	}
 
Example 27
Project: akkaTrader   File: Main.java   View source code 6 votes vote down vote up
@Override
public Routee select(Object message, IndexedSeq<Routee> routees) {

	//find which product ID is relevant here
	String productId = null;
	if(message instanceof PurchaseOrder){
		productId = ((PurchaseOrder) message).getProductId();
	}else if(message instanceof SalesOrder){
		productId = ((SalesOrder) message).getProductId();
	}
	ActorRef actorHandlingProduct = kids.get(productId);

	//no go find the routee for the relevant actor
	for(Routee r : JavaConversions.asJavaIterable(routees)){
		ActorRef a = ((ActorRefRoutee) r).ref(); //cast ok, since the are by definition in this program all routees to ActorRefs
		if(a.equals(actorHandlingProduct)){
			return r;
		}
	}
	
	return akka.routing.NoRoutee$.MODULE$; //none found, return NoRoutee
}
 
Example 28
Project: spork-streaming   File: LimitConverter.java   View source code 6 votes vote down vote up
@Override
public Iterator<Tuple> call(Iterator<Tuple> i) {
    final java.util.Iterator<Tuple> tuples = JavaConversions.asJavaIterator(i);

    return JavaConversions.asScalaIterator(new POOutputConsumerIterator(tuples) {

        protected void attach(Tuple tuple) {
            poLimit.setInputs(null);
            poLimit.attachInput(tuple);
        }

        protected Result getNextResult() throws ExecException {
            return poLimit.getNextTuple();
        }
    });
}
 
Example 29
Project: gatk   File: RangePartitionCoalescer.java   View source code 6 votes vote down vote up
@Override
public PartitionGroup[] coalesce(int maxPartitions, RDD<?> parent) {
    if (maxPartitions != parent.getNumPartitions()) {
        throw new IllegalArgumentException("Cannot use " + getClass().getSimpleName() +
                " with a different number of partitions to the parent RDD.");
    }
    List<Partition> partitions = Arrays.asList(parent.getPartitions());
    PartitionGroup[] groups = new PartitionGroup[partitions.size()];

    for (int i = 0; i < partitions.size(); i++) {
        Seq<String> preferredLocations = parent.getPreferredLocations(partitions.get(i));
        scala.Option<String> preferredLocation = scala.Option.apply
                (preferredLocations.isEmpty() ? null : preferredLocations.apply(0));
        PartitionGroup group = new PartitionGroup(preferredLocation);
        List<Partition> partitionsInGroup =
                partitions.subList(i, maxEndPartitionIndexes.get(i) + 1);
        group.partitions().append(JavaConversions.asScalaBuffer(partitionsInGroup));
        groups[i] = group;
    }
    return groups;
}
 
Example 30
Project: gsn   File: BeansInitializer.java   View source code 6 votes vote down vote up
public static StreamSource source(SourceConf sc){
 StreamSource s = new StreamSource();
 s.setAlias(sc.alias());
 s.setSqlQuery(sc.query());
 if (sc.slide().isDefined())
  s.setRawSlideValue(sc.slide().get());
 if (sc.samplingRate().isDefined())
  s.setSamplingRate(((Double)sc.samplingRate().get()).floatValue());
 if (sc.disconnectBufferSize().isDefined())
  s.setDisconnectedBufferSize(((Integer)sc.disconnectBufferSize().get()));
 if (sc.storageSize().isDefined())
  s.setRawHistorySize(sc.storageSize().get());
 AddressBean[] add=new AddressBean[sc.wrappers().size()];
 int i=0;
 for (WrapperConf w:JavaConversions.asJavaIterable(sc.wrappers())){
  add[i]=address(w);
  i++;
 }
 s.setAddressing(add);
 return s;
}
 
Example 31
Project: gsn   File: BeansInitializer.java   View source code 6 votes vote down vote up
public static AddressBean address(WrapperConf w){
    KeyValueImp [] p=new KeyValueImp[w.params().size()];
    Iterable<String> keys=JavaConversions.asJavaIterable(w.params().keys());
    int i=0;
 for (String k:keys){
  p[i]=new KeyValueImp(k,w.params().apply(k));
  i++;
 }
    AddressBean a = new AddressBean(w.wrapper(),p);
    if(w.partialKey().isDefined()){
    a.setPartialOrderKey(w.partialKey().get());
    }
    DataField [] out=new DataField[(w.output().size())];
 for (int j=0;j<out.length;j++){
  out[j]=dataField(w.output().apply(j));
 }
    a.setVsconfig(out);
 return a;
}
 
Example 32
Project: scoverage-maven-plugin   File: SCoverageReportMojo.java   View source code 6 votes vote down vote up
private void writeReports( Coverage coverage, List<File> sourceRoots, File coberturaXmlOutputDirectory,
                           File scoverageXmlOutputDirectory, File scoverageHtmlOutputDirectory )
{
    Seq<File> sourceRootsAsScalaSeq = JavaConversions.asScalaBuffer( sourceRoots );

    new CoberturaXmlWriter( sourceRootsAsScalaSeq, coberturaXmlOutputDirectory ).write( coverage );
    getLog().info( String.format( "Written Cobertura XML report [%s]",
                                  new File( coberturaXmlOutputDirectory, "cobertura.xml" ).getAbsolutePath() ) );

    new ScoverageXmlWriter( sourceRootsAsScalaSeq, scoverageXmlOutputDirectory, false ).write( coverage );
    getLog().info( String.format( "Written XML coverage report [%s]",
                                  new File( scoverageXmlOutputDirectory, "scoverage.xml" ).getAbsolutePath() ) );

    new ScoverageHtmlWriter( sourceRootsAsScalaSeq, scoverageHtmlOutputDirectory, Option.<String>apply( encoding ) ).write( coverage );
    getLog().info( String.format( "Written HTML coverage report [%s]",
                                  new File( scoverageHtmlOutputDirectory, "index.html" ).getAbsolutePath() ) );

    getLog().info( String.format( "Statement coverage.: %s%%", coverage.statementCoverageFormatted() ) );
    getLog().info( String.format( "Branch coverage....: %s%%", coverage.branchCoverageFormatted() ) );
}
 
Example 33
Project: spliceengine   File: StreamableRDD.java   View source code 6 votes vote down vote up
private void submitBatch(int batch, int batchSize, int numPartitions, final JavaRDD<String> streamed, final Properties properties) {
    final List<Integer> list = new ArrayList<>();
    for (int j = batch*batchSize; j < numPartitions && j < (batch+1)*batchSize; j++) {
        list.add(j);
    }
    if (LOG.isTraceEnabled())
        LOG.trace("Submitting batch " + batch + " with partitions " + list);
    final Seq objects = JavaConversions.asScalaBuffer(list).toList();
    completionService.submit(new Callable<Object>() {
        @Override
        public Object call() {
            SparkContext sc = SpliceSpark.getContextUnsafe().sc();
            sc.setLocalProperties(properties);
            String[] results = (String[]) sc.runJob(streamed.rdd(), new FunctionAdapter(), objects, tag);
            for (String o2: results) {
                if ("STOP".equals(o2)) {
                    return "STOP";
                }
            }
            return "CONTINUE";
        }
    });
}
 
Example 34
Project: MountainRangePvP   File: LeaderBoardRenderer.java   View source code 6 votes vote down vote up
public void render(Snapshot snapshot) {
    int x = 20;
    int y = height - 20;

    batch.begin();
    for (LeaderBoard.Stats stats : JavaConversions.asJavaIterable(snapshot.leaderBoard().sortedByHighest().take(4))) {
        ClientId player = stats.player();
        if (stats.hits() == 0)
            continue;

        Texture tex = bodyTextures[0];
        batch.draw(tex, x, y - tex.getWidth() * 5 / 6);

        String text = snapshot.nicknameFor(player) + " " + stats.hits() + "/" + stats.deaths();
        textRenderer.drawString(batch, text, x + tex.getWidth() + 20, y);

        y -= 40;
    }
    batch.end();
}
 
Example 35
Project: beadledom   File: SwaggerAvroModelConverter.java   View source code 5 votes vote down vote up
@Override
public Option<Model> read(Class<?> cls, Map<String, String> typeMap) {
  Schema schema = getSchema(cls);
  if (schema == null) {
    return Option.empty();
  }

  LinkedHashMap<String, ModelProperty> properties = new LinkedHashMap<>();
  for (Schema.Field field : schema.getFields()) {
    ModelProperty property = parseField(field);
    if (property == null) {
      LOGGER.debug(
          "Omitted field {} of schema {} from swagger docs", field.name(), schema.getName());
    } else {
      properties.update(getFieldName(field), property);
    }
  }

  return Option.apply(
      new Model(
          toName(cls),
          toName(cls),
          cls.getName(),
          properties,
          toDescriptionOpt(cls),
          Option.<String>empty(),
          Option.<String>empty(),
          JavaConversions.asScalaBuffer(Collections.<String>emptyList()).toList()));
}
 
Example 36
Project: net.jgp.labs.informix2spark   File: ScalaSeq.java   View source code 5 votes vote down vote up
/**
 * @param args
 */
public static void main(String[] args) {
  List<String> javaList = new ArrayList<>();
  javaList.add("one");
  javaList.add("two");
  javaList.add("three");

  System.out.println(javaList); // prints [one, two, three]

  scala.collection.Seq<String> s = JavaConversions
      .asScalaBuffer(javaList);
  System.out.println(s); // prints Buffer(one, two, three)
}
 
Example 37
Project: net.jgp.labs.informix2spark   File: ScalaSet.java   View source code 5 votes vote down vote up
/**
 * Adapted from
 * https://stackoverflow.com/questions/3025291/example-of-using-scala-collection-immutable-set-from-java.
 * 
 * @param args
 */
public static void main(String[] args) {
  Set<String> javaSet = new HashSet<>();
  javaSet.add("one");
  javaSet.add("two");
  javaSet.add("three");

  System.out.println(javaSet); // prints [one, two, three]

  scala.collection.Set<String> s = JavaConversions
      .asScalaSet(javaSet);
  System.out.println(s); // prints Set(one, two, three)
}
 
Example 38
Project: kafka-visualizer   File: KafkaUtils.java   View source code 5 votes vote down vote up
public ImmutableList<KafkaConsumerInfo> getAllConsumers() {
    ImmutableList<GroupOverview> consumerGroups =
            ImmutableList.copyOf(JavaConversions.asJavaCollection(adminClient.listAllConsumerGroupsFlattened()));

    ImmutableList.Builder<KafkaConsumerInfo> consumers = ImmutableList.builder();

    consumerGroups.stream()
            .filter(groupOverview -> !groupOverview.groupId().equals(KafkaStatics.GROUP_ID))
            .forEach(consumerGroup -> {
                AdminClient.ConsumerGroupSummary consumerGroupSummary =
                        adminClient.describeConsumerGroup(consumerGroup.groupId(), Duration.ofSeconds(10).toMillis());

                if (!consumerGroupSummary.consumers().isDefined()) {
                    return;
                }

                ImmutableList<AdminClient.ConsumerSummary> consumerSummaries = ImmutableList.copyOf(
                        JavaConversions.asJavaCollection(consumerGroupSummary.consumers().get()));

                if (consumerSummaries.isEmpty()) {
                    return;
                }

                consumers.addAll(convertToKafkaConsumerInfos(consumerSummaries, consumerGroup.groupId()));
            });

    return consumers.build();
}
 
Example 39
Project: kafka-visualizer   File: KafkaUtils.java   View source code 5 votes vote down vote up
private ImmutableList<KafkaConsumerInfo> convertToKafkaConsumerInfos(ImmutableList<AdminClient.ConsumerSummary> consumerSummaries, String groupId) {
    return consumerSummaries.stream()
            .map(consumerSummary -> {
                ImmutableList<KafkaConsumerInfo.Assignment> assignments = JavaConversions.asJavaCollection(consumerSummary.assignment())
                        .stream()
                        .map(assignment -> new KafkaConsumerInfo.Assignment(assignment.topic(), assignment.partition()))
                        .collect(ImmutableList.toImmutableList());

                return new KafkaConsumerInfo(consumerSummary.consumerId(), consumerSummary.clientId(), assignments, groupId);
            })
            .collect(ImmutableList.toImmutableList());
}
 
Example 40
Project: kmanager   File: OffsetGetter.java   View source code 5 votes vote down vote up
public List<String> getTopics() {
	List<String> topics = null;
	try {
		topics = JavaConversions
				.seqAsJavaList(ZKUtils.getZKUtilsFromKafka().getChildren(ZkUtils.BrokerTopicsPath()));
	} catch (Exception e) {
		LOG.error("could not get topics because of " + e.getMessage(), e);
	}
	return topics;
}