scala.collection.JavaConverters Java Examples

The following examples show how to use scala.collection.JavaConverters. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaProduceOffsetFetcher.java    From DDMQ with Apache License 2.0 6 votes vote down vote up
private Map<Integer, Map<String, List<Integer>>> parseMetadataResponse(TopicMetadataResponse response) {
    Map<Integer/*broker id*/, Map<String/*topic*/, List<Integer>/*partition id*/>> metadata = Maps.newHashMap();
    Seq<TopicMetadata> topicMetadatas = response.topicsMetadata();
    for (TopicMetadata topicMetadata : JavaConverters.asJavaListConverter(topicMetadatas).asJava()) {
        List<PartitionMetadata> partitionsMetadata = JavaConverters.asJavaListConverter(topicMetadata.partitionsMetadata()).asJava();
        String topic = topicMetadata.topic();
        for (PartitionMetadata partitionMetadata : partitionsMetadata) {
            int partitionId = partitionMetadata.partitionId();
            int brokerId = partitionMetadata.leader().get().id();
            if (!metadata.containsKey(brokerId)) {
                metadata.put(brokerId, Maps.newHashMap());
            }
            if (!metadata.get(brokerId).containsKey(topic)) {
                metadata.get(brokerId).put(topic, Lists.newArrayList());
            }
            metadata.get(brokerId).get(topic).add(partitionId);
        }
    }
    return metadata;
}
 
Example #2
Source File: TestYarnJob.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testBuildJobSubmissionEnvironment() throws IOException {
  Config config = new MapConfig(new ImmutableMap.Builder<String, String>()
      .put(JobConfig.JOB_NAME, "jobName")
      .put(JobConfig.JOB_ID, "jobId")
      .put(JobConfig.CONFIG_LOADER_FACTORY, "org.apache.samza.config.loaders.PropertiesConfigLoaderFactory")
      .put(YarnConfig.AM_JVM_OPTIONS, "")
      .put(JobConfig.JOB_SPLIT_DEPLOYMENT_ENABLED, "true")
      .build());
  String expectedSubmissionConfig = Util.envVarEscape(SamzaObjectMapper.getObjectMapper()
      .writeValueAsString(config));
  Map<String, String> expected = ImmutableMap.of(
      ShellCommandConfig.ENV_SUBMISSION_CONFIG, expectedSubmissionConfig,
      ShellCommandConfig.ENV_JAVA_OPTS, "",
      ShellCommandConfig.ENV_SPLIT_DEPLOYMENT_ENABLED, "true",
      ShellCommandConfig.ENV_APPLICATION_LIB_DIR, "./__package/lib");
  assertEquals(expected, JavaConverters.mapAsJavaMapConverter(
      YarnJob$.MODULE$.buildEnvironment(config, new YarnConfig(config), new JobConfig(config))).asJava());
}
 
Example #3
Source File: TestYarnJob.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testBuildEnvironmentJobCoordinatorDependencyIsolationEnabled() throws IOException {
  Config config = new MapConfig(new ImmutableMap.Builder<String, String>()
      .put(JobConfig.JOB_NAME, "jobName")
      .put(JobConfig.JOB_ID, "jobId")
      .put(JobConfig.JOB_COORDINATOR_SYSTEM, "jobCoordinatorSystem")
      .put(YarnConfig.AM_JVM_OPTIONS, "")
      .put(JobConfig.JOB_SPLIT_DEPLOYMENT_ENABLED, "true")
      .build());
  String expectedCoordinatorStreamConfigStringValue = Util.envVarEscape(SamzaObjectMapper.getObjectMapper()
      .writeValueAsString(CoordinatorStreamUtil.buildCoordinatorStreamConfig(config)));
  Map<String, String> expected = ImmutableMap.of(
      ShellCommandConfig.ENV_COORDINATOR_SYSTEM_CONFIG, expectedCoordinatorStreamConfigStringValue,
      ShellCommandConfig.ENV_JAVA_OPTS, "",
      ShellCommandConfig.ENV_SPLIT_DEPLOYMENT_ENABLED, "true",
      ShellCommandConfig.ENV_APPLICATION_LIB_DIR, "./__package/lib");
  assertEquals(expected, JavaConverters.mapAsJavaMapConverter(
      YarnJob$.MODULE$.buildEnvironment(config, new YarnConfig(config), new JobConfig(config))).asJava());
}
 
Example #4
Source File: RaygunPlayScalaRequestHeaderMessage.java    From raygun4java with MIT License 6 votes vote down vote up
public RaygunPlayScalaRequestHeaderMessage(RequestHeader scalaRequestHeader) {
    try {
        hostName = scalaRequestHeader.host();
        url = scalaRequestHeader.uri();
        httpMethod = scalaRequestHeader.method();
        ipAddress = scalaRequestHeader.remoteAddress();

        String rawQuery = scalaRequestHeader.rawQueryString();

        if (!rawQuery.isEmpty()) {
            queryString = queryStringToMap(rawQuery);
        }

        headers = JavaConverters.mapAsJavaMapConverter(scalaRequestHeader.headers().toSimpleMap()).asJava();
    } catch (Throwable t) {
        Logger.getLogger("Raygun4Java-Play2").info("Couldn't get all request params: " + t.getMessage());
    }

}
 
Example #5
Source File: CassandraConnectorITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testCassandraScalaTupleAtLeastOnceSinkBuilderDetection() throws Exception {
	Class<scala.Tuple1<String>> c = (Class<scala.Tuple1<String>>) new scala.Tuple1<>("hello").getClass();
	Seq<TypeInformation<?>> typeInfos = JavaConverters.asScalaBufferConverter(
		Collections.<TypeInformation<?>>singletonList(BasicTypeInfo.STRING_TYPE_INFO)).asScala();
	Seq<String> fieldNames = JavaConverters.asScalaBufferConverter(
		Collections.singletonList("_1")).asScala();

	CaseClassTypeInfo<scala.Tuple1<String>> typeInfo = new CaseClassTypeInfo<scala.Tuple1<String>>(c, null, typeInfos, fieldNames) {
		@Override
		public TypeSerializer<scala.Tuple1<String>> createSerializer(ExecutionConfig config) {
			return null;
		}
	};

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	DataStream<scala.Tuple1<String>> input = env.fromElements(new scala.Tuple1<>("hello")).returns(typeInfo);

	CassandraSink.CassandraSinkBuilder<scala.Tuple1<String>> sinkBuilder = CassandraSink.addSink(input);
	assertTrue(sinkBuilder instanceof CassandraSink.CassandraScalaProductSinkBuilder);
}
 
Example #6
Source File: PartialCallGraph.java    From fasten with Apache License 2.0 6 votes vote down vote up
/**
 * It creates a class hierarchy for the given call graph's artifact.
 * @param cg {@link ComputedCallGraph}
 * @return A Map of {@link ObjectType} and created {@link OPALType} for it.
 * @implNote Inside {@link OPALType} all of the methods are indexed, it means one can use
 *     the ids assigned to each method instead of the method itself.
 */
public static Map<ObjectType, OPALType> createCHA(final ComputedCallGraph cg) {

    final var project = cg.callGraph().project();
    final AtomicInteger methodNum = new AtomicInteger();
    final Map<ObjectType, OPALType> cha = new HashMap<>();
    for (final var obj : sort(JavaConverters.asJavaIterable(project.allProjectClassFiles()))) {
        final var classFile = (ClassFile) obj;
        final var currentClass = classFile.thisType();
        final var methods =
            getMethodsMap(methodNum.get(), JavaConverters.asJavaIterable(classFile.methods()));
        final var type =
            new OPALType(methods, extractSuperClasses(project.classHierarchy(), currentClass),
                extractSuperInterfaces(project.classHierarchy(), currentClass),
                classFile.sourceFile()
                    .getOrElse(JavaToScalaConverter.asScalaFunction0OptionString("NotFound")));
        cha.put(currentClass, type);
        methodNum.addAndGet(methods.size());
    }

    return cha;
}
 
Example #7
Source File: KafkaProduceOffsetFetcher.java    From DDMQ with Apache License 2.0 6 votes vote down vote up
private Map<Integer, Map<String, List<Integer>>> parseMetadataResponse(TopicMetadataResponse response) {
    Map<Integer/*broker id*/, Map<String/*topic*/, List<Integer>/*partition id*/>> metadata = Maps.newHashMap();
    Seq<TopicMetadata> topicMetadatas = response.topicsMetadata();
    for (TopicMetadata topicMetadata : JavaConverters.asJavaListConverter(topicMetadatas).asJava()) {
        List<PartitionMetadata> partitionsMetadata = JavaConverters.asJavaListConverter(topicMetadata.partitionsMetadata()).asJava();
        String topic = topicMetadata.topic();
        for (PartitionMetadata partitionMetadata : partitionsMetadata) {
            int partitionId = partitionMetadata.partitionId();
            int brokerId = partitionMetadata.leader().get().id();
            if (!metadata.containsKey(brokerId)) {
                metadata.put(brokerId, Maps.newHashMap());
            }
            if (!metadata.get(brokerId).containsKey(topic)) {
                metadata.get(brokerId).put(topic, Lists.newArrayList());
            }
            metadata.get(brokerId).get(topic).add(partitionId);
        }
    }
    return metadata;
}
 
Example #8
Source File: PartialCallGraph.java    From fasten with Apache License 2.0 6 votes vote down vote up
/**
 * Given a call graph and a CHA it creates a list of internal calls. This list indicates source
 * and target methods by their unique within artifact ids existing in the cha.
 * @param cg  {@link ComputedCallGraph}
 * @param cha A Map of {@link ObjectType} and {@link RevisionCallGraph.Type}
 * @return a list of List of Integers that the first element of each int[] is the source method
 *     and the second one is the target method.
 */
private List<List<Integer>> getInternalCalls(final ComputedCallGraph cg,
                                             final Map<ObjectType, OPALType> cha) {
    final Set<List<Integer>> resultSet = new HashSet<>();
    for (final var source : JavaConverters
        .asJavaIterable(cg.callGraph().project().allMethods())) {
        final var targetsMap = cg.callGraph().calls((source));
        if (targetsMap != null && !targetsMap.isEmpty()) {
            for (final var keyValue : JavaConverters.asJavaIterable(targetsMap)) {
                for (final var target : JavaConverters.asJavaIterable(keyValue._2())) {
                    final var call = Arrays.asList(
                        cha.get(source.declaringClassFile().thisType()).getMethods().get(
                            source),
                        cha.get(target.declaringClassFile().thisType()).getMethods().get(target)
                    );
                    resultSet.add(call);
                }
            }
        }
    }
    return new ArrayList<>(resultSet);
}
 
Example #9
Source File: OpenKoreanTextTokenFilter.java    From elasticsearch-analysis-openkoreantext with Apache License 2.0 6 votes vote down vote up
@Override
public final boolean incrementToken() throws IOException {
    clearAttributes();

    if(input instanceof KoreanTokenPrepareable) {
        if(preparedTokens == null) {
            this.preparedTokens = JavaConverters.seqAsJavaList(prepareKoreanTokens());
        }

        if (this.preparedTokens == null || this.preparedTokens.isEmpty() || tokenIndex >= this.preparedTokens.size()) {
            return false;
        }

        setAttributes(this.preparedTokens.get(tokenIndex++));
        return true;
    } else {
        return input.incrementToken();
    }
}
 
Example #10
Source File: OpenKoreanTextPhraseExtractor.java    From elasticsearch-analysis-openkoreantext with Apache License 2.0 6 votes vote down vote up
private Seq<KoreanToken> convertPhrasesToTokens(Seq<KoreanPhrase> phrases) {
    KoreanToken[] tokens = new KoreanToken[phrases.length()];

    Iterator<KoreanPhrase> iterator = phrases.iterator();
    int i = 0;
    while (iterator.hasNext()) {
        KoreanPhrase phrase = iterator.next();
        tokens[i++] = new KoreanToken(phrase.text(), phrase.pos(), phrase.offset(), phrase.length(), scala.Option.apply(null), false);
    }

    Arrays.sort(tokens, (o1, o2) -> {
        if(o1.offset()== o2.offset())
            return 0;
        return o1.offset()< o2.offset()? -1 : 1;
    });

    return JavaConverters.asScalaBuffer(Arrays.asList(tokens)).toSeq();
}
 
Example #11
Source File: CassandraConnectorITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCassandraScalaTupleAtLeastOnceSinkBuilderDetection() throws Exception {
	Class<scala.Tuple1<String>> c = (Class<scala.Tuple1<String>>) new scala.Tuple1<>("hello").getClass();
	Seq<TypeInformation<?>> typeInfos = JavaConverters.asScalaBufferConverter(
		Collections.<TypeInformation<?>>singletonList(BasicTypeInfo.STRING_TYPE_INFO)).asScala();
	Seq<String> fieldNames = JavaConverters.asScalaBufferConverter(
		Collections.singletonList("_1")).asScala();

	CaseClassTypeInfo<scala.Tuple1<String>> typeInfo = new CaseClassTypeInfo<scala.Tuple1<String>>(c, null, typeInfos, fieldNames) {
		@Override
		public TypeSerializer<scala.Tuple1<String>> createSerializer(ExecutionConfig config) {
			return null;
		}
	};

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	DataStream<scala.Tuple1<String>> input = env.fromElements(new scala.Tuple1<>("hello")).returns(typeInfo);

	CassandraSink.CassandraSinkBuilder<scala.Tuple1<String>> sinkBuilder = CassandraSink.addSink(input);
	assertTrue(sinkBuilder instanceof CassandraSink.CassandraScalaProductSinkBuilder);
}
 
Example #12
Source File: HiveTableSourceTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Test to read from partition table.
 * @throws Exception
 */
@Test
public void testReadPartitionTable() throws Exception {
	final String catalogName = "hive";
	final String dbName = "source_db";
	final String tblName = "test_table_pt";
	hiveShell.execute("CREATE TABLE source_db.test_table_pt " +
					"(year STRING, value INT) partitioned by (pt int);");
	hiveShell.insertInto(dbName, tblName)
			.withColumns("year", "value", "pt")
			.addRow("2014", 3, 0)
			.addRow("2014", 4, 0)
			.addRow("2015", 2, 1)
			.addRow("2015", 5, 1)
			.commit();
	TableEnvironment tEnv = HiveTestUtils.createTableEnv();
	tEnv.registerCatalog(catalogName, hiveCatalog);
	Table src = tEnv.sqlQuery("select * from hive.source_db.test_table_pt");
	List<Row> rows = JavaConverters.seqAsJavaListConverter(TableUtil.collect((TableImpl) src)).asJava();

	assertEquals(4, rows.size());
	Object[] rowStrings = rows.stream().map(Row::toString).sorted().toArray();
	assertArrayEquals(new String[]{"2014,3,0", "2014,4,0", "2015,2,1", "2015,5,1"}, rowStrings);
}
 
Example #13
Source File: HiveTableSourceTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testReadNonPartitionedTable() throws Exception {
	final String catalogName = "hive";
	final String dbName = "source_db";
	final String tblName = "test";
	hiveShell.execute("CREATE TABLE source_db.test ( a INT, b INT, c STRING, d BIGINT, e DOUBLE)");
	hiveShell.insertInto(dbName, tblName)
			.withAllColumns()
			.addRow(1, 1, "a", 1000L, 1.11)
			.addRow(2, 2, "b", 2000L, 2.22)
			.addRow(3, 3, "c", 3000L, 3.33)
			.addRow(4, 4, "d", 4000L, 4.44)
			.commit();

	TableEnvironment tEnv = HiveTestUtils.createTableEnv();
	tEnv.registerCatalog(catalogName, hiveCatalog);
	Table src = tEnv.sqlQuery("select * from hive.source_db.test");
	List<Row> rows = JavaConverters.seqAsJavaListConverter(TableUtil.collect((TableImpl) src)).asJava();

	Assert.assertEquals(4, rows.size());
	Assert.assertEquals("1,1,a,1000,1.11", rows.get(0).toString());
	Assert.assertEquals("2,2,b,2000,2.22", rows.get(1).toString());
	Assert.assertEquals("3,3,c,3000,3.33", rows.get(2).toString());
	Assert.assertEquals("4,4,d,4000,4.44", rows.get(3).toString());
}
 
Example #14
Source File: KafkaProduceOffsetFetcher.java    From DDMQ with Apache License 2.0 6 votes vote down vote up
public Map<String, Long> getConsumeOffset(String group, String topic) throws Exception {
    Map<String/* qid */, Long/* consume offset */> ret = new HashMap<>();
    StringBuilder sbConsumeOffsetDir = new StringBuilder();
    sbConsumeOffsetDir.append(ZkUtils.ConsumersPath()).append("/").append(group).append("/offsets/").append(topic);
    String consumeOffsetDir = sbConsumeOffsetDir.toString();

    if (!ZkUtils.pathExists(zkClient, consumeOffsetDir)) {
        return ret;
    }

    for (String id : JavaConverters.asJavaListConverter(ZkUtils.getChildren(zkClient, consumeOffsetDir)).asJava()) {
        try {
            ret.put(id, Long.parseLong(ZkUtils.readData(zkClient, consumeOffsetDir + "/" + id)._1()));
        } catch (Exception e) {
            ret.put(id, -1L);
        }
    }

    return ret;
}
 
Example #15
Source File: PartialCallGraph.java    From fasten with Apache License 2.0 6 votes vote down vote up
/**
 * Given a call graph and a CHA it creates a map of external calls and their call type. This map
 * indicates the source methods by their unique within artifact id existing in the cha, target
 * methods by their {@link FastenURI}, and a map that indicates the call type.
 * @param cg  {@link ComputedCallGraph}
 * @param cha A Map of {@link ObjectType} and {@link RevisionCallGraph.Type}
 * @return A map that each each entry of it is a {@link Pair} of source method's id, and target
 *     method's {@link FastenURI} as key and a map that shows call types as value. call types
 *     map's key is the name of JVM call type and the value is number of invocation by this call
 *     type for this specific edge.
 */
private Map<Pair<Integer, FastenURI>, Map<String, String>> getExternalCalls(
    final ComputedCallGraph cg,
    final Map<ObjectType, OPALType> cha) {
    List<UnresolvedMethodCall> v = new ArrayList<>();

    final var externlCalls = cg.unresolvedMethodCalls();
    final Map<Pair<Integer, FastenURI>, Map<String, String>> result = new HashMap<>();

    for (final var externalCall : JavaConverters.asJavaIterable(externlCalls)) {

        final var call = new MutablePair<>(
            cha.get(externalCall.caller().declaringClassFile().thisType()).getMethods()
                .get(externalCall.caller()),
            getTargetURI(externalCall));
        final var typeOfCall =
            externalCall.caller().instructionsOption().get()[externalCall.pc()].mnemonic();
        putCall(result, call, typeOfCall);
    }

    return result;
}
 
Example #16
Source File: MethodTest.java    From fasten with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetParametersURI() {

    assertArrayEquals(
        new FastenJavaURI[0],
        OPALMethod.getParametersURI(JavaConverters.seqAsJavaList(
            oneEdgeMethods.stream().filter(i -> i.name().equals("sourceMethod")).findFirst()
                .orElseThrow().parameterTypes())));


    assertArrayEquals(
        new FastenJavaURI[] {new FastenJavaURI("/java.lang/Object%25255B%25255D")},
        OPALMethod.getParametersURI(
            JavaConverters.seqAsJavaList(arrayMethods.stream().filter(i -> i.name().equals(
                "targetMethod")).findFirst().orElseThrow().parameterTypes())));

}
 
Example #17
Source File: PartialCallGraphTest.java    From fasten with Apache License 2.0 6 votes vote down vote up
/**
 * SingleSourceToTarget is a java8 compiled bytecode.
 * <pre>
 * package name.space;
 *
 * public class SingleSourceToTarget{
 *
 *     public static void sourceMethod() { targetMethod(); }
 *
 *     public static void targetMethod() {}
 * }
 * </pre>
 * Including these edges: Resolved:[ public static void sourceMethod(), public static void
 * targetMethod()] Unresolved:[ public void "<"init">"() of current class, public void
 * "<"init">"() of Object class]
 */
@BeforeClass
public static void generateCallGraph() {

    jarFile = new File(
            Thread.currentThread().getContextClassLoader().getResource("SingleSourceToTarget.class")
                    .getFile());
    singleSourceToTarget = new PartialCallGraph(jarFile);
    artifactInOpalFormat = Project.apply(jarFile);
    cg = PartialCallGraph.generateCallGraph(jarFile);
    cha = PartialCallGraph.createCHA(cg);
    methodsList =
            new ArrayList<>(JavaConverters.asJavaCollection(cg.callGraph().project().allMethods()));
    methodsMap = new HashMap<>(Map.of(
            0, FastenURI.create(
                    "/name.space/SingleSourceToTarget.SingleSourceToTarget()%2Fjava.lang%2FVoidType"),
            1,
            FastenURI.create("/name.space/SingleSourceToTarget.sourceMethod()%2Fjava"
                    + ".lang%2FVoidType"),
            2, FastenURI
                    .create("/name.space/SingleSourceToTarget.targetMethod()%2Fjava.lang%2FVoidType")));

}
 
Example #18
Source File: PartialCallGraphTest.java    From fasten with Apache License 2.0 6 votes vote down vote up
@Test
public void testGenerateCallGraph() {

    final var cg = PartialCallGraph.generateCallGraph(jarFile);
    final var allMethods =
            new ArrayList<>(JavaConverters.asJavaCollection(cg.callGraph().project().allMethods()));
    assertEquals("public void <init>()", allMethods.get(0).toString());
    assertEquals("public static void sourceMethod()", allMethods.get(1).toString());
    assertEquals("public static void targetMethod()", allMethods.get(2).toString());

    final var ExternalCalls =
            new ArrayList<>(JavaConverters.asJavaCollection(cg.unresolvedMethodCalls()));
    assertEquals("public void <init>()", ExternalCalls.get(0).caller().toString());
    assertEquals("java.lang.Object", ExternalCalls.get(0).calleeClass().toJava());
    assertEquals("<init>", ExternalCalls.get(0).calleeName());
    assertEquals("(): void", ExternalCalls.get(0).calleeDescriptor().valueToString());

}
 
Example #19
Source File: TestYarnJob.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testBuildEnvironmentWithAMJavaHome() throws IOException {
  Config config = new MapConfig(new ImmutableMap.Builder<String, String>()
      .put(JobConfig.JOB_NAME, "jobName")
      .put(JobConfig.JOB_ID, "jobId")
      .put(JobConfig.JOB_COORDINATOR_SYSTEM, "jobCoordinatorSystem")
      .put(YarnConfig.AM_JVM_OPTIONS, "")
      .put(JobConfig.JOB_SPLIT_DEPLOYMENT_ENABLED, "false")
      .put(YarnConfig.AM_JAVA_HOME, "/some/path/to/java/home")
      .build());
  String expectedCoordinatorStreamConfigStringValue = Util.envVarEscape(SamzaObjectMapper.getObjectMapper()
      .writeValueAsString(CoordinatorStreamUtil.buildCoordinatorStreamConfig(config)));
  Map<String, String> expected = ImmutableMap.of(
      ShellCommandConfig.ENV_COORDINATOR_SYSTEM_CONFIG, expectedCoordinatorStreamConfigStringValue,
      ShellCommandConfig.ENV_JAVA_OPTS, "",
      ShellCommandConfig.ENV_SPLIT_DEPLOYMENT_ENABLED, "false",
      ShellCommandConfig.ENV_JAVA_HOME, "/some/path/to/java/home");
  assertEquals(expected, JavaConverters.mapAsJavaMapConverter(
      YarnJob$.MODULE$.buildEnvironment(config, new YarnConfig(config), new JobConfig(config))).asJava());
}
 
Example #20
Source File: SparkDatasetBoundedSourceVertex.java    From incubator-nemo with Apache License 2.0 6 votes vote down vote up
@Override
protected Iterator<T> initializeIterator() {
  // for setting up the same environment in the executors.
  final SparkSession spark = SparkSession.builder()
    .config(sessionInitialConf)
    .getOrCreate();
  final Dataset<T> dataset;

  try {
    dataset = SparkSession.initializeDataset(spark, commands);
  } catch (final OperationNotSupportedException e) {
    throw new IllegalStateException(e);
  }

  // Spark does lazy evaluation: it doesn't load the full dataset, but only the partition it is asked for.
  final RDD<T> rdd = dataset.sparkRDD();
  final Iterable<T> iterable = () -> JavaConverters.asJavaIteratorConverter(
    rdd.iterator(rdd.getPartitions()[partitionIndex], TaskContext$.MODULE$.empty())).asJava();
  return iterable.iterator();
}
 
Example #21
Source File: ArtifactSourceUtils.java    From rug-cli with GNU General Public License v3.0 6 votes vote down vote up
public static ArtifactSource filterMetaInf(ArtifactSource source) {
    return source.filter(new AbstractFunction1<DirectoryArtifact, Object>() {
        @Override
        public Object apply(DirectoryArtifact dir) {
            // This is required to remove our maven packaging information
            if (dir.name().equals("META-INF")) {
                Optional<Artifact> nonMavenArtifact = JavaConverters
                        .asJavaCollectionConverter(dir.artifacts()).asJavaCollection().stream()
                        .filter(a -> !a.path().startsWith("META-INF/maven")).findAny();
                return nonMavenArtifact.isPresent();
            }
            return (!dir.path().equals("META-INF/maven"));
        }
    }, new AbstractFunction1<FileArtifact, Object>() {
        @Override
        public Object apply(FileArtifact arg0) {
            return true;
        }
    });
}
 
Example #22
Source File: SparkBenchmarkUtil.java    From iceberg with Apache License 2.0 6 votes vote down vote up
public static UnsafeProjection projection(Schema expectedSchema, Schema actualSchema) {
  StructType struct = SparkSchemaUtil.convert(actualSchema);

  List<AttributeReference> refs = JavaConverters.seqAsJavaListConverter(struct.toAttributes()).asJava();
  List<Attribute> attrs = Lists.newArrayListWithExpectedSize(struct.fields().length);
  List<Expression> exprs = Lists.newArrayListWithExpectedSize(struct.fields().length);

  for (AttributeReference ref : refs) {
    attrs.add(ref.toAttribute());
  }

  for (Types.NestedField field : expectedSchema.columns()) {
    int indexInIterSchema = struct.fieldIndex(field.name());
    exprs.add(refs.get(indexInIterSchema));
  }

  return UnsafeProjection.create(
      JavaConverters.asScalaBufferConverter(exprs).asScala().toSeq(),
      JavaConverters.asScalaBufferConverter(attrs).asScala().toSeq());
}
 
Example #23
Source File: TreeNodeTreeCreator.java    From rug-cli with GNU General Public License v3.0 6 votes vote down vote up
private static void collectNodes(GraphNode node, Map<GraphNode, Integer> processed,
        Map<GraphNode, Integer> counts, AtomicInteger counter) {
    if (node.relatedNodes().size() > 0) {
        if (!processed.containsKey(node)) {
            int id = counter.incrementAndGet();
            processed.put(node, id);
            counts.put(node, 1);
            JavaConverters.asJavaCollectionConverter(node.relatedNodes()).asJavaCollection()
                    .forEach(c -> collectNodes(c, processed, counts, counter));
        }
        else {
            counts.put(node, counts.get(node) + 1);
        }
    }
    else {
        if (!processed.containsKey(node)) {
            processed.put(node, 0);
            counts.put(node, 1);
        }
        else {
            counts.put(node, counts.get(node) + 1);
        }
    }
}
 
Example #24
Source File: RowDataReader.java    From iceberg with Apache License 2.0 6 votes vote down vote up
private static UnsafeProjection projection(Schema finalSchema, Schema readSchema) {
  StructType struct = SparkSchemaUtil.convert(readSchema);

  List<AttributeReference> refs = JavaConverters.seqAsJavaListConverter(struct.toAttributes()).asJava();
  List<Attribute> attrs = Lists.newArrayListWithExpectedSize(struct.fields().length);
  List<org.apache.spark.sql.catalyst.expressions.Expression> exprs =
      Lists.newArrayListWithExpectedSize(struct.fields().length);

  for (AttributeReference ref : refs) {
    attrs.add(ref.toAttribute());
  }

  for (Types.NestedField field : finalSchema.columns()) {
    int indexInReadSchema = struct.fieldIndex(field.name());
    exprs.add(refs.get(indexInReadSchema));
  }

  return UnsafeProjection.create(
      JavaConverters.asScalaBufferConverter(exprs).asScala().toSeq(),
      JavaConverters.asScalaBufferConverter(attrs).asScala().toSeq());
}
 
Example #25
Source File: TestYarnJob.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testBuildEnvironment() throws IOException {
  String amJvmOptions = "-Xmx1g -Dconfig.key='config value'";
  Config config = new MapConfig(new ImmutableMap.Builder<String, String>()
      .put(JobConfig.JOB_NAME, "jobName")
      .put(JobConfig.JOB_ID, "jobId")
      .put(JobConfig.JOB_COORDINATOR_SYSTEM, "jobCoordinatorSystem")
      .put(YarnConfig.AM_JVM_OPTIONS, amJvmOptions) // needs escaping
      .put(JobConfig.JOB_SPLIT_DEPLOYMENT_ENABLED, "false")
      .build());
  String expectedCoordinatorStreamConfigStringValue = Util.envVarEscape(SamzaObjectMapper.getObjectMapper()
      .writeValueAsString(CoordinatorStreamUtil.buildCoordinatorStreamConfig(config)));
  Map<String, String> expected = ImmutableMap.of(
      ShellCommandConfig.ENV_COORDINATOR_SYSTEM_CONFIG, expectedCoordinatorStreamConfigStringValue,
      ShellCommandConfig.ENV_JAVA_OPTS, Util.envVarEscape(amJvmOptions),
      ShellCommandConfig.ENV_SPLIT_DEPLOYMENT_ENABLED, "false");
  assertEquals(expected, JavaConverters.mapAsJavaMapConverter(
      YarnJob$.MODULE$.buildEnvironment(config, new YarnConfig(config), new JobConfig(config))).asJava());
}
 
Example #26
Source File: CompressedDDataHandler.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
@Override
public CompletionStage<Collection<ActorRef>> getSubscribers(final Collection<ByteString> topic) {

    return get(Replicator.readLocal()).thenApply(optional -> {
        if (optional.isPresent()) {
            final ORMultiMap<ActorRef, ByteString> mmap = optional.get();
            ddataMetrics.set((long) mmap.size());
            return JavaConverters.mapAsJavaMap(mmap.entries())
                    .entrySet()
                    .stream()
                    .filter(entry -> topic.stream().anyMatch(entry.getValue()::contains))
                    .map(Map.Entry::getKey)
                    .collect(Collectors.toList());
        } else {
            ddataMetrics.set(0L);
            return Collections.emptyList();
        }
    });
}
 
Example #27
Source File: RaygunPlayScalaRequestMessage.java    From raygun4java with MIT License 6 votes vote down vote up
public RaygunPlayScalaRequestMessage(Request request) {
    try {
        httpMethod = request.method();
        ipAddress = request.remoteAddress();
        hostName = request.host();
        url = request.uri();

        headers = JavaConverters.mapAsJavaMapConverter(request.headers().toSimpleMap()).asJava();
        String rawQuery = request.rawQueryString();

        if (rawQuery != null && rawQuery.length() > 0) {
            queryString = queryStringToMap(rawQuery);
        }
    } catch (NullPointerException e) {
        Logger.getLogger("Raygun4Java-Play2").info("Couldn't get all request params: " + e.getMessage());
    }
}
 
Example #28
Source File: AzureJobCoordinator.java    From samza with Apache License 2.0 6 votes vote down vote up
/**
 * For each input stream specified in config, exactly determine its
 * partitions, returning a set of SystemStreamPartitions containing them all.
 */
private Set<SystemStreamPartition> getInputStreamPartitions() {
  TaskConfig taskConfig = new TaskConfig(config);
  scala.collection.immutable.Set<SystemStream> inputSystemStreams =
      JavaConverters.asScalaSetConverter(taskConfig.getInputStreams()).asScala().toSet();

  // Get the set of partitions for each SystemStream from the stream metadata
  Set<SystemStreamPartition>
      sspSet = JavaConverters.mapAsJavaMapConverter(streamMetadataCache.getStreamMetadata(inputSystemStreams, true)).asJava()
      .entrySet()
      .stream()
      .flatMap(this::mapSSMToSSP)
      .collect(Collectors.toSet());

  return sspSet;
}
 
Example #29
Source File: OPALMethod.java    From fasten with Apache License 2.0 6 votes vote down vote up
/**
 * Converts a method to a Canonicalized Schemeless eu.fasten.core.data.FastenURI.
 * @param product    The product which entity belongs to.
 * @param clas       The class of the method in org.opalj.br.ReferenceType format.
 * @param method     Name of the method in String.
 * @param descriptor Descriptor of the method in org.opalj.br.MethodDescriptor format.
 * @return Canonicalized Schemeless eu.fasten.core.data.FastenURI of the given method.
 */
public static FastenURI toCanonicalSchemelessURI(final String product, final ReferenceType clas,
                                                 final String method,
                                                 final MethodDescriptor descriptor)
    throws IllegalArgumentException, NullPointerException {
    final var javaURI = FastenJavaURI.create(null, product, null,
        getPackageName(clas),
        getClassName(clas),
        getMethodName(getClassName(clas), method),
        getParametersURI(JavaConverters.seqAsJavaList(descriptor.parameterTypes())),
        getTypeURI(descriptor.returnType())
    ).canonicalize();

    return FastenURI.createSchemeless(javaURI.getRawForge(), javaURI.getRawProduct(),
        javaURI.getRawVersion(),
        javaURI.getRawNamespace(), javaURI.getRawEntity());

}
 
Example #30
Source File: KafkaClusterManager.java    From doctorkafka with Apache License 2.0 5 votes vote down vote up
/**
 * Get the replica assignment for a given topic partition. This information should be retrieved
 * from zookeeper as topic metadata that we get from kafkaConsumer.listTopic() does not specify
 * the preferred leader for topic partitions.
 *
 * @param tp  topic partition
 * @return the list of brokers that host the replica
 */
private List<Integer> getReplicaAssignment(TopicPartition tp) {
  scala.collection.Map<Object, Seq<Object>> replicaAssignmentMap =
      getReplicaAssignmentForTopic(zkUtils, tp.topic());

  scala.Option<Seq<Object>> replicasOption = replicaAssignmentMap.get(tp.partition());
  Seq<Object> replicas = replicasOption.get();
  List<Object> replicasList = scala.collection.JavaConverters.seqAsJavaList(replicas);
  return replicasList.stream().map(obj -> (Integer) obj).collect(Collectors.toList());
}