Java Code Examples for org.apache.commons.collections.ListUtils#union()

The following examples show how to use org.apache.commons.collections.ListUtils#union() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SqlConverter.java    From Bats with Apache License 2.0 6 votes vote down vote up
/**
 * check if the schema provided is a valid schema:
 * <li>schema is not indicated (only one element in the names list)<li/>
 *
 * @param names list of schema and table names, table name is always the last element
 * @throws UserException if the schema is not valid.
 */
private void isValidSchema(final List<String> names) throws UserException {
  SchemaPlus defaultSchema = session.getDefaultSchema(this.rootSchema);
  String defaultSchemaCombinedPath = SchemaUtilites.getSchemaPath(defaultSchema);
  List<String> schemaPath = Util.skipLast(names);
  String schemaPathCombined = SchemaUtilites.getSchemaPath(schemaPath);
  String commonPrefix = SchemaUtilites.getPrefixSchemaPath(defaultSchemaCombinedPath,
          schemaPathCombined,
          parserConfig.caseSensitive());
  boolean isPrefixDefaultPath = commonPrefix.length() == defaultSchemaCombinedPath.length();
  List<String> fullSchemaPath = Strings.isNullOrEmpty(defaultSchemaCombinedPath) ? schemaPath :
          isPrefixDefaultPath ? schemaPath : ListUtils.union(SchemaUtilites.getSchemaPathAsList(defaultSchema), schemaPath);
  if (names.size() > 1 && (SchemaUtilites.findSchema(this.rootSchema, fullSchemaPath) == null &&
          SchemaUtilites.findSchema(this.rootSchema, schemaPath) == null)) {
    SchemaUtilites.throwSchemaNotFoundException(defaultSchema, schemaPath);
  }
}
 
Example 2
Source File: EurekaServerListProcessor.java    From spring-cloud-gray with Apache License 2.0 6 votes vote down vote up
@Override
public List<Server> process(String serviceId, List<Server> servers) {
    if (!grayHoldoutServerProperties.isEnabled() || CollectionUtils.isEmpty(grayHoldoutServerProperties.getServices().get(serviceId))) {
        return servers;
    }

    List<Server> serverList = null;

    if (grayHoldoutServerProperties.isCacheable()) {
        serverList = serversMap.get(serviceId);
        if (CollectionUtils.isNotEmpty(serverList)) {
            return serverList;
        }
    }

    serverList = servers;
    List<Server> unUpServers = getUnUpServers(serviceId);
    if (CollectionUtils.isNotEmpty(unUpServers)) {
        serverList = ListUtils.union(servers, unUpServers);
    }
    if (grayHoldoutServerProperties.isCacheable()) {
        serversMap.put(serviceId, serverList);
    }
    return serverList;
}
 
Example 3
Source File: PulsarMetadataReader.java    From pulsar-flink with Apache License 2.0 5 votes vote down vote up
private List<String> getTopicsWithPattern(String topicsPattern) throws PulsarAdminException {
    TopicName dest = TopicName.get(topicsPattern);
    List<String> allNonPartitionedTopics = getNonPartitionedTopics(dest.getNamespace());
    List<String> nonPartitionedMatch = topicsPatternFilter(allNonPartitionedTopics, dest.toString());

    List<String> allPartitionedTopics = admin.topics().getPartitionedTopicList(dest.getNamespace());
    List<String> partitionedMatch = topicsPatternFilter(allPartitionedTopics, dest.toString());

    return ListUtils.union(nonPartitionedMatch, partitionedMatch);
}
 
Example 4
Source File: NacosServerListProcessor.java    From spring-cloud-gray with Apache License 2.0 5 votes vote down vote up
@Override
protected List<Server> getServers(String serviceId, List<Server> servers) {
    List<InstanceStatus> statusList = getHoldoutInstanceStatus(serviceId);
    List<Server> holdoutServers = getInstances(serviceId).stream().filter(instance -> statusList.contains(getInstanceStatus(instance)))
            .map(NacosServer::new).collect(Collectors.toList());
    if(CollectionUtils.isEmpty(holdoutServers)){
        return servers;
    }
    return ListUtils.union(servers, holdoutServers);
}
 
Example 5
Source File: IntervalOverlappingIteratorUnitTest.java    From gatk with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@DataProvider(name="data")
public Object[][] getData() {
    // the sequence dictionary
    final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
    dictionary.addSequence(new SAMSequenceRecord("1", 1000000));
    dictionary.addSequence(new SAMSequenceRecord("2", 1000000));
    // the set of intervals
    final List<SimpleInterval> intervals_1 = Arrays.asList(new SimpleInterval("1:500-600"),	new SimpleInterval("1:700-800"));
    final List<SimpleInterval> intervals_2 = Arrays.asList(new SimpleInterval("2:100-200"), new SimpleInterval("2:400-1000"));
    // some records
    final SimpleInterval record_1_1_100 = new SimpleInterval("1:1-100");
    final SimpleInterval record_1_1_800 = new SimpleInterval("1:1-800");
    final SimpleInterval record_1_500_600 = new SimpleInterval("1:500-600");
    final SimpleInterval record_1_700_750 = new SimpleInterval("1:700-750");
    final SimpleInterval record_2_100_150 = new SimpleInterval("2:100-150");
    final SimpleInterval record_2_900_999 = new SimpleInterval("2:900-999");
    // test cases
    return new Object[][] {
        // first record starts before the first interval, second record overlaps the first interval
        {intervals_1, dictionary, new SimpleInterval[]{record_1_1_100, record_1_500_600, record_2_900_999}, new SimpleInterval[]{record_1_500_600}},
        // first record starts after the first interval, second interval overlaps the first record
        {intervals_1, dictionary, new SimpleInterval[]{record_1_700_750, record_2_900_999}, new SimpleInterval[]{record_1_700_750}},
        // first interval is on a later contig than the first record, but overlaps later records
        {intervals_2, dictionary, new SimpleInterval[]{record_1_1_100, record_2_900_999}, new SimpleInterval[]{record_2_900_999}},
        // first interval is on an earlier contig than the first record, but later records overlap later intervals
        {ListUtils.union(intervals_1, intervals_2), dictionary, new SimpleInterval[]{record_2_100_150, record_2_900_999}, new SimpleInterval[]{record_2_100_150, record_2_900_999}},
        // no records overlap any intervals
        {intervals_1, dictionary, new SimpleInterval[]{record_2_900_999}, new SimpleInterval[0]},
        // an interval overlaps multiple records
        {intervals_1, dictionary, new SimpleInterval[]{record_1_1_800, record_1_500_600, record_2_900_999}, new SimpleInterval[]{record_1_1_800, record_1_500_600}},
        // a record overlaps multiple intervals
        {intervals_1, dictionary, new SimpleInterval[]{record_1_1_800, record_2_100_150}, new SimpleInterval[]{record_1_1_800}}
    };
}
 
Example 6
Source File: BackupCreatorIntegrationTest.java    From tutorials with MIT License 5 votes vote down vote up
@Test
public void givenMultipleInputMessagesFromDifferentDays_whenBackupCreatorIsUser_thenMessagesAreGroupedProperly() throws Exception {
    LocalDateTime currentTime = LocalDateTime.now();
    InputMessage message = new InputMessage("Me", "User", currentTime, "First TestMessage");
    InputMessage secondMessage = new InputMessage("Me", "User", currentTime.plusHours(1), "First TestMessage");
    InputMessage thirdMessage = new InputMessage("Me", "User", currentTime.plusHours(2), "First TestMessage");
    InputMessage fourthMessage = new InputMessage("Me", "User", currentTime.plusHours(3), "First TestMessage");
    InputMessage fifthMessage = new InputMessage("Me", "User", currentTime.plusHours(25), "First TestMessage");
    InputMessage sixthMessage = new InputMessage("Me", "User", currentTime.plusHours(26), "First TestMessage");

    List<InputMessage> firstBackupMessages = Arrays.asList(message, secondMessage, thirdMessage, fourthMessage);
    List<InputMessage> secondBackupMessages = Arrays.asList(fifthMessage, sixthMessage);
    List<InputMessage> inputMessages = ListUtils.union(firstBackupMessages, secondBackupMessages);

    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
    env.setParallelism(1);
    DataStreamSource<InputMessage> testDataSet = env.fromCollection(inputMessages);
    CollectingSink sink = new CollectingSink();
    testDataSet.assignTimestampsAndWatermarks(new InputMessageTimestampAssigner())
      .timeWindowAll(Time.hours(24))
      .aggregate(new BackupAggregator())
      .addSink(sink);

    env.execute();

    Awaitility.await().until(() ->  sink.backups.size() == 2);
    assertEquals(2, sink.backups.size());
    assertEquals(firstBackupMessages, sink.backups.get(0).getInputMessages());
    assertEquals(secondBackupMessages, sink.backups.get(1).getInputMessages());

}
 
Example 7
Source File: CombineTwoLists.java    From levelup-java-examples with Apache License 2.0 5 votes vote down vote up
@Test
public void combine_two_lists_in_java_with_apache_commons () {
	
	@SuppressWarnings("unchecked")
	List<String> allStates = ListUtils.union(
			firstHalfStates, 
			secondHalfStates);
	
	assertTrue(allStates.size() == 50);
}
 
Example 8
Source File: GraphStructV1.java    From hugegraph-loader with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
public <ES extends ElementStructV1> List<ES> structs() {
    return (List<ES>) ListUtils.union(this.vertexStructs, this.edgeStructs);
}
 
Example 9
Source File: User.java    From dhis2-core with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
public List<String> getScopes()
{
    return ListUtils.union( searchScope, captureScope );
}
 
Example 10
Source File: AlleleLikelihoods.java    From gatk with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Add alleles that are missing in the evidence-likelihoods collection giving all evidence a default
 * likelihood value.
 * @param candidateAlleles the potentially missing alleles.
 * @param defaultLikelihood the default evidence likelihood value for that allele.
 *
 * @return {@code true} iff the the evidence-likelihood collection was modified by the addition of the input alleles.
 *  So if all the alleles in the input collection were already present in the evidence-likelihood collection this method
 *  will return {@code false}.
 *
 * @throws IllegalArgumentException if {@code candidateAlleles} is {@code null} or there is more than
 * one missing allele that is a reference or there is one but the collection already has
 * a reference allele.
 */
public boolean addMissingAlleles(final Collection<A> candidateAlleles, final double defaultLikelihood) {
    Utils.nonNull(candidateAlleles, "the candidateAlleles list cannot be null");
    if (candidateAlleles.isEmpty()) {
        return false;
    }
    final List<A> allelesToAdd = candidateAlleles.stream().filter(allele -> !alleles.containsAllele(allele)).collect(Collectors.toList());

    if (allelesToAdd.isEmpty()) {
        return false;
    }

    final int oldAlleleCount = alleles.numberOfAlleles();
    final int newAlleleCount = alleles.numberOfAlleles() + allelesToAdd.size();

    int referenceIndex = this.referenceAlleleIndex;

    @SuppressWarnings("unchecked")
    final List<A> newAlleles = ListUtils.union(alleles.asListOfAlleles(), allelesToAdd);
    alleles = new IndexedAlleleList<>(newAlleles);

    // if we previously had no reference allele, update the reference index if a reference allele is added
    // if we previously had a reference and try to add another, throw an exception
    final OptionalInt indexOfReferenceInAllelesToAdd = IntStream.range(0, allelesToAdd.size())
            .filter(n -> allelesToAdd.get(n).isReference()).findFirst();
    if (referenceIndex != MISSING_INDEX) {
        Utils.validateArg(!indexOfReferenceInAllelesToAdd.isPresent(), "there can only be one reference allele");
    } else if (indexOfReferenceInAllelesToAdd.isPresent()){
        referenceAlleleIndex = oldAlleleCount + indexOfReferenceInAllelesToAdd.getAsInt();
    }

    //copy old allele likelihoods and set new allele likelihoods to the default value
    for (int s = 0; s < samples.numberOfSamples(); s++) {
        final int sampleEvidenceCount = evidenceBySampleIndex.get(s).size();
        final double[][] newValuesBySampleIndex = Arrays.copyOf(valuesBySampleIndex[s], newAlleleCount);
        for (int a = oldAlleleCount; a < newAlleleCount; a++) {
            newValuesBySampleIndex[a] = new double[sampleEvidenceCount];
            if (defaultLikelihood != 0.0) {
                Arrays.fill(newValuesBySampleIndex[a], defaultLikelihood);
            }
        }
        valuesBySampleIndex[s] = newValuesBySampleIndex;
    }
    return true;
}
 
Example 11
Source File: TimeBasedRetentionPolicyTest.java    From incubator-gobblin with Apache License 2.0 3 votes vote down vote up
private void verify(String duration, List<TimestampedDatasetVersion> toBeDeleted,
    List<TimestampedDatasetVersion> toBeRetained) {

  @SuppressWarnings("unchecked")
  List<TimestampedDatasetVersion> allVersions = ListUtils.union(toBeRetained, toBeDeleted);

  List<TimestampedDatasetVersion> deletableVersions =
      Lists.newArrayList(new TimeBasedRetentionPolicy(duration).listDeletableVersions(allVersions));

  assertThat(deletableVersions, Matchers.containsInAnyOrder(toBeDeleted.toArray()));
  assertThat(deletableVersions, Matchers.not(Matchers.containsInAnyOrder(toBeRetained.toArray())));

}