Java Code Examples for org.apache.commons.lang3.tuple.Pair

The following examples show how to use org.apache.commons.lang3.tuple.Pair. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may want to check out the right sidebar which shows the related API usage.
Example 1
public List<Pair<CustomTagList, CustomTag>> getCustomTagAndContinuations(CustomTag tag) {
	LinkedList<Pair<CustomTagList, CustomTag>> allTags = new LinkedList<>();
	if (!hasTag(tag))
		return allTags;

	allTags.add(Pair.of(this, tag));

	if (!tag.isContinued())
		return allTags;

	// previous tags:
	Pair<CustomTagList, CustomTag> c = getPreviousContinuedCustomTag(tag);
	while (c != null) {
		allTags.addFirst(c);
		c = c.getLeft().getPreviousContinuedCustomTag(c.getRight());
	}
	// next tags:
	c = getNextContinuedCustomTag(tag);
	while (c != null) {
		allTags.addLast(c);
		c = c.getLeft().getNextContinuedCustomTag(c.getRight());
	}

	return allTags;
}
 
Example 2
Source Project: webanno   Source File: CuratedDocumentsExporterTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void thatImportingCorrectionProjectWorks_3_6_1() throws Exception
{
    project.setMode(PROJECT_TYPE_CORRECTION);
    
    // Export the project and import it again
    List<Pair<SourceDocument, String>> imported = runImportAndFetchDocuments(new ZipFile(
            "src/test/resources/exports/Export+Test+-+Curated+correction+project_3_6_1.zip"));

    // Check that the curation for the document in the project is imported
    assertThat(imported).extracting(p -> p.getKey().getName())
            .containsExactlyInAnyOrder("example_sentence.txt");
    // Since WebAnno 3.5.x, the CORRECTION_USER CAS is stored with the annotations
    assertThat(imported).extracting(Pair::getValue)
            .containsExactlyInAnyOrder(CURATION_USER);
}
 
Example 3
Source Project: azure-cosmosdb-java   Source File: SpyClientUnderTestFactory.java    License: MIT License 6 votes vote down vote up
void initRequestCapture(CompositeHttpClient<ByteBuf, ByteBuf> spyClient) {

            doAnswer(new Answer() {
                @Override
                public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
                    RxClient.ServerInfo serverInfo = invocationOnMock.getArgumentAt(0, RxClient.ServerInfo.class);
                    HttpClientRequest<ByteBuf> httpReq = invocationOnMock.getArgumentAt(1, HttpClientRequest.class);

                    CompletableFuture<HttpResponseHeaders> f = new CompletableFuture<>();
                    requestsResponsePairs.add(Pair.of(httpReq, f));

                    return origHttpClient.submit(serverInfo, httpReq)
                            .doOnNext(
                                    res -> f.complete(res.getHeaders())
                            ).doOnError(
                                    e -> f.completeExceptionally(e)
                            );

                }
            }).when(spyClient).submit(Mockito.any(RxClient.ServerInfo.class), Mockito.any(HttpClientRequest.class));
        }
 
Example 4
Source Project: sqlg   Source File: SqlgEdge.java    License: MIT License 6 votes vote down vote up
/**
 * Called from @link {@link SqlgVertex} to create a brand new edge.
 *
 * @param sqlgGraph       The graph.
 * @param streaming       If in batch mode this indicates if its streaming or not.
 * @param schema          The schema the edge is in.
 * @param table           The edge's label which translates to a table name.
 * @param inVertex        The edge's in vertex.
 * @param outVertex       The edge's out vertex.
 * @param keyValueMapPair A pair of properties of the edge. Left contains all the properties and right the null valued properties.
 */
public SqlgEdge(
        SqlgGraph sqlgGraph,
        boolean streaming,
        String schema,
        String table,
        SqlgVertex inVertex,
        SqlgVertex outVertex,
        Pair<Map<String, Object>, Map<String, Object>> keyValueMapPair) {

    super(sqlgGraph, schema, table);
    this.inVertex = inVertex;
    this.outVertex = outVertex;
    try {
        insertEdge(streaming, keyValueMapPair);
    } catch (SQLException e) {
        throw new RuntimeException(e);
    }
}
 
Example 5
Source Project: onetwo   Source File: ExtFeignConfiguration.java    License: Apache License 2.0 6 votes vote down vote up
@Bean
		@ConditionalOnMissingBean(okhttp3.OkHttpClient.class)
		public OkHttpClient okHttpClient(){
			Pair<Integer, TimeUnit> read = feignProperties.getOkHttpClient().getReadTimeoutTime();
			Pair<Integer, TimeUnit> conn = feignProperties.getOkHttpClient().getConnectTimeoutTime();
			Pair<Integer, TimeUnit> write = feignProperties.getOkHttpClient().getWriteTimeoutTime();
			okhttp3.OkHttpClient.Builder okclientBuilder = new okhttp3.OkHttpClient.Builder()
																	            .readTimeout(read.getKey(), read.getValue()) 
																	            .connectTimeout(conn.getKey(), conn.getValue()) 
																	            .writeTimeout(write.getKey(), write.getValue()) 
//																	            .connectionPool(new ConnectionPool())
																	            ;
			if(LangUtils.isNotEmpty(interceptors)){
				for(Interceptor interceptor : this.interceptors){
					okclientBuilder.addInterceptor(interceptor);
				}
			}
			return okclientBuilder.build();
	    }
 
Example 6
@Test
public void testFixStuckTickets() {
    List<TicketCategoryModification> categories = Collections.singletonList(
        new TicketCategoryModification(null, "default", AVAILABLE_SEATS,
            new DateTimeModification(LocalDate.now(), LocalTime.now()),
            new DateTimeModification(LocalDate.now(), LocalTime.now()),
            DESCRIPTION, BigDecimal.TEN, false, "", false, null, null, null, null, null, 0, null, null, AlfioMetadata.empty()));
    Pair<Event, String> eventUsername = initEvent(categories);
    Event event = eventUsername.getKey();
    TicketReservationModification trm = new TicketReservationModification();
    trm.setAmount(1);
    trm.setTicketCategoryId(eventManager.loadTicketCategories(event).get(0).getId());
    TicketReservationWithOptionalCodeModification r = new TicketReservationWithOptionalCodeModification(trm, Optional.empty());
    Date expiration = DateUtils.addDays(new Date(), 1);
    String reservationId = ticketReservationManager.createTicketReservation(event, Collections.singletonList(r), Collections.emptyList(), expiration, Optional.empty(), Locale.ENGLISH, false);
    //simulate the effect of a reservation cancellation after #392, as described in #391
    ticketReservationRepository.updateReservationStatus(reservationId, TicketReservation.TicketReservationStatus.CANCELLED.name());
    List<Ticket> ticketsInReservation = ticketRepository.findTicketsInReservation(reservationId);
    assertEquals(1, ticketsInReservation.size());
    String uuid = ticketsInReservation.get(0).getUuid();
    assertTrue(ticketsInReservation.stream().allMatch(t -> t.getStatus() == Ticket.TicketStatus.PENDING));
    dataMigrator.fixStuckTickets(event.getId());
    assertSame(Ticket.TicketStatus.RELEASED, ticketRepository.findByUUID(uuid).getStatus());
}
 
Example 7
Source Project: yauaa   Source File: Splitter.java    License: Apache License 2.0 6 votes vote down vote up
public List<Pair<Integer, Integer>> createSplitList(char[] characters){
    List<Pair<Integer, Integer>> result = new ArrayList<>(8);

    int offset = findSplitStart(characters, 1);
    if (offset == -1) {
        return result; // Nothing at all. So we are already done
    }
    while(offset != -1) {

        int start = offset;
        int end= findSplitEnd(characters, start);

        result.add(new ImmutablePair<>(start, end));
        offset = findNextSplitStart(characters, end);
    }
    return result;
}
 
Example 8
/**
 * Given three data sets (corresponding to left, center, and right segments), returns a pair of scores based on
 * the Hodges-Lehmann estimator between (left, center) and (center, right); the sum of the scores will be unity.
 * @param leftData      data set for left segment
 * @param centerData    data set for center segment
 * @param rightData     data set for right segment
 * @return              pair of scores based on the Hodges-Lehmann estimator
 */
private static Pair<Double, Double> calculateHodgesLehmannScores(final double[] leftData,
                                                                 final double[] centerData,
                                                                 final double[] rightData) {
    final double leftDistance = hodgesLehmannDistance(leftData, centerData);
    final double rightDistance = hodgesLehmannDistance(centerData, rightData);

    if (leftDistance == 0. && rightDistance == 0.) {
        return Pair.of(0.5, 0.5);
    }

    //if center segment is above or below both left and right segments,
    //assign score 1 to the closer segment and 0 to the other
    if (leftDistance * rightDistance < 0) {
        return Math.abs(leftDistance) < Math.abs(rightDistance) ? Pair.of(1., 0.) : Pair.of(0., 1.);
    }
    return Pair.of(1. - Math.abs(leftDistance / (leftDistance + rightDistance)),
                   1. - Math.abs(rightDistance / (leftDistance + rightDistance)));
}
 
Example 9
Source Project: sakai   Source File: ItemHashUtilTest.java    License: Educational Community License v2.0 6 votes vote down vote up
@Test
public void testHashBaseForItemAnswersPreservesNullsLiterally()
        throws IOException, NoSuchAlgorithmException, ServerOverloadException {
    final ItemData item = new ItemData();
    item.setTypeId(TypeIfc.FILL_IN_BLANK);

    // sequence, at least, is required, else ordering is completely non-deterministic
    final Pair<Answer,String> answer1 = answerAndExpectedHashBaseFor(item, 1L, null, null, null, null, null, null);
    final Pair<Answer,String> answer2 = answerAndExpectedHashBaseFor(item, 2L, null, null, null, null, null, null);

    final ItemText itemText1 = new ItemText(item, 1L, null, Sets.newSet(answerFrom(answer1), answerFrom(answer2)));
    answerFrom(answer1).setItemText(itemText1);
    answerFrom(answer2).setItemText(itemText1);

    item.setItemTextSet(Sets.newSet(itemText1));

    final StringBuilder expectedHashBase = new StringBuilder()
            .append(stringFrom(answer1))
            .append(stringFrom(answer2));

    final StringBuilder actualHashBase = new StringBuilder();
    itemHashUtil.hashBaseForItemAnswers(item, actualHashBase);
    assertThat(actualHashBase.toString(), equalTo(expectedHashBase.toString()));
}
 
Example 10
private static void pushPlanInfo(Map<String, Object> nodeInfo,List<Pair<String,Integer>> planMap) throws StandardException{
    @SuppressWarnings("unchecked") List<Map<String,Object>> children = (List<Map<String,Object>>)nodeInfo.get("children");
    String thisNodeInfo = infoToString(nodeInfo,false);
    Integer level = (Integer)nodeInfo.get("level");
    planMap.add(Pair.of(thisNodeInfo,level));
    for(Map<String,Object> child:children){
        pushPlanInfo(child,planMap);
    }

    if(!nodeInfo.containsKey("subqueries")) return; //nothing to work with
    @SuppressWarnings("unchecked") List<Map<String,Object>> subqueries = (List<Map<String,Object>>)nodeInfo.get("subqueries");
    for(Map<String,Object> subquery:subqueries){
        Map<String,Object> subqueryNodeInfo = (Map<String,Object>)subquery.get("node");
        pushPlanInfo(subqueryNodeInfo,planMap);
        String subqueryInfo = subqueryToString(subquery,subqueryNodeInfo);
        planMap.add(Pair.of(subqueryInfo,level));
    }
}
 
Example 11
Source Project: SJS   Source File: ExplanationsTest.java    License: Apache License 2.0 6 votes vote down vote up
private static String explainErrors(JSEnvironment env, String sourceCode) {
    AstRoot root = new Parser().parse(sourceCode, "", 1);
    SatSolver sat = new Sat4J();
    SJSTypeTheory theory = new SJSTypeTheory(env, null, root);
    List<Integer> hard = new ArrayList<>();
    List<Integer> soft = new ArrayList<>();
    List<ITypeConstraint> constraints = theory.getConstraints();
    for (int i = 0; i < constraints.size(); ++i) {
        (theory.hackyGenerator().hasExplanation(constraints.get(i)) ? soft : hard).add(i);
    }
    Pair<TypeAssignment, Collection<Integer>> result =
            TheorySolver.solve(
                theory, new SatFixingSetFinder<>(sat),
                hard, soft);
    ConstraintGenerator g = theory.hackyGenerator();
    StringBuilder buf = new StringBuilder();
    for (int broken : result.getRight()) {
        ITypeConstraint c = theory.hackyConstraintAccess().get(broken);
        ByteArrayOutputStream stream = new ByteArrayOutputStream();
        g.explainFailure(c, result.getLeft()).prettyprint(new PrintStream(stream));
        buf.append(stream.toString());
    }
    return buf.toString();
}
 
Example 12
@Test
public void testConcurrentReadWrites() throws Exception {
  LeftRegularBipartiteGraphSegment leftRegularBipartiteGraphSegment =
      new LeftRegularBipartiteGraphSegment(
          4, 3, 2, 1, 2.0, Integer.MAX_VALUE, new IdentityEdgeTypeMask(),
          new NullStatsReceiver());

  @SuppressWarnings("unchecked")
  List<Pair<Long, Long>> edgesToAdd = Lists.newArrayList(
      Pair.of(1L, 11L),
      Pair.of(1L, 12L),
      Pair.of(4L, 41L),
      Pair.of(2L, 21L),
      Pair.of(4L, 42L),
      Pair.of(3L, 31L),
      Pair.of(2L, 22L),
      Pair.of(1L, 13L),
      Pair.of(4L, 43L),
      Pair.of(5L, 51L) // violates the max num nodes assumption
  );

  testConcurrentReadWriteThreads(leftRegularBipartiteGraphSegment, edgesToAdd);
}
 
Example 13
Source Project: datacollector   Source File: TestJdbcMetadata.java    License: Apache License 2.0 6 votes vote down vote up
@NotNull
private Record makeListRecord(Map<String, Pair<Field.Type, Object>> fieldMap) {
  Record record = RecordCreator.create();
  Record.Header header = record.getHeader();
  ArrayList<Field> fields = new ArrayList<>();
  for (Map.Entry<String, Pair<Field.Type, Object>> entry : fieldMap.entrySet()) {
    String fieldName = entry.getKey();
    Field.Type fieldType = entry.getValue().getLeft();
    Field field = Field.create(fieldType, entry.getValue().getRight());
    if (fieldType == Field.Type.DECIMAL) {
      field.setAttribute(HeaderAttributeConstants.ATTR_SCALE, SCALE);
      field.setAttribute(HeaderAttributeConstants.ATTR_PRECISION, PRECISION);
    }
    fields.add(field);
  }
  record.set(Field.create(fields));
  header.setAttribute("table", tableName);
  return record;
}
 
Example 14
Source Project: cloudbreak   Source File: SdxServiceTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
void testCreateNOTInternalSdxClusterFromLightDutyTemplateWhenBaseLocationSpecifiedShouldCreateStackRequestWithSettedUpBaseLocation()
        throws IOException, TransactionExecutionException {
    when(transactionService.required(isA(Supplier.class))).thenAnswer(invocation -> invocation.getArgument(0, Supplier.class).get());
    String lightDutyJson = FileReaderUtils.readFileFromClasspath("/runtime/7.1.0/aws/light_duty.json");
    when(cdpConfigService.getConfigForKey(any())).thenReturn(JsonUtil.readValue(lightDutyJson, StackV4Request.class));
    //doNothing().when(cloudStorageLocationValidator.validate("s3a://some/dir", ));
    SdxClusterRequest sdxClusterRequest = new SdxClusterRequest();
    sdxClusterRequest.setClusterShape(LIGHT_DUTY);
    sdxClusterRequest.setEnvironment("envir");
    SdxCloudStorageRequest cloudStorage = new SdxCloudStorageRequest();
    cloudStorage.setFileSystemType(FileSystemType.S3);
    cloudStorage.setBaseLocation("s3a://some/dir");
    cloudStorage.setS3(new S3CloudStorageV1Parameters());
    sdxClusterRequest.setCloudStorage(cloudStorage);
    long id = 10L;
    when(sdxClusterRepository.save(any(SdxCluster.class))).thenAnswer(invocation -> {
        SdxCluster sdxWithId = invocation.getArgument(0, SdxCluster.class);
        sdxWithId.setId(id);
        return sdxWithId;
    });
    mockEnvironmentCall(sdxClusterRequest, CloudPlatform.AWS);
    Pair<SdxCluster, FlowIdentifier> result = underTest.createSdx(USER_CRN, CLUSTER_NAME, sdxClusterRequest, null);
    SdxCluster createdSdxCluster = result.getLeft();
    assertEquals("s3a://some/dir", createdSdxCluster.getCloudStorageBaseLocation());
}
 
Example 15
Source Project: UDepLambda   Source File: PostProcessLogicalForm.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * From a given logical expression, the main predicates are extracted, make
 * them more readable, link the variables to the sources from which they
 * originated.
 * 
 * @param sentence the source sentence
 * @param parse the logical expression to be processed
 * @param lexicalizePredicates lexicalize predicates by appending the event,
 *        e.g., eat(1:e) ^ arg1(1:e , 1:x) becomes eat.arg1(1:e , 1:x)
 * @return the set of main predicates in readable format
 */
public static Set<String> process(Sentence sentence, LogicalExpression parse,
    boolean lexicalizePredicates) {
  List<Literal> mainPredicates = new ArrayList<>();
  Map<Term, List<Integer>> varToEvents = new HashMap<>();
  Map<Term, List<Integer>> varToEntities = new HashMap<>();
  Map<Term, List<Term>> varToConj = new HashMap<>();
  List<Pair<Term, Term>> equalPairs = new ArrayList<>();
  process(mainPredicates, varToEvents, varToEntities, varToConj, equalPairs,
      sentence, parse);

  // TODO(sivareddyg) handle predicates p_CONJ and p_EQUAL in both varToEvents
  // and varToEntities.
  cleanVarToEntities(varToEntities, sentence);
  cleanVarToEvents(varToEvents, sentence);
  populateConj(varToConj, varToEntities, varToEvents);
  populateEquals(equalPairs, varToEntities, varToEvents);
  Set<String> cleanedPredicates =
      createCleanPredicates(mainPredicates, varToEvents, varToEntities,
          sentence, lexicalizePredicates);
  return cleanedPredicates;
}
 
Example 16
Source Project: MegaSparkDiff   Source File: VisualizerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void keyCaseTest()
{
    Pair<Dataset<Row>,Dataset<Row>> pair = getAppleTablePair("Test1", "Test4");
    boolean flag = true;
    String result1 = "";
    String result2 = "";

    try{
        result1 = generateString(pair.getLeft(), pair.getRight(), "Fruit", 100);
        result2 = generateString(pair.getLeft(), pair.getRight(), "FrUit", 100);
    } catch (Exception ex) {
        flag = false;
    }

    Assert.assertEquals(true, flag);
    Assert.assertEquals(result1, result2);
}
 
Example 17
public static Pair<ReadCountCollection, double[]> simulatedData(final int numTargets, final int numSamples) {
    final List<Target> phonyTargets = SimulatedTargets.phonyTargets(numTargets);
    final List<String> phonySamples = SimulatedSamples.phonySamples(numSamples);

    final Random random = new Random(13);
    final double[] gcContentByTarget = IntStream.range(0, numTargets)
            .mapToDouble(n -> 0.5 + 0.2*random.nextGaussian())
            .map(x -> Math.min(x,0.95)).map(x -> Math.max(x,0.05)).toArray();
    final double[] gcBiasByTarget = Arrays.stream(gcContentByTarget).map(QUADRATIC_GC_BIAS_CURVE::apply).toArray();

    // model mainly GC bias with a small random amount of non-GC bias
    // thus noise after GC correction should be nearly zero
    final RealMatrix counts = new Array2DRowRealMatrix(numTargets, numSamples);
    counts.walkInOptimizedOrder(new DefaultRealMatrixChangingVisitor() {
        @Override
        public double visit(final int target, final int column, final double value) {
            return gcBiasByTarget[target]*(1.0 + 0.01*random.nextDouble());
        }
    });
    final ReadCountCollection rcc = new ReadCountCollection(phonyTargets, phonySamples, counts);
    return new ImmutablePair<>(rcc, gcContentByTarget);
}
 
Example 18
Source Project: incubator-gobblin   Source File: Kafka08DataWriter.java    License: Apache License 2.0 6 votes vote down vote up
public Future<WriteResponse> write(Pair<K, V> keyValuePair, final WriteCallback callback) {
  try {
    return new WriteResponseFuture<>(this.producer
        .send(new ProducerRecord<>(topic, keyValuePair.getKey(), keyValuePair.getValue()), new Callback() {
          @Override
          public void onCompletion(final RecordMetadata metadata, Exception exception) {
            if (exception != null) {
              callback.onFailure(exception);
            } else {
              callback.onSuccess(WRITE_RESPONSE_WRAPPER.wrap(metadata));
            }
          }
        }), WRITE_RESPONSE_WRAPPER);
  } catch (Exception e) {
    throw new RuntimeException("Failed to create a Kafka write request", e);
  }
}
 
Example 19
Source Project: CardinalPGM   Source File: SpawnModule.java    License: MIT License 5 votes vote down vote up
public SpawnModule(TeamModule team, List<Pair<RegionModule, Vector>> regions, KitNode kit, boolean safe, boolean sequential, FilterModule filter) {
    this.team = team;
    this.regions = regions;
    this.kit = kit;
    this.safe = safe;
    this.sequential = sequential;
    this.filter = filter;
    this.position = 0;
}
 
Example 20
Source Project: olca-app   Source File: Comparators.java    License: Mozilla Public License 2.0 5 votes vote down vote up
/**
 * Returns a new comparator for flow descriptors which sorts the flow
 * descriptors first by name and than by category.
 */
public static Comparator<FlowDescriptor> forFlowDescriptors() {
	return (flow1, flow2) -> {
		int c = Strings.compare(flow1.name, flow2.name);
		if (c != 0)
			return c;
		Pair<String, String> cat1 = Labels.getCategory(flow1);
		Pair<String, String> cat2 = Labels.getCategory(flow2);
		c = Strings.compare(cat1.getLeft(), cat2.getLeft());
		if (c != 0)
			return c;
		return Strings.compare(cat1.getRight(), cat2.getRight());
	};
}
 
Example 21
public void merge(long time, long inFilter, long postFilter, long indexUsed, long binLen) {
  super.increaseCount();
  _timeList.add(time);
  _inFilterList.add(inFilter);
  Pair<Long, Long> key = Pair.of(inFilter / binLen, postFilter / binLen);
  if (_minBin.containsKey(key)) {
    if (_minBin.get(key).getRight() > time) {
      _minBin.put(key, Pair.of(indexUsed, time));
    }
  } else {
    _minBin.put(Pair.of(inFilter / binLen, postFilter / binLen), Pair.of(indexUsed, time));
  }
}
 
Example 22
Source Project: distributedlog   Source File: BalancerTool.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected int executeCommand(CommandLine cmdline) throws Exception {
    DLZkServerSet serverSet1 = DLZkServerSet.of(region1, 60000);
    logger.info("Created serverset for {}", region1);
    DLZkServerSet serverSet2 = DLZkServerSet.of(region2, 60000);
    logger.info("Created serverset for {}", region2);
    try {
        DistributedLogClientBuilder builder1 =
                createDistributedLogClientBuilder(serverSet1.getServerSet());
        Pair<DistributedLogClient, MonitorServiceClient> pair1 =
                ClientUtils.buildClient(builder1);
        DistributedLogClientBuilder builder2 =
                createDistributedLogClientBuilder(serverSet2.getServerSet());
        Pair<DistributedLogClient, MonitorServiceClient> pair2 =
                ClientUtils.buildClient(builder2);
        try {
            SimpleBalancer balancer = new SimpleBalancer(
                    BKNamespaceDriver.getZKServersFromDLUri(region1), pair1.getLeft(), pair1.getRight(),
                    BKNamespaceDriver.getZKServersFromDLUri(region2), pair2.getLeft(), pair2.getRight());
            try {
                return runBalancer(balancer);
            } finally {
                balancer.close();
            }
        } finally {
            pair1.getLeft().close();
            pair2.getLeft().close();
        }
    } finally {
        serverSet1.close();
        serverSet2.close();
    }
}
 
Example 23
Source Project: metron   Source File: ArithmeticEvaluatorTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void evaluateIntegerShouldReturnIntegerAdd() {
  Token<Integer> l = mock(Token.class);
  when(l.getValue()).thenReturn(1);

  Token<Integer> r = mock(Token.class);
  when(r.getValue()).thenReturn(2);

  Pair<Token<? extends Number>, Token<? extends Number>> p = Pair.of(l, r);

  Token<? extends Number> evaluated = evaluator.evaluate(ArithmeticEvaluator.ArithmeticEvaluatorFunctions.addition(null), p);

  assertTrue(evaluated.getValue() instanceof Integer);
  assertEquals(3, evaluated.getValue());
}
 
Example 24
Source Project: bullet-core   Source File: ClipTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testRecordAddition() {
    BulletRecord record = new RecordBox().add("field", "sample").addMap("map_field", Pair.of("foo", "bar"))
                                         .addListOfMaps("list_field", new HashMap<>(), singletonMap("foo", 1L))
                                         .getRecord();
    assertJSONEquals(Clip.of(record).asJSON(), makeJSON("[{'list_field':[{},{'foo':1}],'field':'sample','map_field':{'foo':'bar'}}]"));
}
 
Example 25
private static FillerEntry createWeightRandomStateFiller(JsonObject object) {
    JsonArray values = object.get("values").getAsJsonArray();
    ArrayList<Pair<Integer, FillerEntry>> randomList = new ArrayList<>();

    for (JsonElement randomElement : values) {
        JsonObject randomObject = randomElement.getAsJsonObject();
        int weight = randomObject.get("weight").getAsInt();
        Preconditions.checkArgument(weight > 0, "Invalid weight: %d", weight);
        FillerEntry filler = createBlockStateFiller(randomObject.get("value"));
        randomList.add(Pair.of(weight, filler));
    }

    return new WeightRandomMatcherEntry(randomList);
}
 
Example 26
Source Project: TranskribusCore   Source File: UnicodeList.java    License: GNU General Public License v3.0 5 votes vote down vote up
public List<String> getUnicodesAsStrings() {
	// TODO Auto-generated method stub
	List<String> strings = new ArrayList<String>();
	List<Pair<Integer, String>> list = getUnicodes();
	for (Pair<Integer, String> pair : list){
		strings.add(pair.getRight());
	}
	return strings;
}
 
Example 27
Source Project: syncope   Source File: DefaultGroupProvisioningManager.java    License: Apache License 2.0 5 votes vote down vote up
@Transactional(propagation = Propagation.REQUIRES_NEW)
@Override
public Pair<String, List<PropagationStatus>> create(
        final GroupCR groupCR,
        final Map<String, String> groupOwnerMap,
        final Set<String> excludedResources,
        final boolean nullPriorityAsync,
        final String creator,
        final String context) {

    WorkflowResult<String> created = gwfAdapter.create(groupCR, creator, context);

    // see ConnObjectUtils#getAnyTOFromConnObject for GroupOwnerSchema
    groupCR.getPlainAttr(StringUtils.EMPTY).
            ifPresent(groupOwner -> groupOwnerMap.put(created.getResult(), groupOwner.getValues().get(0)));

    List<PropagationTaskInfo> tasks = propagationManager.getCreateTasks(
            AnyTypeKind.GROUP,
            created.getResult(),
            null,
            created.getPropByRes(),
            groupCR.getVirAttrs(),
            excludedResources);
    PropagationReporter propagationReporter = taskExecutor.execute(tasks, nullPriorityAsync, creator);

    return Pair.of(created.getResult(), propagationReporter.getStatuses());
}
 
Example 28
Source Project: quaerite   Source File: TestParameterizableStringFactory.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCrossOver() throws Exception {
    String paramString = "recip(rord(creationDate),[1,2,3],[10,100,1000],[$2])";
    TestParam t1 = new TestParam("bf", "0", "recip(rord(creationDate),2,100,100)");
    TestParam t2 = new TestParam("bf", "0", "recip(rord(creationDate),1,1000,1000)");

    ParameterizableStringFactory<TestParam> fact = new ParameterizableStringFactory("bf",
            "0", TestParam.class, paramString);

    for (int i = 0; i < 100; i++) {
        Pair<TestParam, TestParam> pair = fact.crossover(t1, t2);
        for (TestParam t : new TestParam[]{pair.getLeft(), pair.getRight()}) {
            List<Float> floats = extractFloats(t.toString());
            assertTrue(
                    MathUtil.equals(floats.get(0), 1f, 0.00001f) ||
                            MathUtil.equals(floats.get(0), 2f, 0.00001f)
            );
            assertTrue(
                    MathUtil.equals(floats.get(1), 100f, 0.00001f) ||
                            MathUtil.equals(floats.get(1), 1000f, 0.00001f)
            );
            assertTrue(
                    MathUtil.equals(floats.get(2), 100f, 0.00001f) ||
                            MathUtil.equals(floats.get(2), 1000f, 0.00001f)
            );
        }

    }
}
 
Example 29
Source Project: azure-keyvault-java   Source File: RsaKeyTest.java    License: MIT License 5 votes vote down vote up
@Test
public void testDefaultAlgorithm() throws Exception {

    RsaKey key = getTestRsaKey();

    assertEquals(RsaOaep.ALGORITHM_NAME, key.getDefaultEncryptionAlgorithm());
    assertEquals(RsaOaep.ALGORITHM_NAME, key.getDefaultKeyWrapAlgorithm());
    assertEquals(Rs256.ALGORITHM_NAME, key.getDefaultSignatureAlgorithm());

    // Wrap and Unwrap
    Pair<byte[], String> wrapped   = key.wrapKeyAsync(CEK, key.getDefaultKeyWrapAlgorithm()).get();
    byte[]               unwrapped = key.unwrapKeyAsync(wrapped.getLeft(), wrapped.getRight()).get();

    // Assert
    assertEquals(RsaOaep.ALGORITHM_NAME, wrapped.getRight());
    assertArrayEquals(CEK, unwrapped);

    // Encrypt and Decrypt
    Triple<byte[], byte[], String> encrypted = key.encryptAsync(CEK, null, null, key.getDefaultEncryptionAlgorithm()).get();
    byte[]                         decrypted = key.decryptAsync(encrypted.getLeft(), null, null, null, encrypted.getRight()).get();

    // Assert
    assertEquals(RsaOaep.ALGORITHM_NAME, encrypted.getRight());
    assertArrayEquals(CEK, decrypted);

    key.close();
}
 
Example 30
/**
 * Get the properties from the given {@code configFilePath}, validate that all required properties are present,
 * and return the property map.
 * @param configFilePath {@link Path} to the configuration file.
 * @param errorOnMissingConfigKey If {@code true} will log an error message when the given {@code key} is not contained in {@code configProperties}.
 * @return The {@link Properties} as contained in the given {@code configFilePath}.
 */
public static Pair<Boolean, Properties> getAndValidateConfigFileContentsOnPath(final Path configFilePath,
                                                                               final boolean errorOnMissingConfigKey) {

    Utils.nonNull(configFilePath);

    boolean isValid = true;

    // Read in the contents of the config file:
    final Properties configProperties = new Properties();
    try ( final InputStream inputStream = Files.newInputStream(configFilePath, StandardOpenOption.READ) ) {
        configProperties.load(inputStream);
    }
    catch (final Exception ex) {
        throw new UserException.BadInput("Unable to read from XSV config file: " + configFilePath.toUri().toString(), ex);
    }

    // Validate that it has the correct keys:
    isValid = Stream.of(
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_SRC_FILE,
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_VERSION,
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_ORIGIN_LOCATION,
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_PREPROCESSING_SCRIPT,
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_CONTIG_COLUMN,
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_START_COLUMN,
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_END_COLUMN,
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_XSV_DELIMITER,
                DataSourceUtils.CONFIG_FILE_FIELD_NAME_NAME)
            .map( key -> configPropertiesContainsKey(configProperties, key, configFilePath, errorOnMissingConfigKey))
            .allMatch( result -> result );

    return Pair.of(isValid, configProperties);
}