org.apache.commons.lang3.tuple.Pair Java Examples
The following examples show how to use
org.apache.commons.lang3.tuple.Pair.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SqlgEdge.java From sqlg with MIT License | 6 votes |
/** * Called from @link {@link SqlgVertex} to create a brand new edge. * * @param sqlgGraph The graph. * @param streaming If in batch mode this indicates if its streaming or not. * @param schema The schema the edge is in. * @param table The edge's label which translates to a table name. * @param inVertex The edge's in vertex. * @param outVertex The edge's out vertex. * @param keyValueMapPair A pair of properties of the edge. Left contains all the properties and right the null valued properties. */ public SqlgEdge( SqlgGraph sqlgGraph, boolean streaming, String schema, String table, SqlgVertex inVertex, SqlgVertex outVertex, Pair<Map<String, Object>, Map<String, Object>> keyValueMapPair) { super(sqlgGraph, schema, table); this.inVertex = inVertex; this.outVertex = outVertex; try { insertEdge(streaming, keyValueMapPair); } catch (SQLException e) { throw new RuntimeException(e); } }
Example #2
Source File: GCBiasSimulatedData.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 6 votes |
public static Pair<ReadCountCollection, double[]> simulatedData(final int numTargets, final int numSamples) { final List<Target> phonyTargets = SimulatedTargets.phonyTargets(numTargets); final List<String> phonySamples = SimulatedSamples.phonySamples(numSamples); final Random random = new Random(13); final double[] gcContentByTarget = IntStream.range(0, numTargets) .mapToDouble(n -> 0.5 + 0.2*random.nextGaussian()) .map(x -> Math.min(x,0.95)).map(x -> Math.max(x,0.05)).toArray(); final double[] gcBiasByTarget = Arrays.stream(gcContentByTarget).map(QUADRATIC_GC_BIAS_CURVE::apply).toArray(); // model mainly GC bias with a small random amount of non-GC bias // thus noise after GC correction should be nearly zero final RealMatrix counts = new Array2DRowRealMatrix(numTargets, numSamples); counts.walkInOptimizedOrder(new DefaultRealMatrixChangingVisitor() { @Override public double visit(final int target, final int column, final double value) { return gcBiasByTarget[target]*(1.0 + 0.01*random.nextDouble()); } }); final ReadCountCollection rcc = new ReadCountCollection(phonyTargets, phonySamples, counts); return new ImmutablePair<>(rcc, gcContentByTarget); }
Example #3
Source File: VisualizerTest.java From MegaSparkDiff with Apache License 2.0 | 6 votes |
@Test public void keyCaseTest() { Pair<Dataset<Row>,Dataset<Row>> pair = getAppleTablePair("Test1", "Test4"); boolean flag = true; String result1 = ""; String result2 = ""; try{ result1 = generateString(pair.getLeft(), pair.getRight(), "Fruit", 100); result2 = generateString(pair.getLeft(), pair.getRight(), "FrUit", 100); } catch (Exception ex) { flag = false; } Assert.assertEquals(true, flag); Assert.assertEquals(result1, result2); }
Example #4
Source File: Kafka08DataWriter.java From incubator-gobblin with Apache License 2.0 | 6 votes |
public Future<WriteResponse> write(Pair<K, V> keyValuePair, final WriteCallback callback) { try { return new WriteResponseFuture<>(this.producer .send(new ProducerRecord<>(topic, keyValuePair.getKey(), keyValuePair.getValue()), new Callback() { @Override public void onCompletion(final RecordMetadata metadata, Exception exception) { if (exception != null) { callback.onFailure(exception); } else { callback.onSuccess(WRITE_RESPONSE_WRAPPER.wrap(metadata)); } } }), WRITE_RESPONSE_WRAPPER); } catch (Exception e) { throw new RuntimeException("Failed to create a Kafka write request", e); } }
Example #5
Source File: CuratedDocumentsExporterTest.java From webanno with Apache License 2.0 | 6 votes |
@Test public void thatImportingCorrectionProjectWorks_3_6_1() throws Exception { project.setMode(PROJECT_TYPE_CORRECTION); // Export the project and import it again List<Pair<SourceDocument, String>> imported = runImportAndFetchDocuments(new ZipFile( "src/test/resources/exports/Export+Test+-+Curated+correction+project_3_6_1.zip")); // Check that the curation for the document in the project is imported assertThat(imported).extracting(p -> p.getKey().getName()) .containsExactlyInAnyOrder("example_sentence.txt"); // Since WebAnno 3.5.x, the CORRECTION_USER CAS is stored with the annotations assertThat(imported).extracting(Pair::getValue) .containsExactlyInAnyOrder(CURATION_USER); }
Example #6
Source File: SpyClientUnderTestFactory.java From azure-cosmosdb-java with MIT License | 6 votes |
void initRequestCapture(CompositeHttpClient<ByteBuf, ByteBuf> spyClient) { doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { RxClient.ServerInfo serverInfo = invocationOnMock.getArgumentAt(0, RxClient.ServerInfo.class); HttpClientRequest<ByteBuf> httpReq = invocationOnMock.getArgumentAt(1, HttpClientRequest.class); CompletableFuture<HttpResponseHeaders> f = new CompletableFuture<>(); requestsResponsePairs.add(Pair.of(httpReq, f)); return origHttpClient.submit(serverInfo, httpReq) .doOnNext( res -> f.complete(res.getHeaders()) ).doOnError( e -> f.completeExceptionally(e) ); } }).when(spyClient).submit(Mockito.any(RxClient.ServerInfo.class), Mockito.any(HttpClientRequest.class)); }
Example #7
Source File: CustomTagList.java From TranskribusCore with GNU General Public License v3.0 | 6 votes |
public List<Pair<CustomTagList, CustomTag>> getCustomTagAndContinuations(CustomTag tag) { LinkedList<Pair<CustomTagList, CustomTag>> allTags = new LinkedList<>(); if (!hasTag(tag)) return allTags; allTags.add(Pair.of(this, tag)); if (!tag.isContinued()) return allTags; // previous tags: Pair<CustomTagList, CustomTag> c = getPreviousContinuedCustomTag(tag); while (c != null) { allTags.addFirst(c); c = c.getLeft().getPreviousContinuedCustomTag(c.getRight()); } // next tags: c = getNextContinuedCustomTag(tag); while (c != null) { allTags.addLast(c); c = c.getLeft().getNextContinuedCustomTag(c.getRight()); } return allTags; }
Example #8
Source File: DataMigratorIntegrationTest.java From alf.io with GNU General Public License v3.0 | 6 votes |
@Test public void testFixStuckTickets() { List<TicketCategoryModification> categories = Collections.singletonList( new TicketCategoryModification(null, "default", AVAILABLE_SEATS, new DateTimeModification(LocalDate.now(), LocalTime.now()), new DateTimeModification(LocalDate.now(), LocalTime.now()), DESCRIPTION, BigDecimal.TEN, false, "", false, null, null, null, null, null, 0, null, null, AlfioMetadata.empty())); Pair<Event, String> eventUsername = initEvent(categories); Event event = eventUsername.getKey(); TicketReservationModification trm = new TicketReservationModification(); trm.setAmount(1); trm.setTicketCategoryId(eventManager.loadTicketCategories(event).get(0).getId()); TicketReservationWithOptionalCodeModification r = new TicketReservationWithOptionalCodeModification(trm, Optional.empty()); Date expiration = DateUtils.addDays(new Date(), 1); String reservationId = ticketReservationManager.createTicketReservation(event, Collections.singletonList(r), Collections.emptyList(), expiration, Optional.empty(), Locale.ENGLISH, false); //simulate the effect of a reservation cancellation after #392, as described in #391 ticketReservationRepository.updateReservationStatus(reservationId, TicketReservation.TicketReservationStatus.CANCELLED.name()); List<Ticket> ticketsInReservation = ticketRepository.findTicketsInReservation(reservationId); assertEquals(1, ticketsInReservation.size()); String uuid = ticketsInReservation.get(0).getUuid(); assertTrue(ticketsInReservation.stream().allMatch(t -> t.getStatus() == Ticket.TicketStatus.PENDING)); dataMigrator.fixStuckTickets(event.getId()); assertSame(Ticket.TicketStatus.RELEASED, ticketRepository.findByUUID(uuid).getStatus()); }
Example #9
Source File: ExtFeignConfiguration.java From onetwo with Apache License 2.0 | 6 votes |
@Bean @ConditionalOnMissingBean(okhttp3.OkHttpClient.class) public OkHttpClient okHttpClient(){ Pair<Integer, TimeUnit> read = feignProperties.getOkHttpClient().getReadTimeoutTime(); Pair<Integer, TimeUnit> conn = feignProperties.getOkHttpClient().getConnectTimeoutTime(); Pair<Integer, TimeUnit> write = feignProperties.getOkHttpClient().getWriteTimeoutTime(); okhttp3.OkHttpClient.Builder okclientBuilder = new okhttp3.OkHttpClient.Builder() .readTimeout(read.getKey(), read.getValue()) .connectTimeout(conn.getKey(), conn.getValue()) .writeTimeout(write.getKey(), write.getValue()) // .connectionPool(new ConnectionPool()) ; if(LangUtils.isNotEmpty(interceptors)){ for(Interceptor interceptor : this.interceptors){ okclientBuilder.addInterceptor(interceptor); } } return okclientBuilder.build(); }
Example #10
Source File: PostProcessLogicalForm.java From UDepLambda with Apache License 2.0 | 6 votes |
/** * From a given logical expression, the main predicates are extracted, make * them more readable, link the variables to the sources from which they * originated. * * @param sentence the source sentence * @param parse the logical expression to be processed * @param lexicalizePredicates lexicalize predicates by appending the event, * e.g., eat(1:e) ^ arg1(1:e , 1:x) becomes eat.arg1(1:e , 1:x) * @return the set of main predicates in readable format */ public static Set<String> process(Sentence sentence, LogicalExpression parse, boolean lexicalizePredicates) { List<Literal> mainPredicates = new ArrayList<>(); Map<Term, List<Integer>> varToEvents = new HashMap<>(); Map<Term, List<Integer>> varToEntities = new HashMap<>(); Map<Term, List<Term>> varToConj = new HashMap<>(); List<Pair<Term, Term>> equalPairs = new ArrayList<>(); process(mainPredicates, varToEvents, varToEntities, varToConj, equalPairs, sentence, parse); // TODO(sivareddyg) handle predicates p_CONJ and p_EQUAL in both varToEvents // and varToEntities. cleanVarToEntities(varToEntities, sentence); cleanVarToEvents(varToEvents, sentence); populateConj(varToConj, varToEntities, varToEvents); populateEquals(equalPairs, varToEntities, varToEvents); Set<String> cleanedPredicates = createCleanPredicates(mainPredicates, varToEvents, varToEntities, sentence, lexicalizePredicates); return cleanedPredicates; }
Example #11
Source File: SdxServiceTest.java From cloudbreak with Apache License 2.0 | 6 votes |
@Test void testCreateNOTInternalSdxClusterFromLightDutyTemplateWhenBaseLocationSpecifiedShouldCreateStackRequestWithSettedUpBaseLocation() throws IOException, TransactionExecutionException { when(transactionService.required(isA(Supplier.class))).thenAnswer(invocation -> invocation.getArgument(0, Supplier.class).get()); String lightDutyJson = FileReaderUtils.readFileFromClasspath("/runtime/7.1.0/aws/light_duty.json"); when(cdpConfigService.getConfigForKey(any())).thenReturn(JsonUtil.readValue(lightDutyJson, StackV4Request.class)); //doNothing().when(cloudStorageLocationValidator.validate("s3a://some/dir", )); SdxClusterRequest sdxClusterRequest = new SdxClusterRequest(); sdxClusterRequest.setClusterShape(LIGHT_DUTY); sdxClusterRequest.setEnvironment("envir"); SdxCloudStorageRequest cloudStorage = new SdxCloudStorageRequest(); cloudStorage.setFileSystemType(FileSystemType.S3); cloudStorage.setBaseLocation("s3a://some/dir"); cloudStorage.setS3(new S3CloudStorageV1Parameters()); sdxClusterRequest.setCloudStorage(cloudStorage); long id = 10L; when(sdxClusterRepository.save(any(SdxCluster.class))).thenAnswer(invocation -> { SdxCluster sdxWithId = invocation.getArgument(0, SdxCluster.class); sdxWithId.setId(id); return sdxWithId; }); mockEnvironmentCall(sdxClusterRequest, CloudPlatform.AWS); Pair<SdxCluster, FlowIdentifier> result = underTest.createSdx(USER_CRN, CLUSTER_NAME, sdxClusterRequest, null); SdxCluster createdSdxCluster = result.getLeft(); assertEquals("s3a://some/dir", createdSdxCluster.getCloudStorageBaseLocation()); }
Example #12
Source File: TestJdbcMetadata.java From datacollector with Apache License 2.0 | 6 votes |
@NotNull private Record makeListRecord(Map<String, Pair<Field.Type, Object>> fieldMap) { Record record = RecordCreator.create(); Record.Header header = record.getHeader(); ArrayList<Field> fields = new ArrayList<>(); for (Map.Entry<String, Pair<Field.Type, Object>> entry : fieldMap.entrySet()) { String fieldName = entry.getKey(); Field.Type fieldType = entry.getValue().getLeft(); Field field = Field.create(fieldType, entry.getValue().getRight()); if (fieldType == Field.Type.DECIMAL) { field.setAttribute(HeaderAttributeConstants.ATTR_SCALE, SCALE); field.setAttribute(HeaderAttributeConstants.ATTR_PRECISION, PRECISION); } fields.add(field); } record.set(Field.create(fields)); header.setAttribute("table", tableName); return record; }
Example #13
Source File: LeftRegularBipartiteGraphSegmentTest.java From GraphJet with Apache License 2.0 | 6 votes |
@Test public void testConcurrentReadWrites() throws Exception { LeftRegularBipartiteGraphSegment leftRegularBipartiteGraphSegment = new LeftRegularBipartiteGraphSegment( 4, 3, 2, 1, 2.0, Integer.MAX_VALUE, new IdentityEdgeTypeMask(), new NullStatsReceiver()); @SuppressWarnings("unchecked") List<Pair<Long, Long>> edgesToAdd = Lists.newArrayList( Pair.of(1L, 11L), Pair.of(1L, 12L), Pair.of(4L, 41L), Pair.of(2L, 21L), Pair.of(4L, 42L), Pair.of(3L, 31L), Pair.of(2L, 22L), Pair.of(1L, 13L), Pair.of(4L, 43L), Pair.of(5L, 51L) // violates the max num nodes assumption ); testConcurrentReadWriteThreads(leftRegularBipartiteGraphSegment, edgesToAdd); }
Example #14
Source File: ExplanationsTest.java From SJS with Apache License 2.0 | 6 votes |
private static String explainErrors(JSEnvironment env, String sourceCode) { AstRoot root = new Parser().parse(sourceCode, "", 1); SatSolver sat = new Sat4J(); SJSTypeTheory theory = new SJSTypeTheory(env, null, root); List<Integer> hard = new ArrayList<>(); List<Integer> soft = new ArrayList<>(); List<ITypeConstraint> constraints = theory.getConstraints(); for (int i = 0; i < constraints.size(); ++i) { (theory.hackyGenerator().hasExplanation(constraints.get(i)) ? soft : hard).add(i); } Pair<TypeAssignment, Collection<Integer>> result = TheorySolver.solve( theory, new SatFixingSetFinder<>(sat), hard, soft); ConstraintGenerator g = theory.hackyGenerator(); StringBuilder buf = new StringBuilder(); for (int broken : result.getRight()) { ITypeConstraint c = theory.hackyConstraintAccess().get(broken); ByteArrayOutputStream stream = new ByteArrayOutputStream(); g.explainFailure(c, result.getLeft()).prettyprint(new PrintStream(stream)); buf.append(stream.toString()); } return buf.toString(); }
Example #15
Source File: Splitter.java From yauaa with Apache License 2.0 | 6 votes |
public List<Pair<Integer, Integer>> createSplitList(char[] characters){ List<Pair<Integer, Integer>> result = new ArrayList<>(8); int offset = findSplitStart(characters, 1); if (offset == -1) { return result; // Nothing at all. So we are already done } while(offset != -1) { int start = offset; int end= findSplitEnd(characters, start); result.add(new ImmutablePair<>(start, end)); offset = findNextSplitStart(characters, end); } return result; }
Example #16
Source File: PlanPrinter.java From spliceengine with GNU Affero General Public License v3.0 | 6 votes |
private static void pushPlanInfo(Map<String, Object> nodeInfo,List<Pair<String,Integer>> planMap) throws StandardException{ @SuppressWarnings("unchecked") List<Map<String,Object>> children = (List<Map<String,Object>>)nodeInfo.get("children"); String thisNodeInfo = infoToString(nodeInfo,false); Integer level = (Integer)nodeInfo.get("level"); planMap.add(Pair.of(thisNodeInfo,level)); for(Map<String,Object> child:children){ pushPlanInfo(child,planMap); } if(!nodeInfo.containsKey("subqueries")) return; //nothing to work with @SuppressWarnings("unchecked") List<Map<String,Object>> subqueries = (List<Map<String,Object>>)nodeInfo.get("subqueries"); for(Map<String,Object> subquery:subqueries){ Map<String,Object> subqueryNodeInfo = (Map<String,Object>)subquery.get("node"); pushPlanInfo(subqueryNodeInfo,planMap); String subqueryInfo = subqueryToString(subquery,subqueryNodeInfo); planMap.add(Pair.of(subqueryInfo,level)); } }
Example #17
Source File: ItemHashUtilTest.java From sakai with Educational Community License v2.0 | 6 votes |
@Test public void testHashBaseForItemAnswersPreservesNullsLiterally() throws IOException, NoSuchAlgorithmException, ServerOverloadException { final ItemData item = new ItemData(); item.setTypeId(TypeIfc.FILL_IN_BLANK); // sequence, at least, is required, else ordering is completely non-deterministic final Pair<Answer,String> answer1 = answerAndExpectedHashBaseFor(item, 1L, null, null, null, null, null, null); final Pair<Answer,String> answer2 = answerAndExpectedHashBaseFor(item, 2L, null, null, null, null, null, null); final ItemText itemText1 = new ItemText(item, 1L, null, Sets.newSet(answerFrom(answer1), answerFrom(answer2))); answerFrom(answer1).setItemText(itemText1); answerFrom(answer2).setItemText(itemText1); item.setItemTextSet(Sets.newSet(itemText1)); final StringBuilder expectedHashBase = new StringBuilder() .append(stringFrom(answer1)) .append(stringFrom(answer2)); final StringBuilder actualHashBase = new StringBuilder(); itemHashUtil.hashBaseForItemAnswers(item, actualHashBase); assertThat(actualHashBase.toString(), equalTo(expectedHashBase.toString())); }
Example #18
Source File: SegmentMergeUtils.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 6 votes |
/** * Given three data sets (corresponding to left, center, and right segments), returns a pair of scores based on * the Hodges-Lehmann estimator between (left, center) and (center, right); the sum of the scores will be unity. * @param leftData data set for left segment * @param centerData data set for center segment * @param rightData data set for right segment * @return pair of scores based on the Hodges-Lehmann estimator */ private static Pair<Double, Double> calculateHodgesLehmannScores(final double[] leftData, final double[] centerData, final double[] rightData) { final double leftDistance = hodgesLehmannDistance(leftData, centerData); final double rightDistance = hodgesLehmannDistance(centerData, rightData); if (leftDistance == 0. && rightDistance == 0.) { return Pair.of(0.5, 0.5); } //if center segment is above or below both left and right segments, //assign score 1 to the closer segment and 0 to the other if (leftDistance * rightDistance < 0) { return Math.abs(leftDistance) < Math.abs(rightDistance) ? Pair.of(1., 0.) : Pair.of(0., 1.); } return Pair.of(1. - Math.abs(leftDistance / (leftDistance + rightDistance)), 1. - Math.abs(rightDistance / (leftDistance + rightDistance))); }
Example #19
Source File: FactoryBlockPattern.java From GregTech with GNU Lesser General Public License v3.0 | 5 votes |
private List<Pair<Predicate<BlockWorldState>, IntRange>> makeCountLimitsList() { List<Pair<Predicate<BlockWorldState>, IntRange>> array = new ArrayList<>(countLimits.size()); for (Entry<Character, IntRange> entry : this.countLimits.entrySet()) { Predicate<BlockWorldState> predicate = this.symbolMap.get(entry.getKey()); array.add(Pair.of(predicate, entry.getValue())); } return array; }
Example #20
Source File: TextCollatorRegistryICU.java From fdb-record-layer with Apache License 2.0 | 5 votes |
@Override @Nonnull public TextCollator getTextCollator(@Nonnull String locale, int strength) { return MapUtils.computeIfAbsent(collators, Pair.of(locale, strength), key -> { final Collator collator = DEFAULT_LOCALE.equals(locale) ? Collator.getInstance() : Collator.getInstance(new ULocale(locale)); collator.setStrength(strength); return new TextCollatorICU(collator); }); }
Example #21
Source File: AbstractRestfulBinder.java From statefulj with Apache License 2.0 | 5 votes |
protected Pair<String, String> parseMethod(String event) { Matcher matcher = getMethodPattern().matcher(event); if (!matcher.matches()) { throw new RuntimeException("Unable to parse event=" + event); } return new ImmutablePair<String, String>(matcher.group(2), matcher.group(3)); }
Example #22
Source File: LogSegmentCache.java From distributedlog with Apache License 2.0 | 5 votes |
/** * Diff with new received segment list <code>segmentReceived</code>. * * @param segmentsReceived * new received segment list * @return segments added (left) and removed (right). */ public Pair<Set<String>, Set<String>> diff(Set<String> segmentsReceived) { Set<String> segmentsAdded; Set<String> segmentsRemoved; synchronized (logSegments) { Set<String> segmentsCached = logSegments.keySet(); segmentsAdded = Sets.difference(segmentsReceived, segmentsCached).immutableCopy(); segmentsRemoved = Sets.difference(segmentsCached, segmentsReceived).immutableCopy(); } return Pair.of(segmentsAdded, segmentsRemoved); }
Example #23
Source File: CommonsSpiderService.java From spider with GNU General Public License v3.0 | 5 votes |
public String exportQuartz() { Map<String, Long> result = Maps.newHashMap(); for (JobKey jobKey : quartzManager.listAll(QUARTZ_JOB_GROUP_NAME)) { Pair<JobDetail, Trigger> pair = quartzManager.findInfo(jobKey); long hours = ((SimpleTrigger) ((SimpleScheduleBuilder) pair.getRight().getScheduleBuilder()).build()).getRepeatInterval() / DateBuilder.MILLISECONDS_IN_HOUR; String name = ((SpiderInfo) pair.getLeft().getJobDataMap().get("spiderInfo")).getId(); result.put(name, hours); } return new Gson().toJson(result); }
Example #24
Source File: ElasticsearchMetadataModule.java From heroic with Apache License 2.0 | 5 votes |
@Provides @ElasticsearchScope public RateLimitedCache<Pair<String, HashCode>> writeCache(HeroicReporter reporter) { final Cache<Pair<String, HashCode>, Boolean> cache = CacheBuilder .newBuilder() .concurrencyLevel(writeCacheConcurrency) .maximumSize(writeCacheMaxSize) .expireAfterWrite(writeCacheDurationMinutes, TimeUnit.MINUTES) .build(); reporter.registerCacheSize("elasticsearch-metadata-write-through", cache::size); if (writesPerSecond <= 0d) { return new DisabledRateLimitedCache<>(cache.asMap()); } if (distributedCacheSrvRecord.length() > 0) { return new DistributedRateLimitedCache<>( cache.asMap(), RateLimiter.create(writesPerSecond, rateLimitSlowStartSeconds, SECONDS), MemcachedConnection.create(distributedCacheSrvRecord), toIntExact(Duration.of(writeCacheDurationMinutes, MINUTES).convert(SECONDS)), reporter.newMemcachedReporter("metadata") ); } return new DefaultRateLimitedCache<>(cache.asMap(), RateLimiter.create(writesPerSecond, rateLimitSlowStartSeconds, TimeUnit.SECONDS)); }
Example #25
Source File: FileToFileTest.java From MegaSparkDiff with Apache License 2.0 | 5 votes |
@Test public void testCompareTable2IsSubset() { Pair<Dataset<Row>,Dataset<Row>> pair = returnDiff("Test1","Test5"); //the expectation is that table2 is a complete subset of table1 if (pair.getLeft().count() != 5) Assert.fail("Expected 5 differences coming from left table." + " Instead, found " + pair.getLeft().count() + "."); if (pair.getRight().count() != 0) Assert.fail("Expected 0 differences coming from right table." + " Instead, found " + pair.getRight().count() + "."); }
Example #26
Source File: CamelUserProvisioningManager.java From syncope with Apache License 2.0 | 5 votes |
@Override public Pair<UserUR, List<PropagationStatus>> update( final UserUR userUR, final Set<String> excludedResources, final boolean nullPriorityAsync, final String updater, final String context) { return update(userUR, new ProvisioningReport(), null, excludedResources, nullPriorityAsync, updater, context); }
Example #27
Source File: KafkaApisTest.java From kop with Apache License 2.0 | 5 votes |
private Map<TopicPartition, FetchRequest.PartitionData> createPartitionMap(int maxPartitionBytes, List<TopicPartition> topicPartitions, Map<TopicPartition, Long> offsetMap) { return topicPartitions.stream() .map(topic -> Pair.of(topic, new FetchRequest.PartitionData( offsetMap.getOrDefault(topic, 0L), 0L, maxPartitionBytes))) .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); }
Example #28
Source File: IdMStatusProvider.java From syncope with Apache License 2.0 | 5 votes |
@Override public Optional<Pair<ConnObjectTO, ConnObjectTO>> get( final String anyTypeKey, final String connObjectKeyValue, final String resource) { return ReconStatusUtils.getReconStatus(anyTypeKey, connObjectKeyValue, resource). map(status -> Pair.of(status.getOnSyncope(), status.getOnResource())); }
Example #29
Source File: XsvLocatableTableCodec.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * Get the properties from the given {@code configFilePath}, validate that all required properties are present, * and return the property map. * @param configFilePath {@link Path} to the configuration file. * @param errorOnMissingConfigKey If {@code true} will log an error message when the given {@code key} is not contained in {@code configProperties}. * @return The {@link Properties} as contained in the given {@code configFilePath}. */ public static Pair<Boolean, Properties> getAndValidateConfigFileContentsOnPath(final Path configFilePath, final boolean errorOnMissingConfigKey) { Utils.nonNull(configFilePath); boolean isValid = true; // Read in the contents of the config file: final Properties configProperties = new Properties(); try ( final InputStream inputStream = Files.newInputStream(configFilePath, StandardOpenOption.READ) ) { configProperties.load(inputStream); } catch (final Exception ex) { throw new UserException.BadInput("Unable to read from XSV config file: " + configFilePath.toUri().toString(), ex); } // Validate that it has the correct keys: isValid = Stream.of( DataSourceUtils.CONFIG_FILE_FIELD_NAME_SRC_FILE, DataSourceUtils.CONFIG_FILE_FIELD_NAME_VERSION, DataSourceUtils.CONFIG_FILE_FIELD_NAME_ORIGIN_LOCATION, DataSourceUtils.CONFIG_FILE_FIELD_NAME_PREPROCESSING_SCRIPT, DataSourceUtils.CONFIG_FILE_FIELD_NAME_CONTIG_COLUMN, DataSourceUtils.CONFIG_FILE_FIELD_NAME_START_COLUMN, DataSourceUtils.CONFIG_FILE_FIELD_NAME_END_COLUMN, DataSourceUtils.CONFIG_FILE_FIELD_NAME_XSV_DELIMITER, DataSourceUtils.CONFIG_FILE_FIELD_NAME_NAME) .map( key -> configPropertiesContainsKey(configProperties, key, configFilePath, errorOnMissingConfigKey)) .allMatch( result -> result ); return Pair.of(isValid, configProperties); }
Example #30
Source File: MatrixAppendCPInstruction.java From systemds with Apache License 2.0 | 5 votes |
@Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { //TODO: break append to cbind and rbind for full compilation chain String opcode = _type.toString().toLowerCase(); return Pair.of(output.getName(), new LineageItem(opcode, LineageItemUtils.getLineage(ec, input1, input2))); }