Java Code Examples for java.util.SortedMap#values()
The following examples show how to use
java.util.SortedMap#values() .
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: extra-enforcer-rules File: RequirePropertyDiverges.java License: Apache License 2.0 | 6 votes |
/** * As Xpp3Dom is very picky about the order of children while comparing, create a new list where the children * are added in alphabetical order. See <a href="https://jira.codehaus.org/browse/MOJO-1931">MOJO-1931</a>. * * @param originalListFromPom order not specified * @return a list where children's member are alphabetically sorted. */ private List<Xpp3Dom> createRuleListWithNameSortedChildren( final List<Xpp3Dom> originalListFromPom ) { final List<Xpp3Dom> listWithSortedEntries = new ArrayList<Xpp3Dom>( originalListFromPom.size() ); for ( Xpp3Dom unsortedXpp3Dom : originalListFromPom ) { final Xpp3Dom sortedXpp3Dom = new Xpp3Dom( getRuleName() ); final SortedMap<String, Xpp3Dom> childrenMap = new TreeMap<String, Xpp3Dom>(); final Xpp3Dom[] children = unsortedXpp3Dom.getChildren(); for ( Xpp3Dom child : children ) { childrenMap.put( child.getName(), child ); } for ( Xpp3Dom entry : childrenMap.values() ) { sortedXpp3Dom.addChild( entry ); } listWithSortedEntries.add( sortedXpp3Dom ); } return listWithSortedEntries; }
Example 2
Source Project: atomix File: SegmentedJournal.java License: Apache License 2.0 | 6 votes |
/** * Compacts the journal up to the given index. * <p> * The semantics of compaction are not specified by this interface. * * @param index The index up to which to compact the journal. */ public void compact(long index) { Map.Entry<Long, JournalSegment<E>> segmentEntry = segments.floorEntry(index); if (segmentEntry != null) { SortedMap<Long, JournalSegment<E>> compactSegments = segments.headMap(segmentEntry.getValue().index()); if (!compactSegments.isEmpty()) { log.debug("{} - Compacting {} segment(s)", name, compactSegments.size()); for (JournalSegment segment : compactSegments.values()) { log.trace("Deleting segment: {}", segment); segment.close(); segment.delete(); } compactSegments.clear(); resetHead(segmentEntry.getValue().index()); } } }
Example 3
Source Project: lams File: LearningController.java License: GNU General Public License v2.0 | 6 votes |
@RequestMapping(value = "/showOtherUsersAnswers") private String showOtherUsersAnswers(HttpServletRequest request) { String sessionMapID = request.getParameter("sessionMapID"); request.setAttribute("sessionMapID", sessionMapID); SessionMap<String, Object> sessionMap = (SessionMap<String, Object>) request.getSession() .getAttribute(sessionMapID); SortedMap<Integer, AnswerDTO> surveyItemMap = getQuestionList(sessionMap); Long sessionId = (Long) sessionMap.get(AttributeNames.PARAM_TOOL_SESSION_ID); List<AnswerDTO> answerDtos = new ArrayList<>(); for (SurveyQuestion question : surveyItemMap.values()) { AnswerDTO answerDto = surveyService.getQuestionResponse(sessionId, question.getUid()); answerDtos.add(answerDto); } request.setAttribute("answerDtos", answerDtos); SurveyUser surveyLearner = (SurveyUser) sessionMap.get(SurveyConstants.ATTR_USER); surveyService.setResponseFinalized(surveyLearner.getUid()); int countFinishedUser = surveyService.getCountFinishedUsers(sessionId); request.setAttribute(SurveyConstants.ATTR_COUNT_FINISHED_USERS, countFinishedUser); return "pages/learning/resultOtherUsers"; }
Example 4
Source Project: nifi File: CSVRecordReader.java License: Apache License 2.0 | 6 votes |
private List<RecordField> getRecordFields() { if (this.recordFields != null) { return this.recordFields; } // Use a SortedMap keyed by index of the field so that we can get a List of field names in the correct order final SortedMap<Integer, String> sortedMap = new TreeMap<>(); for (final Map.Entry<String, Integer> entry : csvParser.getHeaderMap().entrySet()) { sortedMap.put(entry.getValue(), entry.getKey()); } final List<RecordField> fields = new ArrayList<>(); final List<String> rawFieldNames = new ArrayList<>(sortedMap.values()); for (final String rawFieldName : rawFieldNames) { final Optional<RecordField> option = schema.getField(rawFieldName); if (option.isPresent()) { fields.add(option.get()); } else { fields.add(new RecordField(rawFieldName, RecordFieldType.STRING.getDataType())); } } this.recordFields = fields; return fields; }
Example 5
Source Project: ironjacamar File: IronJacamar.java License: Eclipse Public License 1.0 | 6 votes |
/** * Filter and sort * @param fms The FrameworkMethods * @param isStatic Filter static * @return The filtered and sorted FrameworkMethods * @exception Exception If an order definition is incorrect */ private Collection<FrameworkMethod> filterAndSort(List<FrameworkMethod> fms, boolean isStatic) throws Exception { SortedMap<Integer, FrameworkMethod> m = new TreeMap<>(); for (FrameworkMethod fm : fms) { SecurityActions.setAccessible(fm.getMethod()); if (Modifier.isStatic(fm.getMethod().getModifiers()) == isStatic) { Deployment deployment = (Deployment)fm.getAnnotation(Deployment.class); int order = deployment.order(); if (order <= 0 || m.containsKey(Integer.valueOf(order))) throw new Exception("Incorrect order definition '" + order + "' on " + fm.getDeclaringClass().getName() + "#" + fm.getName()); m.put(Integer.valueOf(order), fm); } } return m.values(); }
Example 6
Source Project: quarks File: MetricsBaseTest.java License: Apache License 2.0 | 6 votes |
private final void counter(String[] data) throws Exception { Topology t = newTopology(); TStream<String> s = t.strings(data); s = Metrics.counter(s); waitUntilComplete(t, s, data); if (metricRegistry != null) { SortedMap<String, Counter> counters = metricRegistry.getCounters(); assertEquals(1, counters.size()); Collection<Counter> values = counters.values(); for (Counter v : values) { assertEquals(data.length, v.getCount()); } } }
Example 7
Source Project: beam File: IsmReaderImpl.java License: Apache License 2.0 | 6 votes |
@Override public boolean advance() throws IOException { // This is in a while loop because the blocks that we are asked to look into may // not contain the key prefix. while (iterator == null || !iterator.hasNext()) { // If there are no blocks to iterate over we can return false if (!blockEntriesIterator.hasNext()) { return false; } NavigableMap<RandomAccessData, WindowedValue<IsmRecord<V>>> map; try (Closeable counterCloseable = IsmReader.setSideInputReadContext(readCounter)) { IsmShardKey nextBlock = blockEntriesIterator.next(); map = fetch(nextBlock); } SortedMap<RandomAccessData, WindowedValue<IsmRecord<V>>> submap = map.subMap(prefix, prefixUpperBound); Collection<WindowedValue<IsmRecord<V>>> values = submap.values(); iterator = values.iterator(); } current = Optional.of(iterator.next()); return true; }
Example 8
Source Project: Pydev File: AbstractAdditionalTokensInfo.java License: Eclipse Public License 1.0 | 6 votes |
/** * @param qualifier * @param initialsToInfo this is where we are going to get the info from (currently: inner or top level list) * @param toks (out) the tokens will be added to this list * @return */ protected void getWithFilter(String qualifier, SortedMap<String, Set<IInfo>> initialsToInfo, Collection<IInfo> toks, Filter filter, boolean useLowerCaseQual) { String initials = getInitials(qualifier); String qualToCompare = qualifier; if (useLowerCaseQual) { qualToCompare = qualifier.toLowerCase(); } //get until the end of the alphabet SortedMap<String, Set<IInfo>> subMap = initialsToInfo.subMap(initials, initials + "\uffff\uffff\uffff\uffff"); for (Set<IInfo> listForInitials : subMap.values()) { for (IInfo info : listForInitials) { if (filter.doCompare(qualToCompare, info)) { toks.add(info); } } } }
Example 9
Source Project: hbase File: HBaseFsck.java License: Apache License 2.0 | 5 votes |
/** * Prints summary of all tables found on the system. */ private void printTableSummary(SortedMap<TableName, HbckTableInfo> tablesInfo) { StringBuilder sb = new StringBuilder(); int numOfSkippedRegions; errors.print("Summary:"); for (HbckTableInfo tInfo : tablesInfo.values()) { numOfSkippedRegions = (skippedRegions.containsKey(tInfo.getName())) ? skippedRegions.get(tInfo.getName()).size() : 0; if (errors.tableHasErrors(tInfo)) { errors.print("Table " + tInfo.getName() + " is inconsistent."); } else if (numOfSkippedRegions > 0){ errors.print("Table " + tInfo.getName() + " is okay (with " + numOfSkippedRegions + " skipped regions)."); } else { errors.print("Table " + tInfo.getName() + " is okay."); } errors.print(" Number of regions: " + tInfo.getNumRegions()); if (numOfSkippedRegions > 0) { Set<String> skippedRegionStrings = skippedRegions.get(tInfo.getName()); System.out.println(" Number of skipped regions: " + numOfSkippedRegions); System.out.println(" List of skipped regions:"); for(String sr : skippedRegionStrings) { System.out.println(" " + sr); } } sb.setLength(0); // clear out existing buffer, if any. sb.append(" Deployed on: "); for (ServerName server : tInfo.deployedOn) { sb.append(" " + server.toString()); } errors.print(sb.toString()); } }
Example 10
Source Project: streaminer File: HyperLogLogPlus.java License: Apache License 2.0 | 5 votes |
private static int[] getNearestNeighbors(SortedMap<Double, Integer> distanceMap) { int[] nearest = new int[6]; int i = 0; for (Integer index : distanceMap.values()) { nearest[i++] = index; if (i >= 6) { break; } } return nearest; }
Example 11
Source Project: Kylin File: HyperLogLogPlusTable.java License: Apache License 2.0 | 5 votes |
public static int[] getNearestNeighbors(SortedMap<Double, Integer> distanceMap) { int[] nearest = new int[6]; int i = 0; for (Integer index : distanceMap.values()) { nearest[i++] = index; if (i >= 6) { break; } } return nearest; }
Example 12
Source Project: hbase-indexer File: IndexerMetricsUtil.java License: Apache License 2.0 | 5 votes |
public static void shutdownMetrics(String indexerName) { SortedMap<String, SortedMap<MetricName, Metric>> groupedMetrics = Metrics.defaultRegistry().groupedMetrics( new IndexerMetricPredicate(indexerName)); for (SortedMap<MetricName, Metric> metricMap : groupedMetrics.values()) { for (MetricName metricName : metricMap.keySet()) { Metrics.defaultRegistry().removeMetric(metricName); } } }
Example 13
Source Project: TakinRPC File: DefaultRaftLog.java License: Apache License 2.0 | 5 votes |
public boolean append(@Nonnull AppendEntries appendEntries) { final long prevLogIndex = appendEntries.getPrevLogIndex(); final long prevLogTerm = appendEntries.getPrevLogTerm(); final List<Entry> entries = appendEntries.getEntriesList(); EntryMeta previousEntry = entryIndex.get(prevLogIndex); if ((previousEntry == null) || (previousEntry.term != prevLogTerm)) { LOGGER.debug("Append prevLogIndex {} prevLogTerm {} previousEntry {}", prevLogIndex, prevLogTerm, previousEntry); return false; } SortedMap<Long, EntryMeta> old = this.entryIndex.tailMap(prevLogIndex + 1); for (EntryMeta e : old.values()) { try { LOGGER.debug("Deleting {}", e.index); journal.delete(e.location); } catch (IOException e1) { e1.printStackTrace(); } } old.clear(); lastLogIndex = prevLogIndex; for (Entry entry : entries) { storeEntry(++lastLogIndex, entry); } return true; }
Example 14
Source Project: ReactionDecoder File: ExplicitHydrogenSingleUpWedgeRule.java License: GNU Lesser General Public License v3.0 | 5 votes |
/** * * @param centralAtom * @param atomContainer * @param angleMap * @return */ @Override public IStereoElement execute(IAtom centralAtom, IAtomContainer atomContainer, SortedMap<Double, IBond> angleMap) { int[] permutation = getMatchPermutation(); List<IBond> bonds = new ArrayList<>(angleMap.values()); IAtom[] ligandAtoms = new IAtom[4]; for (int index = 0; index < 4; index++) { IBond bond = bonds.get(permutation[index]); ligandAtoms[index] = bond.getOther(centralAtom); } ITetrahedralChirality.Stereo chirality = CLOCKWISE; return new TetrahedralChirality(centralAtom, ligandAtoms, chirality); }
Example 15
Source Project: big-c File: InMemoryPlan.java License: Apache License 2.0 | 4 votes |
@Override public void archiveCompletedReservations(long tick) { // Since we are looking for old reservations, read lock is optimal LOG.debug("Running archival at time: {}", tick); List<InMemoryReservationAllocation> expiredReservations = new ArrayList<InMemoryReservationAllocation>(); readLock.lock(); // archive reservations and delete the ones which are beyond // the reservation policy "window" try { long archivalTime = tick - policy.getValidWindow(); ReservationInterval searchInterval = new ReservationInterval(archivalTime, archivalTime); SortedMap<ReservationInterval, Set<InMemoryReservationAllocation>> reservations = currentReservations.headMap(searchInterval, true); if (!reservations.isEmpty()) { for (Set<InMemoryReservationAllocation> reservationEntries : reservations .values()) { for (InMemoryReservationAllocation reservation : reservationEntries) { if (reservation.getEndTime() <= archivalTime) { expiredReservations.add(reservation); } } } } } finally { readLock.unlock(); } if (expiredReservations.isEmpty()) { return; } // Need write lock only if there are any reservations to be deleted writeLock.lock(); try { for (InMemoryReservationAllocation expiredReservation : expiredReservations) { removeReservation(expiredReservation); } } finally { writeLock.unlock(); } }
Example 16
Source Project: audiveris File: DistancesBuilder.java License: GNU Affero General Public License v3.0 | 4 votes |
/** * Paint the "neutralized" lines (staff lines, ledgers, stems) with a special value, * so that template matching can ignore these locations. */ private void paintLines () { // Neutralize foreground due to staff lines / ledgers and stems for (SystemInfo system : sheet.getSystems()) { for (Staff staff : system.getStaves()) { // "Erase" staff lines for (LineInfo line : staff.getLines()) { // Paint the line glyph Glyph glyph = line.getGlyph(); paintGlyph(glyph); // Also paint this line even at crossings with vertical objects double halfLine = 0.5 * glyph.getMeanThickness(Orientation.HORIZONTAL); Point2D leftPt = line.getEndPoint(LEFT); Point2D rightPt = line.getEndPoint(RIGHT); int xMin = (int) Math.floor(leftPt.getX()); int xMax = (int) Math.ceil(rightPt.getX()); for (int x = xMin; x <= xMax; x++) { double yl = line.yAt((double) x); int yMin = (int) Math.rint(yl - halfLine); int yMax = (int) Math.rint(yl + halfLine); for (int y = yMin; y <= yMax; y++) { table.setValue(x, y, ChamferDistance.VALUE_UNKNOWN); } } } // "Erase" ledgers SortedMap<Integer, List<LedgerInter>> ledgerMap = staff.getLedgerMap(); for (List<LedgerInter> ledgers : ledgerMap.values()) { for (LedgerInter ledger : ledgers) { paintGlyph(ledger.getGlyph()); } } } // "Erase" stem seeds List<Glyph> systemSeeds = system.getGroupedGlyphs(GlyphGroup.VERTICAL_SEED); for (Glyph seed : systemSeeds) { paintGlyph(seed); } } }
Example 17
Source Project: kfs File: AssetDepreciationServiceImpl.java License: GNU Affero General Public License v3.0 | 4 votes |
/** * This method stores the depreciation transactions in the general pending entry table and creates a new documentHeader entry. * <p> * * @param trans SortedMap with the transactions * @return none */ protected void processGeneralLedgerPendingEntry(Integer fiscalYear, Integer fiscalMonth, List<String> documentNos, SortedMap<String, AssetDepreciationTransaction> trans) { LOG.debug("populateExplicitGeneralLedgerPendingEntry(AccountingDocument, AccountingLine, GeneralLedgerPendingEntrySequenceHelper, GeneralLedgerPendingEntry) - start"); String financialSystemDocumentTypeCodeCode; try { String documentNumber = createNewDepreciationDocument(documentNos); financialSystemDocumentTypeCodeCode = CamsConstants.DocumentTypeName.ASSET_DEPRECIATION; LOG.debug(CamsConstants.Depreciation.DEPRECIATION_BATCH + "Depreciation Document Type Code: " + financialSystemDocumentTypeCodeCode); Timestamp transactionTimestamp = new Timestamp(dateTimeService.getCurrentDate().getTime()); GeneralLedgerPendingEntrySequenceHelper sequenceHelper = new GeneralLedgerPendingEntrySequenceHelper(); List<GeneralLedgerPendingEntry> saveList = new ArrayList<GeneralLedgerPendingEntry>(); int counter = 0; for (AssetDepreciationTransaction t : trans.values()) { if (t.getTransactionAmount().isNonZero()) { counter++; LOG.debug(CamsConstants.Depreciation.DEPRECIATION_BATCH + "Creating GLPE entries for asset:" + t.getCapitalAssetNumber()); GeneralLedgerPendingEntry explicitEntry = new GeneralLedgerPendingEntry(); explicitEntry.setFinancialSystemOriginationCode(KFSConstants.ORIGIN_CODE_KUALI); explicitEntry.setDocumentNumber(documentNumber); explicitEntry.setTransactionLedgerEntrySequenceNumber(new Integer(sequenceHelper.getSequenceCounter())); sequenceHelper.increment(); explicitEntry.setChartOfAccountsCode(t.getChartOfAccountsCode()); explicitEntry.setAccountNumber(t.getAccountNumber()); explicitEntry.setSubAccountNumber(null); explicitEntry.setFinancialObjectCode(t.getFinancialObjectCode()); explicitEntry.setFinancialSubObjectCode(null); explicitEntry.setFinancialBalanceTypeCode(BALANCE_TYPE_ACTUAL); explicitEntry.setFinancialObjectTypeCode(t.getFinancialObjectTypeCode()); explicitEntry.setUniversityFiscalYear(fiscalYear); explicitEntry.setUniversityFiscalPeriodCode(StringUtils.leftPad(fiscalMonth.toString().trim(), 2, "0")); explicitEntry.setTransactionLedgerEntryDescription(t.getTransactionLedgerEntryDescription()); explicitEntry.setTransactionLedgerEntryAmount(t.getTransactionAmount().abs()); explicitEntry.setTransactionDebitCreditCode(t.getTransactionType()); explicitEntry.setTransactionDate(new java.sql.Date(transactionTimestamp.getTime())); explicitEntry.setFinancialDocumentTypeCode(financialSystemDocumentTypeCodeCode); explicitEntry.setFinancialDocumentApprovedCode(KFSConstants.DocumentStatusCodes.APPROVED); explicitEntry.setVersionNumber(new Long(1)); explicitEntry.setTransactionEntryProcessedTs(new java.sql.Timestamp(transactionTimestamp.getTime())); // this.generalLedgerPendingEntryService.save(explicitEntry); saveList.add(explicitEntry); if (counter % 1000 == 0) { // save here getDepreciationBatchDao().savePendingGLEntries(saveList); saveList.clear(); } if (sequenceHelper.getSequenceCounter() == 99999) { // create new document and sequence is reset documentNumber = createNewDepreciationDocument(documentNos); sequenceHelper = new GeneralLedgerPendingEntrySequenceHelper(); } } } // save last list getDepreciationBatchDao().savePendingGLEntries(saveList); saveList.clear(); } catch (Exception e) { LOG.error("Error occurred", e); throw new IllegalStateException(kualiConfigurationService.getPropertyValueAsString(CamsKeyConstants.Depreciation.ERROR_WHEN_UPDATING_GL_PENDING_ENTRY_TABLE) + " :" + e.getMessage()); } LOG.debug("populateExplicitGeneralLedgerPendingEntry(AccountingDocument, AccountingLine, GeneralLedgerPendingEntrySequenceHelper, GeneralLedgerPendingEntry) - end"); }
Example 18
Source Project: intellij-spring-assistant File: SuggestionServiceImpl.java License: MIT License | 4 votes |
private List<LookupElementBuilder> doFindSuggestionsForQueryPrefix(Module module, Trie<String, MetadataSuggestionNode> rootSearchIndex, FileType fileType, PsiElement element, @Nullable List<String> ancestralKeys, String queryWithDotDelimitedPrefixes, @Nullable Set<String> siblingsToExclude) { debug(() -> log.debug("Search requested for " + queryWithDotDelimitedPrefixes)); StopWatch timer = new StopWatch(); timer.start(); try { String[] querySegmentPrefixes = toSanitizedPathSegments(queryWithDotDelimitedPrefixes); Set<Suggestion> suggestions = null; if (ancestralKeys != null) { String[] ancestralKeySegments = ancestralKeys.stream().flatMap(key -> stream(toRawPathSegments(key))) .toArray(String[]::new); MetadataSuggestionNode rootNode = rootSearchIndex.get(sanitise(ancestralKeySegments[0])); if (rootNode != null) { List<SuggestionNode> matchesRootToDeepest; SuggestionNode startSearchFrom = null; if (ancestralKeySegments.length > 1) { String[] sanitisedAncestralPathSegments = stream(ancestralKeySegments).map(SuggestionNode::sanitise).toArray(String[]::new); matchesRootToDeepest = rootNode .findDeepestSuggestionNode(module, modifiableList(rootNode), sanitisedAncestralPathSegments, 1); if (matchesRootToDeepest != null && matchesRootToDeepest.size() != 0) { startSearchFrom = matchesRootToDeepest.get(matchesRootToDeepest.size() - 1); } } else { startSearchFrom = rootNode; matchesRootToDeepest = singletonList(rootNode); } if (startSearchFrom != null) { // if search start node is a leaf, this means, the user is looking for values for the given key, lets find the suggestions for values if (startSearchFrom.isLeaf(module)) { suggestions = startSearchFrom.findValueSuggestionsForPrefix(module, fileType, unmodifiableList(matchesRootToDeepest), sanitise(truncateIdeaDummyIdentifier(element.getText())), siblingsToExclude); } else { suggestions = startSearchFrom.findKeySuggestionsForQueryPrefix(module, fileType, unmodifiableList(matchesRootToDeepest), matchesRootToDeepest.size(), querySegmentPrefixes, 0, siblingsToExclude); } } } } else { String rootQuerySegmentPrefix = querySegmentPrefixes[0]; SortedMap<String, MetadataSuggestionNode> topLevelQueryResults = rootSearchIndex.prefixMap(rootQuerySegmentPrefix); Collection<MetadataSuggestionNode> childNodes; int querySegmentPrefixStartIndex; // If no results are found at the top level, let dive deeper and find matches if (topLevelQueryResults == null || topLevelQueryResults.size() == 0) { childNodes = rootSearchIndex.values(); querySegmentPrefixStartIndex = 0; } else { childNodes = topLevelQueryResults.values(); querySegmentPrefixStartIndex = 1; } Collection<MetadataSuggestionNode> nodesToSearchAgainst; if (siblingsToExclude != null) { Set<MetadataSuggestionNode> nodesToExclude = siblingsToExclude.stream() .flatMap(exclude -> rootSearchIndex.prefixMap(exclude).values().stream()) .collect(toSet()); nodesToSearchAgainst = childNodes.stream().filter(node -> !nodesToExclude.contains(node)).collect(toList()); } else { nodesToSearchAgainst = childNodes; } suggestions = doFindSuggestionsForQueryPrefix(module, fileType, nodesToSearchAgainst, querySegmentPrefixes, querySegmentPrefixStartIndex); } if (suggestions != null) { return toLookupElementBuilders(suggestions); } return null; } finally { timer.stop(); debug(() -> log.debug("Search took " + timer.toString())); } }
Example 19
Source Project: hadoop File: InMemoryPlan.java License: Apache License 2.0 | 4 votes |
@Override public void archiveCompletedReservations(long tick) { // Since we are looking for old reservations, read lock is optimal LOG.debug("Running archival at time: {}", tick); List<InMemoryReservationAllocation> expiredReservations = new ArrayList<InMemoryReservationAllocation>(); readLock.lock(); // archive reservations and delete the ones which are beyond // the reservation policy "window" try { long archivalTime = tick - policy.getValidWindow(); ReservationInterval searchInterval = new ReservationInterval(archivalTime, archivalTime); SortedMap<ReservationInterval, Set<InMemoryReservationAllocation>> reservations = currentReservations.headMap(searchInterval, true); if (!reservations.isEmpty()) { for (Set<InMemoryReservationAllocation> reservationEntries : reservations .values()) { for (InMemoryReservationAllocation reservation : reservationEntries) { if (reservation.getEndTime() <= archivalTime) { expiredReservations.add(reservation); } } } } } finally { readLock.unlock(); } if (expiredReservations.isEmpty()) { return; } // Need write lock only if there are any reservations to be deleted writeLock.lock(); try { for (InMemoryReservationAllocation expiredReservation : expiredReservations) { removeReservation(expiredReservation); } } finally { writeLock.unlock(); } }
Example 20
Source Project: incubator-batchee File: JSefaCsvMapping.java License: Apache License 2.0 | 3 votes |
private void calculateHeaders(Class<?> type) { SortedMap<Integer, SortedMap<Integer, String>> allHeaders = new TreeMap<Integer, SortedMap<Integer, String>>(); calculateHeaders(type, allHeaders, 0); for (SortedMap<Integer, String> headerMap : allHeaders.values()) { for (String header : headerMap.values()) { headers.add(header); } } }