Java Code Examples for java.util.LinkedHashMap#remove()
The following examples show how to use
java.util.LinkedHashMap#remove() .
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: react-native-GPay File: BundleDeltaClient.java License: MIT License | 6 votes |
private static int patchDelta(JsonReader jsonReader, LinkedHashMap<Number, byte[]> map) throws IOException { jsonReader.beginArray(); int numModules = 0; while (jsonReader.hasNext()) { jsonReader.beginArray(); int moduleId = jsonReader.nextInt(); if (jsonReader.peek() == JsonToken.NULL) { jsonReader.skipValue(); map.remove(moduleId); } else { map.put(moduleId, jsonReader.nextString().getBytes()); } jsonReader.endArray(); numModules++; } jsonReader.endArray(); return numModules; }
Example 2
Source Project: metron File: SetFunctions.java License: Apache License 2.0 | 6 votes |
@Override @SuppressWarnings("unchecked") public Object apply(List<Object> list) { if(list.size() < 1) { return null; } LinkedHashMap<Object, Integer> ret = (LinkedHashMap<Object, Integer>)list.get(0); if (ret == null) { ret = new LinkedHashMap<>(); } for (int i = 1;i < list.size();++i) { Object o = list.get(i); if (o != null) { Integer cnt = ret.get(o); if (cnt == null) { continue; } if (cnt == 1) { ret.remove(o); } else { ret.put(o, cnt - 1); } } } return ret; }
Example 3
Source Project: elk-reasoner File: NQEvictor.java License: Apache License 2.0 | 6 votes |
@Override public void add(final E element) { // If element is in some queue but the last one, upgrade it. for (int i = 0; i < elements_.size() - 1; i++) { final LinkedHashMap<E, Boolean> iThQueue = elements_.get(i); if (iThQueue.remove(element) != null) { elements_.get(i + 1).put(element, true); return; } } // else if element is in the last queue, reinsert it. final LinkedHashMap<E, Boolean> lastQueue = elements_ .get(elements_.size() - 1); if (lastQueue.containsKey(element)) { lastQueue.put(element, true); return; } // else put it on the first queue. elements_.get(0).put(element, true); }
Example 4
Source Project: vespa File: DimensionCache.java License: Apache License 2.0 | 6 votes |
private void updatePersistentData(Bucket toDelete) { if (toDelete == null) { return; } long millis = toDelete.gotTimeStamps ? toDelete.toMillis : System.currentTimeMillis(); for (Map.Entry<String, List<Entry<Point, UntypedMetric>>> metric : toDelete.getValuesByMetricName().entrySet()) { LinkedHashMap<Point, TimeStampedMetric> cachedPoints = getCachedMetric(metric.getKey()); for (Entry<Point, UntypedMetric> newestInterval : metric.getValue()) { // overwriting an existing entry does not update the order // in the map cachedPoints.remove(newestInterval.getKey()); TimeStampedMetric toInsert = new TimeStampedMetric(millis, newestInterval.getValue()); cachedPoints.put(newestInterval.getKey(), toInsert); } } }
Example 5
Source Project: okapi File: ModuleManager.java License: Apache License 2.0 | 6 votes |
private boolean deleteCheckDep(String id, Handler<ExtendedAsyncResult<Void>> fut, LinkedHashMap<String, ModuleDescriptor> mods) { if (!mods.containsKey(id)) { fut.handle(new Failure<>(ErrorType.NOT_FOUND, messages.getMessage("10207"))); return true; } mods.remove(id); String res = DepResolution.checkAllDependencies(mods); if (!res.isEmpty()) { fut.handle(new Failure<>(ErrorType.USER, messages.getMessage("10208", id, res))); return true; } else { return false; } }
Example 6
Source Project: mycore File: MCRSolrProxyServlet.java License: GNU General Public License v3.0 | 5 votes |
/** * redirects to query handler by using xeditor input document */ private static void redirectToQueryHandler(Document input, HttpServletResponse resp) throws IOException, TransformerException, SAXException { LinkedHashMap<String, String[]> parameters = new LinkedHashMap<>(); List<Element> children = input.getRootElement().getChildren(); for (Element param : children) { String attribute = param.getAttributeValue("name"); if (attribute != null) { parameters.put(attribute, new String[] { param.getTextTrim() }); } } String queryHandlerPath = parameters.get(QUERY_HANDLER_PAR_NAME)[0]; parameters.remove("qt"); doRedirectToQueryHandler(resp, queryHandlerPath, parameters); }
Example 7
Source Project: ctsms File: ArgsUriPart.java License: GNU Lesser General Public License v2.1 | 5 votes |
public LinkedHashSet<NamedParameter> getNamedParameters(String resource, boolean excludeOverriden) throws Exception { LinkedHashMap<String, NamedParameter> namedParameters = new LinkedHashMap<String, NamedParameter>(); Method method = getAnnotatedMethod(resource); if (method != null) { MethodParameterNames annotation = method.getAnnotation(MethodParameterNames.class); String[] parameterNames = null; if (annotation != null) { parameterNames = annotation.value(); } Class<?>[] parameterTypes = method.getParameterTypes(); if (parameterNames != null && parameterTypes != null) { for (int i = 0; i < parameterNames.length; i++) { namedParameters.put(parameterNames[i], new NamedParameter(parameterNames[i], parameterTypes[i])); } } } Iterator<Entry<String, Object>> it = defaults.entrySet().iterator(); while (it.hasNext()) { Entry<String, Object> defaultParameter = it.next(); if (!namedParameters.containsKey(defaultParameter.getKey())) { namedParameters.put(defaultParameter.getKey(), new NamedParameter(defaultParameter.getKey(), defaultParameter.getValue().getClass())); } } it = overrides.entrySet().iterator(); while (it.hasNext()) { Entry<String, Object> overrideParameter = it.next(); namedParameters.put(overrideParameter.getKey(), new NamedParameter(overrideParameter.getKey(), overrideParameter.getValue().getClass())); } if (excludeOverriden) { it = overrides.entrySet().iterator(); while (it.hasNext()) { namedParameters.remove(it.next().getKey()); } } return new LinkedHashSet<NamedParameter>(namedParameters.values()); }
Example 8
Source Project: sagacity-sqltoy File: PageOptimizeUtils.java License: Apache License 2.0 | 5 votes |
public static Long getPageTotalCount(final SqlToyConfig sqlToyConfig, String conditionsKey) { LinkedHashMap<String, Object[]> map = pageOptimizeCache.get(sqlToyConfig.getId()); // sql初次执行查询 if (null == map) { return null; } Object[] values = map.get(conditionsKey); // 为null表示条件初次查询或已经全部过期移除 if (null == values) return null; // 总记录数 Long totalCount = (Long) values[1]; // 失效时间 long expireTime = (Long) values[0]; long nowTime = System.currentTimeMillis(); // 先移除(为了调整排列顺序) map.remove(conditionsKey); // 超时,返回null表示需要重新查询,并不需要定时检测 // 1、控制总记录数量,最早的始终会排在最前面,会最先排挤出去 // 2、每次查询时相同的条件会自动检测是否过期,过期则会重新执行 if (nowTime >= expireTime) { return null; } // 重置过期时间 values[0] = nowTime + sqlToyConfig.getPageAliveSeconds() * 1000; // 重新置于linkedHashMap的最后位置 map.put(conditionsKey, values); return totalCount; }
Example 9
Source Project: sagacity-sqltoy File: PageOptimizeUtils.java License: Apache License 2.0 | 5 votes |
public static void registPageTotalCount(final SqlToyConfig sqlToyConfig, String pageQueryKey, Long totalCount) { long nowTime = System.currentTimeMillis(); // 当前时间 long expireTime = nowTime + sqlToyConfig.getPageAliveSeconds() * 1000; // 同一个分页查询sql保留的不同查询条件记录数量 int aliveMax = sqlToyConfig.getPageAliveMax(); LinkedHashMap<String, Object[]> map = pageOptimizeCache.get(sqlToyConfig.getId()); if (null == map) { map = new LinkedHashMap<String, Object[]>(sqlToyConfig.getPageAliveMax()); map.put(pageQueryKey, new Object[] { expireTime, totalCount }); pageOptimizeCache.put(sqlToyConfig.getId(), map); } else { map.put(pageQueryKey, new Object[] { expireTime, totalCount }); // 长度超阀值,移除最早进入的 while (map.size() > aliveMax) { map.remove(map.keySet().iterator().next()); } // 剔除过期数据 Iterator<Map.Entry<String, Object[]>> iter = map.entrySet().iterator(); Map.Entry<String, Object[]> entry; while (iter.hasNext()) { entry = iter.next(); // 当前时间已经大于过期时间 if (nowTime >= ((Long) entry.getValue()[0])) { iter.remove(); } else { break; } } } }
Example 10
Source Project: metasfresh-webui-api-legacy File: ImmutableRowsIndex.java License: GNU General Public License v3.0 | 5 votes |
public ImmutableRowsIndex<T> replacingRows( @NonNull final DocumentIdsSelection oldRowIds, @NonNull final List<T> newRows) { final LinkedHashMap<DocumentId, T> newRowsToAdd = newRows.stream() .collect(GuavaCollectors.toMapByKey(LinkedHashMap::new, IViewRow::getId)); final ArrayList<T> resultRows = new ArrayList<>(rowIds.size()); for (final DocumentId rowId : this.rowIds) { if (oldRowIds.contains(rowId)) { final T newRowToAdd = newRowsToAdd.remove(rowId); if (newRowToAdd != null) { resultRows.add(newRowToAdd); } } else { resultRows.add(rowsById.get(rowId)); } } resultRows.addAll(newRowsToAdd.values()); return new ImmutableRowsIndex<>(this.initialRowIds, resultRows); }
Example 11
Source Project: lucene-solr File: CommandOperation.java License: Apache License 2.0 | 5 votes |
/** * Get all the values from the metadata for the command * without the specified keys */ public Map<String, Object> getValuesExcluding(String... keys) { getMapVal(null); if (hasError()) return emptyMap();//just to verify the type is Map @SuppressWarnings("unchecked") LinkedHashMap<String, Object> cp = new LinkedHashMap<>((Map<String, Object>) commandData); if (keys == null) return cp; for (String key : keys) { cp.remove(key); } return cp; }
Example 12
Source Project: lams File: BatchFetchQueue.java License: GNU General Public License v2.0 | 5 votes |
/** * After a collection was initialized or evicted, we don't * need to batch fetch it anymore, remove it from the queue * if necessary */ public void removeBatchLoadableCollection(CollectionEntry ce) { LinkedHashMap<CollectionEntry, PersistentCollection> map = batchLoadableCollections.get( ce.getLoadedPersister().getRole() ); if ( map != null ) { map.remove( ce ); } }
Example 13
Source Project: dhis2-core File: NotificationMap.java License: BSD 3-Clause "New" or "Revised" License | 5 votes |
public void addSummary( JobConfiguration jobConfiguration, Object summary ) { LinkedHashMap<String, Object> summaries = summariesWithType.get( jobConfiguration.getJobType() ); if ( summaries.size() >= MAX_POOL_TYPE_SIZE ) { String key = (String) summaries.keySet().toArray()[0]; summaries.remove( key ); } summaries.put( jobConfiguration.getUid(), summary ); }
Example 14
Source Project: Dashchan File: CacheManager.java License: Apache License 2.0 | 5 votes |
private boolean isFileExistsInCache(File file, String fileName, int type) { if (waitCacheSync()) { return false; } LinkedHashMap<String, CacheItem> cacheItems = getCacheItems(type); synchronized (cacheItems) { CacheItem cacheItem = cacheItems.get(fileName.toLowerCase(Locale.US)); if (cacheItem != null && !file.exists()) { cacheItems.remove(cacheItem.nameLc); modifyCacheSize(type, -cacheItem.length); cacheItem = null; } return cacheItem != null; } }
Example 15
Source Project: birt File: ReportPlugin.java License: Eclipse Public License 1.0 | 5 votes |
public static LinkedHashMap<String, ResourceFilter> getFilterMap( boolean showEmptyFolderFilter ) { if ( !showEmptyFolderFilter ) { LinkedHashMap map = (LinkedHashMap) filterMap.clone( ); map.remove( ResourceFilter.FILTER_EMPTY_FOLDERS ); return map; } else return filterMap; }
Example 16
Source Project: uncode-dal-all File: AbstractTemplate.java License: GNU General Public License v2.0 | 4 votes |
public String updateByCriteria(Table model) { model.resetQueryConditions(); SQL sql = new SQL(); sql.UPDATE(ColumnWrapperUtils.wrap(model.getTableName())); LinkedHashMap<String, Object> params = model.getParams(); if(params != null){ Iterator<String> iter = params.keySet().iterator(); while(iter.hasNext()){ String key = iter.next(); if(!model.getPrimaryKey().getFields().contains(key.toLowerCase())){ if(null == params.get(key)){ sql.SET(key + " = null"); }else{ String vstr = String.valueOf(params.get(key)).trim(); if(vstr.startsWith("=")){ sql.SET(ColumnWrapperUtils.wrap(key) + params.get(key)); params.remove(key); }else{ sql.SET(buildSingleParamSql(FieldSqlGenerator.PARAM_PREFIX, key, model, "=")); } } } } } if(model.hasVersion()){ sql.SET(VersionWrapperUtils.wrapSetSql(model.getVersion())); } QueryCriteria queryCriteria = model.getQueryCriteria(); String indexName = null; if(queryCriteria.getOredCriteria() != null && queryCriteria.getOredCriteria().size() > 0){ indexName = caculationQueryCriteria(sql, model); } if(model.hasVersion()){ Object value = queryCriteria.getVersion(); if(null == value){ throw new DalSqlException("Version is request."); } sql.AND(); sql.WHERE(VersionWrapperUtils.wrapWhereSql(model.getVersion(), value)); } model.resetQueryCriteria(); LOG.debug(sql.toString()); if(LOG.isDebugEnabled()){ if(StringUtils.isNotEmpty(indexName)){ LOG.debug("命中索引:"+indexName+model.getContent().getIndexs().get(indexName)); }else{ LOG.debug("命中索引:无"); } } return sql.toString(); }
Example 17
Source Project: chipster File: SocketFactoryCache.java License: MIT License | 4 votes |
private void removeLast(LinkedHashMap<?, ?> map) { map.remove(map.keySet().toArray()[map.size() - 1]); }
Example 18
Source Project: systemsgenetics File: EncodeMultipleTfbsOverlap.java License: GNU General Public License v3.0 | 4 votes |
private static LinkedHashMap<String,HashMap<String, ArrayList<EncodeNarrowPeak>>> readMultipleTfbsInformation(String inputFolderTfbsData) throws IOException { LinkedHashMap<String,HashMap<String, ArrayList<EncodeNarrowPeak>>> data = new LinkedHashMap<>(); File file = new File(inputFolderTfbsData); File[] files = file.listFiles(); ArrayList<String> vecFiles = new ArrayList<>(); for (File f : files) { // System.out.println(f.getAbsolutePath()); vecFiles.add(f.getAbsolutePath()); } for (String fileToRead : vecFiles) { TextFile reader = new TextFile(fileToRead, TextFile.R); String[] storingInformation = fileToRead.split("_"); // String cellLine = storingInformation[1].replace("TFBS\\",""); String transcriptionFactor = storingInformation[2].replace(".narrowPeak",""); if(storingInformation.length>4){ for(int i=3;i<(storingInformation.length-1);++i){ transcriptionFactor = transcriptionFactor +"_"+storingInformation[i].replace(".narrowPeak",""); } } String row; while((row=reader.readLine())!=null){ String[] parts = StringUtils.split(row, '\t'); if(!data.containsKey(transcriptionFactor)){ data.put(transcriptionFactor, new HashMap<String, ArrayList<EncodeNarrowPeak>>()); } if(!data.get(transcriptionFactor).containsKey(parts[0])){ data.get(transcriptionFactor).put(parts[0], new ArrayList<EncodeNarrowPeak>()); } data.get(transcriptionFactor).get(parts[0]).add(new EncodeNarrowPeak(parts, fileToRead)); } reader.close(); } ArrayList<String> cleanList = new ArrayList<>(); for(Entry<String,HashMap<String, ArrayList<EncodeNarrowPeak>>> tfInformation : data.entrySet()){ System.out.println("Transcription factor: "+tfInformation.getKey()); int counter = 0; for(Entry<String, ArrayList<EncodeNarrowPeak>> tfEntry : tfInformation.getValue().entrySet()){ Collections.sort(tfEntry.getValue()); counter+=tfEntry.getValue().size(); } System.out.println("\tcontacts: "+counter); //remove all with less than 750 contacts // if(counter<750){ // cleanList.add(tfInformation.getKey()); // } } for(String k : cleanList){ data.remove(k); } return data; }
Example 19
Source Project: intellij-plugin-v4 File: GenerateLexerRulesForLiteralsAction.java License: BSD 3-Clause "New" or "Revised" License | 4 votes |
@Override public void actionPerformed(AnActionEvent e) { LOG.info("actionPerformed GenerateLexerRulesForLiteralsAction"); final Project project = e.getProject(); final PsiFile psiFile = e.getData(LangDataKeys.PSI_FILE); if (psiFile == null) { return; } String inputText = psiFile.getText(); ParsingResult results = ParsingUtils.parseANTLRGrammar(inputText); final Parser parser = results.parser; final ParseTree tree = results.tree; Collection<ParseTree> literalNodes = XPath.findAll(tree, "//ruleBlock//STRING_LITERAL", parser); LinkedHashMap<String, String> lexerRules = new LinkedHashMap<String, String>(); for (ParseTree node : literalNodes) { String literal = node.getText(); String ruleText = String.format("%s : %s ;", RefactorUtils.getLexerRuleNameFromLiteral(literal), literal); lexerRules.put(literal, ruleText); } // remove those already defined String lexerRulesXPath = "//lexerRule"; String treePattern = "<TOKEN_REF> : <STRING_LITERAL>;"; ParseTreePattern p = parser.compileParseTreePattern(treePattern, ANTLRv4Parser.RULE_lexerRule); List<ParseTreeMatch> matches = p.findAll(tree, lexerRulesXPath); for (ParseTreeMatch match : matches) { ParseTree lit = match.get("STRING_LITERAL"); if (lexerRules.containsKey(lit.getText())) { // we have rule for this literal already lexerRules.remove(lit.getText()); } } final LiteralChooser chooser = new LiteralChooser(project, new ArrayList<String>(lexerRules.values())); chooser.show(); List<String> selectedElements = chooser.getSelectedElements(); // chooser disposed automatically. final Editor editor = e.getData(PlatformDataKeys.EDITOR); final Document doc = editor.getDocument(); final CommonTokenStream tokens = (CommonTokenStream) parser.getTokenStream(); // System.out.println(selectedElements); if (selectedElements != null) { String text = doc.getText(); int cursorOffset = editor.getCaretModel().getOffset(); // make sure it's not in middle of rule; put between. // System.out.println("offset "+cursorOffset); Collection<ParseTree> allRuleNodes = XPath.findAll(tree, "//ruleSpec", parser); for (ParseTree r : allRuleNodes) { Interval extent = r.getSourceInterval(); // token indexes int start = tokens.get(extent.a).getStartIndex(); int stop = tokens.get(extent.b).getStopIndex(); // System.out.println("rule "+r.getChild(0).getText()+": "+start+".."+stop); if (cursorOffset < start) { // before this rule, so must be between previous and this one cursorOffset = start; // put right before this rule break; } else if (cursorOffset >= start && cursorOffset <= stop) { // cursor in this rule cursorOffset = stop + 2; // put right before this rule (after newline) if (cursorOffset >= text.length()) { cursorOffset = text.length(); } break; } } String allRules = Utils.join(selectedElements.iterator(), "\n"); text = text.substring(0, cursorOffset) + "\n" + allRules + "\n" + text.substring(cursorOffset, text.length()); MyPsiUtils.replacePsiFileFromText(project, psiFile, text); } }
Example 20
Source Project: rocketmq_trans_message File: TransactionRecordFlush2DBService.java License: Apache License 2.0 | 4 votes |
private void doFlushDB(boolean shutdown) { DispatchRequestCollections requests = dispatchRequestBufferQueue.poll(); if (requests == null) { return; } if (!shutdown) { putEmptyRequestList(); } boolean addSuccess = false, removeSuccess = false; LinkedHashMap<Long, TransactionRecord> prepareTrs = null; LinkedHashMap<Long, Void> confirmTrs = null; while (true) { if (requests.latch.get() != requests.requestlist.size() && requests.latch.get() > 0) { continue; } requests.latch.set(Integer.MIN_VALUE); if (requests.requestlist.size() == 0) { break; } try { long transactionOffset = -1L; //数据处理 if (prepareTrs == null && confirmTrs == null) { prepareTrs = new LinkedHashMap<Long, TransactionRecord>(); confirmTrs = new LinkedHashMap<Long, Void>(); for (DispatchRequest request : requests.requestlist) { final int tranType = MessageSysFlag.getTransactionValue(request.getSysFlag()); switch (tranType) { case MessageSysFlag.TRANSACTION_NOT_TYPE: break; case MessageSysFlag.TRANSACTION_PREPARED_TYPE: if (this.maxTransOffset.get() < request.getCommitLogOffset()) { prepareTrs.put(request.getCommitLogOffset(), new TransactionRecord(request.getCommitLogOffset(), request.getCheckImmunityTimeOutTimestamp(), request.getMsgSize(), request.getProducerGroup())); this.maxTransOffset.set(request.getCommitLogOffset()); } else { log.info("[PREPARED] request ignore offset =" + request.getCommitLogOffset()); } if (request.getPreparedTransactionOffset() == 0L) { break; } case MessageSysFlag.TRANSACTION_COMMIT_TYPE: case MessageSysFlag.TRANSACTION_ROLLBACK_TYPE: if (this.maxTransOffset.get() < request.getCommitLogOffset()) { if (prepareTrs.containsKey(request.getPreparedTransactionOffset())) { prepareTrs.remove(request.getPreparedTransactionOffset()); } else { confirmTrs.put(request.getPreparedTransactionOffset(), null); } } else { log.info("[COMMIT] request ignore offset =" + request.getCommitLogOffset() + ",isCommitMessge=" + (tranType == MessageSysFlag.TRANSACTION_COMMIT_TYPE)); } break; } } transactionOffset = requests.requestlist.get(requests.requestlist.size() - 1).getCommitLogOffset(); } long startTime = System.currentTimeMillis(); addSuccess = addSuccess || transactionStore.parpare(new ArrayList<>(prepareTrs.values())); if (addSuccess && (removeSuccess = transactionStore.confirm(new ArrayList<>(confirmTrs.keySet())))) { log.info("pull TransactionRecord consume {}ms ,size={},realParpareSize={},realConfirmSize:{}", (System.currentTimeMillis() - startTime), requests.requestlist.size(), prepareTrs.size(), confirmTrs.size()); //更新最新的offset if (transactionOffset > 0) { transactionOffsetConifgService.putOffset(transactionOffset); } break; } } catch (Throwable e) { log.error("transactionStore error:", e); ThreadUtils.sleep(2000); } finally { if (addSuccess && removeSuccess) { flowController.release(requests.requestlist.size()); } } } }