Java Code Examples for java.util.Map#values()

The following examples show how to use java.util.Map#values() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Pf4jServiceLoaderIntegrationTestable.java    From exonum-java-binding with Apache License 2.0 6 votes vote down vote up
@Test
void loadsUnloadsAll(@TempDir Path tmp) throws Exception {
  Map<String, Path> pluginLocationsById = ImmutableMap.of(PLUGIN_ID, tmp.resolve("p1.jar"),
      PLUGIN_ID_2, tmp.resolve("p2.jar"));

  // Prepare the artifact files
  for (Entry<String, Path> e : pluginLocationsById.entrySet()) {
    anArtifact()
        .setPluginId(e.getKey())
        .writeTo(e.getValue());
  }

  // Load the plugins
  for (Path location : pluginLocationsById.values()) {
    serviceLoader.loadService(location);
  }

  // Unload all
  serviceLoader.unloadAll();

  // Verify each is unloaded
  Set<String> pluginIds = pluginLocationsById.keySet();
  pluginIds.forEach(this::verifyUnloaded);
}
 
Example 2
Source File: MapModelAdaptor.java    From codebuff with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@Override
public Object getProperty(Interpreter interp, ST self, Object o, Object property, String propertyName) throws STNoSuchPropertyException {
    Object value;
    Map<?, ?> map = (Map<?, ?>)o;
    if ( property==null ) value = map.get(STGroup.DEFAULT_KEY);
    else if ( property.equals("keys") ) value = map.keySet();
    else if ( property.equals("values") ) value = map.values();
    else if ( map.containsKey(property) ) value = map.get(property);
    else if ( map.containsKey(propertyName) ) { // if can't find the key, try toString version
        value = map.get(propertyName);
         }
    else value = map.get(STGroup.DEFAULT_KEY); // not found, use default
    if ( value==STGroup.DICT_KEY ) {
        value = property;
    }
    return value;
}
 
Example 3
Source File: ResourceAndSimpleDSTransactionManager.java    From easyooo-framework with Apache License 2.0 6 votes vote down vote up
/**
 * 触发资源回滚,使得每一个资源实例都触发回滚
 * @param status
 */
protected void triggerRollback(TransactionStatus status){
	Map<Object, TransactionResource> trs = TransactionResourceManager.getResourceMap();
	List<Throwable> rollbackException = new ArrayList<>();
	List<TransactionResource> res = new ArrayList<>();
	for (TransactionResource tr : trs.values()) {
		try {
			tr.rollback();
		} catch (Throwable e) {
			rollbackException.add(e);
			res.add(tr);
		}
	}
	
	if(rollbackException.size() > 0){
		logger.info("The transaction resource rollback failure. total " + res.size());
	}
}
 
Example 4
Source File: IdentityFactory.java    From o2oa with GNU Affero General Public License v3.0 6 votes vote down vote up
public List<Identity> listMajorOfPerson(Business business, List<String> people) throws Exception {
	List<Identity> list = new ArrayList<>();
	List<Identity> identities = business.entityManagerContainer().listIn(Identity.class, Identity.person_FIELDNAME,
			people);
	Map<String, List<Identity>> map = identities.stream().collect(Collectors.groupingBy(Identity::getPerson));
	for (List<Identity> os : map.values()) {
		Optional<Identity> optional = os.stream().filter(o -> BooleanUtils.isTrue(o.getMajor())).findFirst();
		if (optional.isPresent()) {
			list.add(optional.get());
		} else {
			list.add(os.stream()
					.sorted(Comparator.comparing(Identity::getUnitLevel, Comparator.nullsLast(Integer::compareTo))
							.thenComparing(Identity::getUpdateTime, Comparator.nullsLast(Date::compareTo)))
					.findFirst().get());
		}
	}
	return list;
}
 
Example 5
Source File: MvcInterceptorManager.java    From onetwo with Apache License 2.0 6 votes vote down vote up
@Override
public void onHandlerMethodsInitialized(Map<RequestMappingInfo, HandlerMethod> handlerMethods) {
	for(HandlerMethod hm : handlerMethods.values()){
		List<? extends MvcInterceptor> interceptors = null;
		try {
			interceptors = findMvcInterceptors(hm);
		} catch (Exception e) {
			throw new BaseException("find MvcInterceptor error for HandlerMethod: " + hm.getMethod(), e);
		}
		if(!interceptors.isEmpty()){
			AnnotationAwareOrderComparator.sort(interceptors);
			HandlerMethodInterceptorMeta meta = new HandlerMethodInterceptorMeta(hm, interceptors);
			interceptorMetaCaces.put(hm.getMethod(), meta);
			if(log.isDebugEnabled()){
				log.debug("MvcInterceptor: {} -> {}", hm.getMethod(), interceptors);
			}
		}
	}
}
 
Example 6
Source File: ConsumeSubscriptionServiceImpl.java    From DDMQ with Apache License 2.0 6 votes vote down vote up
private void removeCProxy(String host, List<ConsumeSubscription> subList) throws Exception {
    String ipPort = HostUtils.getIpPortFromHost(host, ZKV4ConfigServiceImpl.DEFAULT_CPROXY_PORT);
    Set<Long> clusterIds = Sets.newHashSet();
    for (ConsumeSubscription sub : subList) {
        if (sub.getConsumeSubscriptionConfig() != null && MapUtils.isNotEmpty(sub.getConsumeSubscriptionConfig().getProxies())) {
            Map<String, Set<String>> proxyMap = sub.getConsumeSubscriptionConfig().getProxies();
            for (Set<String> ipLists : proxyMap.values()) {
                if (ipLists.contains(ipPort)) {
                    ipLists.remove(ipPort);
                    clusterIds.add(sub.getClusterId());
                    updateByPrimaryKey(sub);

                    pushV4ZkInfo(sub.getGroupId(), null);
                    LOGGER.info("remove cproxy {} success, subId={}, group={}, topic={}, cluster={}", ipPort, sub.getId(), sub.getGroupName(), sub.getTopicName(), sub.getClusterName());
                }
            }
        }
    }
    if (CollectionUtils.isNotEmpty(clusterIds)) {
        zkv4ConfigService.updateCProxyConfigByClusterId("removeCProxy", clusterIds);
    }
}
 
Example 7
Source File: DbSqlSession.java    From activiti6-boot2 with Apache License 2.0 6 votes vote down vote up
public void determineUpdatedObjects() {
  updatedObjects = new ArrayList<Entity>();
  Map<Class<?>, Map<String, CachedEntity>> cachedObjects = entityCache.getAllCachedEntities();
  for (Class<?> clazz : cachedObjects.keySet()) {

    Map<String, CachedEntity> classCache = cachedObjects.get(clazz);
    for (CachedEntity cachedObject : classCache.values()) {

      Entity cachedEntity = cachedObject.getEntity();
      
      // Executions are stored as a hierarchical tree, and updates are important to execute
      // even when the execution are deleted, as they can change the parent-child relationships.
      // For the other entities, this is not applicable and an update can be discarded when an update follows.
      
      if (!isEntityInserted(cachedEntity) && 
          (ExecutionEntity.class.isAssignableFrom(cachedEntity.getClass()) || !isEntityToBeDeleted(cachedEntity)) &&
          cachedObject.hasChanged() 
          ) {
        updatedObjects.add(cachedEntity);
      }
    }
  }
}
 
Example 8
Source File: TestDataNodeHotSwapVolumes.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test(timeout=60000)
public void testAddVolumesDuringWrite()
    throws IOException, InterruptedException, TimeoutException,
    ReconfigurationException {
  startDFSCluster(1, 1);
  String bpid = cluster.getNamesystem().getBlockPoolId();
  Path testFile = new Path("/test");
  createFile(testFile, 4);  // Each volume has 2 blocks.

  addVolumes(2);

  // Continue to write the same file, thus the new volumes will have blocks.
  DFSTestUtil.appendFile(cluster.getFileSystem(), testFile, BLOCK_SIZE * 8);
  verifyFileLength(cluster.getFileSystem(), testFile, 8 + 4);
  // After appending data, there should be [2, 2, 4, 4] blocks in each volume
  // respectively.
  List<Integer> expectedNumBlocks = Arrays.asList(2, 2, 4, 4);

  List<Map<DatanodeStorage, BlockListAsLongs>> blockReports =
      cluster.getAllBlockReports(bpid);
  assertEquals(1, blockReports.size());  // 1 DataNode
  assertEquals(4, blockReports.get(0).size());  // 4 volumes
  Map<DatanodeStorage, BlockListAsLongs> dnReport =
      blockReports.get(0);
  List<Integer> actualNumBlocks = new ArrayList<Integer>();
  for (BlockListAsLongs blockList : dnReport.values()) {
    actualNumBlocks.add(blockList.getNumberOfBlocks());
  }
  Collections.sort(actualNumBlocks);
  assertEquals(expectedNumBlocks, actualNumBlocks);
}
 
Example 9
Source File: GemFireTimeSync.java    From gemfirexd-oss with Apache License 2.0 5 votes vote down vote up
private long getRTTStdDev(Map<Address, GFTimeSyncHeader> values, long average) {
  long sqDiffs = 0;
  for (GFTimeSyncHeader response: values.values()) {
    long diff = average - (response.timeReceived - response.time);
    sqDiffs += diff * diff;
  }
  return Math.round(Math.sqrt((double)sqDiffs));
}
 
Example 10
Source File: SolrTestCaseHS.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings({"unchecked"})
public static Object createDocObjects(@SuppressWarnings({"rawtypes"})Map<Comparable, Doc> fullModel,
                                      @SuppressWarnings({"rawtypes"})Comparator sort, int rows,
                                      Collection<String> fieldNames) {
  List<Doc> docList = new ArrayList<>(fullModel.values());
  Collections.sort(docList, sort);
  @SuppressWarnings({"rawtypes"})
  List sortedDocs = new ArrayList(rows);
  for (Doc doc : docList) {
    if (sortedDocs.size() >= rows) break;
    Map<String,Object> odoc = toObject(doc, h.getCore().getLatestSchema(), fieldNames);
    sortedDocs.add(toObject(doc, h.getCore().getLatestSchema(), fieldNames));
  }
  return sortedDocs;
}
 
Example 11
Source File: DownloadDictionaryActivity.java    From aedict with GNU General Public License v3.0 5 votes vote down vote up
@Override
public List<DownloadableDictionaryInfo> impl(Void... params) throws Exception {
	publish(new Progress(AedictApp.getStr(R.string.downloadingDictionaryList), 0, 1));
	final Map<String, DownloadableDictionaryInfo> result = new HashMap<String, DownloadableDictionaryInfo>();
	final BufferedReader reader = new BufferedReader(new InputStreamReader(new URL(DICT_LIST_URL).openStream(), "UTF-8"));
	try {
		while (true) {
			final String line = reader.readLine();
			if (line == null) {
				break;
			}
			if (MiscUtils.isBlank(line)) {
				continue;
			}
			final DownloadableDictionaryInfo info = DownloadableDictionaryInfo.parse(line);
			result.put(info.zipName, info);
		}
	} finally {
		MiscUtils.closeQuietly(reader);
	}
	// remove all dictionaries which are already downloaded
	for (final Dictionary d: Dictionary.listEdictInstalled()) {
		if(d.custom!=null){
			result.remove("edict-lucene-"+d.custom+".zip");
		}
	}
	final List<DownloadableDictionaryInfo> items = new ArrayList<DownloadableDictionaryInfo>(result.values());
	Collections.sort(items);
	return items;
}
 
Example 12
Source File: CLDRUtils.java    From singleton with Eclipse Public License 2.0 5 votes vote down vote up
/**
 * Query the availableLocales.JSON of CLDR to determine if there is a pattern json
 * and if not, query defaultContent.json
 *
 * @param region
 * @param language
 * @param availableLocales
 * @param defaultContentLocales
 * @param localeAliases
 * @return
 */
private static boolean isPatternExist(String region, String language,
                                      Map<String, String> availableLocales,
                                      Map<String, String> defaultContentLocales,
                                      Map<String, Object> localeAliases) {
    if (language.contains("_")) {
        language = language.replace("_", "-");
    }
    String newLocale = language + "-" + region;
    String matchLocale = CommonUtil.getMatchingLocale(localeAliases, newLocale);
    if (!CommonUtil.isEmpty(matchLocale)) {
        newLocale = matchLocale;
    }

    for (String locale : availableLocales.values()) {
        if (locale.equals(newLocale)) {
            return true;
        }
    }

    for (String defaultRegion : defaultContentLocales.values()) {
        if (defaultRegion.equals(newLocale)) {
            return true;
        }
    }
    return false;
}
 
Example 13
Source File: HolidayEventsLoader.java    From incubator-pinot with Apache License 2.0 5 votes vote down vote up
void mergeWithExistingHolidays(Map<String, List<EventDTO>> holidayNameToHolidayEvent, List<EventDTO> existingEvents) {
  for (EventDTO existingEvent : existingEvents) {
    String holidayName = existingEvent.getName();
    if (!holidayNameToHolidayEvent.containsKey(holidayName)) {
      // If a event disappears, delete the event
      eventDAO.delete(existingEvent);
    } else {
      // If an existing event shows up again, overwrite with new time and country code.
      List<EventDTO> eventList = holidayNameToHolidayEvent.get(holidayName);
      EventDTO newEvent = eventList.remove(eventList.size() - 1);

      existingEvent.setStartTime(newEvent.getStartTime());
      existingEvent.setEndTime(newEvent.getEndTime());
      existingEvent.setTargetDimensionMap(newEvent.getTargetDimensionMap());
      eventDAO.update(existingEvent);

      if (eventList.isEmpty()) {
        holidayNameToHolidayEvent.remove(holidayName);
      }
    }
  }

  // Add all remaining new events into the database
  for (List<EventDTO> eventDTOList : holidayNameToHolidayEvent.values()) {
    for (EventDTO eventDTO : eventDTOList) {
      eventDAO.save(eventDTO);
    }
  }
}
 
Example 14
Source File: RedissonMapReduceTest.java    From redisson with Apache License 2.0 5 votes vote down vote up
@Override
public Integer collate(Map<String, Integer> resultMap) {
    redisson.getAtomicLong("test").incrementAndGet();
    
    int result = 0;
    for (Integer count : resultMap.values()) {
        result += count;
    }
    return result;
}
 
Example 15
Source File: DestroyRegionExecutor.java    From gemfirexd-oss with Apache License 2.0 5 votes vote down vote up
public Object executeAndVerify(TestCommandInstance instance) {    
  TestableGfsh gfsh = CLITest.getTestableShell();
  if(!gfsh.isConnectedAndReady()){
    CLITest.connectGfshToManagerNode();
  }    
  Object object[] = TestableGfsh.execAndLogCommand(gfsh, instance.toString(), CLITest.getGfshOutputFile(), false);
  Map map = (Map) object[0];
  CommandResult result =null;
  Collection values = map.values();
  for(Object r : values){
    if(r instanceof CommandResult){
      result = (CommandResult) r;      
      if(!result.getStatus().equals(Result.Status.OK)){
        //throw new TestException("Command return status is *NOT* OK. Command execution has failed");
        CLITest.currentCommand.append(" Command return status is *NOT* OK. Command execution has failed");
        CLITest.hasCommandFailed = true;
      }
      else
        HydraUtil.logInfo("Completed exeuction of <" + instance + "> successfully");         
    }
  }    
  verifyGemfire(gfsh,object);verifyJMX(gfsh,object);verifyCommand(gfsh,object);    
  
  RegionEvents e = new RegionEvents();
  e.regionDestroyed(regionPath, null);
  e.exportToBlackBoard(OperationsBlackboard.getBB());
  this.regionPath = null;    

  return object;
}
 
Example 16
Source File: KafkaAdminClient.java    From common-kafka with Apache License 2.0 5 votes vote down vote up
private Collection<TopicDescription> getTopicDescriptions(Collection<String> topics) {
    try {
        Map<String, TopicDescription> topicDescriptions = getNewAdminClient().describeTopics(topics).all()
            .get(operationTimeout, TimeUnit.MILLISECONDS);
        if (topics.isEmpty()) {
            LOG.warn("Unable to describe Kafka topics");
        }
        return topicDescriptions.values();
    } catch (InterruptedException | ExecutionException | TimeoutException e) {
        throw new AdminOperationException("Unable to describe Kafka topics", e);
    }
}
 
Example 17
Source File: Configurator.java    From oasp4j-ide with Apache License 2.0 5 votes vote down vote up
/**
 * @return a {@link Collection} with all available {@link EclipseWorkspaceFile}.
 */
private Collection<EclipseWorkspaceFile> collectWorkspaceFiles() {

  Map<String, EclipseWorkspaceFile> fileMap = new HashMap<String, EclipseWorkspaceFile>();
  collectWorkspaceFiles(new File(this.pluginsUpdateDirectoryPath), fileMap);
  collectWorkspaceFiles(new File(this.pluginsSetupDirectoryPath), fileMap);
  Collection<EclipseWorkspaceFile> values = fileMap.values();
  Log.LOGGER.info("Collected " + values.size() + " configuration files.");
  return values;
}
 
Example 18
Source File: UDDIClient.java    From juddi with Apache License 2.0 5 votes vote down vote up
/**
 * Removes the bindings of the services in the annotated classes.
 * Multiple nodes may register the same service using different
 * BindingTempates. If the last BindingTemplate is removed the service
 * can be removed as well.
 *
 * @param removeServiceWithNoBindingTemplates - if set to true it will
 * remove the service if there are no other BindingTemplates.
 */
public void unRegisterBindingsOfAnnotatedServices(boolean removeServiceWithNoBindingTemplates) {

        Map<String, UDDIClerk> clerks = clientConfig.getUDDIClerks();
        if (clerks.size() > 0) {
                AnnotationProcessor ap = new AnnotationProcessor();
                for (UDDIClerk clerk : clerks.values()) {
                        Collection<BusinessService> services = ap.readServiceAnnotations(
                                clerk.getClassWithAnnotations(), clerk.getUDDINode().getProperties());
                        for (BusinessService businessService : services) {
                                if (businessService.getBindingTemplates() != null) {
                                        List<BindingTemplate> bindingTemplates = businessService.getBindingTemplates().getBindingTemplate();
                                        for (BindingTemplate bindingTemplate : bindingTemplates) {
                                                clerk.unRegisterBinding(bindingTemplate.getBindingKey(), clerk.getUDDINode().getApiNode());
                                        }
                                }
                                if (removeServiceWithNoBindingTemplates) {
                                        try {
                                                BusinessService existingService = clerk.getServiceDetail(businessService.getServiceKey(), clerk.getUDDINode().getApiNode());
                                                if (existingService.getBindingTemplates() == null || existingService.getBindingTemplates().getBindingTemplate().size() == 0) {
                                                        clerk.unRegisterService(businessService.getServiceKey(), clerk.getUDDINode().getApiNode());
                                                }
                                        } catch (Exception e) {
                                                log.error(e.getMessage(), e);
                                        }
                                }
                        }
                }
        }

}
 
Example 19
Source File: JsonJobGraphGenerationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public void validateJson(String json) throws Exception {
	final Map<String, JsonNode> idToNode = new HashMap<>();

	// validate the produced JSON
	ObjectMapper m = new ObjectMapper();
	JsonNode rootNode = m.readTree(json);

	JsonNode idField = rootNode.get("jid");
	JsonNode nameField = rootNode.get("name");
	JsonNode arrayField = rootNode.get("nodes");

	assertNotNull(idField);
	assertNotNull(nameField);
	assertNotNull(arrayField);
	assertTrue(idField.isTextual());
	assertTrue(nameField.isTextual());
	assertTrue(arrayField.isArray());

	ArrayNode array = (ArrayNode) arrayField;
	Iterator<JsonNode> iter = array.elements();
	while (iter.hasNext()) {
		JsonNode vertex = iter.next();

		JsonNode vertexIdField = vertex.get("id");
		JsonNode parallelismField = vertex.get("parallelism");
		JsonNode contentsFields = vertex.get("description");
		JsonNode operatorField = vertex.get("operator");

		assertNotNull(vertexIdField);
		assertTrue(vertexIdField.isTextual());
		assertNotNull(parallelismField);
		assertTrue(parallelismField.isNumber());
		assertNotNull(contentsFields);
		assertTrue(contentsFields.isTextual());
		assertNotNull(operatorField);
		assertTrue(operatorField.isTextual());

		if (contentsFields.asText().startsWith("Sync")) {
			assertEquals(1, parallelismField.asInt());
		}
		else {
			assertEquals(expectedParallelism, parallelismField.asInt());
		}

		idToNode.put(vertexIdField.asText(), vertex);
	}

	assertEquals(numNodes, idToNode.size());

	// check that all inputs are contained
	for (JsonNode node : idToNode.values()) {
		JsonNode inputsField = node.get("inputs");
		if (inputsField != null) {
			Iterator<JsonNode> inputsIter = inputsField.elements();
			while (inputsIter.hasNext()) {
				JsonNode inputNode = inputsIter.next();
				JsonNode inputIdField = inputNode.get("id");

				assertNotNull(inputIdField);
				assertTrue(inputIdField.isTextual());

				String inputIdString = inputIdField.asText();
				assertTrue(idToNode.containsKey(inputIdString));
			}
		}
	}
}
 
Example 20
Source File: BenchmarkQueryEngine.java    From incubator-pinot with Apache License 2.0 4 votes vote down vote up
@Setup
public void startPinot()
    throws Exception {
  System.out.println("Using table name " + TABLE_NAME);
  System.out.println("Using data directory " + DATA_DIRECTORY);
  System.out.println("Starting pinot");

  PerfBenchmarkDriverConf conf = new PerfBenchmarkDriverConf();
  conf.setStartBroker(true);
  conf.setStartController(true);
  conf.setStartServer(true);
  conf.setStartZookeeper(true);
  conf.setRunQueries(false);
  conf.setServerInstanceSegmentTarDir(null);
  conf.setServerInstanceDataDir(DATA_DIRECTORY);
  conf.setConfigureResources(false);
  _perfBenchmarkDriver = new PerfBenchmarkDriver(conf);
  _perfBenchmarkDriver.run();

  File[] segments = new File(DATA_DIRECTORY, TABLE_NAME).listFiles();
  for (File segmentDir : segments) {
    SegmentMetadataImpl segmentMetadata = new SegmentMetadataImpl(segmentDir);
    _perfBenchmarkDriver.configureTable(TABLE_NAME);
    System.out.println("Adding segment " + segmentDir.getAbsolutePath());
    _perfBenchmarkDriver.addSegment(TABLE_NAME, segmentMetadata);
  }

  ZkClient client = new ZkClient("localhost:2191", 10000, 10000, new ZNRecordSerializer());

  ZNRecord record = client.readData("/PinotPerfTestCluster/EXTERNALVIEW/" + TABLE_NAME);
  while (true) {
    System.out.println("record = " + record);
    Uninterruptibles.sleepUninterruptibly(10, TimeUnit.SECONDS);

    int onlineSegmentCount = 0;
    for (Map<String, String> instancesAndStates : record.getMapFields().values()) {
      for (String state : instancesAndStates.values()) {
        if (state.equals("ONLINE")) {
          onlineSegmentCount++;
          break;
        }
      }
    }

    System.out.println(onlineSegmentCount + " segments online out of " + segments.length);

    if (onlineSegmentCount == segments.length) {
      break;
    }

    record = client.readData("/PinotPerfTestCluster/EXTERNALVIEW/" + TABLE_NAME);
  }

  ranOnce = false;

  System.out.println(_perfBenchmarkDriver.postQuery(QUERY_PATTERNS[queryPattern], optimizationFlags).toString());
}