Java Code Examples for org.apache.commons.collections4.MapUtils

The following examples show how to use org.apache.commons.collections4.MapUtils. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
@Override
protected List<OasViolation> validateCurrentSchemaObject(OasValidationContext context, Schema oasObject,
  OasObjectPropertyLocation location) {

  Map<String, Schema> properties = oasObject.getProperties();

  if (MapUtils.isEmpty(properties)) {
    return emptyList();
  }

  return
    OasObjectValidatorUtils.doValidateMapPropertyKeys(
      location,
      "properties",
      properties,
      keyPredicate,
      errorFunction
    );

}
 
Example 2
/**
 * Instantiates a new Legacy anomaly function algorithm.
 *
 * @param provider the provider
 * @param config the config
 * @param startTime the start time
 * @param endTime the end time
 * @throws Exception the exception
 */
public LegacyAnomalyFunctionAlgorithm(DataProvider provider, DetectionConfigDTO config, long startTime, long endTime) throws Exception {
  super(provider, config, startTime, endTime);
  // TODO: Round start and end time stamps
  Preconditions.checkArgument(config.getProperties().containsKey(PROP_ANOMALY_FUNCTION_CLASS));
  String anomalyFunctionClassName = MapUtils.getString(config.getProperties(), PROP_ANOMALY_FUNCTION_CLASS);

  String specs = OBJECT_MAPPER.writeValueAsString(ConfigUtils.getMap(config.getProperties().get(PROP_SPEC)));
  this.anomalyFunction = (BaseAnomalyFunction) Class.forName(anomalyFunctionClassName).newInstance();
  this.anomalyFunction.init(OBJECT_MAPPER.readValue(specs, AnomalyFunctionDTO.class));

  this.dataFilter = DataFilterFactory.fromSpec(this.anomalyFunction.getSpec().getDataFilter());
  this.failOnError = MapUtils.getBooleanValue(config.getProperties(), PROP_FAIL_ON_ERROR, false);

  if (config.getProperties().containsKey(PROP_METRIC_URN)) {
    this.metricEntity = MetricEntity.fromURN(MapUtils.getString(config.getProperties(), PROP_METRIC_URN));
  } else {
    this.metricEntity = makeEntity(this.anomalyFunction.getSpec());
  }
}
 
Example 3
/**
 * Finds intermediate conversion rate.
 * If pairs USD : AUD - 1.2 and EUR : AUD - 1.5 are present, and EUR to USD conversion is needed, will return
 * (1/1.5) * 1.2 conversion rate.
 */
private static BigDecimal findIntermediateConversionRate(Map<String, BigDecimal> adServerCurrencyRates,
                                                         Map<String, BigDecimal> bidCurrencyRates) {
    BigDecimal conversionRate = null;
    if (MapUtils.isNotEmpty(adServerCurrencyRates) && MapUtils.isNotEmpty(bidCurrencyRates)) {
        final List<String> sharedCurrencies = new ArrayList<>(adServerCurrencyRates.keySet());
        sharedCurrencies.retainAll(bidCurrencyRates.keySet());

        if (!sharedCurrencies.isEmpty()) {
            // pick any found shared currency
            final String sharedCurrency = sharedCurrencies.get(0);
            final BigDecimal adServerCurrencyRateIntermediate = adServerCurrencyRates.get(sharedCurrency);
            final BigDecimal bidCurrencyRateIntermediate = bidCurrencyRates.get(sharedCurrency);
            conversionRate = adServerCurrencyRateIntermediate.divide(bidCurrencyRateIntermediate,
                    // chose largest precision among intermediate rates
                    bidCurrencyRateIntermediate.compareTo(adServerCurrencyRateIntermediate) > 0
                            ? bidCurrencyRateIntermediate.precision()
                            : adServerCurrencyRateIntermediate.precision(),
                    RoundingMode.HALF_EVEN);
        }
    }
    return conversionRate;
}
 
Example 4
Source Project: incubator-pinot   Source File: BaselineAlgorithm.java    License: Apache License 2.0 6 votes vote down vote up
public BaselineAlgorithm(DataProvider provider, DetectionConfigDTO config, long startTime, long endTime) {
  super(provider, config, startTime, endTime);

  Preconditions.checkArgument(config.getProperties().containsKey(PROP_METRIC_URN));

  String metricUrn = MapUtils.getString(config.getProperties(), PROP_METRIC_URN);
  MetricEntity me = MetricEntity.fromURN(metricUrn);
  this.slice = MetricSlice.from(me.getId(), this.startTime, this.endTime, me.getFilters());

  int weeks = MapUtils.getIntValue(config.getProperties(), PROP_WEEKS, PROP_WEEKS_DEFAULT);
  BaselineAggregateType aggregation = BaselineAggregateType.valueOf(MapUtils.getString(config.getProperties(), PROP_AGGREGATION, PROP_AGGREGATION_DEFAULT));
  DateTimeZone timezone = DateTimeZone.forID(MapUtils.getString(this.config.getProperties(), PROP_TIMEZONE, PROP_TIMEZONE_DEFAULT));
  this.baseline = BaselineAggregate.fromWeekOverWeek(aggregation, weeks, 1, timezone);

  this.change = MapUtils.getDoubleValue(config.getProperties(), PROP_CHANGE, PROP_CHANGE_DEFAULT);
  this.difference = MapUtils.getDoubleValue(config.getProperties(), PROP_DIFFERENCE, PROP_DIFFERENCE_DEFAULT);
}
 
Example 5
Source Project: DDMQ   Source File: UpstreamJobBuffer.java    License: Apache License 2.0 6 votes vote down vote up
public synchronized void recoverTimeoutMessage() {
    if (MapUtils.isEmpty(workingJobs)) {
        return;
    }

    LOGGER.trace("recoverTimeoutMessage,group:{},topic:{},qid:{},workingJobs.size={}", groupId, topic, qid, workingJobs.size());

    long curTime = TimeUtils.getCurTime();
    Iterator<Map.Entry<Long, UpstreamJob>> itr = workingJobs.entrySet().iterator();
    while (itr.hasNext()) {
        UpstreamJob job = itr.next().getValue();
        if (curTime - job.getPullTimestamp() >= upstreamTopic.getTimeout()) {
            if (job.canDoErrorRetry()) {
                tryPutInNonEmptyQueue();
                job.setState("PullSvr.Timeout#" + job.getErrorRetryCnt());
            } else {
                itr.remove();
                dropJob(job);  //failure
            }
        } else {
            break;
        }
    }
}
 
Example 6
@Override
	public PageResult<SysPermission> findPermissions(Map<String, Object> params) {
		//设置分页信息,分别是当前页数和每页显示的总记录数【记住:必须在mapper接口中的方法执行之前设置该分页信息】
		if (MapUtils.getInteger(params, "page")!=null && MapUtils.getInteger(params, "limit")!=null)
			PageHelper.startPage(MapUtils.getInteger(params, "page"),MapUtils.getInteger(params, "limit"),true);
		List<SysPermission> list  = sysPermissionDao.findList(params);
		PageInfo<SysPermission> pageInfo = new PageInfo(list);

		return PageResult.<SysPermission>builder().data(pageInfo.getList()).code(0).count(pageInfo.getTotal()).build()  ;

//		int total = sysPermissionDao.count(params);
//		List<SysPermission> list = Collections.emptyList();
//
//		if (total > 0) {
//			PageUtil.pageParamConver(params, false);
//			list = sysPermissionDao.findList(params);
//
//		}
//		return PageResult.<SysPermission>builder().data(list).code(0).count((long)total).build()  ;
	}
 
Example 7
Source Project: incubator-pinot   Source File: AnomaliesResource.java    License: Apache License 2.0 6 votes vote down vote up
public static Multimap<String, String> generateFilterSetWithDimensionMap(DimensionMap dimensionMap,
    Multimap<String, String> filterSet) {

  Multimap<String, String> newFilterSet = HashMultimap.create();

  // Dimension map gives more specified dimension information than filter set (i.e., Dimension Map should be a subset
  // of filterSet), so it needs to be processed first.
  if (MapUtils.isNotEmpty(dimensionMap)) {
    for (Map.Entry<String, String> dimensionMapEntry : dimensionMap.entrySet()) {
      newFilterSet.put(dimensionMapEntry.getKey(), dimensionMapEntry.getValue());
    }
  }

  if (filterSet != null && filterSet.size() != 0) {
    for (String key : filterSet.keySet()) {
      if (!newFilterSet.containsKey(key)) {
        newFilterSet.putAll(key, filterSet.get(key));
      }
    }
  }

  return newFilterSet;
}
 
Example 8
Map<String, Object> compositePropertyBuilderHelper(List<Map<String, Object>> nestedPropertiesList,
    Map<String, Object> compositeAlertConfigMap) {
  Map<String, Object> properties;
  String subEntityName = MapUtils.getString(compositeAlertConfigMap, PROP_NAME);

  // Wrap the entity level grouper, only 1 grouper is supported now
  List<Map<String, Object>> grouperProps = ConfigUtils.getList(compositeAlertConfigMap.get(PROP_GROUPER));
  Map<String, Object> mergerProperties = ConfigUtils.getMap(compositeAlertConfigMap.get(PROP_MERGER));
  if (!grouperProps.isEmpty()) {
    properties = buildWrapperProperties(
        EntityAnomalyMergeWrapper.class.getName(),
        Collections.singletonList(buildGroupWrapperProperties(subEntityName, grouperProps.get(0), nestedPropertiesList)),
        mergerProperties);
    nestedPropertiesList = Collections.singletonList(properties);
  }

  return buildWrapperProperties(
      ChildKeepingMergeWrapper.class.getName(),
      nestedPropertiesList,
      mergerProperties);
}
 
Example 9
Source Project: JuniperBot   Source File: TimeWindowChart.java    License: GNU General Public License v3.0 6 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public synchronized void fromMap(Map<String, Object> data) {
    if (MapUtils.isEmpty(data)) {
        return;
    }
    Object window = data.get("window");
    Object measurements = data.get("measurements");
    if (window instanceof Number && measurements instanceof Map) {
        this.window = ((Number) window).longValue();
        this.measurements.clear();
        ((Map) measurements).forEach((k, v) ->
                this.measurements.put(Long.parseLong(k.toString()), Long.parseLong(v.toString())));
        this.lastTick.set(System.currentTimeMillis());
        trim();
    }
}
 
Example 10
Source Project: ranger   Source File: XUserServiceBase.java    License: Apache License 2.0 6 votes vote down vote up
protected List<VXUser> mapEntityToViewBeans(Map<VXUser, XXUser> vxUserXXUserMap) {
	List<VXUser> vxUsers = new ArrayList<>();
	if (MapUtils.isNotEmpty(vxUserXXUserMap)) {
		for (Map.Entry<VXUser, XXUser> vxUserXXUserEntry : vxUserXXUserMap.entrySet()) {
			VXUser vObj = vxUserXXUserEntry.getKey();
			XXUser mObj = vxUserXXUserEntry.getValue();
			vObj.setName(mObj.getName());
			vObj.setIsVisible(mObj.getIsVisible());
			vObj.setDescription(mObj.getDescription());
			vObj.setCredStoreId(mObj.getCredStoreId());
			vObj.setOtherAttributes(mObj.getOtherAttributes());
			vxUsers.add(vObj);
		}
	}
	return vxUsers;
}
 
Example 11
Source Project: JIMU   Source File: RouterProcessor.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
    super.init(processingEnv);

    routerNodes = new ArrayList<>();

    mFiler = processingEnv.getFiler();
    types = processingEnv.getTypeUtils();
    elements = processingEnv.getElementUtils();
    typeUtils = new TypeUtils(types, elements);

    type_String = elements.getTypeElement("java.lang.String").asType();

    logger = new Logger(processingEnv.getMessager());

    Map<String, String> options = processingEnv.getOptions();
    if (MapUtils.isNotEmpty(options)) {
        host = options.get(KEY_HOST_NAME);
        logger.info(">>> host is " + host + " <<<");
    }
    if (host == null || host.equals("")) {
        host = "default";
    }
    logger.info(">>> RouteProcessor init. <<<");
}
 
Example 12
private boolean shouldUpdateCredentials(CloudServiceInstanceExtended service, CloudServiceInstance existingService,
                                        CloudControllerClient client) {
    try {
        Map<String, Object> serviceParameters = client.getServiceInstanceParameters(existingService.getMetadata()
                                                                                                   .getGuid());
        getStepLogger().debug("Existing service parameters: " + SecureSerialization.toJson(serviceParameters));
        return !Objects.equals(service.getCredentials(), serviceParameters);
    } catch (CloudOperationException e) {
        if (HttpStatus.NOT_IMPLEMENTED == e.getStatusCode() || HttpStatus.BAD_REQUEST == e.getStatusCode()) {
            getStepLogger().warnWithoutProgressMessage(Messages.CANNOT_RETRIEVE_SERVICE_INSTANCE_PARAMETERS, service.getName());
            // TODO: Optimization (Hack) that should be deprecated at some point. So here is a todo for that.
            return !MapUtils.isEmpty(service.getCredentials());
        }
        throw e;
    }
}
 
Example 13
Source Project: DDMQ   Source File: CarreraConfiguration.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public boolean validate() throws ConfigException {
    if (CollectionUtils.isEmpty(retryDelays)) {
        throw new ConfigException("[CarreraConfiguration] retryDelays empty");
    } else if (thriftServer == null || !thriftServer.validate()) {
        throw new ConfigException("[CarreraConfiguration] thriftServer error");
    } else if (useKafka && (kafkaProducers <= 0 || MapUtils.isEmpty(kafkaConfigurationMap) || !kafkaConfigurationMap.values().stream().allMatch(KafkaConfiguration::validate))) {
        throw new ConfigException("[CarreraConfiguration] kafka config error");
    } else if (useRocketmq && (rocketmqProducers <= 0 || MapUtils.isEmpty(rocketmqConfigurationMap) || !rocketmqConfigurationMap.values().stream().allMatch(RocketmqConfiguration::validate))) {
        throw new ConfigException("[CarreraConfiguration] rocketmq config error");
    } else if (useAutoBatch && (autoBatch == null || !autoBatch.validate())) {
        throw new ConfigException("[CarreraConfiguration] autoBatch error");
    } else if (maxTps <= 0) {
        throw new ConfigException("[CarreraConfiguration] maxTps <= 0");
    } else if (tpsWarningRatio <= 0) {
        throw new ConfigException("[CarreraConfiguration] tpsWarningRatio <= 0");
    } else if (defaultTopicInfoConf == null) {
        throw new ConfigException("[CarreraConfiguration] defaultTopicInfoConf is null");
    }

    return true;
}
 
Example 14
Source Project: spring-boot-plus   Source File: PropertyColumnUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * 从本地缓存中获取属性列名map
 *
 * @param clazz
 * @return
 */
public static Map<String, String> getPropertyColumnMap(Class<?> clazz) {
    Map<String, String> propertyColumnMap = cacheMap.get(clazz);
    if (MapUtils.isEmpty(propertyColumnMap)) {
        // 从TableInfo中获取,并缓存到内存map中
        Map<String, String> fieldMap = getTableFieldMap(clazz);
        if (MapUtils.isEmpty(fieldMap)) {
            return null;
        } else {
            cacheMap.put(clazz, fieldMap);
            return fieldMap;
        }
    } else {
        return propertyColumnMap;
    }
}
 
Example 15
Source Project: DDMQ   Source File: CommitLagLimiter.java    License: Apache License 2.0 6 votes vote down vote up
public void acquire(ConsumeOffsetTracker tracker, String topic, ConsumeContext context) throws InterruptedException {
    long maxCommitLag = MapUtils.getLong(maxCommitLagMap, topic, -1L);
    if (maxCommitLag < 0) {
        return;
    }
    long lag = tracker.getCommitLag(topic, context);
    if (lag < maxCommitLag) {
        return;
    }

    commitLagLock.lock();
    try {
        while ((lag = tracker.getCommitLag(topic, context)) >= maxCommitLag) {
            LOGGER.warn("commit lag is over maxLag, block consuming...group={},topic={},qid={},lag={}",
                    context.getGroupId(), topic, context.getQid(), lag);
            getCondition(topic, context).await();
        }
    } finally {
        commitLagLock.unlock();
    }
}
 
Example 16
private String loadMetricCache(Map<String, Object> metricAlertConfigMap) {
  String metricName = MapUtils.getString(metricAlertConfigMap, PROP_METRIC);
  String datasetName = MapUtils.getString(metricAlertConfigMap, PROP_DATASET);
  String cron = MapUtils.getString(metricAlertConfigMap, PROP_CRON);
  String metricAliasKey = ThirdEyeUtils.constructMetricAlias(datasetName, metricName);
  if (metricAttributesMap.containsKey(metricAliasKey)) {
    return metricAliasKey;
  }

  DatasetConfigDTO datasetConfig = fetchDatasetConfigDTO(this.dataProvider, datasetName);
  datasetConfigs.add(datasetConfig);

  MetricConfigDTO metricConfig = this.dataProvider.fetchMetric(metricName, datasetConfig.getDataset());

  cron = cron == null ? buildCron(datasetConfig.bucketTimeGranularity()) : cron;

  metricAttributesMap.put(metricAliasKey, new DetectionMetricProperties(cron, metricConfig, datasetConfig));

  return metricAliasKey;
}
 
Example 17
Source Project: multiapps-controller   Source File: ServiceGetterTest.java    License: Apache License 2.0 6 votes vote down vote up
@ParameterizedTest
@MethodSource
public void testGetServiceInstanceEntity(Map<String, Object> serviceInstanceGetterResponse,
                                         Map<String, Object> userProvidedInstanceGetterResponse) {
    prepareServiceGetters(serviceInstanceGetterResponse, userProvidedInstanceGetterResponse);

    Map<String, Object> serviceInstanceEntity = serviceGetter.getServiceInstanceEntity(client, SERVICE_NAME, SPACE_ID);

    if (MapUtils.isEmpty(serviceInstanceGetterResponse)) {
        assertEquals(userProvidedInstanceGetterResponse, serviceInstanceEntity);
        verify(userProvidedInstanceGetter).getServiceInstanceEntity(client, SERVICE_NAME, SPACE_ID);
        return;
    }
    assertEquals(serviceInstanceGetterResponse, serviceInstanceEntity);
    verify(serviceInstanceGetter).getServiceInstanceEntity(client, SERVICE_NAME, SPACE_ID);
}
 
Example 18
/**
 * Maps the item type for the given source based on the configuration
 * @param source the source to map
 * @return the item type
 */
protected String getItemType(Map<String, Object> source) {
    if(MapUtils.isNotEmpty(types)) {
        for (HierarchicalConfiguration<ImmutableNode> typeConfig : types.values()) {
            String fieldName = typeConfig.getString(CONFIG_KEY_TYPE_FIELD);
            if(source.containsKey(fieldName)) {
                String fieldValue = source.get(fieldName).toString();
                if (StringUtils.isNotEmpty(fieldValue) &&
                        fieldValue.matches(typeConfig.getString(CONFIG_KEY_TYPE_MATCHES))) {
                    return typeConfig.getString(CONFIG_KEY_TYPE_NAME);
                }
            }
        }
    }
    return defaultType;
}
 
Example 19
Source Project: DDMQ   Source File: KafkaFetcher.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public boolean ack(AckResult result) {
    if (result.getOffsetsSize() == 0) {
        return true;
    }
    result.getOffsets().forEach((topic, qidMap) -> {
        if (MapUtils.getObject(config.getTopicMap(), topic) == null) {
            LOGGER.warn("invalid topic({}) in {},result={}", topic, this, result);
            return;
        }
        Set<String> currentQids = consumer.getCurrentTopicQids(topic);
        qidMap.forEach((qid, offset) -> {
            if (!currentQids.contains(qid)) {
                LOGGER.warn("invalid qid({}) in {},result={}", qid, this, result);
                return;
            }

            MetricUtils.maxOffsetCount(result.getGroupId(), topic, qid, "ack", offset);
            consumer.setCommitOffset(topic, QidUtils.getKafkaQid(config.getBrokerCluster(), qid), offset);
            LOGGER.debug("commit offset groupId:{}, topic:{}, qid:{}, offset:{}, consumer:{}", result.getGroupId(), topic, qid, offset, this);
        });
    });
    return true;
}
 
Example 20
/**
 * Given property keys from anomaly function, comparing if two anomalies have same property on the mergeable keys when doing anomaly detection
 * If key set is empty, or both properties for the two anomalies are empty or if all of the values on mergeable keys are equal on anomalies return true
 * Otherwise return false
 * @param anomaly1 The first anomaly result
 * @param anomaly2 The second anomaly result
 * @param mergeableKeys keys that passed by AnomalyMergeConfig, which is defined by Anomaly Detection Function
 * @return true if two anomalies are equal on mergeable keys, otherwise return false
 */
private static boolean isEqualOnMergeableKeys(MergedAnomalyResultDTO anomaly1, MergedAnomalyResultDTO anomaly2, List<String> mergeableKeys){
  Map<String, String> prop1 = anomaly1.getProperties();
  Map<String, String> prop2 = anomaly2.getProperties();
  // degenerate case
  if(mergeableKeys.size() == 0 ||
      (MapUtils.isEmpty(prop1) && MapUtils.isEmpty(prop2))){
    return true;
  }
  // If both of anomalies have mergeable keys and the contents are equal, they are mergeable;
  // Otherwise it's indicating the two anomalies are detected by different function configurations, they are not mergeable
  for (String key : mergeableKeys) {
    // If both prop1 and prop2 do not contain key, the mergeable keys are not properly defined or the anomalies are not generated by the anomaly function
    if (!prop1.containsKey(key) && !prop2.containsKey(key)) {
      LOG.warn("Mergeable key: {} does not exist in properties! The mergeable keys are not properly defined or the anomalies are not generated by the anomaly function", key);
    }
    // If prop1 and prop2 have different value on key, return false
    if (!ObjectUtils.equals(prop1.get(key), prop2.get(key))) {
      return false;
    }
  }
  return true;
}
 
Example 21
Source Project: chronus   Source File: TaskHeartBeatServiceImpl.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void removeTaskFromHeartBeatQueue(Integer heartBeatRate, TaskRuntimeEntity taskRuntime, boolean needClearRuntimeInfo) {
    String heartBeatRateStr = heartBeatRate.toString();
    String key = getKey(taskRuntime);

    TASK_HEARTBEAT_INTERVAL_GROUP_TABLE.remove(heartBeatRateStr, key);
    // 如果这个间隔不存在需要发送心跳的任务 则清空相关队列
    Map<String, TaskRuntimeEntity> rowMap = TASK_HEARTBEAT_INTERVAL_GROUP_TABLE.row(heartBeatRateStr);
    if (MapUtils.isEmpty(rowMap)) {
        ScheduledExecutorService heartbeatIntervalScheduledService = HEARTBEAT_INTERVAL_SCHEDULED_MAP.remove(heartBeatRateStr);
        if (heartbeatIntervalScheduledService != null) {
            heartbeatIntervalScheduledService.shutdown();
        }
        log.info("移除{}/s周期的心跳数据队列!", heartBeatRateStr);
        CONCURRENT_HEARTBEAT_QUEUE_MAP.remove(heartBeatRateStr);
    }
    if (needClearRuntimeInfo) {
        taskRuntimeService.delete(taskRuntime);
    }
}
 
Example 22
Source Project: incubator-pinot   Source File: JiraContentFormatter.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Apply the parameter map to given jira template, and format it as JiraEntity
 */
private JiraEntity buildJiraEntity(String jiraTemplate, Map<String, Object> templateValues,
    Multimap<String, String> dimensionFilters) {
  String jiraProject = MapUtils.getString(alertClientConfig, PROP_PROJECT, this.jiraAdminConfig.getJiraDefaultProjectKey());
  Long jiraIssueTypeId = MapUtils.getLong(alertClientConfig, PROP_ISSUE_TYPE, this.jiraAdminConfig.getJiraIssueTypeId());

  JiraEntity jiraEntity = new JiraEntity(jiraProject, jiraIssueTypeId, buildSummary(templateValues, dimensionFilters));
  jiraEntity.setAssignee(MapUtils.getString(alertClientConfig, PROP_ASSIGNEE, "")); // Default - Unassigned
  jiraEntity.setMergeGap(MapUtils.getLong(alertClientConfig, PROP_MERGE_GAP, -1L)); // Default - Always merge
  jiraEntity.setLabels(buildLabels(dimensionFilters));
  jiraEntity.setDescription(buildDescription(jiraTemplate, templateValues));
  jiraEntity.setComponents(ConfigUtils.getList(alertClientConfig.get(PROP_COMPONENTS)));
  jiraEntity.setSnapshot(buildSnapshot());
  Map<String, Object> customFieldsMap = ConfigUtils.getMap(alertClientConfig.get(PROP_CUSTOM));
  jiraEntity.setCustomFieldsMap(customFieldsMap);

  return jiraEntity;
}
 
Example 23
Source Project: sunbird-lms-service   Source File: SystemSettingsActor.java    License: MIT License 6 votes vote down vote up
private void getAllSystemSettings() {
  ProjectLogger.log("SystemSettingsActor: getAllSystemSettings called", LoggerEnum.DEBUG.name());
  Map<String, String> systemSettings = DataCacheHandler.getConfigSettings();
  Response response = new Response();
  List<SystemSetting> allSystemSettings = null;
  if (MapUtils.isNotEmpty(systemSettings)) {
    allSystemSettings = new ArrayList<>();
    for (Map.Entry setting : systemSettings.entrySet()) {
      allSystemSettings.add(
          new SystemSetting(
              (String) setting.getKey(), (String) setting.getKey(), (String) setting.getValue()));
    }
  } else {
    allSystemSettings = systemSettingDaoImpl.readAll();
  }
  response.put(JsonKey.RESPONSE, allSystemSettings);
  sender().tell(response, self());
}
 
Example 24
Source Project: samza   Source File: ContainerStorageManager.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * For each standby task, we remove its changeLogSSPs from changelogSSP map and add it to the task's taskSideInputSSPs.
 * The task's sideInputManager will consume and restore these as well.
 *
 * @param containerModel the container's model
 * @param changelogSystemStreams the passed in set of changelogSystemStreams
 * @return A map of changeLogSSP to storeName across all tasks, assuming no two stores have the same changelogSSP
 */
private Map<String, SystemStream> getChangelogSystemStreams(ContainerModel containerModel, Map<String, SystemStream> changelogSystemStreams) {

  if (MapUtils.invertMap(changelogSystemStreams).size() != changelogSystemStreams.size()) {
    throw new SamzaException("Two stores cannot have the same changelog system-stream");
  }

  Map<SystemStreamPartition, String> changelogSSPToStore = new HashMap<>();
  changelogSystemStreams.forEach((storeName, systemStream) ->
      containerModel.getTasks().forEach((taskName, taskModel) -> { changelogSSPToStore.put(new SystemStreamPartition(systemStream, taskModel.getChangelogPartition()), storeName); })
  );

  getTasks(containerModel, TaskMode.Standby).forEach((taskName, taskModel) -> {
    this.taskSideInputStoreSSPs.putIfAbsent(taskName, new HashMap<>());
    changelogSystemStreams.forEach((storeName, systemStream) -> {
      SystemStreamPartition ssp = new SystemStreamPartition(systemStream, taskModel.getChangelogPartition());
      changelogSSPToStore.remove(ssp);
      this.taskSideInputStoreSSPs.get(taskName).put(storeName, Collections.singleton(ssp));
    });
  });

  // changelogSystemStreams correspond only to active tasks (since those of standby-tasks moved to sideInputs above)
  return MapUtils.invertMap(changelogSSPToStore).entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> x.getValue().getSystemStream()));
}
 
Example 25
/**
 * @param <T>
 * @param context
 * @param ownerLocation   location of owner OAS object who own this mapProperty
 * @param mapPropertyName mapProperty name appears in owner OAS object
 * @param mapProperty     null-safe
 * @param valueType       type of values in mapProperty
 * @param validators
 * @return
 */
public static <T> List<OasViolation> doValidateMapPropertyValues(
  OasValidationContext context,
  OasObjectPropertyLocation ownerLocation,
  String mapPropertyName,
  Map<String, T> mapProperty,
  OasObjectType valueType,
  List<? extends OasObjectValidator<T>> validators
) {

  if (MapUtils.isEmpty(mapProperty)) {
    return emptyList();
  }

  List<OasViolation> violations = new ArrayList<>();

  for (Map.Entry<String, T> entry : mapProperty.entrySet()) {
    String key = entry.getKey();
    T value = entry.getValue();

    String keyName = mapPropertyName + ".'" + key + "'";
    OasObjectPropertyLocation valueLoc = ownerLocation.property(keyName, valueType);
    violations.addAll(doValidateProperty(context, valueLoc, value, validators));
  }

  return violations;
}
 
Example 26
Source Project: molicode   Source File: BrowserCallbackCenterImpl.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * 将所有的listener 注册到context 中,方便回调
 *
 * @param configurableListableBeanFactory
 * @throws BeansException
 */
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory configurableListableBeanFactory) throws BeansException {
    Map<String, BrowserCallbackListener> map = configurableListableBeanFactory.getBeansOfType(BrowserCallbackListener.class);
    if (MapUtils.isEmpty(map)) {
        return;
    }
    for (Map.Entry<String, BrowserCallbackListener> entry : map.entrySet()) {
        this.addListener(entry.getValue());
    }
}
 
Example 27
Source Project: studio   Source File: SecurityServiceImpl.java    License: GNU General Public License v3.0 5 votes vote down vote up
@Override
@ValidateParams
public boolean isSiteAdmin(@ValidateStringParam(name = "username") String username, String site) {

    boolean toRet = false;
    try {
        if (userServiceInternal.isUserMemberOfGroup(username, SYSTEM_ADMIN_GROUP)) {
            return true;
        }

        List<Group> groups = userServiceInternal.getUserGroups(-1, username);

        if (CollectionUtils.isNotEmpty(groups)) {
            Map<String, List<String>> roleMappings = configurationService.geRoleMappings(site);

            if (MapUtils.isNotEmpty(roleMappings)) {
                for (Group group : groups) {
                    String groupName = group.getGroupName();
                    List<String> roles = roleMappings.get(groupName);
                    if (roles.contains(ADMIN_ROLE)) {
                        toRet = true;
                    }
                }
            }
        }

    } catch (ServiceLayerException | UserNotFoundException e) {
        logger.warn("Error getting user memberships", e);
    }
    return toRet;
}
 
Example 28
@Override
public PageResult queryList(Map<String, Object> map) {
    //设置分页信息,分别是当前页数和每页显示的总记录数【记住:必须在mapper接口中的方法执行之前设置该分页信息】
    PageHelper.startPage(MapUtils.getInteger(map, "page"),MapUtils.getInteger(map, "limit"),true);

    List list = sysGeneratorDao.queryList(map);
    PageInfo pageInfo = new PageInfo<>(list);
    return PageResult.builder().data(pageInfo.getList()).code(0).count(pageInfo.getTotal()).build();
}
 
Example 29
Source Project: studio   Source File: UserServiceImpl.java    License: GNU General Public License v3.0 5 votes vote down vote up
@Override
@HasPermission(type = DefaultPermission.class, action = "read_users")
public List<String> getUserSiteRoles(long userId, String username, String site)
        throws ServiceLayerException, UserNotFoundException {
    List<Group> groups = userServiceInternal.getUserGroups(userId, username);

    if (CollectionUtils.isNotEmpty(groups)) {
        Map<String, List<String>> roleMappings = configurationService.geRoleMappings(site);
        Set<String> userRoles = new LinkedHashSet<>();

        if (MapUtils.isNotEmpty(roleMappings)) {
            for (Group group : groups) {
                String groupName = group.getGroupName();
                if (groupName.equals(SYSTEM_ADMIN_GROUP)) {
                    // If sysadmin, return all roles
                    Collection<List<String>> roleSets = roleMappings.values();

                    for (List<String> roleSet : roleSets) {
                        userRoles.addAll(roleSet);
                    }

                    break;
                } else {
                    List<String> roles = roleMappings.get(groupName);
                    if (CollectionUtils.isNotEmpty(roles)) {
                        userRoles.addAll(roles);
                    }
                }
            }
        }

        return new ArrayList<>(userRoles);
    } else {
        return Collections.emptyList();
    }
}
 
Example 30
Source Project: incubator-pinot   Source File: DetectionConfigValidator.java    License: Apache License 2.0 5 votes vote down vote up
private void validateBasicAttributes(Map<String, Object> detectionYaml, String parentAlertName) {
  Preconditions.checkArgument(detectionYaml.containsKey(PROP_NAME),
      "Missing property ( " + PROP_NAME + " ) in one of the sub-alerts under " + parentAlertName);
  String alertName = MapUtils.getString(detectionYaml, PROP_NAME);

  String alertType = MapUtils.getString(detectionYaml, PROP_TYPE);
  if (alertType != null) {
    Preconditions.checkArgument(SUPPORTED_ALERT_TYPES.contains(alertType),
        "Unsupported type (" + alertType + ") in sub-alert " + alertName);
  }
}