Java Code Examples for org.apache.hadoop.conf.Configuration#getValByRegex()

The following examples show how to use org.apache.hadoop.conf.Configuration#getValByRegex() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DownSamplerUtils.java    From ambari-metrics with Apache License 2.0 6 votes vote down vote up
/**
 * Get the list of downsamplers that are configured in ams-site
 * Sample config
 <name>timeline.metrics.downsampler.topn.metric.patterns</name>
 <value>dfs.NNTopUserOpCounts.windowMs=60000.op%,dfs.NNTopUserOpCounts.windowMs=300000.op%</value>

 <name>timeline.metrics.downsampler.topn.value</name>
 <value>10</value>

 <name>timeline.metrics.downsampler.topn.function</name>
 <value>max</value>
 * @param configuration
 * @return
 */
public static List<CustomDownSampler> getDownSamplers(Configuration configuration) {

  Map<String,String> conf = configuration.getValByRegex(downSamplerConfigPrefix + "*");
  List<CustomDownSampler> downSamplers = new ArrayList<>();
  Set<String> keys = conf.keySet();

  try {
    for (String key : keys) {
      if (key.startsWith(downSamplerConfigPrefix) && key.endsWith(downSamplerMetricPatternsConfig)) {
        String type = key.split("\\.")[3];
        CustomDownSampler downSampler = getDownSamplerByType(type, conf);
        if (downSampler != null) {
          downSamplers.add(downSampler);
        }
      }
    }
  } catch (Exception e) {
    LOG.warn("Exception caught while parsing downsampler configs from ams-site : " + e.getMessage());
  }
  return downSamplers;
}
 
Example 2
Source File: DownSamplerUtils.java    From ambari-metrics with Apache License 2.0 6 votes vote down vote up
/**
 * Get the list of metrics that are requested to be downsampled.
 * @param configuration
 * @return List of metric patterns/names that are to be downsampled.
 */
public static List<String> getDownsampleMetricPatterns(Configuration configuration) {
  Map<String, String> conf = configuration.getValByRegex(downSamplerConfigPrefix + "*");
  List<String> metricPatterns = new ArrayList<>();
  Set<String> keys = conf.keySet();
  for (String key : keys) {
    if (key.endsWith(downSamplerMetricPatternsConfig)) {
      String patternString = conf.get(key);
      String[] patterns = StringUtils.split(patternString, ",");
      for (String pattern : patterns) {
        if (StringUtils.isNotEmpty(pattern)) {
          String trimmedPattern = pattern.trim();
          metricPatterns.add(trimmedPattern);
        }
      }
    }
  }
  return metricPatterns;
}
 
Example 3
Source File: DownSamplerTest.java    From ambari-metrics with Apache License 2.0 6 votes vote down vote up
@Test
public void testPrepareEventDownSamplingStatement() throws Exception {
  Configuration configuration = new Configuration();
  configuration.setIfUnset("timeline.metrics.downsampler.event.metric.patterns", "pattern1,pattern2");

  Map<String, String> conf = configuration.getValByRegex(DownSamplerUtils.downSamplerConfigPrefix);

  EventMetricDownSampler eventMetricDownSampler = EventMetricDownSampler.fromConfig(conf);
  List<String> stmts = eventMetricDownSampler.prepareDownSamplingStatement(14000000l, 14100000l, "METRIC_RECORD_UUID");
  Assert.assertEquals(stmts.size(),2);

  Assert.assertTrue(stmts.get(0).equals("SELECT METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, 14100000 AS SERVER_TIME, " +
    "UNITS, SUM(METRIC_SUM), SUM(METRIC_COUNT), MAX(METRIC_MAX), MIN(METRIC_MIN) FROM METRIC_RECORD_UUID WHERE METRIC_NAME " +
    "LIKE 'pattern1' AND SERVER_TIME > 14000000 AND SERVER_TIME <= 14100000 GROUP BY METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, UNITS"));

  Assert.assertTrue(stmts.get(1).equals("SELECT METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, 14100000 AS SERVER_TIME, " +
    "UNITS, SUM(METRIC_SUM), SUM(METRIC_COUNT), MAX(METRIC_MAX), MIN(METRIC_MIN) FROM METRIC_RECORD_UUID WHERE METRIC_NAME " +
    "LIKE 'pattern2' AND SERVER_TIME > 14000000 AND SERVER_TIME <= 14100000 GROUP BY METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, UNITS"));
}
 
Example 4
Source File: GeoWaveConfiguratorBase.java    From geowave with Apache License 2.0 6 votes vote down vote up
private static DataTypeAdapter<?>[] getDataAdaptersInternal(
    final Class<?> implementingClass,
    final Configuration configuration) {
  final Map<String, String> input =
      configuration.getValByRegex(
          enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER) + "*");
  if (input != null) {
    final List<DataTypeAdapter<?>> adapters = new ArrayList<>(input.size());
    for (final String dataAdapterStr : input.values()) {
      final byte[] dataAdapterBytes = ByteArrayUtils.byteArrayFromString(dataAdapterStr);
      adapters.add((DataTypeAdapter<?>) PersistenceUtils.fromBinary(dataAdapterBytes));
    }
    return adapters.toArray(new DataTypeAdapter[adapters.size()]);
  }
  return new DataTypeAdapter[] {};
}
 
Example 5
Source File: GeoWaveConfiguratorBase.java    From geowave with Apache License 2.0 6 votes vote down vote up
public static void setStoreOptionsMap(
    final Class<?> implementingClass,
    final Configuration config,
    final Map<String, String> dataStoreOptions) {
  if ((dataStoreOptions != null) && !dataStoreOptions.isEmpty()) {
    for (final Entry<String, String> entry : dataStoreOptions.entrySet()) {
      config.set(
          enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION, entry.getKey()),
          entry.getValue());
    }
  } else {
    final Map<String, String> existingVals =
        config.getValByRegex(
            enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION) + "*");
    for (final String k : existingVals.keySet()) {
      config.unset(k);
    }
  }
}
 
Example 6
Source File: ShimConfigsLoader.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
public static Map<String, String> parseFile( NamedCluster namedCluster, String fileName ) {
  Configuration c = new Configuration();
  if ( namedCluster != null ) {
    InputStream is = namedCluster.getSiteFileInputStream( fileName );
    if ( is != null ) {
      c.addResource( is, fileName );
      return c.getValByRegex( ".*" );
    }
  }
  return null;
}
 
Example 7
Source File: GeoWaveConfiguratorBase.java    From geowave with Apache License 2.0 5 votes vote down vote up
private static Index[] getIndicesInternal(
    final Class<?> implementingClass,
    final Configuration configuration) {
  final Map<String, String> input =
      configuration.getValByRegex(enumToConfKey(implementingClass, GeoWaveConfg.INDEX) + "*");
  if (input != null) {
    final List<Index> indices = new ArrayList<>(input.size());
    for (final String indexStr : input.values()) {
      final byte[] indexBytes = ByteArrayUtils.byteArrayFromString(indexStr);
      indices.add((Index) PersistenceUtils.fromBinary(indexBytes));
    }
    return indices.toArray(new Index[indices.size()]);
  }
  return new Index[] {};
}
 
Example 8
Source File: GeoWaveConfiguratorBase.java    From geowave with Apache License 2.0 5 votes vote down vote up
private static Map<String, String> getStoreOptionsMapInternal(
    final Class<?> implementingClass,
    final Configuration configuration) {
  final String prefix =
      enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION) + KEY_SEPARATOR;
  final Map<String, String> enumMap = configuration.getValByRegex(prefix + "*");
  final Map<String, String> retVal = new HashMap<>();
  for (final Entry<String, String> entry : enumMap.entrySet()) {
    final String key = entry.getKey();
    retVal.put(key.substring(prefix.length()), entry.getValue());
  }
  return retVal;
}
 
Example 9
Source File: GeoWaveConfiguratorBase.java    From geowave with Apache License 2.0 5 votes vote down vote up
private static String getTypeNameInternal(
    final Class<?> implementingClass,
    final Configuration configuration,
    final short internalAdapterId) {
  final String prefix = enumToConfKey(implementingClass, GeoWaveConfg.INTERNAL_ADAPTER);
  final Map<String, String> input = configuration.getValByRegex(prefix + "*");
  final String internalAdapterIdStr = Short.toString(internalAdapterId);
  for (final Entry<String, String> e : input.entrySet()) {
    if (e.getValue().equals(internalAdapterIdStr)) {
      return e.getKey().substring(prefix.length() + 1);
    }
  }
  return null;
}
 
Example 10
Source File: HttpServer2.java    From knox with Apache License 2.0 5 votes vote down vote up
private Map<String, String> setHeaders(Configuration conf) {
  Map<String, String> xFrameParams = new HashMap<>();
  Map<String, String> headerConfigMap =
      conf.getValByRegex(HTTP_HEADER_REGEX);

  xFrameParams.putAll(getDefaultHeaders());
  if(this.xFrameOptionIsEnabled) {
    xFrameParams.put(HTTP_HEADER_PREFIX+X_FRAME_OPTIONS,
        this.xFrameOption.toString());
  }
  xFrameParams.putAll(headerConfigMap);
  return xFrameParams;
}
 
Example 11
Source File: HttpServer2.java    From knox with Apache License 2.0 5 votes vote down vote up
private Map<String, String> setHeaders(Configuration conf) {
  Map<String, String> xFrameParams = new HashMap<>();
  Map<String, String> headerConfigMap =
      conf.getValByRegex(HTTP_HEADER_REGEX);

  xFrameParams.putAll(getDefaultHeaders());
  if(this.xFrameOptionIsEnabled) {
    xFrameParams.put(HTTP_HEADER_PREFIX+X_FRAME_OPTIONS,
        this.xFrameOption.toString());
  }
  xFrameParams.putAll(headerConfigMap);
  return xFrameParams;
}
 
Example 12
Source File: DefaultPreferenceStore.java    From imputationserver with GNU Affero General Public License v3.0 5 votes vote down vote up
public void load(Configuration configuration) {
	Map<String, String> pairs = configuration.getValByRegex("cloudgene.*");
	for (String key : pairs.keySet()) {
		String cleanKey = key.replace("cloudgene.", "");
		String value = pairs.get(key);
		properties.setProperty(cleanKey, value);
	}
}
 
Example 13
Source File: HttpServer2.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private Map<String, String> setHeaders(Configuration conf) {
  Map<String, String> xFrameParams = new HashMap<>();
  Map<String, String> headerConfigMap =
      conf.getValByRegex(HTTP_HEADER_REGEX);

  xFrameParams.putAll(getDefaultHeaders());
  if(this.xFrameOptionIsEnabled) {
    xFrameParams.put(HTTP_HEADER_PREFIX+X_FRAME_OPTIONS,
        this.xFrameOption.toString());
  }
  xFrameParams.putAll(headerConfigMap);
  return xFrameParams;
}
 
Example 14
Source File: DownSamplerTest.java    From ambari-metrics with Apache License 2.0 5 votes vote down vote up
@Ignore
@Test
public void testPrepareTopNDownSamplingStatement() throws Exception {
  Configuration configuration = new Configuration();
  configuration.setIfUnset("timeline.metrics.downsampler.topn.metric.patterns", "pattern1,pattern2");
  configuration.setIfUnset("timeline.metrics.downsampler.topn.value", "3");

  Map<String, String> conf = configuration.getValByRegex(DownSamplerUtils.downSamplerConfigPrefix);

  TopNDownSampler topNDownSampler = TopNDownSampler.fromConfig(conf);
  List<String> stmts = topNDownSampler.prepareDownSamplingStatement(14000000l, 14100000l, "METRIC_RECORD_UUID");
  Assert.assertEquals(stmts.size(),2);
  Assert.assertTrue(stmts.contains("SELECT METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, 14100000 AS SERVER_TIME, UNITS, " +
    "MAX(METRIC_MAX), 1, MAX(METRIC_MAX), MAX(METRIC_MAX) FROM METRIC_RECORD_UUID WHERE " +
    "METRIC_NAME LIKE 'pattern1' AND SERVER_TIME > 14000000 AND SERVER_TIME <= 14100000 " +
    "GROUP BY METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, UNITS ORDER BY MAX(METRIC_MAX) DESC LIMIT 3"));

  Assert.assertTrue(stmts.contains("SELECT METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, 14100000 AS SERVER_TIME, UNITS, " +
    "MAX(METRIC_MAX), 1, MAX(METRIC_MAX), MAX(METRIC_MAX) FROM METRIC_RECORD_UUID WHERE " +
    "METRIC_NAME LIKE 'pattern2' AND SERVER_TIME > 14000000 AND SERVER_TIME <= 14100000 " +
    "GROUP BY METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, UNITS ORDER BY MAX(METRIC_MAX) DESC LIMIT 3"));

  configuration.clear();
  configuration.setIfUnset("timeline.metrics.downsampler.topn.metric.patterns", "pattern1");
  configuration.setIfUnset("timeline.metrics.downsampler.topn.value", "4");
  configuration.setIfUnset("timeline.metrics.downsampler.topn.function", "sum");
  conf = configuration.getValByRegex(DownSamplerUtils.downSamplerConfigPrefix);
  topNDownSampler = TopNDownSampler.fromConfig(conf);
  stmts = topNDownSampler.prepareDownSamplingStatement(14000000l, 14100000l, "METRIC_AGGREGATE_MINUTE_UUID");
  Assert.assertEquals(stmts.size(),1);

  Assert.assertTrue(stmts.contains("SELECT METRIC_NAME, APP_ID, INSTANCE_ID, 14100000 AS SERVER_TIME, UNITS, " +
    "SUM(METRIC_SUM), 1, SUM(METRIC_SUM), SUM(METRIC_SUM) FROM METRIC_AGGREGATE_MINUTE_UUID WHERE " +
    "METRIC_NAME LIKE 'pattern1' AND SERVER_TIME > 14000000 AND SERVER_TIME <= 14100000 " +
    "GROUP BY METRIC_NAME, APP_ID, INSTANCE_ID, UNITS ORDER BY SUM(METRIC_SUM) DESC LIMIT 4"));
}
 
Example 15
Source File: QueryExecutor.java    From phoenix with Apache License 2.0 4 votes vote down vote up
/**
 * Execute all scenarios
 *
 * @param dataModel
 * @throws Exception
 */
protected Callable<Void> executeAllScenarios(final DataModel dataModel) throws Exception {
    return new Callable<Void>() {
        @Override public Void call() throws Exception {
            List<DataModelResult> dataModelResults = new ArrayList<>();
            DataModelResult
                    dataModelResult =
                    new DataModelResult(dataModel, PhoenixUtil.getZookeeper());
            ResultManager
                    resultManager =
                    new ResultManager(dataModelResult.getName());

            dataModelResults.add(dataModelResult);
            List<Scenario> scenarios = dataModel.getScenarios();
            Configuration conf = HBaseConfiguration.create();
            Map<String, String> phoenixProperty = conf.getValByRegex("phoenix");
            try {

                for (Scenario scenario : scenarios) {
                    ScenarioResult scenarioResult = new ScenarioResult(scenario);
                    scenarioResult.setPhoenixProperties(phoenixProperty);
                    dataModelResult.getScenarioResult().add(scenarioResult);

                    for (QuerySet querySet : scenario.getQuerySet()) {
                        QuerySetResult querySetResult = new QuerySetResult(querySet);
                        scenarioResult.getQuerySetResult().add(querySetResult);

                        util.executeQuerySetDdls(querySet);
                        if (querySet.getExecutionType() == ExecutionType.SERIAL) {
                            executeQuerySetSerial(dataModelResult, querySet, querySetResult, scenario);
                        } else {
                            executeQuerySetParallel(dataModelResult, querySet, querySetResult, scenario);
                        }
                    }
                    resultManager.write(dataModelResult, ruleApplier);
                }
                resultManager.write(dataModelResults, ruleApplier);
                resultManager.flush();
            } catch (Exception e) {
                LOGGER.error("Scenario throws exception", e);
                throw e;
            }
            return null;
        }
    };
}
 
Example 16
Source File: HadoopCMClusterService.java    From components with Apache License 2.0 4 votes vote down vote up
private Map<String, String> getConfiguration(String confName) {
    Configuration conf = confs.get(confName);
    return conf.getValByRegex(".*"); //$NON-NLS-1$ ;
}
 
Example 17
Source File: ShimConfigsLoader.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
public static Map<String, String> parseFile( URL fileUrl ) {
  Configuration c = new Configuration();
  c.addResource( fileUrl );
  return c.getValByRegex( ".*" );
}