org.apache.commons.configuration2.SubsetConfiguration Java Examples
The following examples show how to use
org.apache.commons.configuration2.SubsetConfiguration.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Config.java From bireme with Apache License 2.0 | 6 votes |
/** * Get DebeziumSource configuration. * * @param debeziumConf An empty {@code SourceConfig} * @throws BiremeException miss some required configuration */ protected void fetchDebeziumConfig(SourceConfig debeziumConf) throws BiremeException { Configuration subConfig = new SubsetConfiguration(config, debeziumConf.name, "."); String prefix = subConfig.getString("namespace"); if (prefix == null) { String messages = "Please designate your namespace."; logger.fatal(messages); throw new BiremeException(messages); } debeziumConf.type = SourceType.DEBEZIUM; debeziumConf.server = subConfig.getString("kafka.server"); debeziumConf.topic = prefix; debeziumConf.groupID = subConfig.getString("kafka.groupid", "bireme"); if (debeziumConf.server == null) { String message = "Please designate server for " + debeziumConf.name + "."; logger.fatal(message); throw new BiremeException(message); } }
Example #2
Source File: DoctorKafkaConfig.java From doctorkafka with Apache License 2.0 | 6 votes |
private void initialize() { Set<String> clusters = new HashSet<>(); Iterator<String> keysIterator = configuration.getKeys(); while (keysIterator.hasNext()) { String propertyName = keysIterator.next(); if (propertyName.startsWith(CLUSTER_PREFIX)) { String clusterName = propertyName.split("\\.")[1]; clusters.add(clusterName); } } clusterConfigurations = new HashMap<>(); for (String cluster : clusters) { SubsetConfiguration subsetConfiguration = new SubsetConfiguration(configuration, CLUSTER_PREFIX + cluster + "."); clusterConfigurations.put( cluster, new DoctorKafkaClusterConfig(cluster, subsetConfiguration)); } }
Example #3
Source File: HadoopTimelineMetricsSink.java From ambari-metrics with Apache License 2.0 | 5 votes |
/** * Return configured serviceName with or without prefix. * Default without serviceName or configured prefix : first config prefix * With prefix : configured prefix + first config prefix * Configured serviceName : Return serviceName as is. */ private String getServiceName(SubsetConfiguration conf) { String serviceNamePrefix = conf.getString(SERVICE_NAME_PREFIX, ""); String serviceName = conf.getString(SERVICE_NAME, ""); return StringUtils.isEmpty(serviceName) ? StringUtils.isEmpty(serviceNamePrefix) ? getFirstConfigPrefix(conf) : serviceNamePrefix + "-" + getFirstConfigPrefix(conf) : serviceName; }
Example #4
Source File: Config.java From bireme with Apache License 2.0 | 5 votes |
/** * Get the connection configuration to database. * * @param prefix "target" database * @throws BiremeException when url of database is null */ protected void connectionConfig(String prefix) throws BiremeException { Configuration subConfig = new SubsetConfiguration(config, "target", "."); targetDatabase = new ConnectionConfig(); targetDatabase.jdbcUrl = subConfig.getString("url"); targetDatabase.user = subConfig.getString("user"); targetDatabase.passwd = subConfig.getString("passwd"); if (targetDatabase.jdbcUrl == null) { String message = "Please designate url for target Database."; throw new BiremeException(message); } }
Example #5
Source File: DoctorKafkaConfig.java From doctorkafka with Apache License 2.0 | 5 votes |
public DoctorKafkaConfig(String configPath) throws Exception { try { Configurations configurations = new Configurations(); configuration = configurations.properties(new File(configPath)); drkafkaConfiguration = new SubsetConfiguration(configuration, DOCTORKAFKA_PREFIX); this.initialize(); } catch (Exception e) { LOG.error("Failed to initialize configuration file {}", configPath, e); } }
Example #6
Source File: ConfigurationDynaBean.java From commons-configuration with Apache License 2.0 | 5 votes |
@Override public Object get(final String name) { if (LOG.isTraceEnabled()) { LOG.trace("get(" + name + ")"); } // get configuration property Object result = getConfiguration().getProperty(name); if (result == null) { // otherwise attempt to create bean from configuration subset final Configuration subset = new SubsetConfiguration(getConfiguration(), name, PROPERTY_DELIMITER); if (!subset.isEmpty()) { result = new ConfigurationDynaBean(subset); } } if (LOG.isDebugEnabled()) { LOG.debug(name + "=[" + result + "]"); } if (result == null) { throw new IllegalArgumentException("Property '" + name + "' does not exist."); } return result; }
Example #7
Source File: HadoopTimelineMetricsSinkTest.java From ambari-metrics with Apache License 2.0 | 4 votes |
@Test @PrepareForTest({URL.class, OutputStream.class, AbstractTimelineMetricsSink.class, HttpURLConnection.class, TimelineMetric.class, HadoopTimelineMetricsSink.class, SubsetConfiguration.class}) public void testPutMetrics() throws Exception { HadoopTimelineMetricsSink sink = new HadoopTimelineMetricsSink(); HttpURLConnection connection = PowerMock.createNiceMock(HttpURLConnection.class); URL url = PowerMock.createNiceMock(URL.class); InputStream is = IOUtils.toInputStream(gson.toJson(Collections.singletonList("localhost"))); TimelineMetric timelineMetric = PowerMock.createNiceMock(TimelineMetric.class); expectNew(TimelineMetric.class).andReturn(timelineMetric).times(2); expect(timelineMetric.getMetricValues()).andReturn(new TreeMap<Long, Double>()).anyTimes(); expect(timelineMetric.getMetricName()).andReturn("metricName").anyTimes(); expectNew(URL.class, anyString()).andReturn(url).anyTimes(); expect(url.openConnection()).andReturn(connection).anyTimes(); expect(connection.getInputStream()).andReturn(is).anyTimes(); expect(connection.getResponseCode()).andReturn(200).anyTimes(); OutputStream os = PowerMock.createNiceMock(OutputStream.class); expect(connection.getOutputStream()).andReturn(os).anyTimes(); SubsetConfiguration conf = PowerMock.createNiceMock(SubsetConfiguration.class); expect(conf.getString("slave.host.name")).andReturn("localhost").anyTimes(); expect(conf.getParent()).andReturn(null).anyTimes(); expect(conf.getPrefix()).andReturn("service").anyTimes(); expect(conf.getStringArray(eq(COLLECTOR_HOSTS_PROPERTY))).andReturn(new String[]{"localhost"," localhost2"}).anyTimes(); expect(conf.getString(eq("serviceName-prefix"), eq(""))).andReturn("").anyTimes(); expect(conf.getString(eq(COLLECTOR_PROTOCOL), eq("http"))).andReturn("http").anyTimes(); expect(conf.getString(eq(COLLECTOR_PORT), eq("6188"))).andReturn("6188").anyTimes(); expect(conf.getInt(eq(MAX_METRIC_ROW_CACHE_SIZE), anyInt())).andReturn(10).anyTimes(); expect(conf.getInt(eq(METRICS_SEND_INTERVAL), anyInt())).andReturn(1000).anyTimes(); expect(conf.getBoolean(eq(SET_INSTANCE_ID_PROPERTY), eq(false))).andReturn(true).anyTimes(); expect(conf.getString(eq(INSTANCE_ID_PROPERTY), anyString())).andReturn("instanceId").anyTimes(); expect(conf.getString(eq(HOST_IN_MEMORY_AGGREGATION_PROTOCOL_PROPERTY), anyString())).andReturn("http").anyTimes(); conf.setListDelimiterHandler(new DefaultListDelimiterHandler(eq(','))); expectLastCall().anyTimes(); expect(conf.getKeys()).andReturn(new Iterator() { @Override public boolean hasNext() { return false; } @Override public Object next() { return null; } @Override public void remove() { } }).once(); AbstractMetric metric = createNiceMock(AbstractMetric.class); expect(metric.name()).andReturn("metricName").anyTimes(); expect(metric.value()).andReturn(9.5687).anyTimes(); expect(metric.type()).andReturn(MetricType.COUNTER).anyTimes(); //TODO currently only numeric metrics are supported MetricsRecord record = createNiceMock(MetricsRecord.class); expect(record.name()).andReturn("testName").anyTimes(); expect(record.context()).andReturn("testContext").anyTimes(); expect(record.timestamp()).andAnswer(new IAnswer<Long>() { @Override public Long answer() throws Throwable { return System.currentTimeMillis(); } }).anyTimes(); expect(record.metrics()).andReturn(Arrays.asList(metric)).anyTimes(); timelineMetric.setInstanceId(eq("instanceId")); EasyMock.expectLastCall(); replay(record, metric); replayAll(); sink.init(conf); sink.putMetrics(record); Thread.sleep(1500L); sink.putMetrics(record); verifyAll(); }
Example #8
Source File: DoctorKafkaClusterConfig.java From doctorkafka with Apache License 2.0 | 4 votes |
public Map<String, String> getConsumerConfigurations() { AbstractConfiguration sslConfiguration = new SubsetConfiguration(clusterConfiguration, CONSUMER_PREFIX); return DoctorKafkaConfig.configurationToMap(sslConfiguration); }
Example #9
Source File: DoctorKafkaConfig.java From doctorkafka with Apache License 2.0 | 4 votes |
/** * This method parses the configuration file and returns the kafka producer ssl setting * for writing to brokerstats kafka topic */ public Map<String, String> getBrokerStatsConsumerSslConfigs() { AbstractConfiguration sslConfiguration = new SubsetConfiguration(drkafkaConfiguration, BROKERSTATS_CONSUMER_PREFIX); return configurationToMap(sslConfiguration); }
Example #10
Source File: DoctorKafkaConfig.java From doctorkafka with Apache License 2.0 | 4 votes |
public Map<String, String> getActionReportProducerSslConfigs() { AbstractConfiguration sslConfiguration = new SubsetConfiguration(drkafkaConfiguration, ACTION_REPORT_PRODUCER_PREFIX); return configurationToMap(sslConfiguration); }
Example #11
Source File: PhoenixMetricsSink.java From phoenix with Apache License 2.0 | 4 votes |
@Override public void init(SubsetConfiguration config) { Metrics.markSinkInitialized(); LOGGER.info("Phoenix tracing writer started"); }
Example #12
Source File: GlobalPhoenixMetricsTestSink.java From phoenix with Apache License 2.0 | 4 votes |
@Override public void init(SubsetConfiguration subsetConfiguration) { }
Example #13
Source File: LoggingSink.java From phoenix with Apache License 2.0 | 4 votes |
@Override public void init(SubsetConfiguration config) { }
Example #14
Source File: ConfigurationDynaBean.java From commons-configuration with Apache License 2.0 | 4 votes |
@Override public void remove(final String name, final String key) { final Configuration subset = new SubsetConfiguration(getConfiguration(), name, PROPERTY_DELIMITER); subset.setProperty(key, null); }
Example #15
Source File: TestSubsetConfigurationEvents.java From commons-configuration with Apache License 2.0 | 4 votes |
@Override protected AbstractConfiguration createConfiguration() { return (SubsetConfiguration)new MapConfiguration(new HashMap<String, Object>()).subset("test"); }
Example #16
Source File: PrometheusMetricsSink.java From hadoop-ozone with Apache License 2.0 | 2 votes |
@Override public void init(SubsetConfiguration subsetConfiguration) { }