com.typesafe.config.ConfigFactory Java Examples
The following examples show how to use
com.typesafe.config.ConfigFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestEnumRowRule.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testLongEnums() { Map<String, Object> configMap = new HashMap<>(); configMap.put("fields", Lists.newArrayList("age")); configMap.put("fieldtype", "long"); configMap.put("values", Lists.newArrayList(34L, 42L, 111L)); Config config = ConfigFactory.parseMap(configMap); EnumRowRule rule = new EnumRowRule(); assertNoValidationFailures(rule, config); rule.configure(config); rule.configureName("scorecheck"); Row row1 = new RowWithSchema(SCHEMA, "Ian", "Ian", 34L, new BigDecimal("0.00")); assertTrue("Row should pass rule", rule.check(row1)); Row row2 = new RowWithSchema(SCHEMA, "Webster", "Websta", 110L, new BigDecimal("450.10")); assertFalse("Row should not pass rule", rule.check(row2)); }
Example #2
Source File: TestStreamDefinitionDAOImpl.java From Eagle with Apache License 2.0 | 6 votes |
@Test public void test() throws Exception{ Config config = ConfigFactory.load(); AlertStreamSchemaDAO dao = new AlertStreamSchemaDAOImpl(null, null) { public List<AlertStreamSchemaEntity> findAlertStreamSchemaByDataSource(String dataSource) throws Exception { List<AlertStreamSchemaEntity> list = new ArrayList<AlertStreamSchemaEntity>(); String programId = "UnitTest"; list.add(buildTestStreamDefEntity(programId, "TestStream", "Attr1")); list.add(buildTestStreamDefEntity(programId, "TestStream", "Attr2")); list.add(buildTestStreamDefEntity(programId, "TestStream", "Attr3")); list.add(buildTestStreamDefEntity(programId, "TestStream", "Attr4")); return list; } }; StreamMetadataManager.getInstance().init(config, dao); Map<String, List<AlertStreamSchemaEntity>> retMap = StreamMetadataManager.getInstance().getMetadataEntitiesForAllStreams(); Assert.assertTrue(retMap.get("TestStream").size() == 4); StreamMetadataManager.getInstance().reset(); }
Example #3
Source File: LimiterServerResourceTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Test public void testMetrics() throws Exception { ThrottlingGuiceServletConfig guiceServletConfig = new ThrottlingGuiceServletConfig(); guiceServletConfig.initialize(ConfigFactory.empty()); Injector injector = guiceServletConfig.getInjector(); LimiterServerResource limiterServer = injector.getInstance(LimiterServerResource.class); PermitRequest request = new PermitRequest(); request.setPermits(10); request.setResource("myResource"); limiterServer.getSync(new ComplexResourceKey<>(request, new EmptyRecord())); limiterServer.getSync(new ComplexResourceKey<>(request, new EmptyRecord())); limiterServer.getSync(new ComplexResourceKey<>(request, new EmptyRecord())); MetricContext metricContext = limiterServer.metricContext; Timer timer = metricContext.timer(LimiterServerResource.REQUEST_TIMER_NAME); Assert.assertEquals(timer.getCount(), 3); }
Example #4
Source File: JobSpecResolverTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Test public void test() throws Exception { Config sysConfig = ConfigFactory.empty(); JobTemplate jobTemplate = Mockito.mock(JobTemplate.class); Mockito.when(jobTemplate.getResolvedConfig(Mockito.any(Config.class))).thenAnswer(i -> { Config userConfig = (Config) i.getArguments()[0]; return ConfigFactory.parseMap(ImmutableMap.of("template.value", "foo")).withFallback(userConfig); }); JobCatalogWithTemplates catalog = Mockito.mock(JobCatalogWithTemplates.class); Mockito.when(catalog.getTemplate(Mockito.eq(URI.create("my://template")))).thenReturn(jobTemplate); JobSpecResolver resolver = JobSpecResolver.builder(sysConfig).jobCatalog(catalog).build(); JobSpec jobSpec = JobSpec.builder() .withConfig(ConfigFactory.parseMap(ImmutableMap.of("key", "value"))) .withTemplate(URI.create("my://template")).build(); ResolvedJobSpec resolvedJobSpec = resolver.resolveJobSpec(jobSpec); Assert.assertEquals(resolvedJobSpec.getOriginalJobSpec(), jobSpec); Assert.assertEquals(resolvedJobSpec.getConfig().entrySet().size(), 2); Assert.assertEquals(resolvedJobSpec.getConfig().getString("key"), "value"); Assert.assertEquals(resolvedJobSpec.getConfig().getString("template.value"), "foo"); }
Example #5
Source File: SchedulerUtilsTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Test public void testloadGenericJobConfig() throws ConfigurationException, IOException { Path jobConfigPath = new Path(this.subDir11.getAbsolutePath(), "test111.pull"); Properties properties = new Properties(); properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, this.jobConfigDir.getAbsolutePath()); Properties jobProps = SchedulerUtils.loadGenericJobConfig(properties, jobConfigPath, new Path(this.jobConfigDir.getAbsolutePath()), JobSpecResolver.builder(ConfigFactory.empty()).build()); Assert.assertEquals(jobProps.stringPropertyNames().size(), 7); Assert.assertTrue(jobProps.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY) || jobProps.containsKey( ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY)); Assert.assertTrue(jobProps.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY)); Assert.assertEquals(jobProps.getProperty("k1"), "d1"); Assert.assertEquals(jobProps.getProperty("k2"), "a2"); Assert.assertEquals(jobProps.getProperty("k3"), "a3"); Assert.assertEquals(jobProps.getProperty("k8"), "a8"); Assert.assertEquals(jobProps.getProperty("k9"), "a8"); }
Example #6
Source File: TestHashDeriver.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testCustomNullStringHash() { Map<String, Dataset<Row>> dependencies = Maps.newHashMap(); dependencies.put("dep1", testDataFrame()); Map<String, Object> configMap = Maps.newHashMap(); configMap.put(HashDeriver.NULL_STRING_CONFIG, ""); Config config = ConfigFactory.parseMap(configMap); HashDeriver d = new HashDeriver(); assertNoValidationFailures(d, config); d.configure(config); Dataset<Row> derived = d.derive(dependencies); assertEquals(1, derived.count()); assertEquals(testDataFrame().schema().size() + 1, derived.schema().size()); assertTrue(Lists.newArrayList(derived.schema().fieldNames()).contains(HashDeriver.DEFAULT_HASH_FIELD_NAME)); assertEquals( "862ff0dc2acce97b6f8bd6c369df2668", derived.collectAsList().get(0).get(derived.schema().size() - 1)); }
Example #7
Source File: TestSystemTimeUpsertPlanner.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testLastUpdated() { Map<String, Object> configMap = Maps.newHashMap(); configMap.put(SystemTimeUpsertPlanner.LAST_UPDATED_FIELD_NAME_CONFIG_NAME, "lastupdated"); Config config = ConfigFactory.parseMap(configMap); SystemTimeUpsertPlanner planner = new SystemTimeUpsertPlanner(); assertNoValidationFailures(planner, config); planner.configure(config); List<Tuple2<MutationType, Dataset<Row>>> planned = planner.planMutationsForSet(dataFrame); assertEquals(planned.size(), 1); Dataset<Row> plannedDF = planned.get(0)._2(); assertEquals(plannedDF.count(), 1); Row plannedRow = plannedDF.collectAsList().get(0); assertEquals(plannedRow.length(), 2); }
Example #8
Source File: TestInListDeriver.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testMissingField() throws Exception { Dataset<Row> source = createTestDataframe(); List<String> inListLiteral = Arrays.asList("1", "2", "3"); Map<String, Dataset<Row>> dependencies = new HashMap<>(); dependencies.put("df1", source); Config config = ConfigFactory.empty() .withValue(InListDeriver.INLIST_STEP_CONFIG, ConfigValueFactory.fromAnyRef("df1")) .withValue(InListDeriver.INLIST_VALUES_CONFIG, ConfigValueFactory.fromIterable(inListLiteral)); InListDeriver deriver = new InListDeriver(); assertNoValidationFailures(deriver, config); deriver.configure(config); // Doesn't return anything because the IN values don't match the target column values List<Row> results = deriver.derive(dependencies).select("value").collectAsList(); assertThat(results.size(), is(0)); }
Example #9
Source File: StaticJobTemplateTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Test public void testSecure() throws Exception { Map<String, Object> confMap = Maps.newHashMap(); confMap.put("nonOverridableKey", "value1"); confMap.put("overridableKey", "value1"); confMap.put(StaticJobTemplate.IS_SECURE_KEY, true); confMap.put(StaticJobTemplate.SECURE_OVERRIDABLE_PROPERTIES_KEYS, "overridableKey, overridableKey2"); StaticJobTemplate template = new StaticJobTemplate(URI.create("my://template"), "1", "desc", ConfigFactory.parseMap(confMap), (JobCatalogWithTemplates) null); Config userConfig = ConfigFactory.parseMap(ImmutableMap.of( "overridableKey", "override", "overridableKey2", "override2", "nonOverridableKey", "override", "somethingElse", "override")); Config resolved = template.getResolvedConfig(userConfig); Assert.assertEquals(resolved.entrySet().size(), 5); Assert.assertEquals(resolved.getString("nonOverridableKey"), "value1"); Assert.assertEquals(resolved.getString("overridableKey"), "override"); Assert.assertEquals(resolved.getString("overridableKey2"), "override2"); Assert.assertFalse(resolved.hasPath("somethingElse")); }
Example #10
Source File: VcapServicesStringParser.java From ditto with Eclipse Public License 2.0 | 6 votes |
@Override public Config apply(final String systemVcapServices) { checkNotNull(systemVcapServices, "system VCAP services string"); if (systemVcapServices.isEmpty()) { return ConfigFactory.empty(); } final Config vcapServicesConfig = tryToParseString(systemVcapServices); final Set<Map.Entry<String, ConfigValue>> vcapServicesConfigEntries = vcapServicesConfig.entrySet(); final Map<String, Object> result = new HashMap<>(vcapServicesConfigEntries.size()); for (final Map.Entry<String, ConfigValue> serviceConfigEntry : vcapServicesConfigEntries) { result.put(serviceConfigEntry.getKey(), convertConfigListToConfigObject(serviceConfigEntry.getValue())); } return ConfigFactory.parseMap(result); }
Example #11
Source File: TestImpalaMetadataTask.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testBuildKerberosAuthConnectionString() { System.setProperty("java.security.krb5.conf", ClassLoader.getSystemResource("krb5.conf").getPath()); Map<String, Object> configMap = new HashMap<>(); configMap.put(HOST_CONFIG, "testhost"); configMap.put(QUERY_TYPE_CONFIG, "refresh"); configMap.put(QUERY_TABLE_CONFIG, "testtable"); configMap.put(AUTH_CONFIG, "kerberos"); configMap.put(KEYTAB_CONFIG, "foo.kt"); configMap.put(USER_PRINC_CONFIG, "user"); Config config = ConfigFactory.parseMap(configMap); ImpalaMetadataTask metadataTask = new ImpalaMetadataTask(); metadataTask.configure(config); String connectionString = metadataTask.buildConnectionString(); assertEquals("jdbc:hive2://testhost:21050/;principal=impala/[email protected]" + ";auth=kerberos;kerberosAuthType=fromSubject", connectionString); }
Example #12
Source File: RestliLimiterFactoryTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Test public void testFactory() throws Exception { SharedResourcesBroker<SimpleScopeType> broker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker( ConfigFactory.empty(), SimpleScopeType.GLOBAL.defaultScopeInstance()); MyRequestSender requestSender = new MyRequestSender(); broker.bindSharedResourceAtScope(new RedirectAwareRestClientRequestSender.Factory<>(), new SharedRestClientKey(RestliLimiterFactory.RESTLI_SERVICE_NAME), SimpleScopeType.GLOBAL, requestSender); RestliServiceBasedLimiter limiter = broker.getSharedResource(new RestliLimiterFactory<>(), new SharedLimiterKey("my/resource")); Assert.assertNotNull(limiter.acquirePermits(10)); Assert.assertEquals(requestSender.requestList.size(), 1); broker.close(); }
Example #13
Source File: MetadataServiceClientImplTest.java From eagle with Apache License 2.0 | 6 votes |
@Ignore @Test public void addScheduleState() throws Exception { ConfigFactory.invalidateCaches(); System.setProperty("config.resource", "/test-application.conf"); Config config = ConfigFactory.load("test-application.conf").getConfig("coordinator"); MetadataServiceClientImpl client = new MetadataServiceClientImpl(config); ScheduleState ss = new ScheduleState(); ss.setVersion("spec_version_1463764252582"); client.addScheduleState(ss); client.close(); ss.setVersion("spec_version_1464764252582"); ZKConfig zkConfig = ZKConfigBuilder.getZKConfig(config); ConfigBusProducer producer = new ConfigBusProducer(zkConfig); Coordinator.postSchedule(client, ss, producer); }
Example #14
Source File: HttpDatasetDescriptorTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Test public void testContains() throws IOException { Config config1 = ConfigFactory.empty() .withValue(DatasetDescriptorConfigKeys.PLATFORM_KEY, ConfigValueFactory.fromAnyRef("https")) .withValue(DatasetDescriptorConfigKeys.PATH_KEY, ConfigValueFactory.fromAnyRef("https://a.com/b")); HttpDatasetDescriptor descriptor1 = new HttpDatasetDescriptor(config1); // Verify that same path points to same dataset Config config2 = ConfigFactory.empty() .withValue(DatasetDescriptorConfigKeys.PLATFORM_KEY, ConfigValueFactory.fromAnyRef("https")) .withValue(DatasetDescriptorConfigKeys.PATH_KEY, ConfigValueFactory.fromAnyRef("https://a.com/b")); HttpDatasetDescriptor descriptor2 = new HttpDatasetDescriptor(config2); Assert.assertTrue(descriptor2.contains(descriptor1)); // Verify that same path but different platform points to different dataset Config config3 = ConfigFactory.empty() .withValue(DatasetDescriptorConfigKeys.PLATFORM_KEY, ConfigValueFactory.fromAnyRef("http")) .withValue(DatasetDescriptorConfigKeys.PATH_KEY, ConfigValueFactory.fromAnyRef("https://a.com/b")); HttpDatasetDescriptor descriptor3 = new HttpDatasetDescriptor(config3); Assert.assertFalse(descriptor3.contains(descriptor1)); }
Example #15
Source File: PersistenceActorTestBase.java From ditto with Eclipse Public License 2.0 | 5 votes |
protected void setup(final Config customConfig) { requireNonNull(customConfig, "Consider to use ConfigFactory.empty()"); final Config config = customConfig.withFallback(ConfigFactory.load("test")); actorSystem = ActorSystem.create("AkkaTestSystem", config); pubSubTestProbe = TestProbe.apply("mock-pubSub-mediator", actorSystem); pubSubMediator = pubSubTestProbe.ref(); dittoHeadersV1 = createDittoHeadersMock(JsonSchemaVersion.V_1, "test:" + AUTH_SUBJECT); dittoHeadersV2 = createDittoHeadersMock(JsonSchemaVersion.V_2, "test:" + AUTH_SUBJECT); }
Example #16
Source File: ClickhouseSinkScheduledCheckerTest.java From flink-clickhouse-sink with MIT License | 5 votes |
@Before public void setUp() throws Exception { Config config = ConfigFactory.load(); Map<String, String> params = ConfigUtil.toMap(config); params.put(ClickhouseClusterSettings.CLICKHOUSE_USER, ""); params.put(ClickhouseClusterSettings.CLICKHOUSE_PASSWORD, ""); params.put(ClickhouseClusterSettings.CLICKHOUSE_HOSTS, "http://localhost:8123"); ClickhouseSinkCommonParams commonParams = new ClickhouseSinkCommonParams(params); checker = new ClickhouseSinkScheduledChecker(commonParams); MockitoAnnotations.initMocks(this); }
Example #17
Source File: SchedulerTest.java From eagle with Apache License 2.0 | 5 votes |
@Test public void testIrregularPolicyParallelismHint() { Config config = ConfigFactory.load(); int defaultParallelism = config.getInt("coordinator.policyDefaultParallelism"); TestTopologyMgmtService mgmtService = new TestTopologyMgmtService(5, 12); InMemScheduleConext context = createScheduleContext(mgmtService); // recreate test poicy context.getPolicies().clear(); // make the hint bigger than bolt number int irregularParallelism = defaultParallelism + 2; createSamplePolicy(context, "irregularPolicy", STREAM1, irregularParallelism); GreedyPolicyScheduler ps = new GreedyPolicyScheduler(); ps.init(context, mgmtService); ScheduleState scheduled = ps.schedule(new ScheduleOption()); Assert.assertEquals(2, scheduled.getSpoutSpecs().size()); Assert.assertEquals(2, scheduled.getGroupSpecs().size()); Assert.assertEquals(2, scheduled.getAlertSpecs().size()); // assertion RouterSpec spec = scheduled.getGroupSpecs().get(TOPO1); Assert.assertTrue(spec.getRouterSpecs().size() > 0); // must be allocated for (StreamRouterSpec routerSpec : spec.getRouterSpecs()) { Assert.assertEquals(1, routerSpec.getTargetQueue().size()); // irregularParallelism is prompted to 2 * defaultParallelism = 10 Assert.assertEquals(10, routerSpec.getTargetQueue().get(0).getWorkers().size()); } }
Example #18
Source File: TrainWorker.java From ytk-learn with MIT License | 5 votes |
public String getURI() { String configRealPath = (new File(configFile).exists()) ? configFile : configPath; File realFile = new File(configRealPath); CheckUtils.check(realFile.exists(), "config file(%s) doesn't exist!", configRealPath); Config config = ConfigFactory.parseFile(realFile); config = updateConfigWithCustom(config); return config.getString("fs_scheme"); }
Example #19
Source File: DatePartitionedHiveVersionFinderTest.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@Test public void testMultiplePartitionFields() throws Exception { DatePartitionHiveVersionFinder versionFinder = new DatePartitionHiveVersionFinder(this.fs, ConfigFactory.empty()); String tableName = "VfTb3"; Table tbl = this.hiveMetastoreTestUtils.createTestAvroTable(dbName, tableName, ImmutableList.of("datepartition", "field1")); org.apache.hadoop.hive.metastore.api.Partition tp = this.hiveMetastoreTestUtils.addTestPartition(tbl, ImmutableList.of("2016-01-01-20", "f1"), (int) System.currentTimeMillis()); Partition partition = new Partition(new org.apache.hadoop.hive.ql.metadata.Table(tbl), tp); assertThat(partition.getName(), anyOf(is("field1=f1/datepartition=2016-01-01-20"), is("datepartition=2016-01-01-20/field1=f1"))); TimestampedHiveDatasetVersion dv = versionFinder.getDatasetVersion(partition); Assert.assertEquals(dv.getDateTime(), formatter.parseDateTime("2016/01/01/20")); }
Example #20
Source File: MultiVersionCleanableDatasetBase.java From incubator-gobblin with Apache License 2.0 | 5 votes |
public MultiVersionCleanableDatasetBase(final FileSystem fs, final Properties props, Logger log) throws IOException { // This constructor is used by retention jobs configured through job configs and do not use dataset configs from config store. // IS_DATASET_BLACKLISTED_KEY is only available with dataset config. Hence set IS_DATASET_BLACKLISTED_KEY to default // ...false for jobs running with job configs this(fs, props, ConfigFactory.parseMap(ImmutableMap.<String, String> of(IS_DATASET_BLACKLISTED_KEY, IS_DATASET_BLACKLISTED_DEFAULT)), log); }
Example #21
Source File: XTraceSettings.java From tracing-framework with BSD 3-Clause "New" or "Revised" License | 5 votes |
public XTraceSettings() { Config config = ConfigFactory.load(); on = config.getBoolean("xtrace.client.reporting.on"); defaultEnabled = config.getBoolean("xtrace.client.reporting.default"); discoveryMode = config.getBoolean("xtrace.client.reporting.discoverymode"); traceMain = config.getBoolean("xtrace.client.tracemain"); defaultLoggingLevel = XTraceLoggingLevel.valueOf(config.getString("xtrace.client.reporting.default_level").toUpperCase()); mainMethodLoggingLevel = XTraceLoggingLevel.valueOf(config.getString("xtrace.client.tracemain_level").toUpperCase()); classesEnabled = Sets.newHashSet(config.getStringList("xtrace.client.reporting.enabled")); classesDisabled = Sets.newHashSet(config.getStringList("xtrace.client.reporting.disabled")); recycleThreshold = config.getInt("xtrace.client.recycle-threshold"); }
Example #22
Source File: StoreConfig.java From distkv with BSD 3-Clause "New" or "Revised" License | 5 votes |
public static StoreConfig create() { ConfigFactory.invalidateCaches(); Config config = ConfigFactory.systemProperties(); String configPath = System.getProperty("distkv.store.config"); if (Strings.isNullOrEmpty(configPath)) { LOG.info("Loading config from \"store.conf\" file in classpath."); config = config.withFallback(ConfigFactory.load(CUSTOM_CONFIG_FILE)); } else { LOG.info("Loading config from " + configPath + "."); config = config.withFallback(ConfigFactory.parseFile(new File(configPath))); } config = config.withFallback(ConfigFactory.load(DEFAULT_CONFIG_FILE)); return new StoreConfig(config); }
Example #23
Source File: ValidatedConfigurationTest.java From divolte-collector with Apache License 2.0 | 5 votes |
@Test public void allMappingsForConfluentSinksMustHaveConfluentId() { final ValidatedConfiguration vc = new ValidatedConfiguration(() -> ConfigFactory.parseResources("kafka-sink-confluent-partially-without-confluent-id.conf")); assertFalse(vc.isValid()); assertEquals(1, vc.errors().size()); assertTrue( vc.errors() .get(0) .startsWith("Property 'divolte.' Mappings used by sinks in Confluent-mode must have their 'confluent_id' attribute set. The following mappings are missing this: [test-2]..") ); }
Example #24
Source File: SelfHealingEngineTest.java From healenium-web with Apache License 2.0 | 5 votes |
@BeforeEach public void createDriver() { ChromeOptions options = new ChromeOptions(); options.setHeadless(true); WebDriver delegate = new ChromeDriver(options); Config config = ConfigFactory.parseResources("test.conf") .withValue("heal-enabled", ConfigValueFactory.fromAnyRef(true)).resolve(); SelfHealingEngine engine = new SelfHealingEngine(delegate, config); driver = SelfHealingDriver.create(engine); }
Example #25
Source File: JobConfigurationManagerTest.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@BeforeClass public void setUp() throws IOException { this.eventBus.register(this); // Prepare the test job configuration files Assert.assertTrue(this.jobConfigFileDir.mkdirs(), "Failed to create " + this.jobConfigFileDir); Closer closer = Closer.create(); try { for (int i = 0; i < NUM_JOB_CONFIG_FILES; i++) { File jobConfigFile = new File(this.jobConfigFileDir, "test" + i + ".job"); Assert.assertTrue(jobConfigFile.createNewFile()); Properties properties = new Properties(); properties.setProperty("foo", "bar" + i); properties.store(closer.register(Files.newWriter(jobConfigFile, ConfigurationKeys.DEFAULT_CHARSET_ENCODING)), ""); } } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } Config config = ConfigFactory.empty().withValue(GobblinClusterConfigurationKeys.JOB_CONF_PATH_KEY, ConfigValueFactory.fromAnyRef(JOB_CONFIG_DIR_NAME)); this.jobConfigurationManager = new JobConfigurationManager(this.eventBus, config); this.jobConfigurationManager.startAsync().awaitRunning(); }
Example #26
Source File: CompulsiveTraderVerticle.java From microtrader with MIT License | 5 votes |
@Override public void start(Future<Void> future) { super.start(); // Get configuration config = ConfigFactory.load(); String company = TraderUtils.pickACompany(); int numberOfShares = TraderUtils.pickANumber(); EventBus eventBus = vertx.eventBus(); EventBusService.getProxy(discovery, PortfolioService.class, ar -> { if (ar.failed()) { System.out.println("Portfolio service could not be retrieved: " + ar.cause()); } else { // Our services: PortfolioService portfolio = ar.result(); MessageConsumer<JsonObject> marketConsumer = eventBus.consumer(config.getString("market.address")); // Listen to the market... marketConsumer.handler(message -> { JsonObject quote = message.body(); TraderUtils.dumbTradingLogic(company, numberOfShares, portfolio, quote); }); } }); }
Example #27
Source File: GPlusUserActivityProvider.java From streams with Apache License 2.0 | 5 votes |
/** * Retrieve recent activity from a list of accounts. * @param args args * @throws Exception Exception */ public static void main(String[] args) throws Exception { Preconditions.checkArgument(args.length >= 2); String configfile = args[0]; String outfile = args[1]; File file = new File(configfile); assert (file.exists()); Config conf = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false)); StreamsConfigurator.addConfig(conf); StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(); GPlusUserActivityProviderConfiguration config = new ComponentConfigurator<>(GPlusUserActivityProviderConfiguration.class).detectConfiguration(); GPlusUserActivityProvider provider = new GPlusUserActivityProvider(config); Gson gson = new Gson(); PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile))); provider.prepare(config); provider.startStream(); do { Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS); for (StreamsDatum datum : provider.readCurrent()) { String json; if (datum.getDocument() instanceof String) { json = (String) datum.getDocument(); } else { json = gson.toJson(datum.getDocument()); } outStream.println(json); } } while ( provider.isRunning()); provider.cleanUp(); outStream.flush(); }
Example #28
Source File: GobblinClusterKillTest.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@BeforeMethod public void setUp() throws Exception { // Use a random ZK port _testingZKServer = new TestingServer(-1); LOG.info("Testing ZK Server listening on: " + _testingZKServer.getConnectString()); URL url = GobblinClusterKillTest.class.getClassLoader().getResource( GobblinClusterKillTest.class.getSimpleName() + ".conf"); Assert.assertNotNull(url, "Could not find resource " + url); _config = ConfigFactory.parseURL(url) .withValue("gobblin.cluster.zk.connection.string", ConfigValueFactory.fromAnyRef(_testingZKServer.getConnectString())) .withValue("gobblin.cluster.jobconf.fullyQualifiedPath", ConfigValueFactory.fromAnyRef("/tmp/gobblinClusterKillTest/job-conf")) .resolve(); String zkConnectionString = _config.getString(GobblinClusterConfigurationKeys.ZK_CONNECTION_STRING_KEY); HelixUtils.createGobblinHelixCluster(zkConnectionString, _config.getString(GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY)); setupTestDir(); _clusterManagers = new GobblinClusterManager[NUM_MANAGERS]; _clusterWorkers = new GobblinTaskRunner[NUM_WORKERS]; _workerStartThreads = new Thread[NUM_WORKERS]; for (int i = 0; i < NUM_MANAGERS; i++) { setupManager(i); } for (int i = 0; i < NUM_WORKERS; i++) { setupWorker(i); } }
Example #29
Source File: DefaultWebsocketConfigTest.java From ditto with Eclipse Public License 2.0 | 5 votes |
@Test public void underTestReturnsDefaultValuesIfBaseConfigWasEmpty() { final WebsocketConfig underTest = DefaultWebsocketConfig.of(ConfigFactory.empty()); softly.assertThat(underTest.getSubscriberBackpressureQueueSize()) .as(WebsocketConfigValue.SUBSCRIBER_BACKPRESSURE_QUEUE_SIZE.getConfigPath()) .isEqualTo(WebsocketConfigValue.SUBSCRIBER_BACKPRESSURE_QUEUE_SIZE.getDefaultValue()); softly.assertThat(underTest.getPublisherBackpressureBufferSize()) .as(WebsocketConfigValue.PUBLISHER_BACKPRESSURE_BUFFER_SIZE.getConfigPath()) .isEqualTo(WebsocketConfigValue.PUBLISHER_BACKPRESSURE_BUFFER_SIZE.getDefaultValue()); softly.assertThat(underTest.getThrottlingRejectionFactor()) .as(WebsocketConfigValue.THROTTLING_REJECTION_FACTOR.getConfigPath()) .isCloseTo((Double) WebsocketConfigValue.THROTTLING_REJECTION_FACTOR.getDefaultValue(), Percentage.withPercentage(1.0)); }
Example #30
Source File: AppTest.java From freeacs with MIT License | 5 votes |
@BeforeClass public static void setUp() throws SQLException { ValueInsertHelper.insert(dataSource); Config baseConfig = ConfigFactory.load("application.conf"); WebProperties properties = new WebProperties(baseConfig); Configuration configuration = Freemarker.initFreemarker(); ObjectMapper objectMapper = new ObjectMapper(); App.routes(dataSource, properties, configuration, objectMapper); Spark.awaitInitialization(); seleniumTest = new SeleniumTest( "http://localhost:" + properties.getServerPort() + properties.getContextPath()); }