com.typesafe.config.Config Java Examples
The following examples show how to use
com.typesafe.config.Config.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ZipFileConfigStoreTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Test public void testGetOwnConfig() { Config config1 = this.store.getOwnConfig(this.rootPath, this.version); Assert.assertEquals(config1.getString("gobblin.property.test1"), "prop1"); Assert.assertEquals(config1.getString("gobblin.property.test2"), "prop2"); Config config2 = this.store.getOwnConfig(this.testPath, this.version); Assert.assertEquals(config2.getString("gobblin.test.property"), "string1"); Config config3 = this.store.getOwnConfig(this.child1Path, this.version); Assert.assertEquals(config3.getString("gobblin.test.property"), "string2"); Config config4 = this.store.getOwnConfig(this.child2Path, this.version); Assert.assertEquals(config4.getString("gobblin.test.property"), "string3"); }
Example #2
Source File: JdbcMetadataHandler.java From eagle with Apache License 2.0 | 6 votes |
public JdbcMetadataHandler(Config config) { try { //JdbcSchemaManager.getInstance().init(config); BasicDataSource bDatasource = new BasicDataSource(); bDatasource.setDriverClassName(config.getString(MetadataUtils.JDBC_DRIVER_PATH)); if (config.hasPath(MetadataUtils.JDBC_USERNAME_PATH)) { bDatasource.setUsername(config.getString(MetadataUtils.JDBC_USERNAME_PATH)); bDatasource.setPassword(config.getString(MetadataUtils.JDBC_PASSWORD_PATH)); } bDatasource.setUrl(config.getString(MetadataUtils.JDBC_CONNECTION_PATH)); if (config.hasPath(MetadataUtils.JDBC_CONNECTION_PROPERTIES_PATH)) { bDatasource.setConnectionProperties(config.getString(MetadataUtils.JDBC_CONNECTION_PROPERTIES_PATH)); } this.dataSource = bDatasource; } catch (Exception e) { LOG.error(e.getMessage(), e); } }
Example #3
Source File: MetadataDaoFactory.java From eagle with Apache License 2.0 | 6 votes |
private MetadataDaoFactory() { Config config = ConfigFactory.load(); if (!config.hasPath(MetadataUtils.META_DATA)) { LOG.warn("metadata is not configured, use in-memory store !!!"); dao = new InMemMetadataDaoImpl(null); } else { Config metaDataConfig = config.getConfig(MetadataUtils.META_DATA); try { String clsName = metaDataConfig.getString(MetadataUtils.ALERT_META_DATA_DAO); Class<?> clz; clz = Thread.currentThread().getContextClassLoader().loadClass(clsName); if (IMetadataDao.class.isAssignableFrom(clz)) { Constructor<?> cotr = clz.getConstructor(Config.class); LOG.info("metadata.alertMetadataDao loaded: " + clsName); dao = (IMetadataDao) cotr.newInstance(metaDataConfig); } else { throw new Exception("metadata.metadataDao configuration need to be implementation of IMetadataDao! "); } } catch (Exception e) { LOG.error("error when initialize the dao, fall back to in memory mode!", e); dao = new InMemMetadataDaoImpl(metaDataConfig); } } }
Example #4
Source File: DecisionStep.java From envelope with Apache License 2.0 | 6 votes |
@Override public void configure(Config config) { super.configure(config); this.ifTrueStepNames = config.getStringList(IF_TRUE_STEP_NAMES_PROPERTY); this.decisionMethod = DecisionMethod.valueOf(config.getString(DECISION_METHOD_PROPERTY).toUpperCase()); switch (decisionMethod) { case LITERAL: this.literalResult = config.getBoolean(LITERAL_RESULT_PROPERTY); break; case STEP_BY_KEY: this.stepByKeyStepName = config.getString(STEP_BY_KEY_STEP_PROPERTY); this.stepByKeyKey = config.getString(STEP_BY_KEY_KEY_PROPERTY); break; case STEP_BY_VALUE: this.stepByValueStepName = config.getString(STEP_BY_VALUE_STEP_PROPERTY); break; } }
Example #5
Source File: TestInputTranslatorCompatibilityValidation.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testInputTranslatorCompatible() { Map<String, Object> translatorConfigMap = Maps.newHashMap(); translatorConfigMap.put( ComponentFactory.TYPE_CONFIG_NAME, StringExpectingTranslator.class.getName()); Map<String, Object> inputConfigMap = Maps.newHashMap(); inputConfigMap.put( ComponentFactory.TYPE_CONFIG_NAME, StringProvidingStreamInput.class.getName()); inputConfigMap.put( StreamingStep.TRANSLATOR_WITHIN_INPUT_PROPERTY, translatorConfigMap); Map<String, Object> stepConfigMap = Maps.newHashMap(); stepConfigMap.put(DataStep.INPUT_TYPE, inputConfigMap); Config stepConfig = ConfigFactory.parseMap(stepConfigMap); StreamingStep step = new StreamingStep("to_validate"); ValidationAssert.assertNoValidationFailures(step, stepConfig); }
Example #6
Source File: Rules.java From Stargraph with MIT License | 6 votes |
private Map<Language, List<Pattern>> loadStopPatterns(Config config) { Map<Language, List<Pattern>> rulesByLang = new LinkedHashMap<>(); ConfigObject configObject = config.getObject("rules.stop-pattern"); configObject.keySet().forEach(strLang -> { Language language = Language.valueOf(strLang.toUpperCase()); List<String> patternStr = configObject.toConfig().getStringList(strLang); rulesByLang.compute(language, (lang, pattern) -> patternStr.stream().map(Pattern::compile).collect(Collectors.toList())); logger.info(marker, "Loaded {} Stop patterns for '{}'", rulesByLang.get(language).size(), language); }); return rulesByLang; }
Example #7
Source File: TestTokenStoreManager.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testAddProvider() { try { Configuration hadoopConf = new Configuration(); Config config = ConfigUtils.configFromResource("/security/security_manager_testrenewal.conf"); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); TokenStoreManager manager = new TokenStoreManager(ConfigUtils.getOrElse(config, APPLICATION_SECTION_PREFIX + "." + SECURITY_PREFIX, ConfigFactory.empty())); manager.addTokenProvider(new TestTokenProvider()); manager.start(); List<Path> files = SecurityUtils.getExistingTokenStoreFiles(ConfigUtils.getOrElse(config, APPLICATION_SECTION_PREFIX + "." + SECURITY_PREFIX, ConfigFactory.empty()), hadoopConf, true); assertEquals(1, files.size()); TokenStore wrapper = new TokenStore(); wrapper.read(files.get(0).toString(), hadoopConf); assertTrue(wrapper.getTokenAliases().contains("test-provider")); manager.stop(); } catch (Exception e) { fail(e.getMessage()); } }
Example #8
Source File: DatePartitionHiveVersionFinder.java From incubator-gobblin with Apache License 2.0 | 6 votes |
public DatePartitionHiveVersionFinder(FileSystem fs, Config config) { this.pattern = ConfigUtils.getString(config, PARTITION_VALUE_DATE_TIME_PATTERN_KEY, DEFAULT_PARTITION_VALUE_DATE_TIME_PATTERN); if (config.hasPath(PARTITION_VALUE_DATE_TIME_TIMEZONE_KEY)) { this.formatter = DateTimeFormat.forPattern(pattern) .withZone(DateTimeZone.forID(config.getString(PARTITION_VALUE_DATE_TIME_TIMEZONE_KEY))); } else { this.formatter = DateTimeFormat.forPattern(pattern).withZone(DateTimeZone.forID(DEFAULT_PARTITION_VALUE_DATE_TIME_TIMEZONE)); } this.partitionKeyName = ConfigUtils.getString(config, PARTITION_KEY_NAME_KEY, DEFAULT_PARTITION_KEY_NAME); this.partitionKeyNamePredicate = new Predicate<FieldSchema>() { @Override public boolean apply(FieldSchema input) { return StringUtils.equalsIgnoreCase(input.getName(), DatePartitionHiveVersionFinder.this.partitionKeyName); } }; }
Example #9
Source File: ConfigMapper.java From atomix with Apache License 2.0 | 6 votes |
/** * Loads the given configuration file using the mapper, falling back to the given resources. * * @param type the type to load * @param files the files to load * @param resources the resources to which to fall back * @param <T> the resulting type * @return the loaded configuration */ public <T> T loadFiles(Class<T> type, List<File> files, List<String> resources) { if (files == null) { return loadResources(type, resources); } Config config = ConfigFactory.systemProperties(); for (File file : files) { config = config.withFallback(ConfigFactory.parseFile(file, ConfigParseOptions.defaults().setAllowMissing(false))); } for (String resource : resources) { config = config.withFallback(ConfigFactory.load(classLoader, resource)); } return map(checkNotNull(config, "config cannot be null").resolve(), type); }
Example #10
Source File: HBaseUtils.java From envelope with Apache License 2.0 | 6 votes |
public synchronized static Connection getConnection(Config config) throws IOException { LOG.info("Opening connection to HBase"); LOG.debug("Creating connection object..."); Configuration configuration = HBaseUtils.getHBaseConfiguration(config); // new Connection Connection connection = ConnectionFactory.createConnection(configuration); if (connection == null) { LOG.error("Could not open connection to HBase with {}", configuration.get(HBaseUtils.ZK_QUORUM_PROPERTY)); throw new IllegalArgumentException("Could not connect to HBase with supplied ZK quorum"); } JVMUtils.closeAtShutdown(connection); return connection; }
Example #11
Source File: TestTranslateFunction.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testAppendRaw() { Map<String, Object> configMap = Maps.newHashMap(); configMap.put(ComponentFactory.TYPE_CONFIG_NAME, DummyTranslator.class.getName()); configMap.put(TranslateFunction.APPEND_RAW_ENABLED_CONFIG, true); Config config = ConfigFactory.parseMap(configMap); TranslateFunction tf = new TranslateFunction(config); tf.receiveProvidedSchema(tf.getExpectingSchema()); Dataset<Row> raw = Contexts.getSparkSession().createDataFrame( Lists.newArrayList(RowFactory.create("hello?")), tf.getExpectingSchema()); Dataset<Row> translated = raw.flatMap(tf, RowEncoder.apply(tf.getProvidingSchema())); assertEquals(2, translated.schema().size()); assertEquals("_value", translated.schema().fields()[1].name()); assertEquals("hello?", translated.collectAsList().get(0).getString(1)); }
Example #12
Source File: FileEventSinkTest.java From mewbase with MIT License | 6 votes |
public void testMultiPublishWritesEvent() throws Exception { final Config cfg = createConfig(); final EventSink es = new FileEventSink(cfg); final String eventPath = cfg.getString("mewbase.event.sink.file.basedir" ); final String channelName = "channel"; final BsonObject evt = new BsonObject().put("key","value"); IntStream.range(0, 10).forEach( i -> es.publishSync(channelName,evt.put("evt",""+i)) ); // check that each file has been written Set<Path> files = Files.list(Paths.get(eventPath,channelName)).collect(Collectors.toSet()); IntStream.range(0, 10).forEach( i -> { final String eventFileName = FileEventUtils.pathFromEventNumber(i).toString(); final Path path = Paths.get(eventPath, channelName, eventFileName); assertTrue(files.contains(path)); }); }
Example #13
Source File: EventTimeHistoryPlanner.java From envelope with Apache License 2.0 | 6 votes |
@Override public Set<InstantiatedComponent> getComponents(Config config, boolean configure) { this.config = config; Set<InstantiatedComponent> components = Sets.newHashSet(); components.add(new InstantiatedComponent( getEventTimeModel(configure), getEventTimeModelConfig(), "Event Time Model")); if (hasLastUpdatedField()) { components.add(new InstantiatedComponent(getLastUpdatedTimeModel(configure), getLastUpdatedTimeModelConfig(), "Last Updated Time Model")); } return components; }
Example #14
Source File: RemoteMiniClusterImpl.java From beam with Apache License 2.0 | 6 votes |
@Override protected RpcService createRpcService( AkkaRpcServiceConfiguration akkaRpcServiceConfig, boolean remoteEnabled, String bindAddress) { // Enable remote connections to the mini cluster which are disabled by default final Config akkaConfig = AkkaUtils.getAkkaConfig(akkaRpcServiceConfig.getConfiguration(), bindAddress, 0); final Config effectiveAkkaConfig = AkkaUtils.testDispatcherConfig().withFallback(akkaConfig); final ActorSystem actorSystem = AkkaUtils.createActorSystem(effectiveAkkaConfig); final AkkaRpcService akkaRpcService = new AkkaRpcService(actorSystem, akkaRpcServiceConfig); this.port = akkaRpcService.getPort(); return akkaRpcService; }
Example #15
Source File: XConfigTest.java From xrpc with Apache License 2.0 | 6 votes |
@Test void getClientRateLimitOverride() { Config rawConfig = ConfigFactory.load("test.conf"); XConfig config = new XConfig(rawConfig.getConfig("xrpc")); Map<String, List<Double>> configTest = config.getClientRateLimitOverride(); double expected1 = Double.parseDouble("550"); double testVal1 = configTest.get("localhost").get(0); assertEquals(expected1, testVal1); double expected2 = Double.parseDouble("1"); double testVal2 = configTest.get("1.2.3.4").get(0); double expected3 = Double.parseDouble("2"); double testVal3 = configTest.get("1.2.3.4").get(1); assertEquals(expected2, testVal2); assertEquals(expected3, testVal3); }
Example #16
Source File: HyperParams.java From ytk-learn with MIT License | 6 votes |
public Hoag(Config config, String prefix) { init_step = config.getDouble(prefix + KEY + "init_step"); step_decr_factor = config.getDouble(prefix + KEY + "step_decr_factor"); test_loss_reduce_limit = config.getDouble(prefix + KEY + "test_loss_reduce_limit"); outer_iter = config.getInt(prefix + KEY + "outer_iter"); List<Double> l1List = config.getDoubleList(prefix + KEY + "l1"); List<Double> l2List = config.getDoubleList(prefix + KEY + "l2"); l1 = new double[l1List.size()]; l2 = new double[l2List.size()]; for (int i = 0; i < l2.length; i ++) { l1[i] = l1List.get(i); l2[i] = l2List.get(i); } CheckUtils.check(step_decr_factor < 1.0, "%sstep_decr_factor:%f must < 1.0", prefix + KEY, step_decr_factor); CheckUtils.check(l1.length == l2.length, "%sl1 lenght must be equal to %sl2 lenght", prefix + KEY, prefix + KEY); }
Example #17
Source File: DatasetCleanerSource.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * Create a work unit for each configuration defined or a single work unit if no configurations are defined * @param state see {@link org.apache.gobblin.configuration.SourceState} * @return list of workunits */ @Override public List<WorkUnit> getWorkunits(SourceState state) { List<WorkUnit> workUnits = Lists.newArrayList(); Config config = ConfigUtils.propertiesToConfig(state.getProperties()); Config sourceConfig = ConfigUtils.getConfigOrEmpty(config, DATASET_CLEANER_SOURCE_PREFIX); List<String> configurationNames = ConfigUtils.getStringList(config, DATASET_CLEANER_CONFIGURATIONS); // use a dummy configuration name if none set if (configurationNames.isEmpty()) { configurationNames = ImmutableList.of("DummyConfig"); } for (String configurationName: configurationNames) { WorkUnit workUnit = WorkUnit.createEmpty(); // specific configuration prefixed by the configuration name has precedence over the source specific configuration // and the source specific configuration has precedence over the general configuration Config wuConfig = ConfigUtils.getConfigOrEmpty(sourceConfig, configurationName).withFallback(sourceConfig) .withFallback(config); workUnit.setProps(ConfigUtils.configToProperties(wuConfig), new Properties()); TaskUtils.setTaskFactoryClass(workUnit, DatasetCleanerTaskFactory.class); workUnits.add(workUnit); } return workUnits; }
Example #18
Source File: ConsoleOutputProvider.java From plog with Apache License 2.0 | 5 votes |
@Override public Handler getHandler(Config config) throws Exception { PrintStream target = System.out; try { final String targetDescription = config.getString("target"); if (targetDescription.toLowerCase().equals("stderr")) { target = System.err; } } catch (ConfigException.Missing ignored) { } return new ConsoleOutputHandler(target); }
Example #19
Source File: Gateway.java From nassau with Apache License 2.0 | 5 votes |
private static UpstreamFactory upstream(Config config) { NetworkInterface multicastInterface = Configs.getNetworkInterface(config, "upstream.multicast-interface"); InetAddress multicastGroup = Configs.getInetAddress(config, "upstream.multicast-group"); int multicastPort = Configs.getPort(config, "upstream.multicast-port"); InetAddress requestAddress = Configs.getInetAddress(config, "upstream.request-address"); int requestPort = Configs.getPort(config, "upstream.request-port"); return new UpstreamFactory(multicastInterface, new InetSocketAddress(multicastGroup, multicastPort), new InetSocketAddress(requestAddress, requestPort)); }
Example #20
Source File: MongoDbUriSupplierTest.java From ditto with Eclipse Public License 2.0 | 5 votes |
@Test public void nullValuesAreNotSet() { final Config options = ConfigFactory.parseString("readPreference=null"); final Config config = ConfigFactory.parseString( String.format("%s=\"%s\"\n%s=%s", KEY_URI, SOURCE_URI, KEY_OPTIONS, options.root().render())); final MongoDbUriSupplier underTest = MongoDbUriSupplier.of(config); final String targetUri = underTest.get(); assertThat(targetUri).isEqualTo(SOURCE_URI); }
Example #21
Source File: ZooKeeperClientFactory.java From xio with Apache License 2.0 | 5 votes |
public CuratorFramework newClient() { Config retry = config.getConfig("client.retry"); try { ClientRetryPolicy policy = ClientRetryPolicy.valueOf(retry.getString("policy")); return CuratorFrameworkFactory.newClient( config.getString("cluster"), policy.build(retry.getConfig(policy.name()))); } catch (IllegalArgumentException e) { throw new RuntimeException( "zookeeper.client.retry.policy must be one of " + Arrays.asList(ClientRetryPolicy.values())); } }
Example #22
Source File: RegExFilterProcessorTest.java From Stargraph with MIT License | 5 votes |
@Test public void filterTest() { Holder fact1 = ModelUtils.createWrappedFact(kbId, "http://dbpedia.org/resource/President_of_the_United_States", "http://dbpedia.org/property/incumbent", "http://dbpedia.org/resource/Barack_Obama"); Assert.assertFalse(fact1.isSinkable()); Config cfg = buildConfig(null, "^http://dbpedia.org/property/inc(.*)$", null); Processor processor = Processors.create(cfg); processor.run(fact1); Assert.assertTrue(fact1.isSinkable()); }
Example #23
Source File: CouchbaseWriterTest.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * Test that a single Json document can be written successfully with TTL and timeunits * @throws IOException * @throws DataConversionException * @throws ExecutionException * @throws InterruptedException */ @Test(groups={"timeout"}) public void testJsonDocumentWriteTtlWithField() throws ExecutionException, InterruptedException { int ttl = 30; int originDiffFromNow = 5; TimeUnit timeUnit = TimeUnit.DAYS; String ttlOriginField = "time"; long now = System.currentTimeMillis(); long originDelta = TimeUnit.MILLISECONDS.convert(originDiffFromNow, TimeUnit.DAYS); long origin = now - originDelta; long expiry = TimeUnit.SECONDS.convert(now, TimeUnit.MILLISECONDS) + TimeUnit.SECONDS.convert(ttl, timeUnit) - TimeUnit.SECONDS.convert(originDiffFromNow, timeUnit) ; Config config = getConfig("default", Optional.of(ttl), Optional.of(timeUnit), Optional.of(ttlOriginField)); CouchbaseWriter writer = new CouchbaseWriter(_couchbaseEnvironment, config); try { String key = "hello"; String testContent = "hello world"; HashMap<String, String> contentMap = new HashMap<>(); contentMap.put("value", testContent); contentMap.put(ttlOriginField, "" + origin); Gson gson = new Gson(); String jsonString = gson.toJson(contentMap); RawJsonDocument jsonDocument = RawJsonDocument.create(key, jsonString); AbstractDocument storedDoc = ((GenericWriteResponse<AbstractDocument>) writer.write(jsonDocument, null).get()).getRawResponse(); RawJsonDocument returnDoc = writer.getBucket().get(key, RawJsonDocument.class); Map<String, String> returnedMap = gson.fromJson(returnDoc.content(), Map.class); Assert.assertEquals(testContent, returnedMap.get("value")); Assert.assertEquals(storedDoc.expiry() , expiry, 50); } finally { writer.close(); } }
Example #24
Source File: ExecutorsModule.java From incubator-retired-wave with Apache License 2.0 | 5 votes |
@Provides @Singleton @LookupExecutor protected Executor provideLookupExecutor(Provider<RequestScopeExecutor> executorProvider, Config config) { return provideThreadPoolExecutor(executorProvider, config .getInt("threads.lookup_executor_thread_count"), LookupExecutor.class.getSimpleName()); }
Example #25
Source File: HelixUtils.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * Return the system properties from the input {@link Config} instance * @param config */ public static void setSystemProperties(Config config) { Properties properties = ConfigUtils.configToProperties(ConfigUtils.getConfig(config, GobblinClusterConfigurationKeys.GOBBLIN_CLUSTER_SYSTEM_PROPERTY_PREFIX, ConfigFactory.empty())); for (Map.Entry<Object, Object> entry: properties.entrySet()) { System.setProperty(entry.getKey().toString(), entry.getValue().toString()); } }
Example #26
Source File: AzkabanSpecProducer.java From incubator-gobblin with Apache License 2.0 | 5 votes |
public AzkabanSpecProducer(Config config, Optional<Logger> log) { this._config = config; try { // Initialize Azkaban client / producer and cache credentials String azkabanUsername = _config.getString(ServiceAzkabanConfigKeys.AZKABAN_USERNAME_KEY); String azkabanPassword = getAzkabanPassword(_config); String azkabanServerUrl = _config.getString(ServiceAzkabanConfigKeys.AZKABAN_SERVER_URL_KEY); _sessionId = AzkabanAjaxAPIClient.authenticateAndGetSessionId(azkabanUsername, azkabanPassword, azkabanServerUrl); } catch (IOException | EncoderException e) { throw new RuntimeException("Could not authenticate with Azkaban", e); } }
Example #27
Source File: TestHBaseOutput.java From envelope with Apache License 2.0 | 5 votes |
@Test public void testApplyBulkMutations() throws Exception { Table table = connection.getTable(TableName.valueOf(TABLE)); Config config = ConfigUtils.configFromResource("/hbase/hbase-output-test.conf").getConfig("output"); config = config.withValue("zookeeper", ConfigValueFactory.fromAnyRef("localhost:" + utility.getZkCluster().getClientPort())); HBaseOutput output = new HBaseOutput(); output.configure(config); // Generate bulk mutations Dataset<Row> upserts = createBulkMutations(INPUT_ROWS); Dataset<Row> deletes = createBulkMutations(INPUT_ROWS); List<Tuple2<MutationType, Dataset<Row>>> bulk1 = Lists.newArrayList(); bulk1.add(new Tuple2<>(MutationType.UPSERT, upserts)); List<Tuple2<MutationType, Dataset<Row>>> bulk2 = Lists.newArrayList(); bulk2.add(new Tuple2<>(MutationType.DELETE, deletes)); List<Tuple2<MutationType, Dataset<Row>>> bulk3 = Lists.newArrayList(); bulk3.add(new Tuple2<>(MutationType.UPSERT, upserts)); bulk3.add(new Tuple2<>(MutationType.DELETE, deletes)); // Run 1 should have 2000 output.applyBulkMutations(bulk1); scanAndCountTable(table, INPUT_ROWS * 4); // Run 2 should have 0 output.applyBulkMutations(bulk2); scanAndCountTable(table, 0); // Run 3 should have 0 output.applyBulkMutations(bulk3); scanAndCountTable(table, 0); }
Example #28
Source File: AsyncWriterManager.java From incubator-gobblin with Apache License 2.0 | 5 votes |
protected AsyncWriterManager(Config config, long commitTimeoutMillis, long commitStepWaitTimeMillis, double failureAllowanceRatio, boolean retriesEnabled, int numRetries, int minRetryIntervalMillis, int maxOutstandingWrites, AsyncDataWriter asyncDataWriter, Optional<Logger> loggerOptional) { Preconditions.checkArgument(commitTimeoutMillis > 0, "Commit timeout must be greater than 0"); Preconditions.checkArgument(commitStepWaitTimeMillis > 0, "Commit step wait time must be greater than 0"); Preconditions.checkArgument(commitStepWaitTimeMillis < commitTimeoutMillis, "Commit step wait time must be less " + "than commit timeout"); Preconditions.checkArgument((failureAllowanceRatio <= 1.0 && failureAllowanceRatio >= 0), "Failure Allowance must be a ratio between 0 and 1"); Preconditions.checkArgument(maxOutstandingWrites > 0, "Max outstanding writes must be greater than 0"); Preconditions.checkNotNull(asyncDataWriter, "Async Data Writer cannot be null"); this.log = loggerOptional.isPresent()? loggerOptional.get() : LoggerFactory.getLogger(AsyncWriterManager.class); this.closer = Closer.create(); State state = ConfigUtils.configToState(config); this.instrumentationEnabled = GobblinMetrics.isEnabled(state); this.metricContext = this.closer.register(Instrumented.getMetricContext(state, asyncDataWriter.getClass())); regenerateMetrics(); this.commitTimeoutMillis = commitTimeoutMillis; this.commitStepWaitTimeMillis = commitStepWaitTimeMillis; this.failureAllowanceRatio = failureAllowanceRatio; this.minRetryIntervalMillis = minRetryIntervalMillis; if (retriesEnabled) { this.numRetries = numRetries; this.retryQueue = Optional.of(new LinkedBlockingQueue<Attempt>()); this.retryThreadPool = Optional.of(new ScheduledThreadPoolExecutor(1, ExecutorsUtils.newDaemonThreadFactory(Optional.of(this.log), Optional.of("AsyncWriteManagerRetry-%d")))); this.retryThreadPool.get().execute(new RetryRunner()); } else { this.numRetries = 0; this.retryQueue = Optional.absent(); this.retryThreadPool = Optional.absent(); } this.maxOutstandingWrites = maxOutstandingWrites; this.writePermits = new Semaphore(maxOutstandingWrites); this.asyncDataWriter = asyncDataWriter; this.closer.register(asyncDataWriter); }
Example #29
Source File: SchedulerUtils.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * Load a given job configuration file from a general file system. * * @param sysProps Gobblin framework configuration properties * @param jobConfigPath job configuration file to be loaded * @param jobConfigPathDir root job configuration file directory * @return a job configuration in the form of {@link java.util.Properties} */ public static Properties loadGenericJobConfig(Properties sysProps, Path jobConfigPath, Path jobConfigPathDir, JobSpecResolver resolver) throws ConfigurationException, IOException { PullFileLoader loader = new PullFileLoader(jobConfigPathDir, jobConfigPathDir.getFileSystem(new Configuration()), getJobConfigurationFileExtensions(sysProps), PullFileLoader.DEFAULT_HOCON_PULL_FILE_EXTENSIONS); Config sysConfig = ConfigUtils.propertiesToConfig(sysProps); Config config = loader.loadPullFile(jobConfigPath, sysConfig, true); return resolveTemplate(ConfigUtils.configToProperties(config), resolver); }
Example #30
Source File: FlowGraphPath.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * A method to convert a path of {@link FlowEdgeContext}s into a {@link Dag<JobExecutionPlan>}. * @param sysConfig containing environment config (e.g. metric/tracking event config) to be added to each {@link JobSpec}. * @return a {@link Dag<JobExecutionPlan>} * @throws SpecNotFoundException * @throws JobTemplate.TemplateException * @throws URISyntaxException */ public Dag<JobExecutionPlan> asDag(Config sysConfig) throws SpecNotFoundException, JobTemplate.TemplateException, URISyntaxException { Dag<JobExecutionPlan> flowDag = new Dag<>(new ArrayList<>()); for (List<FlowEdgeContext> path: paths) { Dag<JobExecutionPlan> pathDag = new Dag<>(new ArrayList<>()); Iterator<FlowEdgeContext> pathIterator = path.iterator(); while (pathIterator.hasNext()) { Dag<JobExecutionPlan> flowEdgeDag = convertHopToDag(pathIterator.next(), sysConfig); pathDag = concatenate(pathDag, flowEdgeDag); } flowDag = flowDag.merge(pathDag); } return flowDag; }