Java Code Examples for com.typesafe.config.ConfigFactory#parseProperties()

The following examples show how to use com.typesafe.config.ConfigFactory#parseProperties() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestValidator.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testEmptyValue() {
  ProvidesValidations validee = new ProvidesValidations() {
    @Override
    public Validations getValidations() {
      return Validations.builder().mandatoryPath("hello").build();
    }
  };

  Properties configProps = new Properties();
  configProps.setProperty("hello", "");
  Config config = ConfigFactory.parseProperties(configProps);

  assertValidationFailures(validee, config);

  config = config.withValue("hello", ConfigValueFactory.fromAnyRef(null));
  assertValidationFailures(validee, config);
}
 
Example 2
Source File: TestSparkMLDeriver.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test (expected = RuntimeException.class)
public void testFailOnMultipleDependencies() throws Exception {
  String modelPath = folder.newFolder().getAbsolutePath();
  generateAndSaveModel(modelPath);

  Properties configProps = new Properties();
  configProps.setProperty(SparkMLDeriver.MODEL_PATH_CONFIG, modelPath);
  Config config = ConfigFactory.parseProperties(configProps);

  SparkMLDeriver deriver = new SparkMLDeriver();
  assertNoValidationFailures(deriver, config);
  deriver.configure(config);

  Map<String, Dataset<Row>> dependencies = getDependencies();
  dependencies.put("step2", dependencies.get("step1"));

  derive(deriver, dependencies);
}
 
Example 3
Source File: GobblinServiceJobSchedulerTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
@Test
public void testDisableFlowRunImmediatelyOnStart()
    throws Exception {
  Properties properties = new Properties();
  properties.setProperty(ConfigurationKeys.FLOW_RUN_IMMEDIATELY, "true");
  properties.setProperty(ConfigurationKeys.JOB_SCHEDULE_KEY, TEST_SCHEDULE);
  properties.setProperty(ConfigurationKeys.JOB_GROUP_KEY, TEST_GROUP_NAME);
  properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, TEST_FLOW_NAME);
  Config config = ConfigFactory.parseProperties(properties);
  FlowSpec spec = FlowSpec.builder().withTemplate(new URI(TEST_TEMPLATE_URI)).withVersion("version")
      .withConfigAsProperties(properties).withConfig(config).build();
  FlowSpec modifiedSpec = (FlowSpec) GobblinServiceJobScheduler.disableFlowRunImmediatelyOnStart(spec);
  for (URI templateURI : modifiedSpec.getTemplateURIs().get()) {
    Assert.assertEquals(templateURI.toString(), TEST_TEMPLATE_URI);
  }
  Assert.assertEquals(modifiedSpec.getVersion(), "version");
  Config modifiedConfig = modifiedSpec.getConfig();
  Assert.assertFalse(modifiedConfig.getBoolean(ConfigurationKeys.FLOW_RUN_IMMEDIATELY));
  Assert.assertEquals(modifiedConfig.getString(ConfigurationKeys.JOB_GROUP_KEY), TEST_GROUP_NAME);
  Assert.assertEquals(modifiedConfig.getString(ConfigurationKeys.JOB_NAME_KEY), TEST_FLOW_NAME);
}
 
Example 4
Source File: ElasticsearchWriterBaseTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
private void assertConstructionExpectation(Properties props,
    String testScenario,
    Boolean constructionSuccess) {
  Config config = ConfigFactory.parseProperties(props);
  try {
    ElasticsearchWriterBase writer = getWriterBase(config);
    if (!constructionSuccess) {
      Assert.fail("Test Scenario: " + testScenario + ": Writer should not be constructed");
    }
  }
  catch (Exception e) {
    if (constructionSuccess) {
      Assert.fail("Test Scenario: " + testScenario + ": Writer should be constructed successfully");
    }
  }
}
 
Example 5
Source File: ErrorManagerTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
@Test
public void testErrorEvery()
{
  Properties props = new Properties();
  props.setProperty(ErrorManager.ERROR_TYPE_CONFIGURATION_KEY, "nth");
  props.setProperty(ErrorManager.FLAKY_ERROR_EVERY_CONFIGURATION_KEY, "5");
  Config config = ConfigFactory.parseProperties(props);
  ErrorManager errorManager = new ErrorManager(config);
  for (int j = 0; j < 5; ++j) {
    for (int i = 0; i < 4; ++i) {
      Assert.assertEquals(errorManager.nextError(null), false, "Failed on " + i);
    }
    Assert.assertEquals(errorManager.nextError(null), true, "Failed on the last one");
  }

}
 
Example 6
Source File: StateStoreBasedWatermarkStorage.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
/**
 * A private method that creates a state store config
 * @return a filled out config that can be passed on to a state store.
 */
Config getStateStoreConfig(State state) {
  // Select and prefix-strip all properties prefixed by WATERMARK_STORAGE_CONFIG_PREFIX
  Properties properties = state.getProperties();
  for (String key : properties.stringPropertyNames())  {
    if (key.startsWith(WATERMARK_STORAGE_CONFIG_PREFIX)) {
      properties.setProperty(key.substring(WATERMARK_STORAGE_CONFIG_PREFIX.length()),
          (String) properties.get(key));
    }
  }

  Config config = ConfigFactory.parseProperties(properties);

  // Defaults
  if (!config.hasPath(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY)) {
    config = config.withValue(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY,
        ConfigValueFactory.fromAnyRef("/streamingWatermarks"));
  }
  return config;
}
 
Example 7
Source File: ObservingFSFlowEdgeTemplateCatalogTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public void setUp() throws Exception {
  URI flowTemplateCatalogUri = this.getClass().getClassLoader().getResource("template_catalog").toURI();
  this.templateDir = Files.createTempDir();
  FileUtils.forceDeleteOnExit(templateDir);
  FileUtils.copyDirectory(new File(flowTemplateCatalogUri.getPath()), templateDir);
  Properties properties = new Properties();
  properties.put(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY, templateDir.toURI().toString());
  properties.put(ConfigurationKeys.JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL_KEY, "1000");
  Config config = ConfigFactory.parseProperties(properties);
  this.templateCatalogCfg = config.withValue(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY,
      config.getValue(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY));
}
 
Example 8
Source File: QuarantineMonitorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setup() {
	Properties properties = new Properties();
	properties.setProperty("akka.remote.watch-failure-detector.threshold", "0.00001");
	properties.setProperty("akka.remote.watch-failure-detector.heartbeat-interval", "1 ms");
	properties.setProperty("akka.remote.watch-failure-detector.acceptable-heartbeat-pause", "1 ms");
	Config deathWatch = ConfigFactory.parseProperties(properties);
	Config defaultConfig = AkkaUtils.getDefaultAkkaConfig();

	actorSystem1 = AkkaUtils.createActorSystem(deathWatch.withFallback(defaultConfig));
}
 
Example 9
Source File: HighLevelConsumerTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public static Config getSimpleConfig(Optional<String> prefix) {
  Properties properties = new Properties();
  properties.put(getConfigKey(prefix, ConfigurationKeys.KAFKA_BROKERS), "127.0.0.1:" + TestUtils.findFreePort());
  properties.put(getConfigKey(prefix, Kafka09ConsumerClient.GOBBLIN_CONFIG_VALUE_DESERIALIZER_CLASS_KEY), Kafka09ConsumerClient.KAFKA_09_DEFAULT_KEY_DESERIALIZER);
  properties.put(getConfigKey(prefix, "zookeeper.connect"), "zookeeper");
  properties.put(ConfigurationKeys.STATE_STORE_ENABLED, "true");
  File tmpDir = Files.createTempDir();
  tmpDir.deleteOnExit();
  properties.put(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, tmpDir.toString());

  return ConfigFactory.parseProperties(properties);
}
 
Example 10
Source File: TestValidator.java    From envelope with Apache License 2.0 5 votes vote down vote up
@Test
public void testAllowUnrecognizedPaths() {
  ProvidesValidations validee = new ProvidesValidations() {
    @Override
    public Validations getValidations() {
      return Validations.builder().allowUnrecognizedPaths().build();
    }
  };

  Properties configProps = new Properties();
  configProps.setProperty("hello", "world");
  Config config = ConfigFactory.parseProperties(configProps);

  assertNoValidationFailures(validee, config);
}
 
Example 11
Source File: TestValidator.java    From envelope with Apache License 2.0 5 votes vote down vote up
@Test
public void testUnrecognizedPaths() {
  ProvidesValidations validee = new ProvidesValidations() {
    @Override
    public Validations getValidations() {
      return Validations.builder().build();
    }
  };

  Properties configProps = new Properties();
  configProps.setProperty("hello", "world");
  Config config = ConfigFactory.parseProperties(configProps);

  assertValidationFailures(validee, config);
}
 
Example 12
Source File: TestContexts.java    From envelope with Apache License 2.0 5 votes vote down vote up
@Test
public void testApplicationNameProvided() {
  Properties props = new Properties();
  props.setProperty("application.name", "test");
  Config config = ConfigFactory.parseProperties(props);
  Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST);
  SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf();
  assertEquals(sparkConf.get("spark.app.name"), "test");
}
 
Example 13
Source File: BaseFlowEdgeFactoryTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Test
public void testCreateFlowEdge() throws Exception {
  Properties properties = new Properties();
  properties.put(FlowGraphConfigurationKeys.FLOW_EDGE_SOURCE_KEY,"node1");
  properties.put(FlowGraphConfigurationKeys.FLOW_EDGE_DESTINATION_KEY, "node2");
  properties.put(FlowGraphConfigurationKeys.FLOW_EDGE_NAME_KEY, "edge1");
  properties.put(FlowGraphConfigurationKeys.FLOW_EDGE_ID_KEY, "node1:node2:edge1");
  properties.put(FlowGraphConfigurationKeys.FLOW_EDGE_TEMPLATE_DIR_URI_KEY, "FS:///flowEdgeTemplate");

  List<SpecExecutor> specExecutorList = new ArrayList<>();
  Config config1 = ConfigFactory.empty().withValue("specStore.fs.dir", ConfigValueFactory.fromAnyRef("/tmp1")).
      withValue("specExecInstance.capabilities", ConfigValueFactory.fromAnyRef("s1:d1"));
  specExecutorList.add(new InMemorySpecExecutor(config1));
  Config config2 = ConfigFactory.empty().withValue("specStore.fs.dir", ConfigValueFactory.fromAnyRef("/tmp2")).
      withValue("specExecInstance.capabilities", ConfigValueFactory.fromAnyRef("s2:d2"));
  specExecutorList.add(new InMemorySpecExecutor(config2));

  FlowEdgeFactory flowEdgeFactory = new BaseFlowEdge.Factory();

  Properties props = new Properties();
  URI flowTemplateCatalogUri = this.getClass().getClassLoader().getResource("template_catalog").toURI();
  props.put(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY, flowTemplateCatalogUri.toString());
  Config config = ConfigFactory.parseProperties(props);
  Config templateCatalogCfg = config
      .withValue(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY,
          config.getValue(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY));
  FSFlowTemplateCatalog catalog = new FSFlowTemplateCatalog(templateCatalogCfg);
  Config edgeProps = ConfigUtils.propertiesToConfig(properties);
  FlowEdge flowEdge = flowEdgeFactory.createFlowEdge(edgeProps, catalog, specExecutorList);
  Assert.assertEquals(flowEdge.getSrc(), "node1");
  Assert.assertEquals(flowEdge.getDest(), "node2");
  Assert.assertEquals(flowEdge.getExecutors().get(0).getConfig().get().getString("specStore.fs.dir"),"/tmp1");
  Assert.assertEquals(flowEdge.getExecutors().get(0).getConfig().get().getString("specExecInstance.capabilities"),"s1:d1");
  Assert.assertEquals(flowEdge.getExecutors().get(1).getConfig().get().getString("specStore.fs.dir"),"/tmp2");
  Assert.assertEquals(flowEdge.getExecutors().get(1).getConfig().get().getString("specExecInstance.capabilities"),"s2:d2");
  Assert.assertEquals(flowEdge.getExecutors().get(0).getClass().getSimpleName(),"InMemorySpecExecutor");
  Assert.assertEquals(flowEdge.getExecutors().get(1).getClass().getSimpleName(),"InMemorySpecExecutor");
}
 
Example 14
Source File: TestSparkMLDeriver.java    From envelope with Apache License 2.0 5 votes vote down vote up
@Test
public void testDeriveWithImplicitStep() throws Exception {
  String modelPath = folder.newFolder().getAbsolutePath();
  generateAndSaveModel(modelPath);

  Properties configProps = new Properties();
  configProps.setProperty(SparkMLDeriver.MODEL_PATH_CONFIG, modelPath);
  Config config = ConfigFactory.parseProperties(configProps);

  SparkMLDeriver deriver = new SparkMLDeriver();
  assertNoValidationFailures(deriver, config);
  deriver.configure(config);

  derive(deriver, getDependencies());
}
 
Example 15
Source File: ValidationJob.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public ValidationJob(String jobId, Properties props) throws IOException {
  super(jobId, log);

  // Set the conversion config prefix for Avro to ORC
  props.setProperty(HiveDatasetFinder.HIVE_DATASET_CONFIG_PREFIX_KEY, HIVE_DATASET_CONFIG_AVRO_PREFIX);

  Config config = ConfigFactory.parseProperties(props);
  this.props = props;
  this.metricContext = Instrumented.getMetricContext(ConfigUtils.configToState(config), ValidationJob.class);
  this.eventSubmitter = new EventSubmitter.Builder(this.metricContext, EventConstants.CONVERSION_NAMESPACE).build();
  this.updateProvider = UpdateProviderFactory.create(props);
  this.datasetFinder = new ConvertibleHiveDatasetFinder(getSourceFs(), props, this.eventSubmitter);
  this.fs = FileSystem.get(new Configuration());

  int maxLookBackDays = Integer.parseInt(props.getProperty(HiveSource.HIVE_SOURCE_MAXIMUM_LOOKBACK_DAYS_KEY, DEFAULT_HIVE_SOURCE_MAXIMUM_LOOKBACK_DAYS));
  int skipRecentThanDays = Integer.parseInt(props.getProperty(HIVE_SOURCE_SKIP_RECENT_THAN_DAYS_KEY, DEFAULT_HIVE_SOURCE_SKIP_RECENT_THAN_DAYS));
  this.maxLookBackTime = new DateTime().minusDays(maxLookBackDays).getMillis();
  this.skipRecentThanTime = new DateTime().minusDays(skipRecentThanDays).getMillis();

  int maxThreadCount = Integer.parseInt(props.getProperty(MAX_THREAD_COUNT, DEFAULT_MAX_THREAD_COUNT));
  this.exec =
      Executors.newFixedThreadPool(maxThreadCount,
          ExecutorsUtils.newThreadFactory(Optional.of(LoggerFactory.getLogger(ValidationJob.class)), Optional.of("getValidationOutputFromHive")));
  this.futures = Lists.newArrayList();
  EventSubmitter.submit(Optional.of(this.eventSubmitter), EventConstants.VALIDATION_SETUP_EVENT);

  this.pool = HiveMetastoreClientPool.get(props, Optional.fromNullable(props.getProperty(HiveDatasetFinder.HIVE_METASTORE_URI_KEY)));
  Preconditions.checkArgument(props.containsKey(VALIDATION_TYPE_KEY), "Missing property " + VALIDATION_TYPE_KEY);
  this.validationType = ValidationType.valueOf(props.getProperty(VALIDATION_TYPE_KEY));
  this.ignoreDataPathIdentifierList = COMMA_BASED_SPLITTER.splitToList(props
      .getProperty(HIVE_VALIDATION_IGNORE_DATA_PATH_IDENTIFIER_KEY,
          DEFAULT_HIVE_VALIDATION_IGNORE_DATA_PATH_IDENTIFIER));
  this.throwables = new ArrayList<>();
  this.isNestedORC = Boolean.parseBoolean(props.getProperty(IS_NESTED_ORC, DEFAULT_IS_NESTED_ORC));
  this.hiveSettings = Splitter.on(";").trimResults().omitEmptyStrings()
      .splitToList(props.getProperty(HIVE_SETTINGS, StringUtils.EMPTY));
}
 
Example 16
Source File: FSFlowTemplateCatalogTest.java    From incubator-gobblin with Apache License 2.0 4 votes vote down vote up
@Test
public void testGetFlowTemplate() throws Exception {
  URI flowTemplateCatalogUri = this.getClass().getClassLoader().getResource("template_catalog").toURI();
  // Create a FSFlowTemplateCatalog instance
  Properties properties = new Properties();
  properties.put(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY, flowTemplateCatalogUri.toString());
  Config config = ConfigFactory.parseProperties(properties);
  Config templateCatalogCfg = config
      .withValue(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY,
          config.getValue(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY));
  FSFlowTemplateCatalog catalog = new FSFlowTemplateCatalog(templateCatalogCfg);
  FlowTemplate flowTemplate = catalog.getFlowTemplate(new URI(TEST_TEMPLATE_DIR_URI));

  //Basic sanity check for the FlowTemplate

  List<JobTemplate> jobTemplates = flowTemplate.getJobTemplates();
  Assert.assertEquals(jobTemplates.size(), 4);
  for (int i = 0; i < 4; i++) {
    String uri = new Path(jobTemplates.get(i).getUri()).getName().split("\\.")[0];
    String templateId = uri.substring(uri.length() - 1);
    for (int j = 0; j < 2; j++) {
      Config jobTemplateConfig = jobTemplates.get(i).getRawTemplateConfig();
      String suffix = templateId + Integer.toString(j + 1);
      Assert.assertEquals(jobTemplateConfig.getString("key" + suffix), "val" + suffix);
    }
  }

  Config flowConfig = ConfigFactory.empty().withValue("team.name", ConfigValueFactory.fromAnyRef("test-team"))
      .withValue("dataset.name", ConfigValueFactory.fromAnyRef("test-dataset"));

  List<Pair<DatasetDescriptor, DatasetDescriptor>> inputOutputDescriptors = flowTemplate.getDatasetDescriptors(flowConfig, true);
  Assert.assertTrue(inputOutputDescriptors.size() == 2);
  List<String> dirs = Lists.newArrayList("inbound", "outbound");
  for (int i = 0; i < 2; i++) {
    for (int j = 0; j < 2; j++) {
      FSDatasetDescriptor datasetDescriptor;
      if (j == 0) {
        datasetDescriptor = (FSDatasetDescriptor) inputOutputDescriptors.get(i).getLeft();
      } else {
        datasetDescriptor = (FSDatasetDescriptor) inputOutputDescriptors.get(i).getRight();
      }
      Assert.assertEquals(datasetDescriptor.getPlatform(), "hdfs");
      Assert.assertEquals(datasetDescriptor.getFormatConfig().getFormat(), "avro");
      Assert.assertEquals(datasetDescriptor.getPath(), "/data/" + dirs.get(i) + "/test-team/test-dataset");
    }
  }
  Config flowTemplateConfig = flowTemplate.getRawTemplateConfig();
  Assert.assertEquals(flowTemplateConfig.getString(DatasetDescriptorConfigKeys.FLOW_EDGE_INPUT_DATASET_DESCRIPTOR_PREFIX + ".0."
      + DatasetDescriptorConfigKeys.CLASS_KEY), FSDatasetDescriptor.class.getCanonicalName());
  Assert.assertEquals(flowTemplateConfig.getString(DatasetDescriptorConfigKeys.FLOW_EDGE_OUTPUT_DATASET_DESCRIPTOR_PREFIX
      + ".0." + DatasetDescriptorConfigKeys.CLASS_KEY), FSDatasetDescriptor.class.getCanonicalName());
}
 
Example 17
Source File: FlakyKafkaProducer.java    From incubator-gobblin with Apache License 2.0 4 votes vote down vote up
public FlakyKafkaProducer(Properties properties) {
  super(properties);
  Config config = ConfigFactory.parseProperties(properties);
  errorManager = new ErrorManager(config);
}
 
Example 18
Source File: InMemorySpecExecutor.java    From incubator-gobblin with Apache License 2.0 4 votes vote down vote up
/**
 * A creator that create a SpecExecutor only specifying URI for uniqueness.
 * @param uri
 */
public static SpecExecutor createDummySpecExecutor(URI uri) {
  Properties properties = new Properties();
  properties.setProperty(ConfigurationKeys.SPECEXECUTOR_INSTANCE_URI_KEY, uri.toString());
  return new InMemorySpecExecutor(ConfigFactory.parseProperties(properties));
}
 
Example 19
Source File: Kafka09DataWriter.java    From incubator-gobblin with Apache License 2.0 4 votes vote down vote up
public Kafka09DataWriter(Properties props)
    throws ConfigurationException {
  this(getKafkaProducer(props), ConfigFactory.parseProperties(props));
}
 
Example 20
Source File: HadoopUtils.java    From incubator-gobblin with Apache License 2.0 3 votes vote down vote up
/**
 * Provides Hadoop configuration given state.
 * It also supports decrypting values on "encryptedPath".
 * Note that this encryptedPath path will be removed from full path of each config key and leaving only child path on the key(s).
 * If there's same config path as child path, the one stripped will have higher priority.
 *
 * e.g:
 * - encryptedPath: writer.fs.encrypted
 *   before: writer.fs.encrypted.secret
 *   after: secret
 *
 * Common use case for these encryptedPath:
 *   When there's have encrypted credential in job property but you'd like Filesystem to get decrypted value.
 *
 * @param srcConfig source config.
 * @param encryptedPath Optional. If provided, config that is on this path will be decrypted. @see ConfigUtils.resolveEncrypted
 *                      Note that config on encryptedPath will be included in the end result even it's not part of includeOnlyPath
 * @return Hadoop Configuration.
 */
public static Configuration getConfFromState(State state, Optional<String> encryptedPath) {
  Config config = ConfigFactory.parseProperties(state.getProperties());
  if (encryptedPath.isPresent()) {
    config = ConfigUtils.resolveEncrypted(config, encryptedPath);
  }
  Configuration conf = newConfiguration();

  for (Entry<String, ConfigValue> entry : config.entrySet()) {
    conf.set(entry.getKey(), entry.getValue().unwrapped().toString());
  }
  return conf;
}