com.typesafe.config.Config Java Examples

The following examples show how to use com.typesafe.config.Config. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ZipFileConfigStoreTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetOwnConfig() {
  Config config1 = this.store.getOwnConfig(this.rootPath, this.version);
  Assert.assertEquals(config1.getString("gobblin.property.test1"), "prop1");
  Assert.assertEquals(config1.getString("gobblin.property.test2"), "prop2");

  Config config2 = this.store.getOwnConfig(this.testPath, this.version);
  Assert.assertEquals(config2.getString("gobblin.test.property"), "string1");

  Config config3 = this.store.getOwnConfig(this.child1Path, this.version);
  Assert.assertEquals(config3.getString("gobblin.test.property"), "string2");

  Config config4 = this.store.getOwnConfig(this.child2Path, this.version);
  Assert.assertEquals(config4.getString("gobblin.test.property"), "string3");

}
 
Example #2
Source File: TestTranslateFunction.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testAppendRaw() {
  Map<String, Object> configMap = Maps.newHashMap();
  configMap.put(ComponentFactory.TYPE_CONFIG_NAME, DummyTranslator.class.getName());
  configMap.put(TranslateFunction.APPEND_RAW_ENABLED_CONFIG, true);
  Config config = ConfigFactory.parseMap(configMap);

  TranslateFunction tf = new TranslateFunction(config);
  tf.receiveProvidedSchema(tf.getExpectingSchema());
  Dataset<Row> raw = Contexts.getSparkSession().createDataFrame(
      Lists.newArrayList(RowFactory.create("hello?")), tf.getExpectingSchema());
  Dataset<Row> translated = raw.flatMap(tf, RowEncoder.apply(tf.getProvidingSchema()));

  assertEquals(2, translated.schema().size());
  assertEquals("_value", translated.schema().fields()[1].name());
  assertEquals("hello?", translated.collectAsList().get(0).getString(1));
}
 
Example #3
Source File: FileEventSinkTest.java    From mewbase with MIT License 6 votes vote down vote up
public void testMultiPublishWritesEvent() throws Exception {

        final Config cfg = createConfig();
        final EventSink es = new FileEventSink(cfg);
        final String eventPath = cfg.getString("mewbase.event.sink.file.basedir" );

        final String channelName = "channel";
        final BsonObject evt = new BsonObject().put("key","value");
        IntStream.range(0, 10).forEach( i -> es.publishSync(channelName,evt.put("evt",""+i)) );

        // check that each file has been written
        Set<Path> files =  Files.list(Paths.get(eventPath,channelName)).collect(Collectors.toSet());
        IntStream.range(0, 10).forEach( i -> {
            final String eventFileName = FileEventUtils.pathFromEventNumber(i).toString();
            final Path path = Paths.get(eventPath, channelName, eventFileName);
            assertTrue(files.contains(path));
        });
    }
 
Example #4
Source File: EventTimeHistoryPlanner.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Override
public Set<InstantiatedComponent> getComponents(Config config, boolean configure) {
  this.config = config;

  Set<InstantiatedComponent> components = Sets.newHashSet();

  components.add(new InstantiatedComponent(
      getEventTimeModel(configure), getEventTimeModelConfig(), "Event Time Model"));

  if (hasLastUpdatedField()) {
    components.add(new InstantiatedComponent(getLastUpdatedTimeModel(configure),
        getLastUpdatedTimeModelConfig(), "Last Updated Time Model"));
  }

  return components;
}
 
Example #5
Source File: HBaseUtils.java    From envelope with Apache License 2.0 6 votes vote down vote up
public synchronized static Connection getConnection(Config config) throws IOException {
  LOG.info("Opening connection to HBase");
  LOG.debug("Creating connection object...");
  Configuration configuration = HBaseUtils.getHBaseConfiguration(config);

  // new Connection
  Connection connection = ConnectionFactory.createConnection(configuration);

  if (connection == null) {
    LOG.error("Could not open connection to HBase with {}", configuration.get(HBaseUtils.ZK_QUORUM_PROPERTY));
    throw new IllegalArgumentException("Could not connect to HBase with supplied ZK quorum");
  }

  JVMUtils.closeAtShutdown(connection);
  return connection;
}
 
Example #6
Source File: RemoteMiniClusterImpl.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
protected RpcService createRpcService(
    AkkaRpcServiceConfiguration akkaRpcServiceConfig, boolean remoteEnabled, String bindAddress) {

  // Enable remote connections to the mini cluster which are disabled by default
  final Config akkaConfig =
      AkkaUtils.getAkkaConfig(akkaRpcServiceConfig.getConfiguration(), bindAddress, 0);

  final Config effectiveAkkaConfig = AkkaUtils.testDispatcherConfig().withFallback(akkaConfig);

  final ActorSystem actorSystem = AkkaUtils.createActorSystem(effectiveAkkaConfig);

  final AkkaRpcService akkaRpcService = new AkkaRpcService(actorSystem, akkaRpcServiceConfig);
  this.port = akkaRpcService.getPort();

  return akkaRpcService;
}
 
Example #7
Source File: XConfigTest.java    From xrpc with Apache License 2.0 6 votes vote down vote up
@Test
void getClientRateLimitOverride() {
  Config rawConfig = ConfigFactory.load("test.conf");
  XConfig config = new XConfig(rawConfig.getConfig("xrpc"));

  Map<String, List<Double>> configTest = config.getClientRateLimitOverride();

  double expected1 = Double.parseDouble("550");
  double testVal1 = configTest.get("localhost").get(0);
  assertEquals(expected1, testVal1);

  double expected2 = Double.parseDouble("1");
  double testVal2 = configTest.get("1.2.3.4").get(0);

  double expected3 = Double.parseDouble("2");
  double testVal3 = configTest.get("1.2.3.4").get(1);

  assertEquals(expected2, testVal2);
  assertEquals(expected3, testVal3);
}
 
Example #8
Source File: DecisionStep.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Override
public void configure(Config config) {
  super.configure(config);
  
  this.ifTrueStepNames = config.getStringList(IF_TRUE_STEP_NAMES_PROPERTY);
  this.decisionMethod = DecisionMethod.valueOf(config.getString(DECISION_METHOD_PROPERTY).toUpperCase());
  
  switch (decisionMethod) {
    case LITERAL:
      this.literalResult = config.getBoolean(LITERAL_RESULT_PROPERTY);
      break;
    case STEP_BY_KEY:
      this.stepByKeyStepName = config.getString(STEP_BY_KEY_STEP_PROPERTY);
      this.stepByKeyKey = config.getString(STEP_BY_KEY_KEY_PROPERTY);
      break;
    case STEP_BY_VALUE:
      this.stepByValueStepName = config.getString(STEP_BY_VALUE_STEP_PROPERTY);
      break;
  }
}
 
Example #9
Source File: HyperParams.java    From ytk-learn with MIT License 6 votes vote down vote up
public Hoag(Config config, String prefix) {
    init_step = config.getDouble(prefix + KEY + "init_step");
    step_decr_factor = config.getDouble(prefix + KEY + "step_decr_factor");
    test_loss_reduce_limit = config.getDouble(prefix + KEY + "test_loss_reduce_limit");
    outer_iter = config.getInt(prefix + KEY + "outer_iter");
    List<Double> l1List = config.getDoubleList(prefix + KEY + "l1");
    List<Double> l2List = config.getDoubleList(prefix + KEY + "l2");
    l1 = new double[l1List.size()];
    l2 = new double[l2List.size()];
    for (int i = 0; i < l2.length; i ++) {
        l1[i] = l1List.get(i);
        l2[i] = l2List.get(i);
    }

    CheckUtils.check(step_decr_factor < 1.0, "%sstep_decr_factor:%f must < 1.0",
            prefix + KEY, step_decr_factor);
    CheckUtils.check(l1.length == l2.length,
            "%sl1 lenght must be equal to %sl2 lenght",
            prefix + KEY,
            prefix + KEY);
}
 
Example #10
Source File: MetadataDaoFactory.java    From eagle with Apache License 2.0 6 votes vote down vote up
private MetadataDaoFactory() {
    Config config = ConfigFactory.load();
    if (!config.hasPath(MetadataUtils.META_DATA)) {
        LOG.warn("metadata is not configured, use in-memory store !!!");
        dao = new InMemMetadataDaoImpl(null);
    } else {
        Config metaDataConfig = config.getConfig(MetadataUtils.META_DATA);
        try {
            String clsName = metaDataConfig.getString(MetadataUtils.ALERT_META_DATA_DAO);
            Class<?> clz;
            clz = Thread.currentThread().getContextClassLoader().loadClass(clsName);
            if (IMetadataDao.class.isAssignableFrom(clz)) {
                Constructor<?> cotr = clz.getConstructor(Config.class);
                LOG.info("metadata.alertMetadataDao loaded: " + clsName);
                dao = (IMetadataDao) cotr.newInstance(metaDataConfig);
            } else {
                throw new Exception("metadata.metadataDao configuration need to be implementation of IMetadataDao! ");
            }
        } catch (Exception e) {
            LOG.error("error when initialize the dao, fall back to in memory mode!", e);
            dao = new InMemMetadataDaoImpl(metaDataConfig);
        }
    }
}
 
Example #11
Source File: DatePartitionHiveVersionFinder.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
public DatePartitionHiveVersionFinder(FileSystem fs, Config config) {

    this.pattern =
        ConfigUtils.getString(config, PARTITION_VALUE_DATE_TIME_PATTERN_KEY, DEFAULT_PARTITION_VALUE_DATE_TIME_PATTERN);

    if (config.hasPath(PARTITION_VALUE_DATE_TIME_TIMEZONE_KEY)) {
      this.formatter = DateTimeFormat.forPattern(pattern)
          .withZone(DateTimeZone.forID(config.getString(PARTITION_VALUE_DATE_TIME_TIMEZONE_KEY)));
    } else {
      this.formatter =
          DateTimeFormat.forPattern(pattern).withZone(DateTimeZone.forID(DEFAULT_PARTITION_VALUE_DATE_TIME_TIMEZONE));
    }

    this.partitionKeyName = ConfigUtils.getString(config, PARTITION_KEY_NAME_KEY, DEFAULT_PARTITION_KEY_NAME);
    this.partitionKeyNamePredicate = new Predicate<FieldSchema>() {

      @Override
      public boolean apply(FieldSchema input) {
        return StringUtils.equalsIgnoreCase(input.getName(), DatePartitionHiveVersionFinder.this.partitionKeyName);
      }
    };
  }
 
Example #12
Source File: Rules.java    From Stargraph with MIT License 6 votes vote down vote up
private Map<Language, List<Pattern>> loadStopPatterns(Config config) {
    Map<Language, List<Pattern>> rulesByLang = new LinkedHashMap<>();
    ConfigObject configObject = config.getObject("rules.stop-pattern");

    configObject.keySet().forEach(strLang -> {
        Language language = Language.valueOf(strLang.toUpperCase());
        List<String> patternStr = configObject.toConfig().getStringList(strLang);

        rulesByLang.compute(language,
                (lang, pattern) -> patternStr.stream().map(Pattern::compile).collect(Collectors.toList()));

        logger.info(marker, "Loaded {} Stop patterns for '{}'", rulesByLang.get(language).size(), language);

    });

    return rulesByLang;
}
 
Example #13
Source File: TestInputTranslatorCompatibilityValidation.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testInputTranslatorCompatible() {
  Map<String, Object> translatorConfigMap = Maps.newHashMap();
  translatorConfigMap.put(
      ComponentFactory.TYPE_CONFIG_NAME, StringExpectingTranslator.class.getName());

  Map<String, Object> inputConfigMap = Maps.newHashMap();
  inputConfigMap.put(
      ComponentFactory.TYPE_CONFIG_NAME, StringProvidingStreamInput.class.getName());
  inputConfigMap.put(
      StreamingStep.TRANSLATOR_WITHIN_INPUT_PROPERTY, translatorConfigMap);

  Map<String, Object> stepConfigMap = Maps.newHashMap();
  stepConfigMap.put(DataStep.INPUT_TYPE, inputConfigMap);
  Config stepConfig = ConfigFactory.parseMap(stepConfigMap);

  StreamingStep step = new StreamingStep("to_validate");

  ValidationAssert.assertNoValidationFailures(step, stepConfig);
}
 
Example #14
Source File: JdbcMetadataHandler.java    From eagle with Apache License 2.0 6 votes vote down vote up
public JdbcMetadataHandler(Config config) {
    try {
        //JdbcSchemaManager.getInstance().init(config);
        BasicDataSource bDatasource = new BasicDataSource();
        bDatasource.setDriverClassName(config.getString(MetadataUtils.JDBC_DRIVER_PATH));
        if (config.hasPath(MetadataUtils.JDBC_USERNAME_PATH)) {
            bDatasource.setUsername(config.getString(MetadataUtils.JDBC_USERNAME_PATH));
            bDatasource.setPassword(config.getString(MetadataUtils.JDBC_PASSWORD_PATH));
        }
        bDatasource.setUrl(config.getString(MetadataUtils.JDBC_CONNECTION_PATH));
        if (config.hasPath(MetadataUtils.JDBC_CONNECTION_PROPERTIES_PATH)) {
            bDatasource.setConnectionProperties(config.getString(MetadataUtils.JDBC_CONNECTION_PROPERTIES_PATH));
        }
        this.dataSource = bDatasource;
    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
    }
}
 
Example #15
Source File: TestTokenStoreManager.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testAddProvider() {
  try {
    Configuration hadoopConf = new Configuration();

    Config config = ConfigUtils.configFromResource("/security/security_manager_testrenewal.conf");
    Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST);
    TokenStoreManager manager = new TokenStoreManager(ConfigUtils.getOrElse(config,
        APPLICATION_SECTION_PREFIX + "." + SECURITY_PREFIX, ConfigFactory.empty()));
    manager.addTokenProvider(new TestTokenProvider());
    manager.start();

    List<Path> files = SecurityUtils.getExistingTokenStoreFiles(ConfigUtils.getOrElse(config,
        APPLICATION_SECTION_PREFIX + "." + SECURITY_PREFIX, ConfigFactory.empty()),
        hadoopConf, true);
    assertEquals(1, files.size());
    TokenStore wrapper = new TokenStore();
    wrapper.read(files.get(0).toString(), hadoopConf);
    assertTrue(wrapper.getTokenAliases().contains("test-provider"));

    manager.stop();
  } catch (Exception e) {
    fail(e.getMessage());
  }
}
 
Example #16
Source File: ConfigMapper.java    From atomix with Apache License 2.0 6 votes vote down vote up
/**
 * Loads the given configuration file using the mapper, falling back to the given resources.
 *
 * @param type      the type to load
 * @param files     the files to load
 * @param resources the resources to which to fall back
 * @param <T>       the resulting type
 * @return the loaded configuration
 */
public <T> T loadFiles(Class<T> type, List<File> files, List<String> resources) {
  if (files == null) {
    return loadResources(type, resources);
  }

  Config config = ConfigFactory.systemProperties();
  for (File file : files) {
    config = config.withFallback(ConfigFactory.parseFile(file, ConfigParseOptions.defaults().setAllowMissing(false)));
  }

  for (String resource : resources) {
    config = config.withFallback(ConfigFactory.load(classLoader, resource));
  }
  return map(checkNotNull(config, "config cannot be null").resolve(), type);
}
 
Example #17
Source File: GravatarProfilesFetcher.java    From incubator-retired-wave with Apache License 2.0 5 votes vote down vote up
@Inject
public GravatarProfilesFetcher(Config config) {
  if (config.getBoolean("security.enable_ssl")) {
    gravatarUrl = SECURE_GRAVATAR_URL;
  } else {
    gravatarUrl = NON_SECURE_GRAVATAR_URL;
  }
}
 
Example #18
Source File: TokenSecurityContextFilter.java    From openscoring with GNU Affero General Public License v3.0 5 votes vote down vote up
@Inject
public TokenSecurityContextFilter(@Named("openscoring") Config config){
	Config filterConfig = config.getConfig("tokenSecurityContextFilter");

	this.userToken = prepareToken(filterConfig, "userToken");
	this.adminToken = prepareToken(filterConfig, "adminToken");

	logger.info("User token: {}", this.userToken);
	logger.info("Admin token: {}", this.adminToken);
}
 
Example #19
Source File: SharedResourcesBrokerFactoryTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Test
public void testLoadingOfClasspath() {
  Config config =
      ConfigFactory.parseMap(ImmutableMap.of(SharedResourcesBrokerFactory.BROKER_CONF_FILE_KEY, "/broker/testBroker.conf"));
  SharedResourcesBrokerImpl<SimpleScopeType> broker =
      SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config, SimpleScopeType.GLOBAL.defaultScopeInstance());

  ConfigView configView = broker.getConfigView(null, null, "factory");
  Assert.assertTrue(configView.getConfig().hasPath("testKey"));
  Assert.assertEquals(configView.getConfig().getString("testKey"), "testValue");
}
 
Example #20
Source File: AkkaSource.java    From flink-learning with Apache License 2.0 5 votes vote down vote up
/**
 * Creates {@link AkkaSource} for Streaming
 *
 * @param actorName      Receiver Actor name
 * @param urlOfPublisher tcp url of the publisher or feeder actor
 */
public AkkaSource(String actorName, String urlOfPublisher, Config configuration) {
    super();
    this.classForActor = ReceiverActor.class;
    this.actorName = actorName;
    this.urlOfPublisher = urlOfPublisher;
    this.configuration = configuration;
}
 
Example #21
Source File: StormMetricConsumer.java    From eagle with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings( {"serial", "rawtypes"})
@Override
public void prepare(Map stormConf, Object registrationArgument, TopologyContext context, IErrorReporter errorReporter) {
    Config config = ConfigFactory.parseString((String) registrationArgument, ConfigParseOptions.defaults());
    topologyName = config.getString("appId");
    topologyId = context.getStormId();
    metricSystem = MetricSystem.load(config);
    metricSystem.tags(new HashMap<String, Object>() {
        {
            put("appId", topologyName);
            put("stormId", topologyId);
        }
    });
    metricSystem.start();
}
 
Example #22
Source File: LoadSolrBuilder.java    From kite with Apache License 2.0 5 votes vote down vote up
private RetryPolicyFactory parseRetryPolicyFactory(Config retryPolicyConfig) {      
  if (retryPolicyConfig == null && !DISABLE_RETRY_POLICY_BY_DEFAULT) {
    // ask RetryPolicyFactoryParser to return a retry policy with reasonable defaults
    retryPolicyConfig = ConfigFactory.parseString(
        "{" + RetryPolicyFactoryParser.BOUNDED_EXPONENTIAL_BACKOFF_RETRY_NAME + "{}}");
  }
  if (retryPolicyConfig == null) {
    return null;
  } else {
    return new RetryPolicyFactoryParser().parse(retryPolicyConfig);
  }
}
 
Example #23
Source File: CountBasedLimiter.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Override
public Limiter buildLimiter(Config config) {
  if (!config.hasPath(COUNT_KEY)) {
    throw new IllegalArgumentException("Missing key " + COUNT_KEY);
  }
  return new CountBasedLimiter(config.getLong(COUNT_KEY));
}
 
Example #24
Source File: YarnContainerSecurityManager.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public YarnContainerSecurityManager(Config config, FileSystem fs, EventBus eventBus, LogCopier logCopier) {
  this.fs = fs;
  this.tokenFilePath = new Path(this.fs.getHomeDirectory(),
      config.getString(GobblinYarnConfigurationKeys.APPLICATION_NAME_KEY) + Path.SEPARATOR
          + GobblinYarnConfigurationKeys.TOKEN_FILE_NAME);
  this.eventBus = eventBus;
  this.logCopier = logCopier;
}
 
Example #25
Source File: ServingLayerTest.java    From oryx with Apache License 2.0 5 votes vote down vote up
@Test
public void testServingLayerSecure() throws Exception {
  Path keystoreFile = SecureAPIConfigIT.buildKeystoreFile();
  Map<String,Object> overlay = buildOverlay();
  overlay.put("oryx.serving.api.keystore-file", "\"" + keystoreFile + "\"");
  overlay.put("oryx.serving.api.keystore-password", "oryxpass");
  overlay.put("oryx.serving.api.key-alias", "oryxtest");
  Config config = ConfigUtils.overlayOn(overlay, ConfigUtils.getDefault());
  try {
    doTestServingLayer(config);
  } finally {
    Files.delete(Paths.get(config.getString("oryx.serving.api.keystore-file")));
  }
}
 
Example #26
Source File: ConfigurationTest.java    From gsc-core with GNU Lesser General Public License v3.0 5 votes vote down vote up
@Test
public void getShouldReturnConfiguration() {
  Config config = Configuration.getByFileName(Constant.TEST_NET_CONF, Constant.TEST_NET_CONF);
  assertTrue(config.hasPath("storage"));
  assertTrue(config.hasPath("node.discovery.boot"));
  assertTrue(config.hasPath("genesis.block"));
}
 
Example #27
Source File: SprinklrBootstrapIT.java    From streams with Apache License 2.0 5 votes vote down vote up
@BeforeClass(alwaysRun = true)
public void setup() throws Exception {
  File conf = new File(configfile);
  Assert.assertTrue(conf.exists());
  Assert.assertTrue(conf.canRead());
  Assert.assertTrue(conf.isFile());
  Config parsedConfig = ConfigFactory.parseFileAnySyntax(conf);
  StreamsConfigurator.addConfig(parsedConfig);
  config = new ComponentConfigurator<>(SprinklrConfiguration.class).detectConfiguration();
  testsconfig = StreamsConfigurator.getConfig().getConfig("org.apache.streams.sprinklr.config.SprinklrConfiguration");
}
 
Example #28
Source File: ExecutionRuntimeManagerTest.java    From eagle with Apache License 2.0 5 votes vote down vote up
@Test
public void test() throws NoSuchFieldException, IllegalAccessException {
    Config config = ConfigFactory.load();
    ExecutionRuntimeManager manager = ExecutionRuntimeManager.getInstance();
    manager.getRuntime(StormEnvironment.class, config);
    manager.getRuntime(StormEnvironment.class, config);

    Field field = manager.getClass().getDeclaredField("executionRuntimeCache");
    field.setAccessible(true);
    Map<Environment, ExecutionRuntime> executionRuntimeCache = (Map<Environment, ExecutionRuntime>) field.get(manager);

    Assert.assertTrue(executionRuntimeCache.size() == 1);

}
 
Example #29
Source File: ExtractJsonPathsBuilder.java    From kite with Apache License 2.0 5 votes vote down vote up
public ExtractJsonPaths(CommandBuilder builder, Config config, Command parent, Command child, MorphlineContext context) {
  super(builder, config, parent, child, context);
  ListMultimap<String, String> stepMultiMap = ArrayListMultimap.create();
  this.flatten = getConfigs().getBoolean(config, "flatten", true);
  Config paths = getConfigs().getConfig(config, "paths");
  for (Map.Entry<String, Object> entry : new Configs().getEntrySet(paths)) {
    String fieldName = entry.getKey();        
    String path = entry.getValue().toString().trim();
    if (path.contains("//")) {
      throw new MorphlineCompilationException("No support for descendant axis available yet", config);
    }
    if (path.startsWith("/")) {
      path = path.substring(1);
    }
    if (path.endsWith("/")) {
      path = path.substring(0, path.length() - 1);
    }
    path = path.trim();
    for (String step : path.split("/")) {
      step = step.trim();
      if (step.length() > ARRAY_TOKEN.length() && step.endsWith(ARRAY_TOKEN)) {
        step = step.substring(0,  step.length() - ARRAY_TOKEN.length());
        stepMultiMap.put(fieldName, normalize(step));
        stepMultiMap.put(fieldName, ARRAY_TOKEN);
      } else {
        stepMultiMap.put(fieldName, normalize(step));
      }
    }
  }
  this.stepMap = stepMultiMap.asMap();
  LOG.debug("stepMap: {}", stepMap);
  validateArguments();
}
 
Example #30
Source File: DatasetCleanerSource.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
/**
 * Create a work unit for each configuration defined or a single work unit if no configurations are defined
 * @param state see {@link org.apache.gobblin.configuration.SourceState}
 * @return list of workunits
 */
@Override
public List<WorkUnit> getWorkunits(SourceState state) {
  List<WorkUnit> workUnits = Lists.newArrayList();
  Config config = ConfigUtils.propertiesToConfig(state.getProperties());
  Config sourceConfig = ConfigUtils.getConfigOrEmpty(config, DATASET_CLEANER_SOURCE_PREFIX);
  List<String> configurationNames = ConfigUtils.getStringList(config, DATASET_CLEANER_CONFIGURATIONS);

  // use a dummy configuration name if none set
  if (configurationNames.isEmpty()) {
    configurationNames = ImmutableList.of("DummyConfig");
  }

  for (String configurationName: configurationNames) {
    WorkUnit workUnit = WorkUnit.createEmpty();

    // specific configuration prefixed by the configuration name has precedence over the source specific configuration
    // and the source specific configuration has precedence over the general configuration
    Config wuConfig = ConfigUtils.getConfigOrEmpty(sourceConfig, configurationName).withFallback(sourceConfig)
        .withFallback(config);

    workUnit.setProps(ConfigUtils.configToProperties(wuConfig), new Properties());
    TaskUtils.setTaskFactoryClass(workUnit, DatasetCleanerTaskFactory.class);
    workUnits.add(workUnit);
  }

  return workUnits;
}