com.typesafe.config.ConfigRenderOptions Java Examples

The following examples show how to use com.typesafe.config.ConfigRenderOptions. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractBackgroundStreamingActorWithConfigWithStatusReport.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
@Override
public Config setConfig(final Config config) {
    final C previousConfig = this.config;
    // TODO Ditto issue #439: replace ConfigWithFallback - it breaks AbstractConfigValue.withFallback!
    // Workaround: re-parse my config
    final Config fallback = ConfigFactory.parseString(getConfig().root().render(ConfigRenderOptions.concise()));
    try {
        this.config = parseConfig(config.withFallback(fallback));
    } catch (final DittoConfigError | ConfigException e) {
        log.error(e, "Failed to set config");
    }
    if (!previousConfig.isEnabled() && this.config.isEnabled()) {
        scheduleWakeUp();
    }
    return this.config.getConfig();
}
 
Example #2
Source File: BenchmarkTest.java    From sawmill with Apache License 2.0 6 votes vote down vote up
private void testScenario(String scenarioName, double threshold) throws RunnerException, IOException {
    String scenario = Resources.toString(Resources.getResource(scenarioName), Charsets.UTF_8);
    scenario = scenario.replaceAll("DOCUMENTPLACEHOLDER", tempFolder.getRoot().getAbsolutePath());

    String json = ConfigFactory.parseString(scenario).root().render(ConfigRenderOptions.concise());
    Options opts = JsonUtils.fromJsonString(SawmillBenchmarkOptions.class, json).toJmhOptions();
    Iterator<RunResult> results = new Runner(opts).run().iterator();

    // Copy the artifacts
    File sourceResults = new File(tempFolder.getRoot(), "result.json");
    Files.copy(sourceResults, new File(targetDir(), "result.json"));

    logger.info("Test Output:");
    logger.info(FileUtils.readFileToString(sourceResults, Charsets.UTF_8));

    while ( results.hasNext()) {
        RunResult runResults = results.next();
        assertThat(runResults.getPrimaryResult().getScore()).isGreaterThan(threshold);
    }
}
 
Example #3
Source File: AuditVerticleTest.java    From microtrader with MIT License 6 votes vote down vote up
@Test
public void testStockTradesPersisted(TestContext context) throws ClassNotFoundException {
    Async async = context.async();
    JsonObject jdbcConfig = new JsonObject(config.getObject("jdbc").render(ConfigRenderOptions.concise()));
    JDBCClient jdbc = JDBCClient.createNonShared(vertx, jdbcConfig);
    Class.forName(jdbcConfig.getString("driverclass"));

    jdbc.getConnection(ar -> {
        SQLConnection connection = ar.result();
        if (ar.failed()) {
            context.fail(ar.cause());
        } else {
            connection.query(SELECT_STATEMENT, result -> {
                ResultSet set = result.result();
                List<JsonObject> operations = set.getRows().stream()
                        .map(json -> new JsonObject(json.getString("OPERATION")))
                        .collect(Collectors.toList());
                context.assertTrue(operations.size() >= 3);
                connection.close();
                async.complete();
            });
        }
    });
}
 
Example #4
Source File: AbstractIndexer.java    From samantha with MIT License 6 votes vote down vote up
private void notifyDataSubscribers(JsonNode entities, RequestContext requestContext) {
    if (subscribers == null) {
        return;
    }
    ObjectNode reqBody = Json.newObject();
    IOUtilities.parseEntityFromJsonNode(requestContext.getRequestBody(), reqBody);
    ObjectNode daoConfig = Json.newObject();
    daoConfig.put(ConfigKey.ENTITY_DAO_NAME_KEY.get(), ConfigKey.REQUEST_ENTITY_DAO_NAME.get());
    daoConfig.set(ConfigKey.REQUEST_ENTITY_DAO_ENTITIES_KEY.get(), entities);
    reqBody.set(daoConfigKey, daoConfig);
    RequestContext pseudoReq = new RequestContext(reqBody, requestContext.getEngineName());
    for (Configuration configuration : subscribers) {
        String name = configuration.getString(ConfigKey.ENGINE_COMPONENT_NAME.get());
        String type = configuration.getString(ConfigKey.ENGINE_COMPONENT_TYPE.get());
        JsonNode configReq = Json.parse(configuration.getConfig(ConfigKey.REQUEST_CONTEXT.get())
                .underlying().root().render(ConfigRenderOptions.concise()));
        IOUtilities.parseEntityFromJsonNode(configReq, reqBody);
        EngineComponent.valueOf(type).getComponent(configService, name, pseudoReq);
    }
}
 
Example #5
Source File: StreamsConfigurator.java    From streams with Apache License 2.0 6 votes vote down vote up
public static StreamsConfiguration detectConfiguration(Config typesafeConfig) {

    Config streamsConfigurationRoot = null;
    if( typesafeConfig.hasPath(StreamsConfiguration.class.getCanonicalName())) {
      streamsConfigurationRoot = typesafeConfig.getConfig(StreamsConfiguration.class.getCanonicalName());
    } else if (typesafeConfig.hasPath(StreamsConfiguration.class.getSimpleName())) {
      streamsConfigurationRoot = typesafeConfig.getConfig(StreamsConfiguration.class.getSimpleName());
    } else {
      streamsConfigurationRoot = typesafeConfig;
    }

    StreamsConfiguration pojoConfig = null;

    try {
      pojoConfig = mapper.readValue(streamsConfigurationRoot.resolve().root().render(ConfigRenderOptions.concise()), StreamsConfiguration.class);
    } catch (Exception e) {
      e.printStackTrace();
      LOGGER.warn("Could not parse:", typesafeConfig);
    }

    return pojoConfig;
  }
 
Example #6
Source File: HiveJobRunningMonitoringMain.java    From Eagle with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception{
       new ConfigOptionParser().load(args);
       System.setProperty("config.trace", "loads");
       Config config = ConfigFactory.load();

       LOG.info("Config class: " + config.getClass().getCanonicalName());

       if(LOG.isDebugEnabled()) LOG.debug("Config content:"+config.root().render(ConfigRenderOptions.concise()));

       String spoutName = "msgConsumer";
       int parallelism = config.getInt("envContextConfig.parallelismConfig." + spoutName);
       StormExecutionEnvironment env = ExecutionEnvironmentFactory.getStorm(config);
       env.newSource(new HiveJobRunningSourcedStormSpoutProvider().getSpout(config, parallelism)).renameOutputFields(4).withName(spoutName).groupBy(Arrays.asList(0))
               .flatMap(new JobConfigurationAdaptorExecutor()).groupBy(Arrays.asList(0))
               .flatMap(new HiveQueryParserExecutor()).groupBy(Arrays.asList(0))
               .flatMap(new HiveResourceSensitivityDataJoinExecutor())
               .alertWithConsumer("hiveAccessLogStream", "hiveAccessAlertByRunningJob");
       env.execute();
}
 
Example #7
Source File: DefaultAkkaReplicatorConfig.java    From ditto with Eclipse Public License 2.0 6 votes vote down vote up
/**
 * Returns an instance of {@code DefaultAkkaReplicatorConfig} based on the settings of the specified Config.
 *
 * @param name the name of the replicator.
 * @param role the cluster role of members with replicas of the distributed collection.
 * @param config is supposed to provide the settings of the Replicator config at {@value #CONFIG_PATH}.
 * @return the instance.
 * @throws org.eclipse.ditto.services.utils.config.DittoConfigError if {@code config} is invalid.
 */
public static DefaultAkkaReplicatorConfig of(final Config config, final CharSequence name,
        final CharSequence role) {
    final Map<String, Object> specificConfig = new HashMap<>(2);
    specificConfig.put(AkkaReplicatorConfigValue.NAME.getConfigPath(), checkNotNull(name, "name"));
    specificConfig.put(AkkaReplicatorConfigValue.ROLE.getConfigPath(), checkNotNull(role, "role"));

    // TODO Ditto issue #439: replace ConfigWithFallback - it breaks AbstractConfigValue.withFallback!
    // Workaround: re-parse my config
    final ConfigWithFallback configWithFallback = ConfigWithFallback.newInstance(config, CONFIG_PATH,
            AkkaReplicatorConfigValue.values());
    final Config fallback =
            ConfigFactory.parseString(configWithFallback.root().render(ConfigRenderOptions.concise()));

    return new DefaultAkkaReplicatorConfig(ConfigFactory.parseMap(specificConfig)
            .withFallback(fallback));
}
 
Example #8
Source File: HoconProcessor.java    From vertx-config with Apache License 2.0 6 votes vote down vote up
@Override
public void process(Vertx vertx, JsonObject configuration, Buffer input, Handler<AsyncResult<JsonObject>> handler) {
  // Use executeBlocking even if the bytes are in memory
  // Indeed, HOCON resolution can read others files (includes).
  vertx.executeBlocking(
      future -> {
        try (Reader reader = new StringReader(input.toString("UTF-8"))) {
          Config conf = ConfigFactory.parseReader(reader);
          conf = conf.resolve();
          String output = conf.root().render(ConfigRenderOptions.concise()
            .setJson(true).setComments(false).setFormatted(false));
          JsonObject json = new JsonObject(output);
          future.complete(json);
        } catch (Exception e) {
          future.fail(e);
        }
      },
      handler
  );
}
 
Example #9
Source File: UserProfileDetectionStreamMain.java    From Eagle with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception{
    Config config = new ConfigOptionParser().load(args);

    LOG.info("Config class: " + config.getClass().getCanonicalName());

    if(LOG.isDebugEnabled()) LOG.debug("Config content:"+config.root().render(ConfigRenderOptions.concise()));

    StormExecutionEnvironment env = ExecutionEnvironmentFactory.getStorm(config);
    env.newSource(new KafkaSourcedSpoutProvider().getSpout(config)).renameOutputFields(1).withName("kafkaMsgConsumer")
            .flatMap(new AuditLogTransformer()).withName("transformer")     // [user,map]
            .groupBy(Arrays.asList(0))                                      // group by [user]
            .flatMap(new UserProfileAggregatorExecutor()).withName("aggregator")
            .alertWithConsumer(Arrays.asList(UserProfileDetectionConstants.USER_ACTIVITY_AGGREGATION_STREAM),
                    UserProfileDetectionConstants.USER_PROFILE_ANOMALY_DETECTION_EXECUTOR); // alert
            ;
    env.execute();
}
 
Example #10
Source File: UserProfileDetectionBatchMain.java    From Eagle with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception{
    new ConfigOptionParser().load(args);
    System.setProperty("config.trace", "loads");
    Config config = ConfigFactory.load();

    LOG.info("Config class: " + config.getClass().getCanonicalName());

    if(LOG.isDebugEnabled()) LOG.debug("Config content:"+config.root().render(ConfigRenderOptions.concise()));

    StormExecutionEnvironment env = ExecutionEnvironmentFactory.getStorm(config);
    env.newSource(new KafkaSourcedSpoutProvider().getSpout(config)).renameOutputFields(1)
            .flatMap(new UserActivityPartitionExecutor())
            .alertWithConsumer(UserProfileDetectionConstants.USER_ACTIVITY_AGGREGATION_STREAM,
                    UserProfileDetectionConstants.USER_PROFILE_ANOMALY_DETECTION_EXECUTOR);
    env.execute();
}
 
Example #11
Source File: TestMetastoreDatabaseFactory.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
public static ITestMetastoreDatabase get(String version, Config dbConfig) throws Exception {
    try {
        synchronized (syncObject) {
            ensureDatabaseExists(dbConfig);
            TestMetadataDatabase instance = new TestMetadataDatabase(testMetastoreDatabaseServer, version);
            instances.add(instance);
            return instance;
        }
    }
    catch (Exception e) {
        throw new RuntimeException("Failed to create TestMetastoreDatabase with version " + version +
           " and config " + dbConfig.root().render(ConfigRenderOptions.defaults().setFormatted(true).setJson(true))
           + " cause: " + e, e);
    }

}
 
Example #12
Source File: SabotConfig.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
public SabotConfig(Config config, boolean enableServerConfigs) {
  super(config);
  logger.debug("Setting up SabotConfig object.");
  logger.trace("Given Config object is:\n{}",
               config.root().render(ConfigRenderOptions.defaults()));
  RuntimeMXBean bean = ManagementFactory.getRuntimeMXBean();
  this.startupArguments = ImmutableList.copyOf(bean.getInputArguments());
  logger.debug("SabotConfig object initialized.");
}
 
Example #13
Source File: HbaseAuditLogProcessorMain.java    From Eagle with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception{
    new ConfigOptionParser().load(args);
    System.setProperty("config.trace", "loads");
    Config config = ConfigFactory.load();

    LOG.info("Config class: " + config.getClass().getCanonicalName());

    if(LOG.isDebugEnabled()) LOG.debug("Config content:"+config.root().render(ConfigRenderOptions.concise()));

    StormExecutionEnvironment env = ExecutionEnvironmentFactory.getStorm(config);
    env.newSource(new KafkaSourcedSpoutProvider().getSpout(config)).renameOutputFields(1).withName("kafkaMsgConsumer")
            .flatMap(new HbaseResourceSensitivityDataJoinExecutor())
            .alertWithConsumer("hbaseSecurityLogEventStream", "hbaseSecurityLogAlertExecutor");
    env.execute();
}
 
Example #14
Source File: HDFSSecurityLogProcessorMain.java    From Eagle with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception{
    new ConfigOptionParser().load(args);
    System.setProperty("config.trace", "loads");
    Config config = ConfigFactory.load();

    LOG.info("Config class: " + config.getClass().getCanonicalName());

    if(LOG.isDebugEnabled()) LOG.debug("Config content:"+config.root().render(ConfigRenderOptions.concise()));

    StormExecutionEnvironment env = ExecutionEnvironmentFactory.getStorm(config);

    env.newSource(new KafkaSourcedSpoutProvider().getSpout(config)).renameOutputFields(1).withName("kafkaMsgConsumer")
            .alertWithConsumer("hdfsSecurityLogEventStream", "hdfsSecurityLogAlertExecutor");
    env.execute();
}
 
Example #15
Source File: FacebookUserstreamProvider.java    From streams with Apache License 2.0 5 votes vote down vote up
/**
 * FacebookUserstreamProvider constructor.
 */
public FacebookUserstreamProvider() {
  Config config = StreamsConfigurator.getConfig().getConfig("facebook");
  FacebookUserInformationConfiguration facebookUserInformationConfiguration;
  try {
    facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
  } catch (IOException ex) {
    ex.printStackTrace();
  }
}
 
Example #16
Source File: ModifyConfigBehavior.java    From ditto with Eclipse Public License 2.0 5 votes vote down vote up
/**
 * Injectable behavior to handle {@code ModifyConfig}.
 *
 * @return behavior to handle {@code ModifyConfig}.
 */
default AbstractActor.Receive modifyConfigBehavior() {
    return ReceiveBuilder.create()
            .match(ModifyConfig.class, cmd -> {
                final Config newConfig = setConfig(ConfigFactory.parseString(cmd.getConfig().toString()));
                final JsonObject newConfigJson =
                        JsonObject.of(newConfig.root().render(ConfigRenderOptions.concise()));
                final ModifyConfigResponse response =
                        ModifyConfigResponse.of(newConfigJson, cmd.getDittoHeaders());
                sender().tell(response, self());
            })
            .build();
}
 
Example #17
Source File: FacebookFriendFeedProvider.java    From streams with Apache License 2.0 5 votes vote down vote up
/**
 * FacebookFriendFeedProvider constructor - output supplied Class.
 * @param klass Class
 */
public FacebookFriendFeedProvider(Class klass) {
  Config config = StreamsConfigurator.getConfig().getConfig("facebook");
  FacebookUserInformationConfiguration configuration;
  try {
    configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
  } catch (IOException ex) {
    ex.printStackTrace();
    return;
  }
  this.klass = klass;
}
 
Example #18
Source File: Controller.java    From J-Kinopoisk2IMDB with Apache License 2.0 5 votes vote down vote up
boolean destroy() {
    if (clientExecutor.isRunning()) {
        if (!confirmStop()) {
            return false;
        }

        clientExecutor.terminate();
    }

    byte[] configuration = ConfigFactory.parseMap(configMap)
            .withFallback(config)
            .root()
            .render(ConfigRenderOptions.concise())
            .getBytes();

    try {
        Path parentDir = configPath.getParent();
        if (!Files.exists(parentDir)) {
            Files.createDirectories(parentDir);
        }
        Files.write(configPath, configuration);
    } catch (IOException ignore) {
        // Do nothing
    }

    return true;
}
 
Example #19
Source File: UnitTopologyRunner.java    From eagle with Apache License 2.0 5 votes vote down vote up
private void run(String topologyId,
                 int numOfTotalWorkers,
                 int numOfSpoutTasks,
                 int numOfRouterBolts,
                 int numOfAlertBolts,
                 int numOfPublishExecutors,
                 int numOfPublishTasks,
                 Config config,
                 boolean localMode) {

    backtype.storm.Config stormConfig = givenStormConfig == null ? new backtype.storm.Config() : givenStormConfig;
    // TODO: Configurable metric consumer instance number

    int messageTimeoutSecs = config.hasPath(MESSAGE_TIMEOUT_SECS) ? config.getInt(MESSAGE_TIMEOUT_SECS) : DEFAULT_MESSAGE_TIMEOUT_SECS;
    LOG.info("Set topology.message.timeout.secs as {}", messageTimeoutSecs);
    stormConfig.setMessageTimeoutSecs(messageTimeoutSecs);

    if (config.hasPath("metric")) {
        stormConfig.registerMetricsConsumer(StormMetricTaggedConsumer.class, config.root().render(ConfigRenderOptions.concise()), 1);
    }

    stormConfig.setNumWorkers(numOfTotalWorkers);
    StormTopology topology = buildTopology(topologyId, numOfSpoutTasks, numOfRouterBolts, numOfAlertBolts, numOfPublishExecutors, numOfPublishTasks, config).createTopology();

    if (localMode) {
        LOG.info("Submitting as local mode");
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology(topologyId, stormConfig, topology);
        Utils.sleep(Long.MAX_VALUE);
    } else {
        LOG.info("Submitting as cluster mode");
        try {
            StormSubmitter.submitTopologyWithProgressBar(topologyId, stormConfig, topology);
        } catch (Exception ex) {
            LOG.error("fail submitting topology {}", topology, ex);
            throw new IllegalStateException(ex);
        }
    }
}
 
Example #20
Source File: ConfigurableCleanableDataset.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private void initWithSelectionPolicy(Config config, Properties jobProps) {

    String selectionPolicyKey = StringUtils.substringAfter(SELECTION_POLICY_CLASS_KEY, CONFIGURATION_KEY_PREFIX);
    String versionFinderKey = StringUtils.substringAfter(VERSION_FINDER_CLASS_KEY, CONFIGURATION_KEY_PREFIX);
    Preconditions.checkArgument(
        config.hasPath(versionFinderKey),
        String.format("Version finder class is required at %s in config %s", versionFinderKey,
            config.root().render(ConfigRenderOptions.concise())));

    VersionFinderAndPolicyBuilder<T> builder = VersionFinderAndPolicy.builder();
    builder.config(config);
    builder.versionFinder(createVersionFinder(config.getString(versionFinderKey), config, jobProps));
    if (config.hasPath(selectionPolicyKey)) {
      builder.versionSelectionPolicy(createSelectionPolicy(
          ConfigUtils.getString(config, selectionPolicyKey, SelectNothingPolicy.class.getName()), config, jobProps));
    }

    for (Class<? extends RetentionActionFactory> factoryClass : RETENTION_ACTION_TYPES) {
      try {
        RetentionActionFactory factory = factoryClass.newInstance();
        if (factory.canCreateWithConfig(config)) {
          builder.retentionAction((RetentionAction) factory.createRetentionAction(config, this.fs,
              ConfigUtils.propertiesToConfig(jobProps)));
        }
      } catch (InstantiationException | IllegalAccessException e) {
        Throwables.propagate(e);
      }
    }

    this.versionFindersAndPolicies.add(builder.build());

  }
 
Example #21
Source File: ConfigurableCleanableDataset.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private VersionSelectionPolicy<T> createSelectionPolicy(String className, Config config, Properties jobProps) {
  try {
    this.log.debug(String.format("Configuring selection policy %s for %s with %s", className, this.datasetRoot,
        config.root().render(ConfigRenderOptions.concise())));
    return (VersionSelectionPolicy<T>) GobblinConstructorUtils.invokeFirstConstructor(Class.forName(className),
        ImmutableList.<Object> of(config), ImmutableList.<Object> of(config, jobProps),
        ImmutableList.<Object> of(jobProps));
  } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException
      | ClassNotFoundException e) {
    throw new IllegalArgumentException(e);
  }
}
 
Example #22
Source File: CleanableHiveDataset.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public CleanableHiveDataset(FileSystem fs, HiveMetastoreClientPool clientPool, Table table, Properties jobProps,
    Config config) throws IOException {
  super(fs, clientPool, table, jobProps, config);

  try {
    this.hiveSelectionPolicy =
        (VersionSelectionPolicy) GobblinConstructorUtils.invokeFirstConstructor(Class.forName(ConfigUtils.getString(
            this.datasetConfig, SELECTION_POLICY_CLASS_KEY, DEFAULT_SELECTION_POLICY_CLASS)), ImmutableList.<Object> of(
            this.datasetConfig, jobProps), ImmutableList.<Object> of(this.datasetConfig), ImmutableList.<Object> of(jobProps));

    log.info(String.format("Configured selection policy %s for dataset:%s with config %s",
        ConfigUtils.getString(this.datasetConfig, SELECTION_POLICY_CLASS_KEY, DEFAULT_SELECTION_POLICY_CLASS),
        datasetURN(), this.datasetConfig.root().render(ConfigRenderOptions.concise())));

    this.hiveDatasetVersionFinder =
        (AbstractHiveDatasetVersionFinder) GobblinConstructorUtils.invokeFirstConstructor(Class.forName(ConfigUtils
            .getString(this.datasetConfig, VERSION_FINDER_CLASS_KEY, DEFAULT_VERSION_FINDER_CLASS)), ImmutableList
            .<Object> of(this.fs, this.datasetConfig), ImmutableList.<Object> of(this.fs, jobProps));
  } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException
      | ClassNotFoundException e) {
    log.error("Failed to instantiate CleanableHiveDataset", e);
    throw new IllegalArgumentException(e);
  }

  this.fsCleanableHelper = new FsCleanableHelper(fs, jobProps, this.datasetConfig, log);

  this.shouldDeleteData = Boolean.valueOf(jobProps.getProperty(SHOULD_DELETE_DATA_KEY, SHOULD_DELETE_DATA_DEFAULT));
  this.simulate = Boolean.valueOf(jobProps.getProperty(FsCleanableHelper.SIMULATE_KEY, FsCleanableHelper.SIMULATE_DEFAULT));
}
 
Example #23
Source File: FacebookFriendFeedProvider.java    From streams with Apache License 2.0 5 votes vote down vote up
/**
 * FacebookFriendFeedProvider constructor - resolves FacebookUserInformationConfiguration from JVM 'facebook'.
 */
public FacebookFriendFeedProvider() {
  Config config = StreamsConfigurator.getConfig().getConfig("facebook");
  FacebookUserInformationConfiguration configuration;
  try {
    configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
  } catch (IOException ex) {
    ex.printStackTrace();
  }
}
 
Example #24
Source File: MultiAccessControlAction.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Override
public MultiAccessControlAction createRetentionAction(Config config, FileSystem fs, Config jobConfig) {
  Preconditions.checkArgument(this.canCreateWithConfig(config),
      "Can not create MultiAccessControlAction with config " + config.root().render(ConfigRenderOptions.concise()));
  if (config.hasPath(LEGACY_ACCESS_CONTROL_KEY)) {
    return new MultiAccessControlAction(config.getConfig(LEGACY_ACCESS_CONTROL_KEY), fs, jobConfig);
  } else if (config.hasPath(ACCESS_CONTROL_KEY)) {
    return new MultiAccessControlAction(config.getConfig(ACCESS_CONTROL_KEY), fs, jobConfig);
  }
  throw new IllegalStateException(
      "RetentionActionFactory.canCreateWithConfig returned true but could not create MultiAccessControlAction");
}
 
Example #25
Source File: ConfigBasedMultiDatasets.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public ConfigBasedMultiDatasets (Config c, Properties props,
    Optional<List<String>> blacklistPatterns){
  this.props = props;
  blacklist = patternListInitHelper(blacklistPatterns);

  try {
    FileSystem executionCluster = FileSystem.get(new Configuration());
    URI executionClusterURI = executionCluster.getUri();

    ReplicationConfiguration rc = ReplicationConfiguration.buildFromConfig(c);

    // push mode
    if(this.props.containsKey(REPLICATION_PUSH_MODE) && Boolean.parseBoolean(this.props.getProperty(REPLICATION_PUSH_MODE))){
      generateDatasetInPushMode(rc, executionClusterURI);
    }
    // default pull mode
    else{
      generateDatasetInPullMode(rc, executionClusterURI);
    }
  } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
    log.error("Can not create Replication Configuration from raw config "
        + c.root().render(ConfigRenderOptions.defaults().setComments(false).setOriginComments(false)), e);
  } catch (IOException ioe) {
    log.error("Can not decide current execution cluster ", ioe);

  }
}
 
Example #26
Source File: SimpleHDFSStoreMetadata.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
/**
 * Writes the <code>config</code> to {@link #storeMetadataFilePath}. Creates a backup file at
 * <code>storeMetadataFilePath + ".bkp"</code> to recover old metadata in case of unexpected deployment failures
 *
 * @param config to be serialized
 * @throws IOException if there was any problem writing the <code>config</code> to the store metadata file.
 */
void writeMetadata(Config config) throws IOException {

  Path storeMetadataFileBkpPath =
      new Path(this.storeMetadataFilePath.getParent(), this.storeMetadataFilePath.getName() + ".bkp");

  // Delete old backup file if exists
  HadoopUtils.deleteIfExists(this.fs, storeMetadataFileBkpPath, true);

  // Move current storeMetadataFile to backup
  if (this.fs.exists(this.storeMetadataFilePath)) {
    HadoopUtils.renamePath(this.fs, this.storeMetadataFilePath, storeMetadataFileBkpPath);
  }

  // Write new storeMetadataFile
  try (FSDataOutputStream outputStream =
      FileSystem.create(this.fs, this.storeMetadataFilePath, FsDeploymentConfig.DEFAULT_STORE_PERMISSIONS);) {
    outputStream.write(config.root().render(ConfigRenderOptions.concise()).getBytes(Charsets.UTF_8));
  } catch (Exception e) {
    // Restore from backup
    HadoopUtils.deleteIfExists(this.fs, this.storeMetadataFilePath, true);
    HadoopUtils.renamePath(this.fs, storeMetadataFileBkpPath, this.storeMetadataFilePath);
    throw new IOException(
        String.format("Failed to write store metadata at %s. Restored existing store metadata file from backup",
            this.storeMetadataFilePath),
        e);
  }
}
 
Example #27
Source File: IntegrationBasicSuite.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private void writeJobConf(String jobName, Config jobConfig) throws IOException {
  String targetPath = this.jobConfigPath + "/" + jobName + ".conf";
  String renderedConfig = jobConfig.root().render(ConfigRenderOptions.defaults());
  try (DataOutputStream os = new DataOutputStream(new FileOutputStream(targetPath));
      Writer writer = new OutputStreamWriter(os, Charsets.UTF_8)) {
    writer.write(renderedConfig);
  }
}
 
Example #28
Source File: GobblinYarnAppLauncher.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
/**
 * Write the config to the file specified with the config key {@value GOBBLIN_YARN_CONFIG_OUTPUT_PATH} if it
 * is configured.
 * @param config the config to output
 * @throws IOException
 */
@VisibleForTesting
static void outputConfigToFile(Config config)
    throws IOException {
  // If a file path is specified then write the Azkaban config to that path in HOCON format.
  // This can be used to generate an application.conf file to pass to the yarn app master and containers.
  if (config.hasPath(GOBBLIN_YARN_CONFIG_OUTPUT_PATH)) {
    File configFile = new File(config.getString(GOBBLIN_YARN_CONFIG_OUTPUT_PATH));
    File parentDir = configFile.getParentFile();

    if (parentDir != null && !parentDir.exists()) {
      if (!parentDir.mkdirs()) {
        throw new IOException("Error creating directories for " + parentDir);
      }
    }

    ConfigRenderOptions configRenderOptions = ConfigRenderOptions.defaults();
    configRenderOptions = configRenderOptions.setComments(false);
    configRenderOptions = configRenderOptions.setOriginComments(false);
    configRenderOptions = configRenderOptions.setFormatted(true);
    configRenderOptions = configRenderOptions.setJson(false);

    String renderedConfig = config.root().render(configRenderOptions);

    FileUtils.writeStringToFile(configFile, renderedConfig, Charsets.UTF_8);
  }
}
 
Example #29
Source File: FSJobCatalog.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
/**
 * Used for shadow copying in the process of updating a existing job configuration file,
 * which requires deletion of the pre-existed copy of file and create a new one with the same name.
 * Steps:
 *  Create a new one in /tmp.
 *  Safely deletion of old one.
 *  copy the newly created configuration file to jobConfigDir.
 *  Delete the shadow file.
 */
synchronized void materializedJobSpec(Path jobSpecPath, JobSpec jobSpec, FileSystem fs)
    throws IOException, JobSpecNotFoundException {
  Path shadowDirectoryPath = new Path("/tmp");
  Path shadowFilePath = new Path(shadowDirectoryPath, UUID.randomUUID().toString());
  /* If previously existed, should delete anyway */
  if (fs.exists(shadowFilePath)) {
    fs.delete(shadowFilePath, false);
  }

  ImmutableMap.Builder mapBuilder = ImmutableMap.builder();
  mapBuilder.put(ImmutableFSJobCatalog.DESCRIPTION_KEY_IN_JOBSPEC, jobSpec.getDescription())
      .put(ImmutableFSJobCatalog.VERSION_KEY_IN_JOBSPEC, jobSpec.getVersion());

  if (jobSpec.getTemplateURI().isPresent()) {
    mapBuilder.put(ConfigurationKeys.JOB_TEMPLATE_PATH, jobSpec.getTemplateURI().get().toString());
  }

  Map<String, String> injectedKeys = mapBuilder.build();
  String renderedConfig = ConfigFactory.parseMap(injectedKeys).withFallback(jobSpec.getConfig())
      .root().render(ConfigRenderOptions.defaults());
  try (DataOutputStream os = fs.create(shadowFilePath);
      Writer writer = new OutputStreamWriter(os, Charsets.UTF_8)) {
    writer.write(renderedConfig);
  }

  /* (Optionally:Delete oldSpec) and copy the new one in. */
  if (fs.exists(jobSpecPath)) {
    if (! fs.delete(jobSpecPath, false)) {
      throw new IOException("Unable to delete existing job file: " + jobSpecPath);
    }
  }
  if (!fs.rename(shadowFilePath, jobSpecPath)) {
    throw new IOException("Unable to rename job file: " + shadowFilePath + " to " + jobSpecPath);
  }
}
 
Example #30
Source File: FlowSpecSerializer.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Override
public JsonElement serialize(FlowSpec src, Type typeOfSrc, JsonSerializationContext context) {
  JsonObject flowSpecJson = new JsonObject();

  flowSpecJson.add(FLOW_SPEC_URI_KEY, context.serialize(src.getUri()));
  flowSpecJson.add(FLOW_SPEC_VERSION_KEY, context.serialize(src.getVersion()));
  flowSpecJson.add(FLOW_SPEC_DESCRIPTION_KEY, context.serialize(src.getDescription()));
  flowSpecJson.add(FLOW_SPEC_CONFIG_KEY, context.serialize(src.getConfig().root().render(ConfigRenderOptions.concise())));

  flowSpecJson.add(FLOW_SPEC_CONFIG_AS_PROPERTIES_KEY, context.serialize(src.getConfigAsProperties()));

  JsonArray templateURIs = new JsonArray();
  if (src.getTemplateURIs().isPresent()) {
    for (URI templateURI : src.getTemplateURIs().get()) {
      templateURIs.add(context.serialize(templateURI));
    }
  }
  flowSpecJson.add(FLOW_SPEC_TEMPLATE_URIS_KEY, templateURIs);

  JsonArray childSpecs = new JsonArray();
  if (src.getChildSpecs().isPresent()) {
    for (Spec spec : src.getChildSpecs().get()) {
      childSpecs.add(context.serialize(spec));
    }
  }
  flowSpecJson.add(FLOW_SPEC_CHILD_SPECS_KEY, childSpecs);

  return flowSpecJson;
}