Java Code Examples for org.apache.commons.configuration2.Configuration

The following examples show how to use org.apache.commons.configuration2.Configuration. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: tinkerpop   Source File: TinkerGraphTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void shouldPersistToGraphSON() {
    final String graphLocation = TestHelper.makeTestDataFile(TinkerGraphTest.class, "shouldPersistToGraphSON.json");
    final File f = new File(graphLocation);
    if (f.exists() && f.isFile()) f.delete();

    final Configuration conf = new BaseConfiguration();
    conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "graphson");
    conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION, graphLocation);
    final TinkerGraph graph = TinkerGraph.open(conf);
    TinkerFactory.generateModern(graph);
    graph.close();

    final TinkerGraph reloadedGraph = TinkerGraph.open(conf);
    IoTest.assertModernGraph(reloadedGraph, true, false);
    reloadedGraph.close();
}
 
Example 2
/**
 * Tests a reset of the builder. The configuration instance should be
 * created anew.
 */
@Test
public void testResetBuilder() throws ConfigurationException
{
    final Map<String, Object> attrs = new HashMap<>();
    final BasicConfigurationBuilder<? extends HierarchicalConfiguration<ImmutableNode>> defBuilder =
            prepareSubBuilderTest(attrs);
    final CombinedConfiguration cc = builder.getConfiguration();
    final ConfigurationBuilder<? extends Configuration> subBuilder =
            builder.getNamedBuilder(BUILDER_NAME);
    defBuilder.reset();
    final CombinedConfiguration cc2 = builder.getConfiguration();
    assertNotSame("No new configuration instance", cc, cc2);
    final ConfigurationBuilder<? extends Configuration> subBuilder2 =
            builder.getNamedBuilder(BUILDER_NAME);
    assertNotSame("No new sub builder instance", subBuilder, subBuilder2);
}
 
Example 3
Source Project: bireme   Source File: Config.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Get DebeziumSource configuration.
 *
 * @param debeziumConf An empty {@code SourceConfig}
 * @throws BiremeException miss some required configuration
 */
protected void fetchDebeziumConfig(SourceConfig debeziumConf) throws BiremeException {
  Configuration subConfig = new SubsetConfiguration(config, debeziumConf.name, ".");

  String prefix = subConfig.getString("namespace");
  if (prefix == null) {
    String messages = "Please designate your namespace.";
    logger.fatal(messages);
    throw new BiremeException(messages);
  }

  debeziumConf.type = SourceType.DEBEZIUM;
  debeziumConf.server = subConfig.getString("kafka.server");
  debeziumConf.topic = prefix;
  debeziumConf.groupID = subConfig.getString("kafka.groupid", "bireme");

  if (debeziumConf.server == null) {
    String message = "Please designate server for " + debeziumConf.name + ".";
    logger.fatal(message);
    throw new BiremeException(message);
  }
}
 
Example 4
Source Project: tinkerpop   Source File: SubgraphTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
@LoadGraphWith(CREW)
@FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = FEATURE_ADD_VERTICES)
@FeatureRequirement(featureClass = Graph.Features.EdgeFeatures.class, feature = FEATURE_ADD_EDGES)
@FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = FEATURE_USER_SUPPLIED_IDS)
@FeatureRequirement(featureClass = Graph.Features.EdgeFeatures.class, feature = FEATURE_USER_SUPPLIED_IDS)
public void g_withSideEffectXsgX_V_hasXname_danielXout_capXsgX() throws Exception {
    final Configuration config = graphProvider.newGraphConfiguration("subgraph", this.getClass(), name.getMethodName(), CREW);
    graphProvider.clear(config);
    final Graph subgraph = graphProvider.openTestGraph(config);
    /////
    final Traversal<Vertex, Vertex> traversal = get_g_withSideEffectXsgX_V_hasXname_danielX_outE_subgraphXsgX_inV(subgraph);
    printTraversalForm(traversal);
    traversal.iterate();
    assertVertexEdgeCounts(subgraph, 3, 2);

    final List<String> locations = subgraph.traversal().V().has("name", "daniel").<String>values("location").toList();
    assertThat(locations, contains("spremberg", "kaiserslautern", "aachen"));

    graphProvider.clear(subgraph, config);
}
 
Example 5
/**
 * Returns a new servlet request configuration that is backed by the passed
 * in configuration.
 *
 * @param base the configuration with the underlying values
 * @return the servlet request configuration
 */
private ServletRequestConfiguration createConfiguration(final Configuration base)
{
    final ServletRequest request = new MockHttpServletRequest()
    {
        @Override
        public String[] getParameterValues(final String key)
        {
            return base.getStringArray(key);
        }

        @Override
        public Map<?, ?> getParameterMap()
        {
            return new ConfigurationMap(base);
        }
    };

    final ServletRequestConfiguration config = new ServletRequestConfiguration(request);
    config.setListDelimiterHandler(new DefaultListDelimiterHandler(','));
    return config;
}
 
Example 6
Source Project: tinkerpop   Source File: DriverRemoteConnection.java    License: Apache License 2.0 6 votes vote down vote up
public DriverRemoteConnection(final Configuration conf) {
    final boolean hasClusterConf = IteratorUtils.anyMatch(conf.getKeys(), k -> k.startsWith("clusterConfiguration"));
    if (conf.containsKey(GREMLIN_REMOTE_DRIVER_CLUSTERFILE) && hasClusterConf)
        throw new IllegalStateException(String.format("A configuration should not contain both '%s' and 'clusterConfiguration'", GREMLIN_REMOTE_DRIVER_CLUSTERFILE));

    remoteTraversalSourceName = conf.getString(GREMLIN_REMOTE_DRIVER_SOURCENAME, DEFAULT_TRAVERSAL_SOURCE);

    try {
        final Cluster cluster;
        if (!conf.containsKey(GREMLIN_REMOTE_DRIVER_CLUSTERFILE) && !hasClusterConf)
            cluster = Cluster.open();
        else
            cluster = conf.containsKey(GREMLIN_REMOTE_DRIVER_CLUSTERFILE) ?
                    Cluster.open(conf.getString(GREMLIN_REMOTE_DRIVER_CLUSTERFILE)) : Cluster.open(conf.subset("clusterConfiguration"));

        client = cluster.connect(Client.Settings.build().create()).alias(remoteTraversalSourceName);
    } catch (Exception ex) {
        throw new IllegalStateException(ex);
    }

    attachElements = false;

    tryCloseCluster = true;
    tryCloseClient = true;
    this.conf = Optional.of(conf);
}
 
Example 7
Source Project: tinkerpop   Source File: PersistedOutputRDD.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void writeGraphRDD(final Configuration configuration, final JavaPairRDD<Object, VertexWritable> graphRDD) {
    if (!configuration.getBoolean(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, false))
        LOGGER.warn("The SparkContext should be persisted in order for the RDD to persist across jobs. To do so, set " + Constants.GREMLIN_SPARK_PERSIST_CONTEXT + " to true");
    if (!configuration.containsKey(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION))
        throw new IllegalArgumentException("There is no provided " + Constants.GREMLIN_HADOOP_OUTPUT_LOCATION + " to write the persisted RDD to");
    SparkContextStorage.open(configuration).rm(configuration.getString(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION));  // this might be bad cause it unpersists the job RDD
    // determine which storage level to persist the RDD as with MEMORY_ONLY being the default cache()
    final StorageLevel storageLevel = StorageLevel.fromString(configuration.getString(Constants.GREMLIN_SPARK_PERSIST_STORAGE_LEVEL, "MEMORY_ONLY"));
    if (!configuration.getBoolean(Constants.GREMLIN_HADOOP_GRAPH_WRITER_HAS_EDGES, true))
        graphRDD.mapValues(vertex -> {
            vertex.get().dropEdges(Direction.BOTH);
            return vertex;
        }).setName(Constants.getGraphLocation(configuration.getString(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION))).persist(storageLevel)
                // call action to eager store rdd
                .count();
    else
        graphRDD.setName(Constants.getGraphLocation(configuration.getString(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION))).persist(storageLevel)
                // call action to eager store rdd
                .count();
    Spark.refresh(); // necessary to do really fast so the Spark GC doesn't clear out the RDD
}
 
Example 8
Source Project: james-project   Source File: DNSRBLHandler.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void init(Configuration config) throws ConfigurationException {
    boolean validConfig = false;
    HierarchicalConfiguration<ImmutableNode> handlerConfiguration = (HierarchicalConfiguration<ImmutableNode>) config;
    ArrayList<String> rblserverCollection = new ArrayList<>();

    Collections.addAll(rblserverCollection, handlerConfiguration.getStringArray("rblservers.whitelist"));
    if (rblserverCollection.size() > 0) {
        setWhitelist(rblserverCollection.toArray(String[]::new));
        rblserverCollection.clear();
        validConfig = true;
    }
    Collections.addAll(rblserverCollection, handlerConfiguration.getStringArray("rblservers.blacklist"));
    if (rblserverCollection.size() > 0) {
        setBlacklist(rblserverCollection.toArray(String[]::new));
        rblserverCollection.clear();
        validConfig = true;
    }

    // Throw an ConfiigurationException on invalid config
    if (!validConfig) {
        throw new ConfigurationException("Please configure whitelist or blacklist");
    }

    setGetDetail(handlerConfiguration.getBoolean("getDetail", false));
}
 
Example 9
Source Project: tinkerpop   Source File: IoGraphTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
@FeatureRequirement(featureClass = Graph.Features.EdgeFeatures.class, feature = Graph.Features.EdgeFeatures.FEATURE_ADD_EDGES)
@FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = Graph.Features.VertexFeatures.FEATURE_ADD_VERTICES)
public void shouldReadWriteModernToFileWithHelpers() throws Exception {
    final File f = TestHelper.generateTempFile(this.graph.getClass(), name.getMethodName(), fileExtension);
    try {
        graph.io(ioBuilderToTest).writeGraph(Storage.toPath(f));

        final Configuration configuration = graphProvider.newGraphConfiguration("readGraph", this.getClass(), name.getMethodName(), LoadGraphWith.GraphData.MODERN);
        final Graph g1 = graphProvider.openTestGraph(configuration);
        g1.io(ioBuilderToTest).readGraph(Storage.toPath(f));

        // modern uses double natively so always assert as such
        IoTest.assertModernGraph(g1, true, lossyForId);

        graphProvider.clear(g1, configuration);
    } catch (Exception ex) {
        f.delete();
        throw ex;
    }
}
 
Example 10
Source Project: tinkerpop   Source File: TinkerIoRegistryV2d0.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public TinkerGraph deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
    final Configuration conf = new BaseConfiguration();
    conf.setProperty("gremlin.tinkergraph.defaultVertexPropertyCardinality", "list");
    final TinkerGraph graph = TinkerGraph.open(conf);

    while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
        if (jsonParser.getCurrentName().equals("vertices")) {
            while (jsonParser.nextToken() != JsonToken.END_ARRAY) {
                if (jsonParser.currentToken() == JsonToken.START_OBJECT) {
                    final DetachedVertex v = (DetachedVertex) deserializationContext.readValue(jsonParser, Vertex.class);
                    v.attach(Attachable.Method.getOrCreate(graph));
                }
            }
        } else if (jsonParser.getCurrentName().equals("edges")) {
            while (jsonParser.nextToken() != JsonToken.END_ARRAY) {
                if (jsonParser.currentToken() == JsonToken.START_OBJECT) {
                    final DetachedEdge e = (DetachedEdge) deserializationContext.readValue(jsonParser, Edge.class);
                    e.attach(Attachable.Method.getOrCreate(graph));
                }
            }
        }
    }

    return graph;
}
 
Example 11
Source Project: james-project   Source File: BlobExportMechanismModule.java    License: Apache License 2.0 6 votes vote down vote up
@VisibleForTesting
@Provides
@Singleton
BlobExportImplChoice provideChoice(PropertiesProvider propertiesProvider) throws ConfigurationException {
    try {
        Configuration configuration = propertiesProvider.getConfigurations(ConfigurationComponent.NAMES);
        return BlobExportImplChoice.from(configuration)
            .orElseGet(() -> {
                LOGGER.warn("No blob export mechanism defined. Defaulting to " + BlobExportImplChoice.LOCAL_FILE.getImplName());
                return BlobExportImplChoice.LOCAL_FILE;
            });
    } catch (FileNotFoundException e) {
        LOGGER.warn("Could not find " + ConfigurationComponent.NAME + " configuration file, using localFile blob exporting as the default");
        return BlobExportImplChoice.LOCAL_FILE;
    }
}
 
Example 12
Source Project: tinkerpop   Source File: IoTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
@FeatureRequirement(featureClass = Graph.Features.EdgeFeatures.class, feature = Graph.Features.EdgeFeatures.FEATURE_ADD_EDGES)
@FeatureRequirement(featureClass = EdgePropertyFeatures.class, feature = FEATURE_STRING_VALUES)
@FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = Graph.Features.VertexFeatures.FEATURE_ADD_VERTICES)
public void shouldReadWriteSelfLoopingEdges() throws Exception {
    final GraphSONMapper mapper = graph.io(graphson).mapper().version(GraphSONVersion.V3_0).create();
    final Graph source = graph;
    final Vertex v1 = source.addVertex();
    final Vertex v2 = source.addVertex();
    v1.addEdge("CONTROL", v2);
    v1.addEdge("SELFLOOP", v1);

    final Configuration targetConf = graphProvider.newGraphConfiguration("target", this.getClass(), name.getMethodName(), null);
    final Graph target = graphProvider.openTestGraph(targetConf);
    try (ByteArrayOutputStream os = new ByteArrayOutputStream()) {
        source.io(IoCore.graphson()).writer().mapper(mapper).create().writeGraph(os, source);
        try (ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray())) {
            target.io(IoCore.graphson()).reader().mapper(mapper).create().readGraph(is, target);
        }
    } catch (IOException ioe) {
        throw new RuntimeException(ioe);
    }

    assertEquals(IteratorUtils.count(source.vertices()), IteratorUtils.count(target.vertices()));
    assertEquals(IteratorUtils.count(source.edges()), IteratorUtils.count(target.edges()));
}
 
Example 13
Source Project: tinkerpop   Source File: GryoPoolTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void shouldConfigPoolOnConstructionWithCustomIoRegistryInstance() throws Exception {
    final Configuration conf = new BaseConfiguration();
    conf.setProperty(IoRegistry.IO_REGISTRY, IoXIoRegistry.InstanceBased.class.getName());
    final GryoPool pool = GryoPool.build().ioRegistries(conf.getList(IoRegistry.IO_REGISTRY, Collections.emptyList())).create();
    assertReaderWriter(pool.takeWriter(), pool.takeReader(), new IoX("test"), IoX.class);
}
 
Example 14
/**
 * Tries to create an event about a newly created configuration without a
 * configuration instance.
 */
@Test(expected = IllegalArgumentException.class)
public void testResultCreatedEventNoConfiguration()
{
    new ConfigurationBuilderResultCreatedEvent(
            new BasicConfigurationBuilder<>(
                    Configuration.class),
            ConfigurationBuilderResultCreatedEvent.RESULT_CREATED, null);
}
 
Example 15
Source Project: tinkerpop   Source File: GraphFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void shouldThrowExceptionOnNullConfiguration() {
    try {
        GraphFactory.open((Configuration) null);
        fail("Should have thrown an exception since configuration is null");
    } catch (Exception ex) {
        final Exception expected = Graph.Exceptions.argumentCanNotBeNull("configuration");
        assertEquals(expected.getClass(), ex.getClass());
        assertEquals(expected.getMessage(), ex.getMessage());
    }
}
 
Example 16
/**
 * {@inheritDoc} This implementation lets the super class create a fully
 * configured builder. Then it returns a special wrapper around it.
 */
@Override
public ConfigurationBuilder<? extends Configuration> getConfigurationBuilder(
        final ConfigurationDeclaration decl) throws ConfigurationException
{
    final ConfigurationBuilder<? extends Configuration> multiBuilder =
            super.getConfigurationBuilder(decl);
    final Configuration wrapConfig = createWrapperConfiguration(multiBuilder);
    return createWrapperBuilder(multiBuilder, wrapConfig);
}
 
Example 17
Source Project: engine   Source File: SiteProperties.java    License: GNU General Public License v3.0 5 votes vote down vote up
/**
 * Returns true if the sub items of folders with the same family of target IDs should be merged (e.g. "en_US" and
 * "en" are of the same family).
 */
public static boolean isMergeFolders() {
    Configuration config = ConfigUtils.getCurrentConfig();
    if (config != null) {
        return config.getBoolean(MERGE_FOLDERS_CONFIG_KEY, false);
    } else {
        return false;
    }
}
 
Example 18
public static CassandraMailQueueViewConfiguration from(Configuration configuration) {
    int bucketCount = configuration.getInteger(BUCKET_COUNT_PROPERTY, DEFAULT_BUCKET_COUNT);
    int updateBrowseStartPace = configuration.getInteger(UPDATE_BROWSE_START_PACE_PROPERTY, DEFAULT_UPDATE_BROWSE_START_PACE);
    Optional<String> sliceWindowAsString = Optional.ofNullable(configuration.getString(SLICE_WINDOW_PROPERTY, null));

    return builder()
        .bucketCount(bucketCount)
        .updateBrowseStartPace(updateBrowseStartPace)
        .sliceWindow(sliceWindowAsString
            .map(DurationParser::parse)
            .orElse(DEFAULT_SLICE_WINDOW))
        .build();
}
 
Example 19
Source Project: dsworkbench   Source File: PropertyHelper.java    License: Apache License 2.0 5 votes vote down vote up
public static void storeTableProperties(JXTable pTable, Configuration pConfig, String pPrefix) {
  List<TableColumn> cols = ((TableColumnModelExt) pTable.getColumnModel()).getColumns(true);

  for (TableColumn c : cols) {
    TableColumnExt col = (TableColumnExt) c;
    String title = col.getTitle();
    pConfig.setProperty(pPrefix + ".table.col." + title + ".width", col.getWidth());
    pConfig.setProperty(pPrefix + ".table.col." + title + ".visible", col.isVisible());
  }
  int sortedCol = pTable.getSortedColumnIndex();
  if (sortedCol < 0) {
    return;
  }
  pConfig.setProperty(pPrefix + ".table.sort.col", sortedCol);
  int sortOrder = 0;
  switch (pTable.getSortOrder(sortedCol)) {
    case ASCENDING:
      sortOrder = 1;
      break;
    case DESCENDING:
      sortOrder = -1;
      break;
    default:
      sortOrder = 0;
  }
  pConfig.setProperty(pPrefix + ".table.sort.order", sortOrder);
  pConfig.setProperty(pPrefix + ".table.horizontal.scroll", pTable.isHorizontalScrollEnabled());
}
 
Example 20
Source Project: tinkerpop   Source File: GryoSerializer.java    License: Apache License 2.0 5 votes vote down vote up
private static Configuration makeApacheConfiguration(final SparkConf sparkConfiguration) {
    final BaseConfiguration apacheConfiguration = new BaseConfiguration();
    for (final Tuple2<String, String> tuple : sparkConfiguration.getAll()) {
        apacheConfiguration.setProperty(tuple._1(), tuple._2());
    }
    return apacheConfiguration;
}
 
Example 21
Source Project: batfish   Source File: BaseSettings.java    License: Apache License 2.0 5 votes vote down vote up
private static Configuration loadFileConfiguration(File configFile) {
  try {
    return new Configurations().properties(configFile);
  } catch (ConfigurationException e) {
    throw new BatfishException(
        "Error loading configuration from " + configFile.getAbsolutePath(), e);
  }
}
 
Example 22
Source Project: james-project   Source File: ElasticSearchConfiguration.java    License: Apache License 2.0 5 votes vote down vote up
private static Optional<SSLTrustStore> getSSLTrustStore(Configuration configuration) {
    String trustStorePath = configuration.getString(ELASTICSEARCH_HTTPS_TRUST_STORE_PATH);
    String trustStorePassword = configuration.getString(ELASTICSEARCH_HTTPS_TRUST_STORE_PASSWORD);

    if (trustStorePath == null && trustStorePassword == null) {
        return Optional.empty();
    }

    return Optional.of(SSLTrustStore.of(trustStorePath, trustStorePassword));
}
 
Example 23
/**
 * Tests whether all child builders can be obtained.
 */
@Test
public void testGetChildBuilders() throws ConfigurationException
{
    builder.configure(createParameters()
            .setFile(TEST_FILE));
    builder.getConfiguration();
    final Collection<ConfigurationBuilder<? extends Configuration>> childBuilders =
            builder.getChildBuilders();
    assertEquals("Wrong number of child builders", 3, childBuilders.size());
}
 
Example 24
private static Duration getDurationFromConfiguration(Configuration configuration) {
    if (StringUtils.isEmpty(configuration.getString(HEALTH_CHECK_PERIOD))) {
       return DEFAULT_HEALTH_CHECK_PERIOD;
    }

    return DurationParser.parse(configuration.getString(HEALTH_CHECK_PERIOD));
}
 
Example 25
Source Project: tinkerpop   Source File: HadoopPools.java    License: Apache License 2.0 5 votes vote down vote up
public synchronized static void initialize(final Configuration configuration) {
    if (!INITIALIZED) {
        INITIALIZED = true;
        GRYO_POOL = GryoPool.build().
                poolSize(configuration.getInt(GryoPool.CONFIG_IO_GRYO_POOL_SIZE, 256)).
                version(GryoVersion.valueOf(configuration.getString(GryoPool.CONFIG_IO_GRYO_VERSION, GryoPool.CONFIG_IO_GRYO_POOL_VERSION_DEFAULT.name()))).
                ioRegistries(configuration.getList(IoRegistry.IO_REGISTRY, Collections.emptyList())).
                initializeMapper(m -> m.registrationRequired(false)).
                create();
    }
}
 
Example 26
Source Project: engine   Source File: SiteProperties.java    License: GNU General Public License v3.0 5 votes vote down vote up
/**
 * Returns the list of available target IDs.
 */
public static String[] getAvailableTargetIds() {
    Configuration config = ConfigUtils.getCurrentConfig();
    if (config != null) {
        return config.getStringArray(AVAILABLE_TARGET_IDS_CONFIG_KEY);
    } else {
        return null;
    }
}
 
Example 27
Source Project: tinkerpop   Source File: GraphFactory.java    License: Apache License 2.0 5 votes vote down vote up
private static org.apache.commons.configuration2.Configuration getConfiguration(final File configurationFile) {
    if (!configurationFile.isFile())
        throw new IllegalArgumentException(String.format("The location configuration must resolve to a file and [%s] does not", configurationFile));

    try {
        final String fileName = configurationFile.getName();
        final String fileExtension = fileName.substring(fileName.lastIndexOf('.') + 1);

        final Configuration conf;
        final Configurations configs = new Configurations();

        switch (fileExtension) {
            case "yml":
            case "yaml":
                final Parameters params = new Parameters();
                final FileBasedConfigurationBuilder<FileBasedConfiguration> builder =
                        new FileBasedConfigurationBuilder<FileBasedConfiguration>(YAMLConfiguration.class).
                                configure(params.fileBased().setFile(configurationFile));

                final org.apache.commons.configuration2.Configuration copy = new org.apache.commons.configuration2.BaseConfiguration();
                ConfigurationUtils.copy(builder.configure(params.fileBased().setFile(configurationFile)).getConfiguration(), copy);
                conf = copy;
                break;
            case "xml":
                conf = configs.xml(configurationFile);
                break;
            default:
                conf = configs.properties(configurationFile);
        }
        return conf;
    } catch (ConfigurationException e) {
        throw new IllegalArgumentException(String.format("Could not load configuration at: %s", configurationFile), e);
    }
}
 
Example 28
Source Project: tinkerpop   Source File: AbstractSparkTest.java    License: Apache License 2.0 5 votes vote down vote up
protected Configuration getBaseConfiguration() {
    final BaseConfiguration configuration = new BaseConfiguration();
    configuration.setProperty(SparkLauncher.SPARK_MASTER, "local[4]");
    configuration.setProperty(Constants.SPARK_SERIALIZER, GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Constants.SPARK_KRYO_REGISTRATION_REQUIRED, true);
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    return configuration;
}
 
Example 29
Source Project: swagger2markup   Source File: Schema2MarkupConfigBuilder.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Loads the default properties from the classpath.
 *
 * @return the default properties
 */
public static Configuration getDefaultConfiguration() {
    Configurations configs = new Configurations();
    try {
        return configs.properties(PROPERTIES_DEFAULT);
    } catch (ConfigurationException e) {
        throw new RuntimeException(String.format("Can't load default properties '%s'", PROPERTIES_DEFAULT), e);
    }
}
 
Example 30
Source Project: tinkerpop   Source File: SparkExecutor.java    License: Apache License 2.0 5 votes vote down vote up
public static <K, V, OK, OV> JavaPairRDD<OK, OV> executeCombine(final JavaPairRDD<K, V> mapRDD,
                                                                final Configuration graphComputerConfiguration) {
    return mapRDD.mapPartitionsToPair(partitionIterator -> {
        KryoShimServiceLoader.applyConfiguration(graphComputerConfiguration);
        return new CombineIterator<>(MapReduce.<MapReduce<K, V, OK, OV, ?>>createMapReduce(HadoopGraph.open(graphComputerConfiguration), graphComputerConfiguration), partitionIterator);
    });
}