Java Code Examples for org.apache.hadoop.conf.Configuration#getResource()
The following examples show how to use
org.apache.hadoop.conf.Configuration#getResource() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractKiteProcessor.java From localization_nifi with Apache License 2.0 | 6 votes |
protected static Configuration getConfiguration(String configFiles) { Configuration conf = DefaultConfiguration.get(); if (configFiles == null || configFiles.isEmpty()) { return conf; } for (String file : COMMA.split(configFiles)) { // process each resource only once if (conf.getResource(file) == null) { // use Path instead of String to get the file from the FS conf.addResource(new Path(file)); } } return conf; }
Example 2
Source File: JobClient.java From hadoop with Apache License 2.0 | 6 votes |
static Configuration getConfiguration(String jobTrackerSpec) { Configuration conf = new Configuration(); if (jobTrackerSpec != null) { if (jobTrackerSpec.indexOf(":") >= 0) { conf.set("mapred.job.tracker", jobTrackerSpec); } else { String classpathFile = "hadoop-" + jobTrackerSpec + ".xml"; URL validate = conf.getResource(classpathFile); if (validate == null) { throw new RuntimeException(classpathFile + " not found on CLASSPATH"); } conf.addResource(classpathFile); } } return conf; }
Example 3
Source File: TestRefreshUserMappings.java From hadoop with Apache License 2.0 | 6 votes |
private void addNewConfigResource(String rsrcName, String keyGroup, String groups, String keyHosts, String hosts) throws FileNotFoundException, UnsupportedEncodingException { // location for temp resource should be in CLASSPATH Configuration conf = new Configuration(); URL url = conf.getResource("hdfs-site.xml"); String urlPath = URLDecoder.decode(url.getPath().toString(), "UTF-8"); Path p = new Path(urlPath); Path dir = p.getParent(); tempResource = dir.toString() + "/" + rsrcName; String newResource = "<configuration>"+ "<property><name>" + keyGroup + "</name><value>"+groups+"</value></property>" + "<property><name>" + keyHosts + "</name><value>"+hosts+"</value></property>" + "</configuration>"; PrintWriter writer = new PrintWriter(new FileOutputStream(tempResource)); writer.println(newResource); writer.close(); Configuration.addDefaultResource(rsrcName); }
Example 4
Source File: JobClient.java From big-c with Apache License 2.0 | 6 votes |
static Configuration getConfiguration(String jobTrackerSpec) { Configuration conf = new Configuration(); if (jobTrackerSpec != null) { if (jobTrackerSpec.indexOf(":") >= 0) { conf.set("mapred.job.tracker", jobTrackerSpec); } else { String classpathFile = "hadoop-" + jobTrackerSpec + ".xml"; URL validate = conf.getResource(classpathFile); if (validate == null) { throw new RuntimeException(classpathFile + " not found on CLASSPATH"); } conf.addResource(classpathFile); } } return conf; }
Example 5
Source File: TestRefreshUserMappings.java From big-c with Apache License 2.0 | 6 votes |
private void addNewConfigResource(String rsrcName, String keyGroup, String groups, String keyHosts, String hosts) throws FileNotFoundException, UnsupportedEncodingException { // location for temp resource should be in CLASSPATH Configuration conf = new Configuration(); URL url = conf.getResource("hdfs-site.xml"); String urlPath = URLDecoder.decode(url.getPath().toString(), "UTF-8"); Path p = new Path(urlPath); Path dir = p.getParent(); tempResource = dir.toString() + "/" + rsrcName; String newResource = "<configuration>"+ "<property><name>" + keyGroup + "</name><value>"+groups+"</value></property>" + "<property><name>" + keyHosts + "</name><value>"+hosts+"</value></property>" + "</configuration>"; PrintWriter writer = new PrintWriter(new FileOutputStream(tempResource)); writer.println(newResource); writer.close(); Configuration.addDefaultResource(rsrcName); }
Example 6
Source File: JobClient.java From RDFS with Apache License 2.0 | 6 votes |
static Configuration getConfiguration(String jobTrackerSpec) { Configuration conf = new Configuration(); if (jobTrackerSpec != null) { if (jobTrackerSpec.indexOf(":") >= 0) { conf.set("mapred.job.tracker", jobTrackerSpec); } else { String classpathFile = "hadoop-" + jobTrackerSpec + ".xml"; URL validate = conf.getResource(classpathFile); if (validate == null) { throw new RuntimeException(classpathFile + " not found on CLASSPATH"); } conf.addResource(classpathFile); } } return conf; }
Example 7
Source File: ProxyFilter.java From RDFS with Apache License 2.0 | 6 votes |
private static Map<String, Set<Path>> getPermMap(Configuration conf) { String permLoc = conf.get("hdfsproxy.user.permissions.file.location", "user-permissions.xml"); if (conf.getResource(permLoc) == null) { LOG.warn("HdfsProxy user permissions file not found"); return null; } Configuration permConf = new Configuration(false); permConf.addResource(permLoc); Map<String, Set<Path>> map = new HashMap<String, Set<Path>>(); for (Map.Entry<String, String> e : permConf) { String k = e.getKey(); String v = e.getValue(); if (k != null && k.length() != 0 && v != null && v.length() != 0) { Set<Path> pathSet = new HashSet<Path>(); String[] paths = v.split(",\\s*"); for (String p : paths) { if (p.length() != 0) { pathSet.add(new Path(p)); } } map.put(k, pathSet); } } return map; }
Example 8
Source File: ProxyFilter.java From RDFS with Apache License 2.0 | 6 votes |
private static Map<String, Set<BigInteger>> getCertsMap(Configuration conf) { String certsLoc = conf.get("hdfsproxy.user.certs.file.location", "user-certs.xml"); if (conf.getResource(certsLoc) == null) { LOG.warn("HdfsProxy user certs file not found"); return null; } Configuration certsConf = new Configuration(false); certsConf.addResource(certsLoc); Map<String, Set<BigInteger>> map = new HashMap<String, Set<BigInteger>>(); for (Map.Entry<String, String> e : certsConf) { String k = e.getKey(); String v = e.getValue().trim(); if (k != null && k.length() != 0 && v != null && v.length() != 0) { Set<BigInteger> numSet = new HashSet<BigInteger>(); String[] serialnumbers = v.split("\\s*,\\s*"); for (String num : serialnumbers) { if (num.length() != 0) { numSet.add(new BigInteger(num, 16)); } } map.put(k, numSet); } } return map; }
Example 9
Source File: AbstractKiteProcessor.java From nifi with Apache License 2.0 | 6 votes |
protected static Configuration getConfiguration(String configFiles) { Configuration conf = DefaultConfiguration.get(); if (configFiles == null || configFiles.isEmpty()) { return conf; } for (String file : COMMA.split(configFiles)) { // process each resource only once if (conf.getResource(file) == null) { // use Path instead of String to get the file from the FS conf.addResource(new Path(file)); } } return conf; }
Example 10
Source File: JobClient.java From hadoop-gpu with Apache License 2.0 | 6 votes |
static Configuration getConfiguration(String jobTrackerSpec) { Configuration conf = new Configuration(); if (jobTrackerSpec != null) { if (jobTrackerSpec.indexOf(":") >= 0) { conf.set("mapred.job.tracker", jobTrackerSpec); } else { String classpathFile = "hadoop-" + jobTrackerSpec + ".xml"; URL validate = conf.getResource(classpathFile); if (validate == null) { throw new RuntimeException(classpathFile + " not found on CLASSPATH"); } conf.addResource(classpathFile); } } return conf; }
Example 11
Source File: ProxyFilter.java From hadoop-gpu with Apache License 2.0 | 6 votes |
private static Map<String, Set<Path>> getPermMap(Configuration conf) { String permLoc = conf.get("hdfsproxy.user.permissions.file.location", "user-permissions.xml"); if (conf.getResource(permLoc) == null) { LOG.warn("HdfsProxy user permissions file not found"); return null; } Configuration permConf = new Configuration(false); permConf.addResource(permLoc); Map<String, Set<Path>> map = new HashMap<String, Set<Path>>(); for (Map.Entry<String, String> e : permConf) { String k = e.getKey(); String v = e.getValue(); if (k != null && k.length() != 0 && v != null && v.length() != 0) { Set<Path> pathSet = new HashSet<Path>(); String[] paths = v.split(",\\s*"); for (String p : paths) { if (p.length() != 0) { pathSet.add(new Path(p)); } } map.put(k, pathSet); } } return map; }
Example 12
Source File: ProxyFilter.java From hadoop-gpu with Apache License 2.0 | 6 votes |
private static Map<String, Set<BigInteger>> getCertsMap(Configuration conf) { String certsLoc = conf.get("hdfsproxy.user.certs.file.location", "user-certs.xml"); if (conf.getResource(certsLoc) == null) { LOG.warn("HdfsProxy user certs file not found"); return null; } Configuration certsConf = new Configuration(false); certsConf.addResource(certsLoc); Map<String, Set<BigInteger>> map = new HashMap<String, Set<BigInteger>>(); for (Map.Entry<String, String> e : certsConf) { String k = e.getKey(); String v = e.getValue().trim(); if (k != null && k.length() != 0 && v != null && v.length() != 0) { Set<BigInteger> numSet = new HashSet<BigInteger>(); String[] serialnumbers = v.split("\\s*,\\s*"); for (String num : serialnumbers) { if (num.length() != 0) { numSet.add(new BigInteger(num, 16)); } } map.put(k, numSet); } } return map; }
Example 13
Source File: HadoopUtils.java From mr4c with Apache License 2.0 | 4 votes |
/** * Generates human readable string with resource name and URI */ public static String describeResource(Configuration conf, String name) { URL url = conf.getResource(name); return String.format("Resource %s found at %s", name, url); }
Example 14
Source File: BatchUpdateFunction.java From oryx with Apache License 2.0 | 4 votes |
@Override public void call(JavaPairRDD<K,M> newData, Time timestamp) throws IOException, InterruptedException { if (newData.isEmpty()) { log.info("No data in current generation's RDD; nothing to do"); return; } log.info("Beginning update at {}", timestamp); Configuration hadoopConf = sparkContext.hadoopConfiguration(); if (hadoopConf.getResource("core-site.xml") == null) { log.warn("Hadoop config like core-site.xml was not found; " + "is the Hadoop config directory on the classpath?"); } JavaPairRDD<K,M> pastData; Path inputPathPattern = new Path(dataDirString + "/*/part-*"); FileSystem fs = FileSystem.get(inputPathPattern.toUri(), hadoopConf); FileStatus[] inputPathStatuses = fs.globStatus(inputPathPattern); if (inputPathStatuses == null || inputPathStatuses.length == 0) { log.info("No past data at path(s) {}", inputPathPattern); pastData = null; } else { log.info("Found past data at path(s) like {}", inputPathStatuses[0].getPath()); Configuration updatedConf = new Configuration(hadoopConf); updatedConf.set(FileInputFormat.INPUT_DIR, joinFSPaths(fs, inputPathStatuses)); @SuppressWarnings("unchecked") JavaPairRDD<Writable,Writable> pastWritableData = (JavaPairRDD<Writable,Writable>) sparkContext.newAPIHadoopRDD(updatedConf, SequenceFileInputFormat.class, keyWritableClass, messageWritableClass); pastData = pastWritableData.mapToPair( new WritableToValueFunction<>(keyClass, messageClass, keyWritableClass, messageWritableClass)); } if (updateTopic == null || updateBroker == null) { log.info("Not producing updates to update topic since none was configured"); updateInstance.runUpdate(sparkContext, timestamp.milliseconds(), newData, pastData, modelDirString, null); } else { // This TopicProducer should not be async; sends one big model generally and // needs to occur before other updates reliably rather than be buffered try (TopicProducer<String,U> producer = new TopicProducerImpl<>(updateBroker, updateTopic, false)) { updateInstance.runUpdate(sparkContext, timestamp.milliseconds(), newData, pastData, modelDirString, producer); } } }