Java Code Examples for org.apache.hadoop.conf.Configuration#getResource()

The following examples show how to use org.apache.hadoop.conf.Configuration#getResource() . These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
protected static Configuration getConfiguration(String configFiles) {
    Configuration conf = DefaultConfiguration.get();

    if (configFiles == null || configFiles.isEmpty()) {
        return conf;
    }

    for (String file : COMMA.split(configFiles)) {
        // process each resource only once
        if (conf.getResource(file) == null) {
            // use Path instead of String to get the file from the FS
            conf.addResource(new Path(file));
        }
    }

    return conf;
}
 
Example 2
Source Project: hadoop   File: JobClient.java    License: Apache License 2.0 6 votes vote down vote up
static Configuration getConfiguration(String jobTrackerSpec)
{
  Configuration conf = new Configuration();
  if (jobTrackerSpec != null) {        
    if (jobTrackerSpec.indexOf(":") >= 0) {
      conf.set("mapred.job.tracker", jobTrackerSpec);
    } else {
      String classpathFile = "hadoop-" + jobTrackerSpec + ".xml";
      URL validate = conf.getResource(classpathFile);
      if (validate == null) {
        throw new RuntimeException(classpathFile + " not found on CLASSPATH");
      }
      conf.addResource(classpathFile);
    }
  }
  return conf;
}
 
Example 3
Source Project: hadoop   File: TestRefreshUserMappings.java    License: Apache License 2.0 6 votes vote down vote up
private void addNewConfigResource(String rsrcName, String keyGroup,
    String groups, String keyHosts, String hosts)
        throws FileNotFoundException, UnsupportedEncodingException {
  // location for temp resource should be in CLASSPATH
  Configuration conf = new Configuration();
  URL url = conf.getResource("hdfs-site.xml");

  String urlPath = URLDecoder.decode(url.getPath().toString(), "UTF-8");
  Path p = new Path(urlPath);
  Path dir = p.getParent();
  tempResource = dir.toString() + "/" + rsrcName;

  String newResource =
  "<configuration>"+
  "<property><name>" + keyGroup + "</name><value>"+groups+"</value></property>" +
  "<property><name>" + keyHosts + "</name><value>"+hosts+"</value></property>" +
  "</configuration>";
  PrintWriter writer = new PrintWriter(new FileOutputStream(tempResource));
  writer.println(newResource);
  writer.close();

  Configuration.addDefaultResource(rsrcName);
}
 
Example 4
Source Project: big-c   File: JobClient.java    License: Apache License 2.0 6 votes vote down vote up
static Configuration getConfiguration(String jobTrackerSpec)
{
  Configuration conf = new Configuration();
  if (jobTrackerSpec != null) {        
    if (jobTrackerSpec.indexOf(":") >= 0) {
      conf.set("mapred.job.tracker", jobTrackerSpec);
    } else {
      String classpathFile = "hadoop-" + jobTrackerSpec + ".xml";
      URL validate = conf.getResource(classpathFile);
      if (validate == null) {
        throw new RuntimeException(classpathFile + " not found on CLASSPATH");
      }
      conf.addResource(classpathFile);
    }
  }
  return conf;
}
 
Example 5
Source Project: big-c   File: TestRefreshUserMappings.java    License: Apache License 2.0 6 votes vote down vote up
private void addNewConfigResource(String rsrcName, String keyGroup,
    String groups, String keyHosts, String hosts)
        throws FileNotFoundException, UnsupportedEncodingException {
  // location for temp resource should be in CLASSPATH
  Configuration conf = new Configuration();
  URL url = conf.getResource("hdfs-site.xml");

  String urlPath = URLDecoder.decode(url.getPath().toString(), "UTF-8");
  Path p = new Path(urlPath);
  Path dir = p.getParent();
  tempResource = dir.toString() + "/" + rsrcName;

  String newResource =
  "<configuration>"+
  "<property><name>" + keyGroup + "</name><value>"+groups+"</value></property>" +
  "<property><name>" + keyHosts + "</name><value>"+hosts+"</value></property>" +
  "</configuration>";
  PrintWriter writer = new PrintWriter(new FileOutputStream(tempResource));
  writer.println(newResource);
  writer.close();

  Configuration.addDefaultResource(rsrcName);
}
 
Example 6
Source Project: RDFS   File: JobClient.java    License: Apache License 2.0 6 votes vote down vote up
static Configuration getConfiguration(String jobTrackerSpec)
{
  Configuration conf = new Configuration();
  if (jobTrackerSpec != null) {
    if (jobTrackerSpec.indexOf(":") >= 0) {
      conf.set("mapred.job.tracker", jobTrackerSpec);
    } else {
      String classpathFile = "hadoop-" + jobTrackerSpec + ".xml";
      URL validate = conf.getResource(classpathFile);
      if (validate == null) {
        throw new RuntimeException(classpathFile + " not found on CLASSPATH");
      }
      conf.addResource(classpathFile);
    }
  }
  return conf;
}
 
Example 7
Source Project: RDFS   File: ProxyFilter.java    License: Apache License 2.0 6 votes vote down vote up
private static Map<String, Set<Path>> getPermMap(Configuration conf) {
  String permLoc = conf.get("hdfsproxy.user.permissions.file.location",
      "user-permissions.xml");
  if (conf.getResource(permLoc) == null) {
    LOG.warn("HdfsProxy user permissions file not found");
    return null;
  }
  Configuration permConf = new Configuration(false);
  permConf.addResource(permLoc);
  Map<String, Set<Path>> map = new HashMap<String, Set<Path>>();
  for (Map.Entry<String, String> e : permConf) {
    String k = e.getKey();
    String v = e.getValue();
    if (k != null && k.length() != 0 && v != null && v.length() != 0) {
      Set<Path> pathSet = new HashSet<Path>();
      String[] paths = v.split(",\\s*");
      for (String p : paths) {
        if (p.length() != 0) {
          pathSet.add(new Path(p));
        }
      }
      map.put(k, pathSet);
    }
  }
  return map;
}
 
Example 8
Source Project: RDFS   File: ProxyFilter.java    License: Apache License 2.0 6 votes vote down vote up
private static Map<String, Set<BigInteger>> getCertsMap(Configuration conf) {
  String certsLoc = conf.get("hdfsproxy.user.certs.file.location",
      "user-certs.xml");
  if (conf.getResource(certsLoc) == null) {
    LOG.warn("HdfsProxy user certs file not found");
    return null;
  }
  Configuration certsConf = new Configuration(false);
  certsConf.addResource(certsLoc);
  Map<String, Set<BigInteger>> map = new HashMap<String, Set<BigInteger>>();
  for (Map.Entry<String, String> e : certsConf) {
    String k = e.getKey();
    String v = e.getValue().trim();
    if (k != null && k.length() != 0 && v != null && v.length() != 0) {
      Set<BigInteger> numSet = new HashSet<BigInteger>();
      String[] serialnumbers = v.split("\\s*,\\s*");
      for (String num : serialnumbers) {
        if (num.length() != 0) {
          numSet.add(new BigInteger(num, 16));
        }
      }
      map.put(k, numSet);
    }
  }
  return map;
}
 
Example 9
Source Project: nifi   File: AbstractKiteProcessor.java    License: Apache License 2.0 6 votes vote down vote up
protected static Configuration getConfiguration(String configFiles) {
    Configuration conf = DefaultConfiguration.get();

    if (configFiles == null || configFiles.isEmpty()) {
        return conf;
    }

    for (String file : COMMA.split(configFiles)) {
        // process each resource only once
        if (conf.getResource(file) == null) {
            // use Path instead of String to get the file from the FS
            conf.addResource(new Path(file));
        }
    }

    return conf;
}
 
Example 10
Source Project: hadoop-gpu   File: JobClient.java    License: Apache License 2.0 6 votes vote down vote up
static Configuration getConfiguration(String jobTrackerSpec)
{
  Configuration conf = new Configuration();
  if (jobTrackerSpec != null) {        
    if (jobTrackerSpec.indexOf(":") >= 0) {
      conf.set("mapred.job.tracker", jobTrackerSpec);
    } else {
      String classpathFile = "hadoop-" + jobTrackerSpec + ".xml";
      URL validate = conf.getResource(classpathFile);
      if (validate == null) {
        throw new RuntimeException(classpathFile + " not found on CLASSPATH");
      }
      conf.addResource(classpathFile);
    }
  }
  return conf;
}
 
Example 11
Source Project: hadoop-gpu   File: ProxyFilter.java    License: Apache License 2.0 6 votes vote down vote up
private static Map<String, Set<Path>> getPermMap(Configuration conf) {
  String permLoc = conf.get("hdfsproxy.user.permissions.file.location",
      "user-permissions.xml");
  if (conf.getResource(permLoc) == null) {
    LOG.warn("HdfsProxy user permissions file not found");
    return null;
  }
  Configuration permConf = new Configuration(false);
  permConf.addResource(permLoc);
  Map<String, Set<Path>> map = new HashMap<String, Set<Path>>();
  for (Map.Entry<String, String> e : permConf) {
    String k = e.getKey();
    String v = e.getValue();
    if (k != null && k.length() != 0 && v != null && v.length() != 0) {
      Set<Path> pathSet = new HashSet<Path>();
      String[] paths = v.split(",\\s*");
      for (String p : paths) {
        if (p.length() != 0) {
          pathSet.add(new Path(p));
        }
      }
      map.put(k, pathSet);
    }
  }
  return map;
}
 
Example 12
Source Project: hadoop-gpu   File: ProxyFilter.java    License: Apache License 2.0 6 votes vote down vote up
private static Map<String, Set<BigInteger>> getCertsMap(Configuration conf) {
  String certsLoc = conf.get("hdfsproxy.user.certs.file.location",
      "user-certs.xml");
  if (conf.getResource(certsLoc) == null) {
    LOG.warn("HdfsProxy user certs file not found");
    return null;
  }
  Configuration certsConf = new Configuration(false);
  certsConf.addResource(certsLoc);
  Map<String, Set<BigInteger>> map = new HashMap<String, Set<BigInteger>>();
  for (Map.Entry<String, String> e : certsConf) {
    String k = e.getKey();
    String v = e.getValue().trim();
    if (k != null && k.length() != 0 && v != null && v.length() != 0) {
      Set<BigInteger> numSet = new HashSet<BigInteger>();
      String[] serialnumbers = v.split("\\s*,\\s*");
      for (String num : serialnumbers) {
        if (num.length() != 0) {
          numSet.add(new BigInteger(num, 16));
        }
      }
      map.put(k, numSet);
    }
  }
  return map;
}
 
Example 13
Source Project: mr4c   File: HadoopUtils.java    License: Apache License 2.0 4 votes vote down vote up
/**
  * Generates human readable string with resource name and URI
*/
public static String describeResource(Configuration conf, String name) {
	URL url = conf.getResource(name);
	return String.format("Resource %s found at %s", name, url);
}
 
Example 14
Source Project: oryx   File: BatchUpdateFunction.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public void call(JavaPairRDD<K,M> newData, Time timestamp)
    throws IOException, InterruptedException {

  if (newData.isEmpty()) {
    log.info("No data in current generation's RDD; nothing to do");
    return;
  }

  log.info("Beginning update at {}", timestamp);

  Configuration hadoopConf = sparkContext.hadoopConfiguration();
  if (hadoopConf.getResource("core-site.xml") == null) {
    log.warn("Hadoop config like core-site.xml was not found; " +
             "is the Hadoop config directory on the classpath?");
  }

  JavaPairRDD<K,M> pastData;
  Path inputPathPattern = new Path(dataDirString + "/*/part-*");
  FileSystem fs = FileSystem.get(inputPathPattern.toUri(), hadoopConf);
  FileStatus[] inputPathStatuses = fs.globStatus(inputPathPattern);
  if (inputPathStatuses == null || inputPathStatuses.length == 0) {

    log.info("No past data at path(s) {}", inputPathPattern);
    pastData = null;

  } else {

    log.info("Found past data at path(s) like {}", inputPathStatuses[0].getPath());
    Configuration updatedConf = new Configuration(hadoopConf);
    updatedConf.set(FileInputFormat.INPUT_DIR, joinFSPaths(fs, inputPathStatuses));

    @SuppressWarnings("unchecked")
    JavaPairRDD<Writable,Writable> pastWritableData = (JavaPairRDD<Writable,Writable>)
        sparkContext.newAPIHadoopRDD(updatedConf,
                                     SequenceFileInputFormat.class,
                                     keyWritableClass,
                                     messageWritableClass);

    pastData = pastWritableData.mapToPair(
        new WritableToValueFunction<>(keyClass,
                                      messageClass,
                                      keyWritableClass,
                                      messageWritableClass));
  }

  if (updateTopic == null || updateBroker == null) {
    log.info("Not producing updates to update topic since none was configured");
    updateInstance.runUpdate(sparkContext,
                             timestamp.milliseconds(),
                             newData,
                             pastData,
                             modelDirString,
                             null);
  } else {
    // This TopicProducer should not be async; sends one big model generally and
    // needs to occur before other updates reliably rather than be buffered
    try (TopicProducer<String,U> producer =
             new TopicProducerImpl<>(updateBroker, updateTopic, false)) {
      updateInstance.runUpdate(sparkContext,
                               timestamp.milliseconds(),
                               newData,
                               pastData,
                               modelDirString,
                               producer);
    }
  }
}