org.apache.hadoop.fs.FsUrlStreamHandlerFactory Java Examples

The following examples show how to use org.apache.hadoop.fs.FsUrlStreamHandlerFactory. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HadoopFileSystem.java    From rheem with Apache License 2.0 6 votes vote down vote up
/**
 * Make sure that this instance is initialized. This is particularly required to use HDFS {@link URL}s.
 */
public void ensureInitialized() {
    if (this.isInitialized) return;

    // Add handler for HDFS URL for java.net.URL
    LoggerFactory.getLogger(HadoopFileSystem.class).info("Adding handler for HDFS URLs.");
    try {
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
    } catch (Throwable t) {
        LoggerFactory.getLogger(HadoopFileSystem.class).error(
                "Could not set URL stream handler factory.", t
        );
    } finally {
        this.isInitialized = true;
    }
}
 
Example #2
Source File: SparkIngestDriver.java    From geowave with Apache License 2.0 5 votes vote down vote up
public static void setHdfsURLStreamHandlerFactory() throws NoSuchFieldException,
    SecurityException, IllegalArgumentException, IllegalAccessException {
  final Field factoryField = URL.class.getDeclaredField("factory");
  factoryField.setAccessible(true);
  // HP Fortify "Access Control" false positive
  // The need to change the accessibility here is
  // necessary, has been review and judged to be safe

  final URLStreamHandlerFactory urlStreamHandlerFactory =
      (URLStreamHandlerFactory) factoryField.get(null);

  if (urlStreamHandlerFactory == null) {
    URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
  } else {
    try {
      factoryField.setAccessible(true);
      // HP Fortify "Access Control" false positive
      // The need to change the accessibility here is
      // necessary, has been review and judged to be safe
      factoryField.set(null, new FsUrlStreamHandlerFactory());
    } catch (final IllegalAccessException e1) {
      LOGGER.error("Could not access URLStreamHandler factory field on URL class: {}", e1);
      throw new RuntimeException(
          "Could not access URLStreamHandler factory field on URL class: {}",
          e1);
    }
  }
}
 
Example #3
Source File: TestUrlStreamHandler.java    From RDFS with Apache License 2.0 4 votes vote down vote up
/**
 * Test opening and reading from an InputStream through a hdfs:// URL.
 * <p>
 * First generate a file with some content through the FileSystem API, then
 * try to open and read the file through the URL stream API.
 * 
 * @throws IOException
 */
public void testDfsUrls() throws IOException {

  Configuration conf = new Configuration();
  MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
  FileSystem fs = cluster.getFileSystem();

  // Setup our own factory
  // setURLSteramHandlerFactor is can be set at most once in the JVM
  // the new URLStreamHandler is valid for all tests cases 
  // in TestStreamHandler
  FsUrlStreamHandlerFactory factory =
      new org.apache.hadoop.fs.FsUrlStreamHandlerFactory();
  java.net.URL.setURLStreamHandlerFactory(factory);

  Path filePath = new Path("/thefile");

  try {
    byte[] fileContent = new byte[1024];
    for (int i = 0; i < fileContent.length; ++i)
      fileContent[i] = (byte) i;

    // First create the file through the FileSystem API
    OutputStream os = fs.create(filePath);
    os.write(fileContent);
    os.close();

    // Second, open and read the file content through the URL API
    URI uri = fs.getUri();
    URL fileURL =
        new URL(uri.getScheme(), uri.getHost(), uri.getPort(), filePath
            .toString());

    InputStream is = fileURL.openStream();
    assertNotNull(is);

    byte[] bytes = new byte[4096];
    assertEquals(1024, is.read(bytes));
    is.close();

    for (int i = 0; i < fileContent.length; ++i)
      assertEquals(fileContent[i], bytes[i]);

    // Cleanup: delete the file
    fs.delete(filePath, false);

  } finally {
    fs.close();
    cluster.shutdown();
  }

}
 
Example #4
Source File: TestUrlStreamHandler.java    From hadoop-gpu with Apache License 2.0 4 votes vote down vote up
/**
 * Test opening and reading from an InputStream through a hdfs:// URL.
 * <p>
 * First generate a file with some content through the FileSystem API, then
 * try to open and read the file through the URL stream API.
 * 
 * @throws IOException
 */
public void testDfsUrls() throws IOException {

  Configuration conf = new Configuration();
  MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
  FileSystem fs = cluster.getFileSystem();

  // Setup our own factory
  // setURLSteramHandlerFactor is can be set at most once in the JVM
  // the new URLStreamHandler is valid for all tests cases 
  // in TestStreamHandler
  FsUrlStreamHandlerFactory factory =
      new org.apache.hadoop.fs.FsUrlStreamHandlerFactory();
  java.net.URL.setURLStreamHandlerFactory(factory);

  Path filePath = new Path("/thefile");

  try {
    byte[] fileContent = new byte[1024];
    for (int i = 0; i < fileContent.length; ++i)
      fileContent[i] = (byte) i;

    // First create the file through the FileSystem API
    OutputStream os = fs.create(filePath);
    os.write(fileContent);
    os.close();

    // Second, open and read the file content through the URL API
    URI uri = fs.getUri();
    URL fileURL =
        new URL(uri.getScheme(), uri.getHost(), uri.getPort(), filePath
            .toString());

    InputStream is = fileURL.openStream();
    assertNotNull(is);

    byte[] bytes = new byte[4096];
    assertEquals(1024, is.read(bytes));
    is.close();

    for (int i = 0; i < fileContent.length; ++i)
      assertEquals(fileContent[i], bytes[i]);

    // Cleanup: delete the file
    fs.delete(filePath, false);

  } finally {
    fs.close();
    cluster.shutdown();
  }

}