org.apache.hadoop.util.NativeCodeLoader Java Examples

The following examples show how to use org.apache.hadoop.util.NativeCodeLoader. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestFileJournalManager.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that internal renames are done using native code on platforms that
 * have it.  The native rename includes more detailed information about the
 * failure, which can be useful for troubleshooting.
 */
@Test
public void testDoPreUpgradeIOError() throws IOException {
  File storageDir = new File(TestEditLog.TEST_DIR, "preupgradeioerror");
  List<URI> editUris = Collections.singletonList(storageDir.toURI());
  NNStorage storage = setupEdits(editUris, 5);
  StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next();
  assertNotNull(sd);
  // Change storage directory so that renaming current to previous.tmp fails.
  FileUtil.setWritable(storageDir, false);
  FileJournalManager jm = null;
  try {
    jm = new FileJournalManager(conf, sd, storage);
    exception.expect(IOException.class);
    if (NativeCodeLoader.isNativeCodeLoaded()) {
      exception.expectMessage("failure in native rename");
    }
    jm.doPreUpgrade();
  } finally {
    IOUtils.cleanup(LOG, jm);
    // Restore permissions on storage directory and make sure we can delete.
    FileUtil.setWritable(storageDir, true);
    FileUtil.fullyDelete(storageDir);
  }
}
 
Example #2
Source File: TestCodec.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test(timeout=20000)
public void testBZip2NativeCodec() throws IOException {
  Configuration conf = new Configuration();
  conf.set("io.compression.codec.bzip2.library", "system-native");
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
      codecTest(conf, seed, count, 
                "org.apache.hadoop.io.compress.BZip2Codec");
      conf.set("io.compression.codec.bzip2.library", "java-builtin");
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
      codecTest(conf, seed, count, 
                "org.apache.hadoop.io.compress.BZip2Codec");
    } else {
      LOG.warn("Native hadoop library available but native bzip2 is not");
    }
  }
}
 
Example #3
Source File: TestCodec.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testLz4Codec() throws IOException {
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    if (Lz4Codec.isNativeCodeLoaded()) {
      conf.setBoolean(
          CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
          false);
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
      codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
      conf.setBoolean(
          CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
          true);
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
      codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
    } else {
      Assert.fail("Native hadoop library available but lz4 not");
    }
  }
}
 
Example #4
Source File: TestCodec.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test(timeout=20000)
public void testSequenceFileBZip2NativeCodec() throws IOException, 
                      ClassNotFoundException, InstantiationException, 
                      IllegalAccessException {
  Configuration conf = new Configuration();
  conf.set("io.compression.codec.bzip2.library", "system-native");
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
      sequenceFileCodecTest(conf, 0, 
                            "org.apache.hadoop.io.compress.BZip2Codec", 100);
      sequenceFileCodecTest(conf, 100, 
                            "org.apache.hadoop.io.compress.BZip2Codec", 100);
      sequenceFileCodecTest(conf, 200000, 
                            "org.apache.hadoop.io.compress.BZip2Codec", 
                            1000000);
    } else {
      LOG.warn("Native hadoop library available but native bzip2 is not");
    }
  }
}
 
Example #5
Source File: TestCompressionTest.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Test
public void testTestCompression() {
  assertTrue(CompressionTest.testCompression("NONE"));
  assertTrue(CompressionTest.testCompression("GZ"));

  if (NativeCodeLoader.isNativeCodeLoaded()) {
    nativeCodecTest("LZO", "lzo2", "com.hadoop.compression.lzo.LzoCodec");
    nativeCodecTest("LZ4", null, "org.apache.hadoop.io.compress.Lz4Codec");
    nativeCodecTest("SNAPPY", "snappy", "org.apache.hadoop.io.compress.SnappyCodec");
    nativeCodecTest("BZIP2", "bzip2", "org.apache.hadoop.io.compress.BZip2Codec");
    nativeCodecTest("ZSTD", "zstd", "org.apache.hadoop.io.compress.ZStandardCodec");
  } else {
    // Hadoop nativelib is not available
    LOG.debug("Native code not loaded");
    assertFalse(CompressionTest.testCompression("LZO"));
    assertFalse(CompressionTest.testCompression("LZ4"));
    assertFalse(CompressionTest.testCompression("SNAPPY"));
    assertFalse(CompressionTest.testCompression("BZIP2"));
    assertFalse(CompressionTest.testCompression("ZSTD"));
  }
}
 
Example #6
Source File: SequenceFile.java    From RDFS with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of 'raw' SequenceFile Writer.
 * @param out The stream on top which the writer is to be constructed.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param compress Compress data?
 * @param blockCompress Compress blocks?
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
private static Writer
  createWriter(Configuration conf, FSDataOutputStream out, 
               Class keyClass, Class valClass, boolean compress, boolean blockCompress,
               CompressionCodec codec, Metadata metadata)
  throws IOException {
  if (codec != null && (codec instanceof GzipCodec) && 
      !NativeCodeLoader.isNativeCodeLoaded() && 
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (!compress) {
    writer = new Writer(conf, out, keyClass, valClass, metadata);
  } else if (compress && !blockCompress) {
    writer = new RecordCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  } else {
    writer = new BlockCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  }
  
  return writer;
}
 
Example #7
Source File: SequenceFile.java    From RDFS with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of 'raw' SequenceFile Writer.
 * @param conf The configuration.
 * @param out The stream on top which the writer is to be constructed.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param compressionType The compression type.
 * @param codec The compression codec.
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
public static Writer
  createWriter(Configuration conf, FSDataOutputStream out, 
               Class keyClass, Class valClass, CompressionType compressionType,
               CompressionCodec codec, Metadata metadata)
  throws IOException {
  if ((codec instanceof GzipCodec) && 
      !NativeCodeLoader.isNativeCodeLoaded() && 
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (compressionType == CompressionType.NONE) {
    writer = new Writer(conf, out, keyClass, valClass, metadata);
  } else if (compressionType == CompressionType.RECORD) {
    writer = new RecordCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  } else if (compressionType == CompressionType.BLOCK){
    writer = new BlockCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  }
  
  return writer;
}
 
Example #8
Source File: TestFileJournalManager.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that internal renames are done using native code on platforms that
 * have it.  The native rename includes more detailed information about the
 * failure, which can be useful for troubleshooting.
 */
@Test
public void testDoPreUpgradeIOError() throws IOException {
  File storageDir = new File(TestEditLog.TEST_DIR, "preupgradeioerror");
  List<URI> editUris = Collections.singletonList(storageDir.toURI());
  NNStorage storage = setupEdits(editUris, 5);
  StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next();
  assertNotNull(sd);
  // Change storage directory so that renaming current to previous.tmp fails.
  FileUtil.setWritable(storageDir, false);
  FileJournalManager jm = null;
  try {
    jm = new FileJournalManager(conf, sd, storage);
    exception.expect(IOException.class);
    if (NativeCodeLoader.isNativeCodeLoaded()) {
      exception.expectMessage("failure in native rename");
    }
    jm.doPreUpgrade();
  } finally {
    IOUtils.cleanup(LOG, jm);
    // Restore permissions on storage directory and make sure we can delete.
    FileUtil.setWritable(storageDir, true);
    FileUtil.fullyDelete(storageDir);
  }
}
 
Example #9
Source File: SequenceFile.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of 'raw' SequenceFile Writer.
 * @param out The stream on top which the writer is to be constructed.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param compress Compress data?
 * @param blockCompress Compress blocks?
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
private static Writer
  createWriter(Configuration conf, FSDataOutputStream out, 
               Class keyClass, Class valClass, boolean compress, boolean blockCompress,
               CompressionCodec codec, Metadata metadata)
  throws IOException {
  if (codec != null && (codec instanceof GzipCodec) && 
      !NativeCodeLoader.isNativeCodeLoaded() && 
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (!compress) {
    writer = new Writer(conf, out, keyClass, valClass, metadata);
  } else if (compress && !blockCompress) {
    writer = new RecordCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  } else {
    writer = new BlockCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  }
  
  return writer;
}
 
Example #10
Source File: SnappyCodec.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
 
Example #11
Source File: CompressDecompressTester.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Method for compressor availability check
 */
private static <T extends Compressor, E extends Decompressor> boolean isAvailable(TesterPair<T, E> pair) {
  Compressor compressor = pair.compressor;

  if (compressor.getClass().isAssignableFrom(Lz4Compressor.class)
          && (NativeCodeLoader.isNativeCodeLoaded()))
    return true;

  else if (compressor.getClass().isAssignableFrom(BuiltInZlibDeflater.class)
          && NativeCodeLoader.isNativeCodeLoaded())
    return true;

  else if (compressor.getClass().isAssignableFrom(ZlibCompressor.class)) {
    return ZlibFactory.isNativeZlibLoaded(new Configuration());
  }              
  else if (compressor.getClass().isAssignableFrom(SnappyCompressor.class)
          && isNativeSnappyLoadable())
    return true;
  
  return false;      
}
 
Example #12
Source File: TestNativeCodeLoader.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testNativeCodeLoaded() {
  if (requireTestJni() == false) {
    LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
    return;
  }
  if (!NativeCodeLoader.isNativeCodeLoaded()) {
    fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
        "libhadoop.so was not loaded.");
  }
  assertFalse(NativeCodeLoader.getLibraryName().isEmpty());
  // library names are depended on platform and build envs
  // so just check names are available
  assertFalse(ZlibFactory.getLibraryName().isEmpty());
  if (NativeCodeLoader.buildSupportsSnappy()) {
    assertFalse(SnappyCodec.getLibraryName().isEmpty());
  }
  if (NativeCodeLoader.buildSupportsOpenssl()) {
    assertFalse(OpensslCipher.getLibraryName().isEmpty());
  }
  assertFalse(Lz4Codec.getLibraryName().isEmpty());
  LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
 
Example #13
Source File: TestCryptoCodec.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test(timeout=120000)
public void testOpensslAesCtrCryptoCodec() throws Exception {
  if (!"true".equalsIgnoreCase(System.getProperty("runningWithNative"))) {
    LOG.warn("Skipping since test was not run with -Pnative flag");
    Assume.assumeTrue(false);
  }
  if (!NativeCodeLoader.buildSupportsOpenssl()) {
    LOG.warn("Skipping test since openSSL library not loaded");
    Assume.assumeTrue(false);
  }
  Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason());
  cryptoCodecTest(conf, seed, 0, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv);
  // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff 
  for(int i = 0; i < 8; i++) {
    iv[8 + i] = (byte) 0xff;
  }
  cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv);
}
 
Example #14
Source File: SequenceFile.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of 'raw' SequenceFile Writer.
 * @param conf The configuration.
 * @param out The stream on top which the writer is to be constructed.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param compressionType The compression type.
 * @param codec The compression codec.
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
public static Writer
  createWriter(Configuration conf, FSDataOutputStream out, 
               Class keyClass, Class valClass, CompressionType compressionType,
               CompressionCodec codec, Metadata metadata)
  throws IOException {
  if ((codec instanceof GzipCodec) && 
      !NativeCodeLoader.isNativeCodeLoaded() && 
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (compressionType == CompressionType.NONE) {
    writer = new Writer(conf, out, keyClass, valClass, metadata);
  } else if (compressionType == CompressionType.RECORD) {
    writer = new RecordCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  } else if (compressionType == CompressionType.BLOCK){
    writer = new BlockCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  }
  
  return writer;
}
 
Example #15
Source File: TestCryptoCodec.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test(timeout=120000)
public void testOpensslAesCtrCryptoCodec() throws Exception {
  if (!"true".equalsIgnoreCase(System.getProperty("runningWithNative"))) {
    LOG.warn("Skipping since test was not run with -Pnative flag");
    Assume.assumeTrue(false);
  }
  if (!NativeCodeLoader.buildSupportsOpenssl()) {
    LOG.warn("Skipping test since openSSL library not loaded");
    Assume.assumeTrue(false);
  }
  Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason());
  cryptoCodecTest(conf, seed, 0, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv);
  // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff 
  for(int i = 0; i < 8; i++) {
    iv[8 + i] = (byte) 0xff;
  }
  cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv);
}
 
Example #16
Source File: CompressDecompressTester.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private static boolean isNativeSnappyLoadable() {
  boolean snappyAvailable = false;
  boolean loaded = false;
  try {
    System.loadLibrary("snappy");
    logger.warn("Snappy native library is available");
    snappyAvailable = true;
    boolean hadoopNativeAvailable = NativeCodeLoader.isNativeCodeLoaded();
    loaded = snappyAvailable && hadoopNativeAvailable;
    if (loaded) {
      logger.info("Snappy native library loaded");
    } else {
      logger.warn("Snappy native library not loaded");
    }
  } catch (Throwable t) {
    logger.warn("Failed to load snappy: ", t);
    return false;
  }
  return loaded;
}
 
Example #17
Source File: CompressDecompressTester.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Method for compressor availability check
 */
private static <T extends Compressor, E extends Decompressor> boolean isAvailable(TesterPair<T, E> pair) {
  Compressor compressor = pair.compressor;

  if (compressor.getClass().isAssignableFrom(Lz4Compressor.class)
          && (NativeCodeLoader.isNativeCodeLoaded()))
    return true;

  else if (compressor.getClass().isAssignableFrom(BuiltInZlibDeflater.class)
          && NativeCodeLoader.isNativeCodeLoaded())
    return true;

  else if (compressor.getClass().isAssignableFrom(ZlibCompressor.class)) {
    return ZlibFactory.isNativeZlibLoaded(new Configuration());
  }              
  else if (compressor.getClass().isAssignableFrom(SnappyCompressor.class)
          && isNativeSnappyLoadable())
    return true;
  
  return false;      
}
 
Example #18
Source File: TestCodec.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test(timeout=20000)
public void testBZip2NativeCodec() throws IOException {
  Configuration conf = new Configuration();
  conf.set("io.compression.codec.bzip2.library", "system-native");
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
      codecTest(conf, seed, count, 
                "org.apache.hadoop.io.compress.BZip2Codec");
      conf.set("io.compression.codec.bzip2.library", "java-builtin");
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
      codecTest(conf, seed, count, 
                "org.apache.hadoop.io.compress.BZip2Codec");
    } else {
      LOG.warn("Native hadoop library available but native bzip2 is not");
    }
  }
}
 
Example #19
Source File: TestCodec.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testLz4Codec() throws IOException {
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    if (Lz4Codec.isNativeCodeLoaded()) {
      conf.setBoolean(
          CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
          false);
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
      codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
      conf.setBoolean(
          CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
          true);
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
      codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
    } else {
      Assert.fail("Native hadoop library available but lz4 not");
    }
  }
}
 
Example #20
Source File: TestCodec.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test(timeout=20000)
public void testSequenceFileBZip2NativeCodec() throws IOException, 
                      ClassNotFoundException, InstantiationException, 
                      IllegalAccessException {
  Configuration conf = new Configuration();
  conf.set("io.compression.codec.bzip2.library", "system-native");
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
      sequenceFileCodecTest(conf, 0, 
                            "org.apache.hadoop.io.compress.BZip2Codec", 100);
      sequenceFileCodecTest(conf, 100, 
                            "org.apache.hadoop.io.compress.BZip2Codec", 100);
      sequenceFileCodecTest(conf, 200000, 
                            "org.apache.hadoop.io.compress.BZip2Codec", 
                            1000000);
    } else {
      LOG.warn("Native hadoop library available but native bzip2 is not");
    }
  }
}
 
Example #21
Source File: TestCryptoCodec.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test(timeout=120000)
public void testJceAesCtrCryptoCodec() throws Exception {
  if (!"true".equalsIgnoreCase(System.getProperty("runningWithNative"))) {
    LOG.warn("Skipping since test was not run with -Pnative flag");
    Assume.assumeTrue(false);
  }
  if (!NativeCodeLoader.buildSupportsOpenssl()) {
    LOG.warn("Skipping test since openSSL library not loaded");
    Assume.assumeTrue(false);
  }
  Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason());
  cryptoCodecTest(conf, seed, 0, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv);
  // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff 
  for(int i = 0; i < 8; i++) {
    iv[8 + i] = (byte) 0xff;
  }
  cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv);
}
 
Example #22
Source File: CompressDecompressTester.java    From big-c with Apache License 2.0 6 votes vote down vote up
private static boolean isNativeSnappyLoadable() {
  boolean snappyAvailable = false;
  boolean loaded = false;
  try {
    System.loadLibrary("snappy");
    logger.warn("Snappy native library is available");
    snappyAvailable = true;
    boolean hadoopNativeAvailable = NativeCodeLoader.isNativeCodeLoaded();
    loaded = snappyAvailable && hadoopNativeAvailable;
    if (loaded) {
      logger.info("Snappy native library loaded");
    } else {
      logger.warn("Snappy native library not loaded");
    }
  } catch (Throwable t) {
    logger.warn("Failed to load snappy: ", t);
    return false;
  }
  return loaded;
}
 
Example #23
Source File: TestCryptoCodec.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test(timeout=120000)
public void testJceAesCtrCryptoCodec() throws Exception {
  if (!"true".equalsIgnoreCase(System.getProperty("runningWithNative"))) {
    LOG.warn("Skipping since test was not run with -Pnative flag");
    Assume.assumeTrue(false);
  }
  if (!NativeCodeLoader.buildSupportsOpenssl()) {
    LOG.warn("Skipping test since openSSL library not loaded");
    Assume.assumeTrue(false);
  }
  Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason());
  cryptoCodecTest(conf, seed, 0, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv);
  // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff 
  for(int i = 0; i < 8; i++) {
    iv[8 + i] = (byte) 0xff;
  }
  cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv);
}
 
Example #24
Source File: SnappyCodec.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
 
Example #25
Source File: SequenceFile.java    From RDFS with Apache License 2.0 5 votes vote down vote up
/**
   * Construct the preferred type of 'raw' SequenceFile Writer.
   * @param fs The configured filesystem. 
   * @param conf The configuration.
   * @param file The name of the file. 
   * @param keyClass The 'key' type.
   * @param valClass The 'value' type.
   * @param compress Compress data?
   * @param blockCompress Compress blocks?
   * @param codec The compression codec.
   * @param progress
   * @param metadata The metadata of the file.
   * @return Returns the handle to the constructed SequenceFile Writer.
   * @throws IOException
   */
  private static Writer
  createWriter(FileSystem fs, Configuration conf, Path file, 
               Class keyClass, Class valClass, 
               boolean compress, boolean blockCompress,
               CompressionCodec codec, Progressable progress, Metadata metadata)
  throws IOException {
  if (codec != null && (codec instanceof GzipCodec) && 
      !NativeCodeLoader.isNativeCodeLoaded() && 
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (!compress) {
    writer = new Writer(fs, conf, file, keyClass, valClass, progress, metadata);
  } else if (compress && !blockCompress) {
    writer = new RecordCompressWriter(fs, conf, file, keyClass, valClass, 
                                      codec, progress, metadata);
  } else {
    writer = new BlockCompressWriter(fs, conf, file, keyClass, valClass, 
                                     codec, progress, metadata);
  }
  
  return writer;
}
 
Example #26
Source File: Bzip2Factory.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Check if native-bzip2 code is loaded & initialized correctly and 
 * can be loaded for this job.
 * 
 * @param conf configuration
 * @return <code>true</code> if native-bzip2 is loaded & initialized 
 *         and can be loaded for this job, else <code>false</code>
 */
public static boolean isNativeBzip2Loaded(Configuration conf) {
  String libname = conf.get("io.compression.codec.bzip2.library", 
                            "system-native");
  if (!bzip2LibraryName.equals(libname)) {
    nativeBzip2Loaded = false;
    bzip2LibraryName = libname;
    if (libname.equals("java-builtin")) {
      LOG.info("Using pure-Java version of bzip2 library");
    } else if (conf.getBoolean(
              CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, 
              CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT) &&
        NativeCodeLoader.isNativeCodeLoaded()) {
      try {
        // Initialize the native library.
        Bzip2Compressor.initSymbols(libname);
        Bzip2Decompressor.initSymbols(libname);
        nativeBzip2Loaded = true;
        LOG.info("Successfully loaded & initialized native-bzip2 library " +
                 libname);
      } catch (Throwable t) {
        LOG.warn("Failed to load/initialize native-bzip2 library " + 
                 libname + ", will use pure-Java version");
      }
    }
  }
  return nativeBzip2Loaded;
}
 
Example #27
Source File: JniBasedUnixGroupsNetgroupMappingWithFallback.java    From big-c with Apache License 2.0 5 votes vote down vote up
public JniBasedUnixGroupsNetgroupMappingWithFallback() {
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    this.impl = new JniBasedUnixGroupsNetgroupMapping();
  } else {
    LOG.info("Falling back to shell based");
    this.impl = new ShellBasedUnixGroupsNetgroupMapping();
  }
  if (LOG.isDebugEnabled()) {
    LOG.debug("Group mapping impl=" + impl.getClass().getName());
  }
}
 
Example #28
Source File: JniBasedUnixGroupsMappingWithFallback.java    From big-c with Apache License 2.0 5 votes vote down vote up
public JniBasedUnixGroupsMappingWithFallback() {
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    this.impl = new JniBasedUnixGroupsMapping();
  } else {
    PerformanceAdvisory.LOG.debug("Falling back to shell based");
    this.impl = new ShellBasedUnixGroupsMapping();
  }
  if (LOG.isDebugEnabled()){
    LOG.debug("Group mapping impl=" + impl.getClass().getName());
  }
}
 
Example #29
Source File: SequenceFile.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
/**
   * Construct the preferred type of 'raw' SequenceFile Writer.
   * @param fs The configured filesystem. 
   * @param conf The configuration.
   * @param file The name of the file. 
   * @param keyClass The 'key' type.
   * @param valClass The 'value' type.
   * @param compress Compress data?
   * @param blockCompress Compress blocks?
   * @param codec The compression codec.
   * @param progress
   * @param metadata The metadata of the file.
   * @return Returns the handle to the constructed SequenceFile Writer.
   * @throws IOException
   */
  private static Writer
  createWriter(FileSystem fs, Configuration conf, Path file, 
               Class keyClass, Class valClass, 
               boolean compress, boolean blockCompress,
               CompressionCodec codec, Progressable progress, Metadata metadata)
  throws IOException {
  if (codec != null && (codec instanceof GzipCodec) && 
      !NativeCodeLoader.isNativeCodeLoaded() && 
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (!compress) {
    writer = new Writer(fs, conf, file, keyClass, valClass, progress, metadata);
  } else if (compress && !blockCompress) {
    writer = new RecordCompressWriter(fs, conf, file, keyClass, valClass, 
                                      codec, progress, metadata);
  } else {
    writer = new BlockCompressWriter(fs, conf, file, keyClass, valClass, 
                                     codec, progress, metadata);
  }
  
  return writer;
}
 
Example #30
Source File: SequenceFile.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
/**
 * Construct the preferred type of SequenceFile Writer.
 * @param fs The configured filesystem.
 * @param conf The configuration.
 * @param name The name of the file.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param bufferSize buffer size for the underlaying outputstream.
 * @param replication replication factor for the file.
 * @param blockSize block size for the file.
 * @param compressionType The compression type.
 * @param codec The compression codec.
 * @param progress The Progressable object to track progress.
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
public static Writer
  createWriter(FileSystem fs, Configuration conf, Path name,
               Class keyClass, Class valClass, int bufferSize,
               short replication, long blockSize,
               CompressionType compressionType, CompressionCodec codec,
               Progressable progress, Metadata metadata) throws IOException {
  if ((codec instanceof GzipCodec) &&
      !NativeCodeLoader.isNativeCodeLoaded() &&
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (compressionType == CompressionType.NONE) {
    writer = new Writer(fs, conf, name, keyClass, valClass,
                        bufferSize, replication, blockSize,
                        progress, metadata);
  } else if (compressionType == CompressionType.RECORD) {
    writer = new RecordCompressWriter(fs, conf, name, keyClass, valClass,
                                      bufferSize, replication, blockSize,
                                      codec, progress, metadata);
  } else if (compressionType == CompressionType.BLOCK){
    writer = new BlockCompressWriter(fs, conf, name, keyClass, valClass,
                                     bufferSize, replication, blockSize,
                                     codec, progress, metadata);
  }

  return writer;
}