Java Code Examples for org.apache.hadoop.util.NativeCodeLoader#isNativeCodeLoaded()

The following examples show how to use org.apache.hadoop.util.NativeCodeLoader#isNativeCodeLoaded() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestNativeCodeLoader.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testNativeCodeLoaded() {
  if (requireTestJni() == false) {
    LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
    return;
  }
  if (!NativeCodeLoader.isNativeCodeLoaded()) {
    fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
        "libhadoop.so was not loaded.");
  }
  assertFalse(NativeCodeLoader.getLibraryName().isEmpty());
  // library names are depended on platform and build envs
  // so just check names are available
  assertFalse(ZlibFactory.getLibraryName().isEmpty());
  if (NativeCodeLoader.buildSupportsSnappy()) {
    assertFalse(SnappyCodec.getLibraryName().isEmpty());
  }
  if (NativeCodeLoader.buildSupportsOpenssl()) {
    assertFalse(OpensslCipher.getLibraryName().isEmpty());
  }
  assertFalse(Lz4Codec.getLibraryName().isEmpty());
  LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
 
Example 2
Source File: TestFileJournalManager.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that internal renames are done using native code on platforms that
 * have it.  The native rename includes more detailed information about the
 * failure, which can be useful for troubleshooting.
 */
@Test
public void testDoPreUpgradeIOError() throws IOException {
  File storageDir = new File(TestEditLog.TEST_DIR, "preupgradeioerror");
  List<URI> editUris = Collections.singletonList(storageDir.toURI());
  NNStorage storage = setupEdits(editUris, 5);
  StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next();
  assertNotNull(sd);
  // Change storage directory so that renaming current to previous.tmp fails.
  FileUtil.setWritable(storageDir, false);
  FileJournalManager jm = null;
  try {
    jm = new FileJournalManager(conf, sd, storage);
    exception.expect(IOException.class);
    if (NativeCodeLoader.isNativeCodeLoaded()) {
      exception.expectMessage("failure in native rename");
    }
    jm.doPreUpgrade();
  } finally {
    IOUtils.cleanup(LOG, jm);
    // Restore permissions on storage directory and make sure we can delete.
    FileUtil.setWritable(storageDir, true);
    FileUtil.fullyDelete(storageDir);
  }
}
 
Example 3
Source File: TestCodec.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test(timeout=20000)
public void testBZip2NativeCodec() throws IOException {
  Configuration conf = new Configuration();
  conf.set("io.compression.codec.bzip2.library", "system-native");
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
      codecTest(conf, seed, count, 
                "org.apache.hadoop.io.compress.BZip2Codec");
      conf.set("io.compression.codec.bzip2.library", "java-builtin");
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
      codecTest(conf, seed, count, 
                "org.apache.hadoop.io.compress.BZip2Codec");
    } else {
      LOG.warn("Native hadoop library available but native bzip2 is not");
    }
  }
}
 
Example 4
Source File: SequenceFile.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of 'raw' SequenceFile Writer.
 * @param out The stream on top which the writer is to be constructed.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param compress Compress data?
 * @param blockCompress Compress blocks?
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
private static Writer
  createWriter(Configuration conf, FSDataOutputStream out, 
               Class keyClass, Class valClass, boolean compress, boolean blockCompress,
               CompressionCodec codec, Metadata metadata)
  throws IOException {
  if (codec != null && (codec instanceof GzipCodec) && 
      !NativeCodeLoader.isNativeCodeLoaded() && 
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (!compress) {
    writer = new Writer(conf, out, keyClass, valClass, metadata);
  } else if (compress && !blockCompress) {
    writer = new RecordCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  } else {
    writer = new BlockCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  }
  
  return writer;
}
 
Example 5
Source File: TestCodec.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testLz4Codec() throws IOException {
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    if (Lz4Codec.isNativeCodeLoaded()) {
      conf.setBoolean(
          CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
          false);
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
      codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
      conf.setBoolean(
          CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
          true);
      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
      codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
    } else {
      Assert.fail("Native hadoop library available but lz4 not");
    }
  }
}
 
Example 6
Source File: SnappyCodec.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
 
Example 7
Source File: SequenceFile.java    From RDFS with Apache License 2.0 5 votes vote down vote up
/**
 * Construct the preferred type of SequenceFile Writer.
 * @param fs The configured filesystem.
 * @param conf The configuration.
 * @param name The name of the file.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param bufferSize buffer size for the underlaying outputstream.
 * @param replication replication factor for the file.
 * @param blockSize block size for the file.
 * @param createParent create parent directory if non-existent
 * @param compressionType The compression type.
 * @param codec The compression codec.
 * @param progress The Progressable object to track progress.
 * @param metadata The metadata of the file.
 * @param forceSync set the forceSync flag for this file
 * @param doParallelWrites write replicas in parallel
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
public static Writer
  createWriter(FileSystem fs, Configuration conf, Path name,
               Class keyClass, Class valClass, int bufferSize,
               short replication, long blockSize, boolean createParent,
               CompressionType compressionType, CompressionCodec codec,
               Metadata metadata, boolean forceSync,
               boolean doParallelWrites) throws IOException {
  if ((codec instanceof GzipCodec) &&
      !NativeCodeLoader.isNativeCodeLoaded() &&
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  switch (compressionType) {
  case NONE:
    return new Writer(conf,
        fs.createNonRecursive(name, FsPermission.getDefault(),true,
            bufferSize, replication, blockSize, null,forceSync, doParallelWrites),
        keyClass, valClass, metadata).ownStream();
  case RECORD:
    return new RecordCompressWriter(conf,
        fs.createNonRecursive(name, FsPermission.getDefault(), true,
            bufferSize, replication, blockSize, null,forceSync, doParallelWrites),
        keyClass, valClass, codec, metadata).ownStream();
  case BLOCK:
    return new BlockCompressWriter(conf,
        fs.createNonRecursive(name, FsPermission.getDefault(), true,
            bufferSize, replication, blockSize, null, forceSync, doParallelWrites),
        keyClass, valClass, codec, metadata).ownStream();
  default:
    return null;
  }
}
 
Example 8
Source File: TestHdfsNativeCodeLoader.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore
public void testNativeCodeLoaded() {
  if (requireTestJni() == false) {
    LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
    return;
  }
  if (!NativeCodeLoader.isNativeCodeLoaded()) {
    String LD_LIBRARY_PATH = System.getenv().get("LD_LIBRARY_PATH");
    if (LD_LIBRARY_PATH == null) LD_LIBRARY_PATH = "";
    fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
        "libhadoop.so was not loaded.  LD_LIBRARY_PATH = " + LD_LIBRARY_PATH);
  }
  LOG.info("TestHdfsNativeCodeLoader: libhadoop.so is loaded.");
}
 
Example 9
Source File: Bzip2Factory.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Check if native-bzip2 code is loaded & initialized correctly and 
 * can be loaded for this job.
 * 
 * @param conf configuration
 * @return <code>true</code> if native-bzip2 is loaded & initialized 
 *         and can be loaded for this job, else <code>false</code>
 */
public static boolean isNativeBzip2Loaded(Configuration conf) {
  String libname = conf.get("io.compression.codec.bzip2.library", 
                            "system-native");
  if (!bzip2LibraryName.equals(libname)) {
    nativeBzip2Loaded = false;
    bzip2LibraryName = libname;
    if (libname.equals("java-builtin")) {
      LOG.info("Using pure-Java version of bzip2 library");
    } else if (conf.getBoolean(
              CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, 
              CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT) &&
        NativeCodeLoader.isNativeCodeLoaded()) {
      try {
        // Initialize the native library.
        Bzip2Compressor.initSymbols(libname);
        Bzip2Decompressor.initSymbols(libname);
        nativeBzip2Loaded = true;
        LOG.info("Successfully loaded & initialized native-bzip2 library " +
                 libname);
      } catch (Throwable t) {
        LOG.warn("Failed to load/initialize native-bzip2 library " + 
                 libname + ", will use pure-Java version");
      }
    }
  }
  return nativeBzip2Loaded;
}
 
Example 10
Source File: JniBasedUnixGroupsMappingWithFallback.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public JniBasedUnixGroupsMappingWithFallback() {
  if (NativeCodeLoader.isNativeCodeLoaded()) {
    this.impl = new JniBasedUnixGroupsMapping();
  } else {
    PerformanceAdvisory.LOG.debug("Falling back to shell based");
    this.impl = new ShellBasedUnixGroupsMapping();
  }
  if (LOG.isDebugEnabled()){
    LOG.debug("Group mapping impl=" + impl.getClass().getName());
  }
}
 
Example 11
Source File: NativeIO.java    From yuzhouwan with Apache License 2.0 4 votes vote down vote up
/**
 * Return true if the JNI-based native IO extensions are available.
 */
public static boolean isAvailable() {
    return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
}
 
Example 12
Source File: NativeIO.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Return true if the JNI-based native IO extensions are available.
 */
public static boolean isAvailable() {
  return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
}
 
Example 13
Source File: SequenceFile.java    From gemfirexd-oss with Apache License 2.0 4 votes vote down vote up
/**
 * Construct a uncompressed writer from a set of options.
 * @param conf the configuration to use
 * @param opts the options used when creating the writer
 * @throws IOException if it fails
 */
Writer(Configuration conf, 
       Option... opts) throws IOException {
  BlockSizeOption blockSizeOption = 
    Options.getOption(BlockSizeOption.class, opts);
  BufferSizeOption bufferSizeOption = 
    Options.getOption(BufferSizeOption.class, opts);
  ReplicationOption replicationOption = 
    Options.getOption(ReplicationOption.class, opts);
  ProgressableOption progressOption = 
    Options.getOption(ProgressableOption.class, opts);
  FileOption fileOption = Options.getOption(FileOption.class, opts);
  FileSystemOption fsOption = Options.getOption(FileSystemOption.class, opts);
  StreamOption streamOption = Options.getOption(StreamOption.class, opts);
  KeyClassOption keyClassOption = 
    Options.getOption(KeyClassOption.class, opts);
  ValueClassOption valueClassOption = 
    Options.getOption(ValueClassOption.class, opts);
  MetadataOption metadataOption = 
    Options.getOption(MetadataOption.class, opts);
  CompressionOption compressionTypeOption =
    Options.getOption(CompressionOption.class, opts);
  // check consistency of options
  if ((fileOption == null) == (streamOption == null)) {
    throw new IllegalArgumentException("file or stream must be specified");
  }
  if (fileOption == null && (blockSizeOption != null ||
                             bufferSizeOption != null ||
                             replicationOption != null ||
                             progressOption != null)) {
    throw new IllegalArgumentException("file modifier options not " +
                                       "compatible with stream");
  }

  FSDataOutputStream out;
  boolean ownStream = fileOption != null;
  if (ownStream) {
    Path p = fileOption.getValue();
    FileSystem fs;
    if (fsOption != null) {
      fs = fsOption.getValue();
    } else {
      fs = p.getFileSystem(conf);
    }
    int bufferSize = bufferSizeOption == null ? getBufferSize(conf) :
      bufferSizeOption.getValue();
    short replication = replicationOption == null ? 
      fs.getDefaultReplication(p) :
      (short) replicationOption.getValue();
    long blockSize = blockSizeOption == null ? fs.getDefaultBlockSize(p) :
      blockSizeOption.getValue();
    Progressable progress = progressOption == null ? null :
      progressOption.getValue();
    out = fs.create(p, true, bufferSize, replication, blockSize, progress);
  } else {
    out = streamOption.getValue();
  }
  Class<?> keyClass = keyClassOption == null ?
      Object.class : keyClassOption.getValue();
  Class<?> valueClass = valueClassOption == null ?
      Object.class : valueClassOption.getValue();
  Metadata metadata = metadataOption == null ?
      new Metadata() : metadataOption.getValue();
  this.compress = compressionTypeOption.getValue();
  final CompressionCodec codec = compressionTypeOption.getCodec();
  if (codec != null &&
      (codec instanceof GzipCodec) &&
      !NativeCodeLoader.isNativeCodeLoaded() &&
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop " +
                                       "code!");
  }
  init(conf, out, ownStream, keyClass, valueClass, codec, metadata);
}
 
Example 14
Source File: TestAccessControlList.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * Test the netgroups (groups in ACL rules that start with @)
 *
 * This is a  manual test because it requires:
 *   - host setup
 *   - native code compiled
 *   - specify the group mapping class
 *
 * Host setup:
 *
 * /etc/nsswitch.conf should have a line like this:
 * netgroup: files
 *
 * /etc/netgroup should be (the whole file):
 * lasVegas (,elvis,)
 * memphis (,elvis,) (,jerryLeeLewis,)
 *
 * To run this test:
 *
 * export JAVA_HOME='path/to/java'
 * ant \
 *   -Dtestcase=TestAccessControlList \
 *   -Dtest.output=yes \
 *   -DTestAccessControlListGroupMapping=$className \
 *   compile-native test
 *
 * where $className is one of the classes that provide group
 * mapping services, i.e. classes that implement
 * GroupMappingServiceProvider interface, at this time:
 *   - org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping
 *   - org.apache.hadoop.security.ShellBasedUnixGroupsNetgroupMapping
 *
 */
@Test
public void testNetgroups() throws Exception {

  if(!NativeCodeLoader.isNativeCodeLoaded()) {
    LOG.info("Not testing netgroups, " +
      "this test only runs when native code is compiled");
    return;
  }

  String groupMappingClassName =
    System.getProperty("TestAccessControlListGroupMapping");

  if(groupMappingClassName == null) {
    LOG.info("Not testing netgroups, no group mapping class specified, " +
      "use -DTestAccessControlListGroupMapping=$className to specify " +
      "group mapping class (must implement GroupMappingServiceProvider " +
      "interface and support netgroups)");
    return;
  }

  LOG.info("Testing netgroups using: " + groupMappingClassName);

  Configuration conf = new Configuration();
  conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_GROUP_MAPPING,
    groupMappingClassName);

  Groups groups = Groups.getUserToGroupsMappingService(conf);

  AccessControlList acl;

  // create these ACLs to populate groups cache
  acl = new AccessControlList("ja my"); // plain
  acl = new AccessControlList("sinatra ratpack,@lasVegas"); // netgroup
  acl = new AccessControlList(" somegroup,@someNetgroup"); // no user

  // this ACL will be used for testing ACLs
  acl = new AccessControlList("carlPerkins ratpack,@lasVegas");
  acl.addGroup("@memphis");

  // validate the netgroups before and after rehresh to make
  // sure refresh works correctly
  validateNetgroups(groups, acl);
  groups.refresh();
  validateNetgroups(groups, acl);

}
 
Example 15
Source File: NativeLibraryChecker.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * A tool to test native library availability, 
 */
public static void main(String[] args) {
  String usage = "NativeLibraryChecker [-a|-h]\n"
      + "  -a  use -a to check all libraries are available\n"
      + "      by default just check hadoop library (and\n"
      + "      winutils.exe on Windows OS) is available\n"
      + "      exit with error code 1 if check failed\n"
      + "  -h  print this message\n";
  if (args.length > 1 ||
      (args.length == 1 &&
          !(args[0].equals("-a") || args[0].equals("-h")))) {
    System.err.println(usage);
    ExitUtil.terminate(1);
  }
  boolean checkAll = false;
  if (args.length == 1) {
    if (args[0].equals("-h")) {
      System.out.println(usage);
      return;
    }
    checkAll = true;
  }
  Configuration conf = new Configuration();
  boolean nativeHadoopLoaded = NativeCodeLoader.isNativeCodeLoaded();
  boolean zlibLoaded = false;
  boolean snappyLoaded = false;
  // lz4 is linked within libhadoop
  boolean lz4Loaded = nativeHadoopLoaded;
  boolean bzip2Loaded = Bzip2Factory.isNativeBzip2Loaded(conf);
  boolean openSslLoaded = false;
  boolean winutilsExists = false;

  String openSslDetail = "";
  String hadoopLibraryName = "";
  String zlibLibraryName = "";
  String snappyLibraryName = "";
  String lz4LibraryName = "";
  String bzip2LibraryName = "";
  String winutilsPath = null;

  if (nativeHadoopLoaded) {
    hadoopLibraryName = NativeCodeLoader.getLibraryName();
    zlibLoaded = ZlibFactory.isNativeZlibLoaded(conf);
    if (zlibLoaded) {
      zlibLibraryName = ZlibFactory.getLibraryName();
    }
    snappyLoaded = NativeCodeLoader.buildSupportsSnappy() &&
        SnappyCodec.isNativeCodeLoaded();
    if (snappyLoaded && NativeCodeLoader.buildSupportsSnappy()) {
      snappyLibraryName = SnappyCodec.getLibraryName();
    }
    if (OpensslCipher.getLoadingFailureReason() != null) {
      openSslDetail = OpensslCipher.getLoadingFailureReason();
      openSslLoaded = false;
    } else {
      openSslDetail = OpensslCipher.getLibraryName();
      openSslLoaded = true;
    }
    if (lz4Loaded) {
      lz4LibraryName = Lz4Codec.getLibraryName();
    }
    if (bzip2Loaded) {
      bzip2LibraryName = Bzip2Factory.getLibraryName(conf);
    }
  }

  // winutils.exe is required on Windows
  winutilsPath = Shell.getWinUtilsPath();
  if (winutilsPath != null) {
    winutilsExists = true;
  } else {
    winutilsPath = "";
  }

  System.out.println("Native library checking:");
  System.out.printf("hadoop:  %b %s%n", nativeHadoopLoaded, hadoopLibraryName);
  System.out.printf("zlib:    %b %s%n", zlibLoaded, zlibLibraryName);
  System.out.printf("snappy:  %b %s%n", snappyLoaded, snappyLibraryName);
  System.out.printf("lz4:     %b %s%n", lz4Loaded, lz4LibraryName);
  System.out.printf("bzip2:   %b %s%n", bzip2Loaded, bzip2LibraryName);
  System.out.printf("openssl: %b %s%n", openSslLoaded, openSslDetail);
  if (Shell.WINDOWS) {
    System.out.printf("winutils: %b %s%n", winutilsExists, winutilsPath);
  }

  if ((!nativeHadoopLoaded) || (Shell.WINDOWS && (!winutilsExists)) ||
      (checkAll && !(zlibLoaded && snappyLoaded && lz4Loaded && bzip2Loaded))) {
    // return 1 to indicated check failed
    ExitUtil.terminate(1);
  }
}
 
Example 16
Source File: TestCompressionStorages.java    From incubator-tajo with Apache License 2.0 4 votes vote down vote up
@Test
public void testLz4CodecCompressionData() throws IOException {
  if(NativeCodeLoader.isNativeCodeLoaded() && Lz4Codec.isNativeCodeLoaded())
  storageCompressionTest(storeType, Lz4Codec.class);
}
 
Example 17
Source File: NativeLibraryChecker.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * A tool to test native library availability, 
 */
public static void main(String[] args) {
  String usage = "NativeLibraryChecker [-a|-h]\n"
      + "  -a  use -a to check all libraries are available\n"
      + "      by default just check hadoop library (and\n"
      + "      winutils.exe on Windows OS) is available\n"
      + "      exit with error code 1 if check failed\n"
      + "  -h  print this message\n";
  if (args.length > 1 ||
      (args.length == 1 &&
          !(args[0].equals("-a") || args[0].equals("-h")))) {
    System.err.println(usage);
    ExitUtil.terminate(1);
  }
  boolean checkAll = false;
  if (args.length == 1) {
    if (args[0].equals("-h")) {
      System.out.println(usage);
      return;
    }
    checkAll = true;
  }
  Configuration conf = new Configuration();
  boolean nativeHadoopLoaded = NativeCodeLoader.isNativeCodeLoaded();
  boolean zlibLoaded = false;
  boolean snappyLoaded = false;
  // lz4 is linked within libhadoop
  boolean lz4Loaded = nativeHadoopLoaded;
  boolean bzip2Loaded = Bzip2Factory.isNativeBzip2Loaded(conf);
  boolean openSslLoaded = false;
  boolean winutilsExists = false;

  String openSslDetail = "";
  String hadoopLibraryName = "";
  String zlibLibraryName = "";
  String snappyLibraryName = "";
  String lz4LibraryName = "";
  String bzip2LibraryName = "";
  String winutilsPath = null;

  if (nativeHadoopLoaded) {
    hadoopLibraryName = NativeCodeLoader.getLibraryName();
    zlibLoaded = ZlibFactory.isNativeZlibLoaded(conf);
    if (zlibLoaded) {
      zlibLibraryName = ZlibFactory.getLibraryName();
    }
    snappyLoaded = NativeCodeLoader.buildSupportsSnappy() &&
        SnappyCodec.isNativeCodeLoaded();
    if (snappyLoaded && NativeCodeLoader.buildSupportsSnappy()) {
      snappyLibraryName = SnappyCodec.getLibraryName();
    }
    if (OpensslCipher.getLoadingFailureReason() != null) {
      openSslDetail = OpensslCipher.getLoadingFailureReason();
      openSslLoaded = false;
    } else {
      openSslDetail = OpensslCipher.getLibraryName();
      openSslLoaded = true;
    }
    if (lz4Loaded) {
      lz4LibraryName = Lz4Codec.getLibraryName();
    }
    if (bzip2Loaded) {
      bzip2LibraryName = Bzip2Factory.getLibraryName(conf);
    }
  }

  // winutils.exe is required on Windows
  winutilsPath = Shell.getWinUtilsPath();
  if (winutilsPath != null) {
    winutilsExists = true;
  } else {
    winutilsPath = "";
  }

  System.out.println("Native library checking:");
  System.out.printf("hadoop:  %b %s%n", nativeHadoopLoaded, hadoopLibraryName);
  System.out.printf("zlib:    %b %s%n", zlibLoaded, zlibLibraryName);
  System.out.printf("snappy:  %b %s%n", snappyLoaded, snappyLibraryName);
  System.out.printf("lz4:     %b %s%n", lz4Loaded, lz4LibraryName);
  System.out.printf("bzip2:   %b %s%n", bzip2Loaded, bzip2LibraryName);
  System.out.printf("openssl: %b %s%n", openSslLoaded, openSslDetail);
  if (Shell.WINDOWS) {
    System.out.printf("winutils: %b %s%n", winutilsExists, winutilsPath);
  }

  if ((!nativeHadoopLoaded) || (Shell.WINDOWS && (!winutilsExists)) ||
      (checkAll && !(zlibLoaded && snappyLoaded && lz4Loaded && bzip2Loaded))) {
    // return 1 to indicated check failed
    ExitUtil.terminate(1);
  }
}
 
Example 18
Source File: TestCompressionStorages.java    From tajo with Apache License 2.0 4 votes vote down vote up
@Test
public void testLz4CodecCompressionData() throws IOException {
  if(NativeCodeLoader.isNativeCodeLoaded() && Lz4Codec.isNativeCodeLoaded())
  storageCompressionTest(dataFormat, Lz4Codec.class);
}
 
Example 19
Source File: NativeIO.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Return true if the JNI-based native IO extensions are available.
 */
public static boolean isAvailable() {
  return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
}
 
Example 20
Source File: Lz4Codec.java    From big-c with Apache License 2.0 2 votes vote down vote up
/**
 * Are the native lz4 libraries loaded & initialized?
 *
 * @return true if loaded & initialized, otherwise false
 */
public static boolean isNativeCodeLoaded() {
  return NativeCodeLoader.isNativeCodeLoaded();
}