Java Code Examples for org.apache.hadoop.io.compress.snappy.SnappyDecompressor

The following examples show how to use org.apache.hadoop.io.compress.snappy.SnappyDecompressor. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hadoop   Source File: SnappyCodec.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
 
Example 2
Source Project: hadoop   Source File: TestCompressorDecompressor.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;
  
  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
 
Example 3
Source Project: hadoop   Source File: TestCompressorDecompressor.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
 
Example 4
Source Project: big-c   Source File: SnappyCodec.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Are the native snappy libraries loaded & initialized?
 */
public static void checkNativeCodeLoaded() {
    if (!NativeCodeLoader.isNativeCodeLoaded() ||
        !NativeCodeLoader.buildSupportsSnappy()) {
      throw new RuntimeException("native snappy library not available: " +
          "this version of libhadoop was built without " +
          "snappy support.");
    }
    if (!SnappyCompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyCompressor has not been loaded.");
    }
    if (!SnappyDecompressor.isNativeCodeLoaded()) {
      throw new RuntimeException("native snappy library not available: " +
          "SnappyDecompressor has not been loaded.");
    }
}
 
Example 5
Source Project: big-c   Source File: TestCompressorDecompressor.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCompressorDecompressor() {
  // no more for this data
  int SIZE = 44 * 1024;
  
  byte[] rawData = generate(SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
        .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
        .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressor error !!!" + ex);
  }
}
 
Example 6
Source Project: big-c   Source File: TestCompressorDecompressor.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
  int BYTE_SIZE = 100 * 1024;
  byte[] rawData = generate(BYTE_SIZE);
  try {
    CompressDecompressTester.of(rawData)
        .withCompressDecompressPair(
            new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2),
            new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2))
        .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE),
            new Lz4Decompressor(BYTE_SIZE))
        .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                    CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
        .test();

  } catch (Exception ex) {
    fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
  }
}
 
Example 7
Source Project: hadoop   Source File: SnappyCodec.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
 
Example 8
Source Project: big-c   Source File: SnappyCodec.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  checkNativeCodeLoaded();
  int bufferSize = conf.getInt(
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
 
Example 9
Source Project: RDFS   Source File: SnappyCodec.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get the type of {@link Decompressor} needed by this
 * {@link CompressionCodec}.
 *
 * @return the type of decompressor needed by this codec.
 */
@Override
public Class<? extends Decompressor> getDecompressorType() {
  if (!isNativeSnappyLoaded(conf)) {
    throw new RuntimeException("native snappy library not available");
  }

  return SnappyDecompressor.class;
}
 
Example 10
Source Project: RDFS   Source File: SnappyCodec.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create a new {@link Decompressor} for use by this
 * {@link CompressionCodec}.
 *
 * @return a new decompressor for use by this codec
 */
@Override
public Decompressor createDecompressor() {
  if (!isNativeSnappyLoaded(conf)) {
    throw new RuntimeException("native snappy library not available");
  }
  int bufferSize = conf.getInt(
      IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
      IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
  return new SnappyDecompressor(bufferSize);
}
 
Example 11
Source Project: hadoop   Source File: SnappyCodec.java    License: Apache License 2.0 4 votes vote down vote up
public static boolean isNativeCodeLoaded() {
  return SnappyCompressor.isNativeCodeLoaded() && 
      SnappyDecompressor.isNativeCodeLoaded();
}
 
Example 12
Source Project: hadoop   Source File: SnappyCodec.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Get the type of {@link Decompressor} needed by this {@link CompressionCodec}.
 *
 * @return the type of decompressor needed by this codec.
 */
@Override
public Class<? extends Decompressor> getDecompressorType() {
  checkNativeCodeLoaded();
  return SnappyDecompressor.class;
}
 
Example 13
Source Project: dremio-oss   Source File: ScanWithHiveReader.java    License: Apache License 2.0 4 votes vote down vote up
private static Class<? extends HiveAbstractReader> getNativeReaderClass(Optional<String> formatName,
                                                                        OptionManager options, Configuration configuration, boolean mixedSchema, boolean isTransactional) {
  if (!formatName.isPresent()) {
    return HiveDefaultReader.class;
  }

  Class<? extends HiveAbstractReader> readerClass = readerMap.get(formatName.get());
  if (readerClass == HiveOrcReader.class) {
    // Validate reader
    if (OrcConf.USE_ZEROCOPY.getBoolean(configuration)) {
      if (!NativeCodeLoader.isNativeCodeLoaded()) {
        throw UserException.dataReadError()
            .message("Hadoop native library is required for Hive ORC data, but is not loaded").build(logger);
      }
      // TODO: find a way to access compression codec information?
      if (!SnappyDecompressor.isNativeCodeLoaded()) {
        throw UserException.dataReadError()
          .message("Snappy native library is required for Hive ORC data, but is not loaded").build(logger);
      }

      if (!isNativeZlibLoaded) {
        throw UserException
        .dataReadError()
        .message("Zlib native library is required for Hive ORC data, but is not loaded")
        .build(logger);
      }
    }

    if (new HiveSettings(options).vectorizeOrcReaders() && !mixedSchema && !isTransactional) {
      // We don't use vectorized ORC reader if there is a schema change between table and partitions or the table is
      // a transactional Hive table
      return HiveORCVectorizedReader.class;
    }
  }

  if (readerClass == null) {
    return HiveDefaultReader.class;
  }

  return readerClass;
}
 
Example 14
Source Project: dremio-oss   Source File: ScanWithHiveReader.java    License: Apache License 2.0 4 votes vote down vote up
private static Class<? extends HiveAbstractReader> getNativeReaderClass(Optional<String> formatName,
    OptionManager options, Configuration configuration, boolean mixedSchema, boolean isTransactional) {
  if (!formatName.isPresent()) {
    return HiveDefaultReader.class;
  }

  Class<? extends HiveAbstractReader> readerClass = readerMap.get(formatName.get());
  if (readerClass == HiveOrcReader.class) {
    // Validate reader
    if (OrcConf.USE_ZEROCOPY.getBoolean(configuration)) {
      if (!NativeCodeLoader.isNativeCodeLoaded()) {
        throw UserException.dataReadError()
            .message("Hadoop native library is required for Hive ORC data, but is not loaded").build(logger);
      }
      // TODO: find a way to access compression codec information?
      if (!SnappyDecompressor.isNativeCodeLoaded()) {
        throw UserException.dataReadError()
          .message("Snappy native library is required for Hive ORC data, but is not loaded").build(logger);
      }

      if (!isNativeZlibLoaded) {
        throw UserException
        .dataReadError()
        .message("Zlib native library is required for Hive ORC data, but is not loaded")
        .build(logger);
      }
    }

    if (new HiveSettings(options).vectorizeOrcReaders() && !mixedSchema && !isTransactional) {
      // We don't use vectorized ORC reader if there is a schema change between table and partitions or the table is
      // a transactional Hive table
      return HiveORCVectorizedReader.class;
    }
  }

  if (readerClass == null) {
    return HiveDefaultReader.class;
  }

  return readerClass;
}
 
Example 15
Source Project: big-c   Source File: SnappyCodec.java    License: Apache License 2.0 4 votes vote down vote up
public static boolean isNativeCodeLoaded() {
  return SnappyCompressor.isNativeCodeLoaded() && 
      SnappyDecompressor.isNativeCodeLoaded();
}
 
Example 16
Source Project: big-c   Source File: SnappyCodec.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Get the type of {@link Decompressor} needed by this {@link CompressionCodec}.
 *
 * @return the type of decompressor needed by this codec.
 */
@Override
public Class<? extends Decompressor> getDecompressorType() {
  checkNativeCodeLoaded();
  return SnappyDecompressor.class;
}
 
Example 17
Source Project: datacollector   Source File: SnappyCodec.java    License: Apache License 2.0 4 votes vote down vote up
public SnappyCodec() {
  super(SnappyCompressor.class, SnappyDecompressor.class, ".snappy");
}
 
Example 18
Source Project: datacollector   Source File: SnappyCodec.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public Decompressor createDecompressor() {
  return new SnappyDecompressor(getBufferSize());
}