Java Code Examples for org.apache.hadoop.io.compress.Decompressor#reset()

The following examples show how to use org.apache.hadoop.io.compress.Decompressor#reset() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Compression.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public Decompressor getDecompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using
        // it.
        LOG.warn("Deompressor obtained from CodecPool already finished()");
      } else {
        if(LOG.isDebugEnabled()) {
          LOG.debug("Got a decompressor: " + decompressor.hashCode());
        }
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * decompressor is referenced after returned back to the codec pool.
       */
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
Example 2
Source File: Compression.java    From big-c with Apache License 2.0 6 votes vote down vote up
public Decompressor getDecompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using
        // it.
        LOG.warn("Deompressor obtained from CodecPool already finished()");
      } else {
        if(LOG.isDebugEnabled()) {
          LOG.debug("Got a decompressor: " + decompressor.hashCode());
        }
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * decompressor is referenced after returned back to the codec pool.
       */
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
Example 3
Source File: Compression.java    From hbase with Apache License 2.0 6 votes vote down vote up
public Decompressor getDecompressor() {
  CompressionCodec codec = getCodec(conf);
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor + " from pool.");
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using it.
        LOG.warn("Deompressor obtained from CodecPool is already finished()");
      }
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
Example 4
Source File: Compression.java    From RDFS with Apache License 2.0 6 votes vote down vote up
public Decompressor getDecompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using
        // it.
        LOG.warn("Deompressor obtained from CodecPool already finished()");
      } else {
        LOG.debug("Got a decompressor: " + decompressor.hashCode());
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * decompressor is referenced after returned back to the codec pool.
       */
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
Example 5
Source File: Compression.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public Decompressor getDecompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using
        // it.
        LOG.warn("Deompressor obtained from CodecPool already finished()");
      } else {
        LOG.debug("Got a decompressor: " + decompressor.hashCode());
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * decompressor is referenced after returned back to the codec pool.
       */
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
Example 6
Source File: CodecPool.java    From tajo with Apache License 2.0 5 votes vote down vote up
/**
 * Return the {@link Decompressor} to the pool.
 *
 * @param decompressor
 *          the <code>Decompressor</code> to be returned to the pool
 */
public static void returnDecompressor(Decompressor decompressor) {
  if (decompressor == null) {
    return;
  }
  // if the decompressor can't be reused, don't pool it.
  if (decompressor.getClass().isAnnotationPresent(DoNotPool.class)) {
    return;
  }
  decompressor.reset();
  payback(DECOMPRESSOR_POOL, decompressor);
}
 
Example 7
Source File: CodecPool.java    From incubator-tajo with Apache License 2.0 5 votes vote down vote up
/**
 * Return the {@link Decompressor} to the pool.
 *
 * @param decompressor
 *          the <code>Decompressor</code> to be returned to the pool
 */
public static void returnDecompressor(Decompressor decompressor) {
  if (decompressor == null) {
    return;
  }
  // if the decompressor can't be reused, don't pool it.
  if (decompressor.getClass().isAnnotationPresent(DoNotPool.class)) {
    return;
  }
  decompressor.reset();
  payback(DECOMPRESSOR_POOL, decompressor);
}
 
Example 8
Source File: RasterWritable.java    From mrgeo with Apache License 2.0 5 votes vote down vote up
public static MrGeoRaster toMrGeoRaster(RasterWritable writable,
    CompressionCodec codec, Decompressor decompressor) throws IOException
{
  decompressor.reset();
  ByteArrayInputStream bis = new ByteArrayInputStream(writable.bytes, 0, writable.getSize());
  CompressionInputStream gis = codec.createInputStream(bis, decompressor);
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  IOUtils.copyBytes(gis, baos, 1024 * 1024 * 2, true);

  return toMrGeoRaster(new RasterWritable(baos.toByteArray()));
}
 
Example 9
Source File: IFile.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
/**
 * Read entire ifile content to memory.
 *
 * @param buffer
 * @param in
 * @param compressedLength
 * @param codec
 * @param ifileReadAhead
 * @param ifileReadAheadLength
 * @throws IOException
 */
public static void readToMemory(byte[] buffer, InputStream in, int compressedLength,
    CompressionCodec codec, boolean ifileReadAhead, int ifileReadAheadLength)
    throws IOException {
  boolean isCompressed = IFile.Reader.isCompressedFlagEnabled(in);
  IFileInputStream checksumIn = new IFileInputStream(in,
      compressedLength - IFile.HEADER.length, ifileReadAhead,
      ifileReadAheadLength);
  in = checksumIn;
  Decompressor decompressor = null;
  if (isCompressed && codec != null) {
    decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      decompressor.reset();
      in = codec.createInputStream(checksumIn, decompressor);
    } else {
      LOG.warn("Could not obtain decompressor from CodecPool");
      in = checksumIn;
    }
  }
  try {
    IOUtils.readFully(in, buffer, 0, buffer.length - IFile.HEADER.length);
  } catch (IOException ioe) {
    IOUtils.cleanup(LOG, in);
    throw ioe;
  } finally {
    if (decompressor != null) {
      decompressor.reset();
      CodecPool.returnDecompressor(decompressor);
    }
  }
}
 
Example 10
Source File: DataSegmentReader.java    From RDFS with Apache License 2.0 4 votes vote down vote up
/**
 * May throw EOFException if InputStream does not have a
 * complete data segment.
 *
 * NOTE: This class holds reference to the Decompressor in
 * the decompressorCache, until the return value of
 * getInputStream() is closed.
 *
 * @param decompressorCache
 * @throws EmptyDataSegmentException  if there is nothing to read.
 * @throws EOFException  if the data segment is not complete.
 */
DataSegmentReader(DataInputStream in, Configuration conf,
    HashMap<Text, Decompressor> decompressorCache)
    throws EmptyDataSegmentException, EOFException,
    ClassNotFoundException, IOException {

  // Read from DataInputStream
  // 1. Read length
  int length = 0;
  try {
    length = in.readInt();
  } catch (EOFException e) {
    throw new EmptyDataSegmentException();
  }

  // 2. Read codec
  int codecNameUTF8Length = in.readShort();
  byte[] codecNameUTF8 = new byte[codecNameUTF8Length];
  in.readFully(codecNameUTF8);
  Text codecNameText = new Text(codecNameUTF8);
  // 3. read CRC32 (only present when uncompressed)
  boolean hasCrc32 = (codecNameUTF8Length == 0);
  long crc32Value = 0;
  if (hasCrc32) {
    crc32Value = in.readLong();
  }
  // 4. read data
  byte[] storedData
      = new byte[length - (hasCrc32 ? 8 : 0)/*crc32*/
                 - 2/*codec length*/ - codecNameUTF8Length];
  in.readFully(storedData);

  // Verify the checksum
  if (hasCrc32) {
    CRC32 crc32 = new CRC32();
    crc32.update(storedData);
    if (crc32.getValue() != crc32Value) {
      throw new CorruptedDataException("Corrupted data segment with length " + length
          + " crc32 expected " + crc32Value + " but got " + crc32.getValue());
    }
  }

  // Uncompress the data if needed
  if (codecNameUTF8Length == 0) {
    // no compression
    uncompressedData = new ByteArrayInputStream(storedData);
  } else {
    CompressionCodec codec = getCodecFromName(codecNameText, conf);
    Decompressor decompressor = null;
    if (decompressorCache != null) {
      // Create decompressor and add to cache if needed.
      decompressor = decompressorCache.get(codecNameText);
      if (decompressor == null) {
        decompressor = codec.createDecompressor();
      } else {
        decompressor.reset();
      }
    }
    if (decompressor == null) {
      uncompressedData = codec.createInputStream(new ByteArrayInputStream(storedData));
    } else {
      uncompressedData = codec.createInputStream(new ByteArrayInputStream(storedData),
          decompressor);
    }
  }
}
 
Example 11
Source File: IFile.java    From tez with Apache License 2.0 4 votes vote down vote up
/**
 * Read entire ifile content to memory.
 *
 * @param buffer
 * @param in
 * @param compressedLength
 * @param codec
 * @param ifileReadAhead
 * @param ifileReadAheadLength
 * @throws IOException
 */
public static void readToMemory(byte[] buffer, InputStream in, int compressedLength,
    CompressionCodec codec, boolean ifileReadAhead, int ifileReadAheadLength)
    throws IOException {
  boolean isCompressed = IFile.Reader.isCompressedFlagEnabled(in);
  IFileInputStream checksumIn = new IFileInputStream(in,
      compressedLength - IFile.HEADER.length, ifileReadAhead,
      ifileReadAheadLength);
  in = checksumIn;
  Decompressor decompressor = null;
  if (isCompressed && codec != null) {
    decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      decompressor.reset();
      in = getDecompressedInputStreamWithBufferSize(codec, checksumIn, decompressor, compressedLength);
    } else {
      LOG.warn("Could not obtain decompressor from CodecPool");
      in = checksumIn;
    }
  }
  try {
    IOUtils.readFully(in, buffer, 0, buffer.length - IFile.HEADER.length);
    /*
     * We've gotten the amount of data we were expecting. Verify the
     * decompressor has nothing more to offer. This action also forces the
     * decompressor to read any trailing bytes that weren't critical for
     * decompression, which is necessary to keep the stream in sync.
     */
    if (in.read() >= 0) {
      throw new IOException("Unexpected extra bytes from input stream");
    }
  } catch (IOException ioe) {
    if(in != null) {
      try {
        in.close();
      } catch(IOException e) {
        if(LOG.isDebugEnabled()) {
          LOG.debug("Exception in closing " + in, e);
        }
      }
    }
    throw ioe;
  } finally {
    if (decompressor != null) {
      decompressor.reset();
      CodecPool.returnDecompressor(decompressor);
    }
  }
}