Java Code Examples for org.apache.hadoop.io.compress.CodecPool#returnDecompressor()
The following examples show how to use
org.apache.hadoop.io.compress.CodecPool#returnDecompressor() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: IFile.java From hadoop with Apache License 2.0 | 6 votes |
public void close() throws IOException { // Close the underlying stream in.close(); // Release the buffer dataIn = null; buffer = null; if(readRecordsCounter != null) { readRecordsCounter.increment(numRecordsRead); } // Return the decompressor if (decompressor != null) { decompressor.reset(); CodecPool.returnDecompressor(decompressor); decompressor = null; } }
Example 2
Source File: JSONFileRecordReader.java From ojai with Apache License 2.0 | 6 votes |
@Override public void close() throws IOException { try { documentStream.close(); } catch (Exception e) { throw new IOException( "Error closing document Stream in JsonFileRecordReader"); } try { if (inputStream != null) { inputStream.close(); } } finally { if (decompressor != null) { CodecPool.returnDecompressor(decompressor); decompressor = null; } } }
Example 3
Source File: AbstractSpreadSheetDocumentRecordReader.java From hadoopoffice with Apache License 2.0 | 6 votes |
@Override public synchronized void close() throws IOException { try { if (officeReader!=null) { officeReader.close(); } } finally { if (decompressor != null) { // return this decompressor CodecPool.returnDecompressor(decompressor); decompressor = null; } // return decompressor of linked workbooks if (this.currentHFR!=null) { currentHFR.close(); } } // do not close the filesystem! will cause exceptions in Spark }
Example 4
Source File: AbstractSpreadSheetDocumentRecordReader.java From hadoopoffice with Apache License 2.0 | 6 votes |
@Override public synchronized void close() throws IOException { try { if (officeReader!=null) { officeReader.close(); } } finally { if (decompressor != null) { // return this decompressor CodecPool.returnDecompressor(decompressor); decompressor = null; } // return decompressor of linked workbooks if (this.currentHFR!=null) { currentHFR.close(); } } // do not close the filesystem! will cause exceptions in Spark }
Example 5
Source File: Compression.java From hadoop with Apache License 2.0 | 5 votes |
public void returnDecompressor(Decompressor decompressor) { if (decompressor != null) { if(LOG.isDebugEnabled()) { LOG.debug("Returned a decompressor: " + decompressor.hashCode()); } CodecPool.returnDecompressor(decompressor); } }
Example 6
Source File: HadoopUtils.java From incubator-hivemall with Apache License 2.0 | 5 votes |
@Override public void close() throws IOException { super.close(); if (decompressor != null) { CodecPool.returnDecompressor(decompressor); this.decompressor = null; } }
Example 7
Source File: HadoopLogsAnalyzer.java From big-c with Apache License 2.0 | 5 votes |
private boolean setNextDirectoryInputStream() throws FileNotFoundException, IOException { if (input != null) { input.close(); LOG.info("File closed: " + currentFileName); input = null; } if (inputCodec != null) { CodecPool.returnDecompressor(inputDecompressor); inputDecompressor = null; inputCodec = null; } ++inputDirectoryCursor; if (inputDirectoryCursor >= inputDirectoryFiles.length) { return false; } fileFirstLine = true; currentFileName = inputDirectoryFiles[inputDirectoryCursor]; LOG.info("\nOpening file " + currentFileName + " *************************** ."); LOG .info("This file, " + (inputDirectoryCursor + 1) + "/" + inputDirectoryFiles.length + ", starts with line " + lineNumber + "."); input = maybeUncompressedPath(new Path(inputDirectoryPath, currentFileName)); return true; }
Example 8
Source File: MainframeVBRecordReader.java From Cobol-to-Hive with Apache License 2.0 | 5 votes |
@Override public synchronized void close() throws IOException { try { if (inputStream != null) { inputStream.close(); inputStream = null; } } finally { if (decompressor != null) { CodecPool.returnDecompressor(decompressor); decompressor = null; } } }
Example 9
Source File: FixedLengthRecordReader.java From big-c with Apache License 2.0 | 5 votes |
@Override public synchronized void close() throws IOException { try { if (inputStream != null) { inputStream.close(); inputStream = null; } } finally { if (decompressor != null) { CodecPool.returnDecompressor(decompressor); decompressor = null; } } }
Example 10
Source File: MapReduceBitcoinBlockIntegrationTest.java From hadoopcryptoledger with Apache License 2.0 | 5 votes |
@AfterEach public void tearDown() throws IOException { // Remove input and output directory dfsCluster.getFileSystem().delete(DFS_INPUT_DIR,true); dfsCluster.getFileSystem().delete(DFS_OUTPUT_DIR,true); // close any open decompressor for (Decompressor currentDecompressor: this.openDecompressors) { if (currentDecompressor!=null) { CodecPool.returnDecompressor(currentDecompressor); } } }
Example 11
Source File: SparkBitcoinBlockCounterSparkMasterIntegrationTest.java From hadoopcryptoledger with Apache License 2.0 | 5 votes |
@AfterEach public void tearDown() throws IOException { // Remove input and output directory dfsCluster.getFileSystem().delete(DFS_INPUT_DIR,true); dfsCluster.getFileSystem().delete(DFS_OUTPUT_DIR,true); // close any open decompressor for (Decompressor currentDecompressor: this.openDecompressors) { if (currentDecompressor!=null) { CodecPool.returnDecompressor(currentDecompressor); } } }
Example 12
Source File: HadoopDecompressor.java From presto with Apache License 2.0 | 5 votes |
@Override public void destroy() { if (destroyed) { return; } destroyed = true; CodecPool.returnDecompressor(decompressor); }
Example 13
Source File: IFile.java From incubator-tez with Apache License 2.0 | 5 votes |
/** * Read entire ifile content to memory. * * @param buffer * @param in * @param compressedLength * @param codec * @param ifileReadAhead * @param ifileReadAheadLength * @throws IOException */ public static void readToMemory(byte[] buffer, InputStream in, int compressedLength, CompressionCodec codec, boolean ifileReadAhead, int ifileReadAheadLength) throws IOException { boolean isCompressed = IFile.Reader.isCompressedFlagEnabled(in); IFileInputStream checksumIn = new IFileInputStream(in, compressedLength - IFile.HEADER.length, ifileReadAhead, ifileReadAheadLength); in = checksumIn; Decompressor decompressor = null; if (isCompressed && codec != null) { decompressor = CodecPool.getDecompressor(codec); if (decompressor != null) { decompressor.reset(); in = codec.createInputStream(checksumIn, decompressor); } else { LOG.warn("Could not obtain decompressor from CodecPool"); in = checksumIn; } } try { IOUtils.readFully(in, buffer, 0, buffer.length - IFile.HEADER.length); } catch (IOException ioe) { IOUtils.cleanup(LOG, in); throw ioe; } finally { if (decompressor != null) { decompressor.reset(); CodecPool.returnDecompressor(decompressor); } } }
Example 14
Source File: Excel2007FileRecordReader.java From components with Apache License 2.0 | 5 votes |
private void closeResource() throws IOException { try { if(stream_workbook != null) { stream_workbook.close(); stream_workbook = null; } } finally { if (decompressor != null) { CodecPool.returnDecompressor(decompressor); decompressor = null; } } }
Example 15
Source File: LineRecordReader.java From hadoop with Apache License 2.0 | 5 votes |
public synchronized void close() throws IOException { try { if (in != null) { in.close(); } } finally { if (decompressor != null) { CodecPool.returnDecompressor(decompressor); decompressor = null; } } }
Example 16
Source File: HadoopLogsAnalyzer.java From RDFS with Apache License 2.0 | 5 votes |
private boolean setNextDirectoryInputStream() throws FileNotFoundException, IOException { if (input != null) { input.close(); LOG.info("File closed: "+currentFileName); input = null; } if (inputCodec != null) { CodecPool.returnDecompressor(inputDecompressor); inputDecompressor = null; inputCodec = null; } ++inputDirectoryCursor; if (inputDirectoryCursor >= inputDirectoryFiles.length) { return false; } fileFirstLine = true; currentFileName = inputDirectoryFiles[inputDirectoryCursor]; LOG.info("\nOpening file " + currentFileName + " *************************** ."); LOG .info("This file, " + (inputDirectoryCursor + 1) + "/" + inputDirectoryFiles.length + ", starts with line " + lineNumber + "."); input = maybeUncompressedPath(new Path(inputDirectoryPath, currentFileName)); return input != null; }
Example 17
Source File: ChunkRecordReader.java From pxf with Apache License 2.0 | 5 votes |
/** * Closes the input stream. */ @Override public synchronized void close() throws IOException { try { if (in != null) { in.close(); } } finally { if (decompressor != null) { CodecPool.returnDecompressor(decompressor); } } }
Example 18
Source File: DelimitedTextFileReaderWriterFactory.java From secor with Apache License 2.0 | 4 votes |
@Override public void close() throws IOException { this.mReader.close(); CodecPool.returnDecompressor(mDecompressor); mDecompressor = null; }
Example 19
Source File: Compression.java From RDFS with Apache License 2.0 | 4 votes |
public void returnDecompressor(Decompressor decompressor) { if (decompressor != null) { LOG.debug("Returned a decompressor: " + decompressor.hashCode()); CodecPool.returnDecompressor(decompressor); } }
Example 20
Source File: IFile.java From tez with Apache License 2.0 | 4 votes |
/** * Read entire ifile content to memory. * * @param buffer * @param in * @param compressedLength * @param codec * @param ifileReadAhead * @param ifileReadAheadLength * @throws IOException */ public static void readToMemory(byte[] buffer, InputStream in, int compressedLength, CompressionCodec codec, boolean ifileReadAhead, int ifileReadAheadLength) throws IOException { boolean isCompressed = IFile.Reader.isCompressedFlagEnabled(in); IFileInputStream checksumIn = new IFileInputStream(in, compressedLength - IFile.HEADER.length, ifileReadAhead, ifileReadAheadLength); in = checksumIn; Decompressor decompressor = null; if (isCompressed && codec != null) { decompressor = CodecPool.getDecompressor(codec); if (decompressor != null) { decompressor.reset(); in = getDecompressedInputStreamWithBufferSize(codec, checksumIn, decompressor, compressedLength); } else { LOG.warn("Could not obtain decompressor from CodecPool"); in = checksumIn; } } try { IOUtils.readFully(in, buffer, 0, buffer.length - IFile.HEADER.length); /* * We've gotten the amount of data we were expecting. Verify the * decompressor has nothing more to offer. This action also forces the * decompressor to read any trailing bytes that weren't critical for * decompression, which is necessary to keep the stream in sync. */ if (in.read() >= 0) { throw new IOException("Unexpected extra bytes from input stream"); } } catch (IOException ioe) { if(in != null) { try { in.close(); } catch(IOException e) { if(LOG.isDebugEnabled()) { LOG.debug("Exception in closing " + in, e); } } } throw ioe; } finally { if (decompressor != null) { decompressor.reset(); CodecPool.returnDecompressor(decompressor); } } }