Java Code Examples for org.apache.hadoop.io.compress.CodecPool

The following examples show how to use org.apache.hadoop.io.compress.CodecPool. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: ojai   Author: ojai   File: JSONFileRecordReader.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void close() throws IOException {
  try {
    documentStream.close();
  } catch (Exception e) {
    throw new IOException(
        "Error closing document Stream in JsonFileRecordReader");
  }
  try {
    if (inputStream != null) {
      inputStream.close();
    }
  } finally {
    if (decompressor != null) {
      CodecPool.returnDecompressor(decompressor);
      decompressor = null;
    }
  }
}
 
Example #2
Source Project: gemfirexd-oss   Author: gemxd   File: SequenceFile.java    License: Apache License 2.0 6 votes vote down vote up
/** Close the file. */
@Override
public synchronized void close() throws IOException {
  // Return the decompressors to the pool
  CodecPool.returnDecompressor(keyLenDecompressor);
  CodecPool.returnDecompressor(keyDecompressor);
  CodecPool.returnDecompressor(valLenDecompressor);
  CodecPool.returnDecompressor(valDecompressor);
  keyLenDecompressor = keyDecompressor = null;
  valLenDecompressor = valDecompressor = null;
  
  if (keyDeserializer != null) {
	keyDeserializer.close();
  }
  if (valDeserializer != null) {
    valDeserializer.close();
  }
  
  // Close the input-stream
  in.close();
}
 
Example #3
Source Project: hadoop   Author: naver   File: IFile.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Construct an IFile Reader.
 * 
 * @param conf Configuration File 
 * @param in   The input stream
 * @param length Length of the data in the stream, including the checksum
 *               bytes.
 * @param codec codec
 * @param readsCounter Counter for records read from disk
 * @throws IOException
 */
public Reader(Configuration conf, FSDataInputStream in, long length, 
              CompressionCodec codec,
              Counters.Counter readsCounter) throws IOException {
  readRecordsCounter = readsCounter;
  checksumIn = new IFileInputStream(in,length, conf);
  if (codec != null) {
    decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      this.in = codec.createInputStream(checksumIn, decompressor);
    } else {
      LOG.warn("Could not obtain decompressor from CodecPool");
      this.in = checksumIn;
    }
  } else {
    this.in = checksumIn;
  }
  this.dataIn = new DataInputStream(this.in);
  this.fileLength = length;
  
  if (conf != null) {
    bufferSize = conf.getInt("io.file.buffer.size", DEFAULT_BUFFER_SIZE);
  }
}
 
Example #4
Source Project: hbase   Author: apache   File: Compression.java    License: Apache License 2.0 6 votes vote down vote up
public Decompressor getDecompressor() {
  CompressionCodec codec = getCodec(conf);
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor + " from pool.");
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using it.
        LOG.warn("Deompressor obtained from CodecPool is already finished()");
      }
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
Example #5
Source Project: hadoop   Author: naver   File: InMemoryMapOutput.java    License: Apache License 2.0 6 votes vote down vote up
public InMemoryMapOutput(Configuration conf, TaskAttemptID mapId,
                         MergeManagerImpl<K, V> merger,
                         int size, CompressionCodec codec,
                         boolean primaryMapOutput) {
  super(mapId, (long)size, primaryMapOutput);
  this.conf = conf;
  this.merger = merger;
  this.codec = codec;
  byteStream = new BoundedByteArrayOutputStream(size);
  memory = byteStream.getBuffer();
  if (codec != null) {
    decompressor = CodecPool.getDecompressor(codec);
  } else {
    decompressor = null;
  }
}
 
Example #6
Source Project: Hive-XML-SerDe   Author: dvasilen   File: SplittableXmlInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
private InputStream getInputStream(JobConf jobConf, FileSplit split) throws IOException, ClassNotFoundException {
    FSDataInputStream fsin = null;

    // open the file and seek to the start of the split
    long splitStart = split.getStart();
    long splitEnd = splitStart + split.getLength();
    Path file = split.getPath();
    FileSystem fs = file.getFileSystem(jobConf);
    fsin = fs.open(split.getPath());
    fsin.seek(splitStart);

    Configuration conf = new Configuration();
    CompressionCodecFactory compressionCodecFactory = new CompressionCodecFactory(conf);
    CompressionCodec codec = compressionCodecFactory.getCodec(split.getPath());
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (codec instanceof SplittableCompressionCodec) {
        return ((SplittableCompressionCodec) codec).createInputStream(fsin,
            decompressor,
            splitStart,
            splitEnd,
            SplittableCompressionCodec.READ_MODE.BYBLOCK);
    } else {
        return codec.createInputStream(fsin, decompressor);
    }
}
 
Example #7
Source Project: hadoop   Author: naver   File: Anonymizer.java    License: Apache License 2.0 6 votes vote down vote up
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
Example #8
Source Project: hadoop   Author: naver   File: SequenceFile.java    License: Apache License 2.0 6 votes vote down vote up
/** Close the file. */
@Override
public synchronized void close() throws IOException {
  keySerializer.close();
  uncompressedValSerializer.close();
  if (compressedValSerializer != null) {
    compressedValSerializer.close();
  }

  CodecPool.returnCompressor(compressor);
  compressor = null;
  
  if (out != null) {
    
    // Close the underlying stream iff we own it...
    if (ownOutputStream) {
      out.close();
    } else {
      out.flush();
    }
    out = null;
  }
}
 
Example #9
Source Project: incubator-hivemall   Author: apache   File: HadoopUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static BufferedReader getBufferedReader(File file, MapredContext context)
        throws IOException {
    URI fileuri = file.toURI();
    Path path = new Path(fileuri);

    Configuration conf = context.getJobConf();
    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
    CompressionCodec codec = ccf.getCodec(path);

    if (codec == null) {
        return new BufferedReader(new FileReader(file));
    } else {
        Decompressor decompressor = CodecPool.getDecompressor(codec);
        FileInputStream fis = new FileInputStream(file);
        CompressionInputStream cis = codec.createInputStream(fis, decompressor);
        BufferedReader br = new BufferedReaderExt(new InputStreamReader(cis), decompressor);
        return br;
    }
}
 
Example #10
Source Project: hadoop   Author: naver   File: Compression.java    License: Apache License 2.0 6 votes vote down vote up
public Decompressor getDecompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using
        // it.
        LOG.warn("Deompressor obtained from CodecPool already finished()");
      } else {
        if(LOG.isDebugEnabled()) {
          LOG.debug("Got a decompressor: " + decompressor.hashCode());
        }
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * decompressor is referenced after returned back to the codec pool.
       */
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
Example #11
Source Project: gemfirexd-oss   Author: gemxd   File: SequenceFile.java    License: Apache License 2.0 6 votes vote down vote up
/** Close the file. */
@Override
public synchronized void close() throws IOException {
  // Return the decompressors to the pool
  CodecPool.returnDecompressor(keyLenDecompressor);
  CodecPool.returnDecompressor(keyDecompressor);
  CodecPool.returnDecompressor(valLenDecompressor);
  CodecPool.returnDecompressor(valDecompressor);
  keyLenDecompressor = keyDecompressor = null;
  valLenDecompressor = valDecompressor = null;
  
  if (keyDeserializer != null) {
	keyDeserializer.close();
  }
  if (valDeserializer != null) {
    valDeserializer.close();
  }
  
  // Close the input-stream
  in.close();
}
 
Example #12
Source Project: big-c   Author: yncxcw   File: IFile.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Construct an IFile Reader.
 * 
 * @param conf Configuration File 
 * @param in   The input stream
 * @param length Length of the data in the stream, including the checksum
 *               bytes.
 * @param codec codec
 * @param readsCounter Counter for records read from disk
 * @throws IOException
 */
public Reader(Configuration conf, FSDataInputStream in, long length, 
              CompressionCodec codec,
              Counters.Counter readsCounter) throws IOException {
  readRecordsCounter = readsCounter;
  checksumIn = new IFileInputStream(in,length, conf);
  if (codec != null) {
    decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      this.in = codec.createInputStream(checksumIn, decompressor);
    } else {
      LOG.warn("Could not obtain decompressor from CodecPool");
      this.in = checksumIn;
    }
  } else {
    this.in = checksumIn;
  }
  this.dataIn = new DataInputStream(this.in);
  this.fileLength = length;
  
  if (conf != null) {
    bufferSize = conf.getInt("io.file.buffer.size", DEFAULT_BUFFER_SIZE);
  }
}
 
Example #13
Source Project: big-c   Author: yncxcw   File: IFile.java    License: Apache License 2.0 6 votes vote down vote up
public void close() throws IOException {
  // Close the underlying stream
  in.close();
  
  // Release the buffer
  dataIn = null;
  buffer = null;
  if(readRecordsCounter != null) {
    readRecordsCounter.increment(numRecordsRead);
  }

  // Return the decompressor
  if (decompressor != null) {
    decompressor.reset();
    CodecPool.returnDecompressor(decompressor);
    decompressor = null;
  }
}
 
Example #14
Source Project: big-c   Author: yncxcw   File: InMemoryMapOutput.java    License: Apache License 2.0 6 votes vote down vote up
public InMemoryMapOutput(Configuration conf, TaskAttemptID mapId,
                         MergeManagerImpl<K, V> merger,
                         int size, CompressionCodec codec,
                         boolean primaryMapOutput) {
  super(mapId, (long)size, primaryMapOutput);
  this.conf = conf;
  this.merger = merger;
  this.codec = codec;
  byteStream = new BoundedByteArrayOutputStream(size);
  memory = byteStream.getBuffer();
  if (codec != null) {
    decompressor = CodecPool.getDecompressor(codec);
  } else {
    decompressor = null;
  }
}
 
Example #15
Source Project: big-c   Author: yncxcw   File: Anonymizer.java    License: Apache License 2.0 6 votes vote down vote up
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
Example #16
Source Project: big-c   Author: yncxcw   File: SequenceFile.java    License: Apache License 2.0 6 votes vote down vote up
/** Close the file. */
@Override
public synchronized void close() throws IOException {
  keySerializer.close();
  uncompressedValSerializer.close();
  if (compressedValSerializer != null) {
    compressedValSerializer.close();
  }

  CodecPool.returnCompressor(compressor);
  compressor = null;
  
  if (out != null) {
    
    // Close the underlying stream iff we own it...
    if (ownOutputStream) {
      out.close();
    } else {
      out.flush();
    }
    out = null;
  }
}
 
Example #17
Source Project: big-c   Author: yncxcw   File: SequenceFile.java    License: Apache License 2.0 6 votes vote down vote up
/** Close the file. */
@Override
public synchronized void close() throws IOException {
  // Return the decompressors to the pool
  CodecPool.returnDecompressor(keyLenDecompressor);
  CodecPool.returnDecompressor(keyDecompressor);
  CodecPool.returnDecompressor(valLenDecompressor);
  CodecPool.returnDecompressor(valDecompressor);
  keyLenDecompressor = keyDecompressor = null;
  valLenDecompressor = valDecompressor = null;
  
  if (keyDeserializer != null) {
	keyDeserializer.close();
  }
  if (valDeserializer != null) {
    valDeserializer.close();
  }
  
  // Close the input-stream
  in.close();
}
 
Example #18
Source Project: big-c   Author: yncxcw   File: Compression.java    License: Apache License 2.0 6 votes vote down vote up
public Compressor getCompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Compressor compressor = CodecPool.getCompressor(codec);
    if (compressor != null) {
      if (compressor.finished()) {
        // Somebody returns the compressor to CodecPool but is still using
        // it.
        LOG.warn("Compressor obtained from CodecPool already finished()");
      } else {
        if(LOG.isDebugEnabled()) {
          LOG.debug("Got a compressor: " + compressor.hashCode());
        }
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * compressor is referenced after returned back to the codec pool.
       */
      compressor.reset();
    }
    return compressor;
  }
  return null;
}
 
Example #19
Source Project: hadoopoffice   Author: ZuInnoTe   File: MapReduceExcelOutputIntegrationTest.java    License: Apache License 2.0 6 votes vote down vote up
private InputStream openFile(Path path) throws IOException {
        CompressionCodec codec=new CompressionCodecFactory(miniCluster.getConfig()).getCodec(path);
 	FSDataInputStream fileIn=dfsCluster.getFileSystem().open(path);
	// check if compressed
	if (codec==null) { // uncompressed
		return fileIn;
	} else { // compressed
		Decompressor decompressor = CodecPool.getDecompressor(codec);
		this.openDecompressors.add(decompressor); // to be returned later using close
		if (codec instanceof SplittableCompressionCodec) {
			long end = dfsCluster.getFileSystem().getFileStatus(path).getLen(); 
        		final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, 0, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
					return cIn;
      		} else {
        		return codec.createInputStream(fileIn,decompressor);
      		}
	}
}
 
Example #20
Source Project: hadoopoffice   Author: ZuInnoTe   File: AbstractSpreadSheetDocumentRecordReader.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public synchronized void  close() throws IOException {
try {
    if (officeReader!=null) {
	officeReader.close();
     }
    } finally {
      if (decompressor != null) { // return this decompressor
        CodecPool.returnDecompressor(decompressor);
        decompressor = null;
      } // return decompressor of linked workbooks
	if (this.currentHFR!=null) {
		currentHFR.close();
	}
    }
  	// do not close the filesystem! will cause exceptions in Spark
}
 
Example #21
Source Project: RDFS   Author: iVCE   File: IFile.java    License: Apache License 2.0 6 votes vote down vote up
public void close() throws IOException {
  // Return the decompressor
  if (decompressor != null) {
    decompressor.reset();
    CodecPool.returnDecompressor(decompressor);
    decompressor = null;
  }
  
  // Close the underlying stream
  in.close();
  
  // Release the buffer
  dataIn = null;
  buffer = null;
  if(readRecordsCounter != null) {
    readRecordsCounter.increment(numRecordsRead);
  }
}
 
Example #22
Source Project: tez   Author: apache   File: IFile.java    License: Apache License 2.0 6 votes vote down vote up
void setupOutputStream(CompressionCodec codec) throws IOException {
  this.checksumOut = new IFileOutputStream(this.rawOut);
  if (codec != null) {
    this.compressor = CodecPool.getCompressor(codec);
    if (this.compressor != null) {
      this.compressor.reset();
      this.compressedOut = codec.createOutputStream(checksumOut, compressor);
      this.out = new FSDataOutputStream(this.compressedOut,  null);
      this.compressOutput = true;
    } else {
      LOG.warn("Could not obtain compressor from CodecPool");
      this.out = new FSDataOutputStream(checksumOut,null);
    }
  } else {
    this.out = new FSDataOutputStream(checksumOut,null);
  }
}
 
Example #23
Source Project: hbase   Author: apache   File: CellBlockBuilder.java    License: Apache License 2.0 6 votes vote down vote up
private void encodeCellsTo(OutputStream os, CellScanner cellScanner, Codec codec,
    CompressionCodec compressor) throws IOException {
  Compressor poolCompressor = null;
  try {
    if (compressor != null) {
      if (compressor instanceof Configurable) {
        ((Configurable) compressor).setConf(this.conf);
      }
      poolCompressor = CodecPool.getCompressor(compressor);
      os = compressor.createOutputStream(os, poolCompressor);
    }
    Codec.Encoder encoder = codec.getEncoder(os);
    while (cellScanner.advance()) {
      encoder.write(cellScanner.current());
    }
    encoder.flush();
  } catch (BufferOverflowException | IndexOutOfBoundsException e) {
    throw new DoNotRetryIOException(e);
  } finally {
    os.close();
    if (poolCompressor != null) {
      CodecPool.returnCompressor(poolCompressor);
    }
  }
}
 
Example #24
Source Project: hadoop-gpu   Author: koichi626   File: ReduceTask.java    License: Apache License 2.0 5 votes vote down vote up
public MapOutputCopier(JobConf job, Reporter reporter) {
  setName("MapOutputCopier " + reduceTask.getTaskID() + "." + id);
  LOG.debug(getName() + " created");
  this.reporter = reporter;
  
  if (job.getCompressMapOutput()) {
    Class<? extends CompressionCodec> codecClass =
      job.getMapOutputCompressorClass(DefaultCodec.class);
    codec = ReflectionUtils.newInstance(codecClass, job);
    decompressor = CodecPool.getDecompressor(codec);
  }
}
 
Example #25
Source Project: presto   Author: prestosql   File: HadoopCompressor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public CompressedSliceOutput get()
{
    try {
        compressor.reset();
        bufferedOutput.reset();
        CompressionOutputStream compressionStream = codec.createOutputStream(bufferedOutput, compressor);
        return new CompressedSliceOutput(compressionStream, bufferedOutput, this, () -> CodecPool.returnCompressor(compressor));
    }
    catch (IOException e) {
        throw new UncheckedIOException(e);
    }
}
 
Example #26
Source Project: components   Author: Talend   File: Excel2007FileRecordReader.java    License: Apache License 2.0 5 votes vote down vote up
private void closeResource() throws IOException {
  try {
    if(stream_workbook != null) {
      stream_workbook.close();
      stream_workbook = null;
    }
  } finally {
    if (decompressor != null) {
      CodecPool.returnDecompressor(decompressor);
      decompressor = null;
    }
  }
}
 
Example #27
Source Project: hadoopcryptoledger   Author: ZuInnoTe   File: AbstractBitcoinRecordReader.java    License: Apache License 2.0 5 votes vote down vote up
/**
* Initializes reader
* @param split Split to use (assumed to be a file split)
* @param context context of the job
*
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
* @throws java.lang.InterruptedException in case of thread interruption
*
*/
@Override
public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
   FileSplit fSplit = (FileSplit)split;
 // Initialize start and end of split
    start = fSplit.getStart();
    end = start + fSplit.getLength();
    final Path file = fSplit.getPath();
    codec = new CompressionCodecFactory(context.getConfiguration()).getCodec(file);
    final FileSystem fs = file.getFileSystem(context.getConfiguration());
    FSDataInputStream fileIn = fs.open(file);
    // open stream
      if (isCompressedInput()) { // decompress
      	decompressor = CodecPool.getDecompressor(codec);
      	if (codec instanceof SplittableCompressionCodec) {
		
        	final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, start, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
				bbr = new BitcoinBlockReader(cIn, this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,this.readAuxPOW);
		start = cIn.getAdjustedStart();
       		end = cIn.getAdjustedEnd();
        	filePosition = cIn; // take pos from compressed stream
      } else {
	bbr = new BitcoinBlockReader(codec.createInputStream(fileIn,decompressor), this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,readAuxPOW);
	filePosition = fileIn;
      }
    } else {
      fileIn.seek(start);
      bbr = new BitcoinBlockReader(fileIn, this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,readAuxPOW);  
      filePosition = fileIn;
    }
    // seek to block start (for the case a block overlaps a split)
    try {
    	bbr.seekBlockStart();
    } catch (BitcoinBlockReadException bbre) {
		LOG.error("Error reading Bitcoin blockchhain data");
		LOG.error(bbre);
    } 
}
 
Example #28
Source Project: ojai   Author: ojai   File: JSONFileRecordReader.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void initialize(InputSplit arg0, TaskAttemptContext taskContext)
    throws IOException, InterruptedException {

  documentStream = null;
  it = null;
  documentCount = 0;
  key = new LongWritable();
  document = null;
  currentPos = 0;

  /* get the split */
  FileSplit split = (FileSplit) arg0;

  /* get configuration object */
  Configuration job = taskContext.getConfiguration();

  /* initialize file /input stream */
  final Path path = split.getPath();
  FileSystem fs = path.getFileSystem(job);
  inputStream = fs.open(path);

  CompressionCodec codec = new CompressionCodecFactory(job).getCodec(path);

  if (codec != null) {
    decompressor = CodecPool.getDecompressor(codec);
    inputStream = codec.createInputStream(inputStream, decompressor);
  }

  start = split.getStart();
  end = start + split.getLength();

  /* Initialize a stream reader so that it can read multiple documents from */
  /* the file */

  documentStream = (JsonDocumentStream)Json.newDocumentStream(inputStream);
  it = documentStream.iterator();

}
 
Example #29
Source Project: hadoop   Author: naver   File: LineRecordReader.java    License: Apache License 2.0 5 votes vote down vote up
public synchronized void close() throws IOException {
  try {
    if (in != null) {
      in.close();
    }
  } finally {
    if (decompressor != null) {
      CodecPool.returnDecompressor(decompressor);
      decompressor = null;
    }
  }
}
 
Example #30
Source Project: RDFS   Author: iVCE   File: HadoopLogsAnalyzer.java    License: Apache License 2.0 5 votes vote down vote up
private boolean setNextDirectoryInputStream() throws FileNotFoundException,
    IOException {
  if (input != null) {
    input.close();
    LOG.info("File closed: "+currentFileName);
    input = null;
  }

  if (inputCodec != null) {
    CodecPool.returnDecompressor(inputDecompressor);
    inputDecompressor = null;
    inputCodec = null;
  }

  ++inputDirectoryCursor;

  if (inputDirectoryCursor >= inputDirectoryFiles.length) {
    return false;
  }

  fileFirstLine = true;

  currentFileName = inputDirectoryFiles[inputDirectoryCursor];

  LOG.info("\nOpening file " + currentFileName
      + "  *************************** .");
  LOG
      .info("This file, " + (inputDirectoryCursor + 1) + "/"
          + inputDirectoryFiles.length + ", starts with line " + lineNumber
          + ".");

  input = maybeUncompressedPath(new Path(inputDirectoryPath, currentFileName));

  return input != null;
}