Java Code Examples for org.apache.hadoop.io.BytesWritable#setCapacity()

The following examples show how to use org.apache.hadoop.io.BytesWritable#setCapacity() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BCFile.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * @param compressionAlgo
 *          The compression algorithm to be used to for compression.
 * @throws IOException
 */
public WBlockState(Algorithm compressionAlgo, FSDataOutputStream fsOut,
    BytesWritable fsOutputBuffer, Configuration conf) throws IOException {
  this.compressAlgo = compressionAlgo;
  this.fsOut = fsOut;
  this.posStart = fsOut.getPos();

  fsOutputBuffer.setCapacity(TFile.getFSOutputBufferSize(conf));

  this.fsBufferedOutput =
      new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.getBytes());
  this.compressor = compressAlgo.getCompressor();

  try {
    this.out =
        compressionAlgo.createCompressionStream(fsBufferedOutput,
            compressor, 0);
  } catch (IOException e) {
    compressAlgo.returnCompressor(compressor);
    throw e;
  }
}
 
Example 2
Source File: BCFile.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * @param compressionAlgo
 *          The compression algorithm to be used to for compression.
 * @throws IOException
 */
public WBlockState(Algorithm compressionAlgo, FSDataOutputStream fsOut,
    BytesWritable fsOutputBuffer, Configuration conf) throws IOException {
  this.compressAlgo = compressionAlgo;
  this.fsOut = fsOut;
  this.posStart = fsOut.getPos();

  fsOutputBuffer.setCapacity(TFile.getFSOutputBufferSize(conf));

  this.fsBufferedOutput =
      new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.getBytes());
  this.compressor = compressAlgo.getCompressor();

  try {
    this.out =
        compressionAlgo.createCompressionStream(fsBufferedOutput,
            compressor, 0);
  } catch (IOException e) {
    compressAlgo.returnCompressor(compressor);
    throw e;
  }
}
 
Example 3
Source File: DTBCFile.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
/**
 * @param compressionAlgo
 *          The compression algorithm to be used to for compression.
 * @throws IOException
 */
public WBlockState(Algorithm compressionAlgo, FSDataOutputStream fsOut,
    BytesWritable fsOutputBuffer, Configuration conf) throws IOException {
  this.compressAlgo = compressionAlgo;
  this.fsOut = fsOut;
  this.posStart = fsOut.getPos();

  fsOutputBuffer.setCapacity(DTFile.getFSOutputBufferSize(conf));

  this.fsBufferedOutput =
      new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.getBytes());
  this.compressor = compressAlgo.getCompressor();

  try {
    this.out =
        compressionAlgo.createCompressionStream(fsBufferedOutput,
            compressor, 0);
  } catch (IOException e) {
    compressAlgo.returnCompressor(compressor);
    throw e;
  }
}
 
Example 4
Source File: BCFile.java    From RDFS with Apache License 2.0 6 votes vote down vote up
/**
 * @param compressionAlgo
 *          The compression algorithm to be used to for compression.
 * @throws IOException
 */
public WBlockState(Algorithm compressionAlgo, FSDataOutputStream fsOut,
    BytesWritable fsOutputBuffer, Configuration conf) throws IOException {
  this.compressAlgo = compressionAlgo;
  this.fsOut = fsOut;
  this.posStart = fsOut.getPos();

  fsOutputBuffer.setCapacity(TFile.getFSOutputBufferSize(conf));

  this.fsBufferedOutput =
      new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.get());
  this.compressor = compressAlgo.getCompressor();

  try {
    this.out =
        compressionAlgo.createCompressionStream(fsBufferedOutput,
            compressor, 0);
  } catch (IOException e) {
    compressAlgo.returnCompressor(compressor);
    throw e;
  }
}
 
Example 5
Source File: WholeFileRecordReader.java    From zephyr with Apache License 2.0 6 votes vote down vote up
@Override
public boolean next(NullWritable key, BytesWritable value) throws IOException {
    if (!processed) {
        int size = (int) fileSplit.getLength();
        byte[] contents = new byte[size];
        Path file = fileSplit.getPath();
        FileSystem fs = file.getFileSystem(conf);
        FSDataInputStream in = null;
        try {
            in = fs.open(file);
            // bug fix for the capacity overflow issue we would have if the number of bytes to read in was > 2/3 of Integer.MAX_VALUE
            if (size > (2 * Integer.MAX_VALUE) / 3) {
                value.set(new byte[]{0x00}, 0, 1); // erasing the data currently in value so that we don't do a full array copy when we call setCapacity
                value.setCapacity(size);
            }
            IOUtils.readFully(in, contents, 0, contents.length);
            value.set(contents, 0, contents.length);
        } finally {
            IOUtils.closeStream(in);
        }
        processed = true;
        return true;
    }
    return false;
}
 
Example 6
Source File: BCFile.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
/**
 * @param compressionAlgo
 *          The compression algorithm to be used to for compression.
 * @throws IOException
 */
public WBlockState(Algorithm compressionAlgo, FSDataOutputStream fsOut,
    BytesWritable fsOutputBuffer, Configuration conf) throws IOException {
  this.compressAlgo = compressionAlgo;
  this.fsOut = fsOut;
  this.posStart = fsOut.getPos();

  fsOutputBuffer.setCapacity(TFile.getFSOutputBufferSize(conf));

  this.fsBufferedOutput =
      new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.get());
  this.compressor = compressAlgo.getCompressor();

  try {
    this.out =
        compressionAlgo.createCompressionStream(fsBufferedOutput,
            compressor, 0);
  } catch (IOException e) {
    compressAlgo.returnCompressor(compressor);
    throw e;
  }
}
 
Example 7
Source File: CustomWritable.java    From pxf with Apache License 2.0 4 votes vote down vote up
@Override
public void write(DataOutput out) throws IOException {
    // 0. Timestamp
    Text tms_text = new Text(tms);
    tms_text.write(out);

    // 1. num, int1, int2
    IntWritable intw = new IntWritable();

    for (int i = 0; i < num.length; i++) {
        intw.set(num[i]);
        intw.write(out);
    }

    intw.set(int1);
    intw.write(out);

    intw.set(int2);
    intw.write(out);

    // 2. st1
    Text txt = new Text();

    for (int i = 0; i < strings.length; i++) {
        txt.set(strings[i]);
        txt.write(out);
    }

    txt.set(st1);
    txt.write(out);

    // 3. doubles
    DoubleWritable dw = new DoubleWritable();
    for (int i = 0; i < dubs.length; i++) {
        dw.set(dubs[i]);
        dw.write(out);
    }

    dw.set(db);
    dw.write(out);

    // 4. floats
    FloatWritable fw = new FloatWritable();
    for (int i = 0; i < fts.length; i++) {
        fw.set(fts[i]);
        fw.write(out);
    }

    fw.set(ft);
    fw.write(out);

    // 5. longs
    LongWritable lw = new LongWritable();
    for (int i = 0; i < lngs.length; i++) {
        lw.set(lngs[i]);
        lw.write(out);
    }
    lw.set(lng);
    lw.write(out);

    // 6. booleans
    BooleanWritable bw = new BooleanWritable();
    for (int i = 0; i < bools.length; ++i) {
        bw.set(bools[i]);
        bw.write(out);
    }
    bw.set(bool);
    bw.write(out);

    // 7. shorts
    ShortWritable sw = new ShortWritable();
    for (int i = 0; i < shrts.length; ++i) {
        sw.set(shrts[i]);
        sw.write(out);
    }
    sw.set(shrt);
    sw.write(out);

    // 8. bytes
    // BytesWritable btsw = new BytesWritable(bts);
    // btsw.write(out);
    BytesWritable btsw = new BytesWritable();
    btsw.setCapacity(bts.length);
    btsw.setSize(bts.length);
    btsw.set(bts, 0, bts.length);
    btsw.write(out);
}