Java Code Examples for org.apache.hadoop.io.Writable#write()

The following examples show how to use org.apache.hadoop.io.Writable#write() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HFilesystemAdmin.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
private static byte[] toByteArray(Writable... writables) {
    final DataOutputBuffer out = new DataOutputBuffer();
    try {
        for(Writable w : writables) {
            w.write(out);
        }
        out.close();
    } catch (IOException e) {
        throw new RuntimeException("Fail to convert writables to a byte array",e);
    }
    byte[] bytes = out.getData();
    if (bytes.length == out.getLength()) {
        return bytes;
    }
    byte[] result = new byte[out.getLength()];
    System.arraycopy(bytes, 0, result, 0, out.getLength());
    return result;
}
 
Example 2
Source File: ServerRpcConnection.java    From hbase with Apache License 2.0 6 votes vote down vote up
/**
 * No protobuf encoding of raw sasl messages
 */
protected final void doRawSaslReply(SaslStatus status, Writable rv,
    String errorClass, String error) throws IOException {
  BufferChain bc;
  // In my testing, have noticed that sasl messages are usually
  // in the ballpark of 100-200. That's why the initial capacity is 256.
  try (ByteBufferOutputStream saslResponse = new ByteBufferOutputStream(256);
      DataOutputStream  out = new DataOutputStream(saslResponse)) {
    out.writeInt(status.state); // write status
    if (status == SaslStatus.SUCCESS) {
      rv.write(out);
    } else {
      WritableUtils.writeString(out, errorClass);
      WritableUtils.writeString(out, error);
    }
    bc = new BufferChain(saslResponse.getByteBuffer());
  }
  doRespond(() -> bc);
}
 
Example 3
Source File: Server.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
/**
 * Setup response for the IPC Call.
 * 
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param status {@link Status} of the IPC call
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponse(ByteArrayOutputStream response, 
                           Call call, Status status, 
                           Writable rv, String errorClass, String error) 
throws IOException {
  response.reset();
  DataOutputStream out = new DataOutputStream(response);
  out.writeInt(call.id);                // write call id
  out.writeInt(status.state);           // write status

  if (status == Status.SUCCESS) {
    rv.write(out);
  } else {
    WritableUtils.writeString(out, errorClass);
    WritableUtils.writeString(out, error);
  }
  call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
 
Example 4
Source File: ReflectionUtils.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
@Deprecated
public static void cloneWritableInto(Writable dst, 
                                     Writable src) throws IOException {
  CopyInCopyOutBuffer buffer = cloneBuffers.get();
  buffer.outBuffer.reset();
  src.write(buffer.outBuffer);
  buffer.moveData();
  dst.readFields(buffer.inBuffer);
}
 
Example 5
Source File: RubixFileKeyData.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Override
public void write(DataOutput out) throws IOException
{
    if (!(key instanceof Writable))
        throw new RuntimeException("Key Class is not Writable");
    Writable keyWritable = (Writable) key;

    keyWritable.write(out);
    out.writeLong(blockId);
    out.writeLong(offset);
    out.writeLong(numRecords);

}
 
Example 6
Source File: LocalFileUtils.java    From systemds with Apache License 2.0 5 votes vote down vote up
/**
 * Writes an arbitrary writable to local file system, using a fused buffered writer
 * with special support for matrix blocks.
 * 
 * @param fname file name to write
 * @param mb Hadoop writable
 * @throws IOException if IOException occurs
 */
public static void writeWritableToLocal(String fname, Writable mb)
	throws IOException
{	
	FileOutputStream fos = new FileOutputStream( fname );
	FastBufferedDataOutputStream out = new FastBufferedDataOutputStream(fos, BUFFER_SIZE);
	
	try {
		mb.write(out);
	}
	finally {
		IOUtilFunctions.closeSilently(out);
		IOUtilFunctions.closeSilently(fos);
	}	
}
 
Example 7
Source File: HadoopUtil.java    From kylin with Apache License 2.0 5 votes vote down vote up
public static byte[] toBytes(Writable writable) {
    try {
        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        DataOutputStream out = new DataOutputStream(bout);
        writable.write(out);
        out.close();
        bout.close();
        return bout.toByteArray();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
 
Example 8
Source File: WritableList.java    From eagle with Apache License 2.0 5 votes vote down vote up
/**
 * Serialize the fields of this object to <code>out</code>.
 *
 * @param out <code>DataOuput</code> to serialize this object into.
 * @throws java.io.IOException
 */
@Override
public void write(DataOutput out) throws IOException {
    this.check();
    out.writeInt(this.size());
    for (Writable item: this) {
        item.write(out);
    }
}
 
Example 9
Source File: WritableList.java    From Eagle with Apache License 2.0 5 votes vote down vote up
/**
 * Serialize the fields of this object to <code>out</code>.
 *
 * @param out <code>DataOuput</code> to serialize this object into.
 * @throws java.io.IOException
 */
@Override
public void write(DataOutput out) throws IOException {
	this.check();
	out.writeInt(this.size());
	for(Writable item: this){
		item.write(out);
	}
}
 
Example 10
Source File: HadoopUtil.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
public static byte[] toBytes(Writable writable) {
    try {
        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        DataOutputStream out = new DataOutputStream(bout);
        writable.write(out);
        out.close();
        bout.close();
        return bout.toByteArray();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
 
Example 11
Source File: ReadRCFileTest.java    From kite with Apache License 2.0 5 votes vote down vote up
private void createRCFile(final String fileName, final int numRecords,
    final int maxColumns, boolean addNullValue) throws IOException {
  // Write the sequence file
  SequenceFile.Metadata metadata = getMetadataForRCFile();
  Configuration conf = new Configuration();
  conf.set(RCFile.COLUMN_NUMBER_CONF_STR, String.valueOf(maxColumns));
  Path inputFile = dfs.makeQualified(new Path(testDirectory, fileName));
  RCFile.Writer rcFileWriter = new RCFile.Writer(dfs, conf, inputFile, null,
      metadata, null);
  for (int row = 0; row < numRecords; row++) {
    BytesRefArrayWritable dataWrite = new BytesRefArrayWritable(maxColumns);
    dataWrite.resetValid(maxColumns);
    for (int column = 0; column < maxColumns; column++) {
      Writable sampleText = new Text(
          "ROW-NUM:" + row + ", COLUMN-NUM:" + column);
      // Set the last column of the last row as null
      if (addNullValue && column == maxColumns - 1 && row == numRecords - 1) {
        sampleText = NullWritable.get();
      }
      ByteArrayDataOutput dataOutput = ByteStreams.newDataOutput();
      sampleText.write(dataOutput);
      dataWrite.set(column, new BytesRefWritable(dataOutput.toByteArray()));
    }
    rcFileWriter.append(dataWrite);
  }
  rcFileWriter.close();
}
 
Example 12
Source File: EdgeValueWritable.java    From hgraphdb with Apache License 2.0 5 votes vote down vote up
@Override
public void write(final DataOutput output) throws IOException {
    Kryo kryo = new Kryo();
    kryo.register(HBaseEdge.class, new HBaseEdgeSerializer());
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    Output out = new Output(baos);
    kryo.writeObject(out, this.edge);
    out.close();
    final byte[] serialized = baos.toByteArray();
    WritableUtils.writeCompressedByteArray(output, serialized);
    Writable writable = value != null ? value : NullWritable.get();
    Text.writeString(output, writable.getClass().getName());
    writable.write(output);
}
 
Example 13
Source File: WritableUtils2.java    From accumulo-recipes with Apache License 2.0 5 votes vote down vote up
public static byte[] serialize(Writable writable) throws IOException {
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    DataOutputStream dataOut = null;
    try {
        dataOut = new DataOutputStream(out);
        writable.write(dataOut);
        return out.toByteArray();
    } finally {
        IOUtils.closeQuietly(dataOut);
    }
}
 
Example 14
Source File: ReflectionUtils.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Deprecated
public static void cloneWritableInto(Writable dst, 
                                     Writable src) throws IOException {
  CopyInCopyOutBuffer buffer = cloneBuffers.get();
  buffer.outBuffer.reset();
  src.write(buffer.outBuffer);
  buffer.moveData();
  dst.readFields(buffer.inBuffer);
}
 
Example 15
Source File: TypedBytesWritableOutput.java    From big-c with Apache License 2.0 5 votes vote down vote up
public void writeWritable(Writable w) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  DataOutputStream dos = new DataOutputStream(baos);
  WritableUtils.writeString(dos, w.getClass().getName());
  w.write(dos);
  dos.close();
  out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
}
 
Example 16
Source File: AvroBytesRecord.java    From hiped2 with Apache License 2.0 5 votes vote down vote up
public static GenericRecord toGenericRecord(Writable writable)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  DataOutputStream dao = new DataOutputStream(baos);
  writable.write(dao);
  dao.close();
  return toGenericRecord(baos.toByteArray());
}
 
Example 17
Source File: ReflectionUtils.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Deprecated
public static void cloneWritableInto(Writable dst, 
                                     Writable src) throws IOException {
  CopyInCopyOutBuffer buffer = cloneBuffers.get();
  buffer.outBuffer.reset();
  src.write(buffer.outBuffer);
  buffer.moveData();
  dst.readFields(buffer.inBuffer);
}
 
Example 18
Source File: TypedBytesWritableOutput.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public void writeWritable(Writable w) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  DataOutputStream dos = new DataOutputStream(baos);
  WritableUtils.writeString(dos, w.getClass().getName());
  w.write(dos);
  dos.close();
  out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
}
 
Example 19
Source File: LocalFileUtils.java    From systemds with Apache License 2.0 5 votes vote down vote up
/**
 * Writes an arbitrary writable to local file system, using a fused buffered writer
 * with special support for matrix blocks.
 * 
 * @param fname file name to write
 * @param mb Hadoop writable
 * @throws IOException if IOException occurs
 */
public static void writeWritableToLocal(String fname, Writable mb)
	throws IOException
{	
	FileOutputStream fos = new FileOutputStream( fname );
	FastBufferedDataOutputStream out = new FastBufferedDataOutputStream(fos, BUFFER_SIZE);
	
	try {
		mb.write(out);
	}
	finally {
		IOUtilFunctions.closeSilently(out);
		IOUtilFunctions.closeSilently(fos);
	}	
}
 
Example 20
Source File: ProtoBufConverter.java    From eagle with Apache License 2.0 4 votes vote down vote up
public static ByteString writableToByteString(Writable writable) throws IOException {
    ByteArrayDataOutput dataOutput = ByteStreams.newDataOutput();
    writable.write(dataOutput);
    return ByteString.copyFrom(dataOutput.toByteArray());
}