Java Code Examples for org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status#SUCCESS

The following examples show how to use org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status#SUCCESS . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataTransferProtoUtil.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static void checkBlockOpStatus(
        BlockOpResponseProto response,
        String logInfo) throws IOException {
  if (response.getStatus() != Status.SUCCESS) {
    if (response.getStatus() == Status.ERROR_ACCESS_TOKEN) {
      throw new InvalidBlockTokenException(
        "Got access token error"
        + ", status message " + response.getMessage()
        + ", " + logInfo
      );
    } else {
      throw new IOException(
        "Got error"
        + ", status message " + response.getMessage()
        + ", " + logInfo
      );
    }
  }
}
 
Example 2
Source File: DataTransferProtoUtil.java    From big-c with Apache License 2.0 6 votes vote down vote up
public static void checkBlockOpStatus(
        BlockOpResponseProto response,
        String logInfo) throws IOException {
  if (response.getStatus() != Status.SUCCESS) {
    if (response.getStatus() == Status.ERROR_ACCESS_TOKEN) {
      throw new InvalidBlockTokenException(
        "Got access token error"
        + ", status message " + response.getMessage()
        + ", " + logInfo
      );
    } else {
      throw new IOException(
        "Got error"
        + ", status message " + response.getMessage()
        + ", " + logInfo
      );
    }
  }
}
 
Example 3
Source File: FanOutOneBlockAsyncDFSOutput.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Override
protected void channelRead0(ChannelHandlerContext ctx, PipelineAckProto ack) throws Exception {
  Status reply = getStatus(ack);
  if (reply != Status.SUCCESS) {
    failed(ctx.channel(), () -> new IOException("Bad response " + reply + " for block " +
      block + " from datanode " + ctx.channel().remoteAddress()));
    return;
  }
  if (PipelineAck.isRestartOOBStatus(reply)) {
    failed(ctx.channel(), () -> new IOException("Restart response " + reply + " for block " +
      block + " from datanode " + ctx.channel().remoteAddress()));
    return;
  }
  if (ack.getSeqno() == HEART_BEAT_SEQNO) {
    return;
  }
  completed(ctx.channel());
}
 
Example 4
Source File: PipelineAck.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Check if this ack contains error status
 * @return true if all statuses are SUCCESS
 */
public boolean isSuccess() {
  for (Status s : proto.getReplyList()) {
    if (s != Status.SUCCESS) {
      return false;
    }
  }
  return true;
}
 
Example 5
Source File: TestBlockReplacement.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private boolean replaceBlock(
    ExtendedBlock block,
    DatanodeInfo source,
    DatanodeInfo sourceProxy,
    DatanodeInfo destination,
    StorageType targetStorageType) throws IOException, SocketException {
  Socket sock = new Socket();
  try {
    sock.connect(NetUtils.createSocketAddr(destination.getXferAddr()),
        HdfsServerConstants.READ_TIMEOUT);
    sock.setKeepAlive(true);
    // sendRequest
    DataOutputStream out = new DataOutputStream(sock.getOutputStream());
    new Sender(out).replaceBlock(block, targetStorageType,
        BlockTokenSecretManager.DUMMY_TOKEN, source.getDatanodeUuid(),
        sourceProxy);
    out.flush();
    // receiveResponse
    DataInputStream reply = new DataInputStream(sock.getInputStream());

    BlockOpResponseProto proto =
        BlockOpResponseProto.parseDelimitedFrom(reply);
    while (proto.getStatus() == Status.IN_PROGRESS) {
      proto = BlockOpResponseProto.parseDelimitedFrom(reply);
    }
    return proto.getStatus() == Status.SUCCESS;
  } finally {
    sock.close();
  }
}
 
Example 6
Source File: PipelineAck.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Check if this ack contains error status
 * @return true if all statuses are SUCCESS
 */
public boolean isSuccess() {
  for (Status s : proto.getReplyList()) {
    if (s != Status.SUCCESS) {
      return false;
    }
  }
  return true;
}
 
Example 7
Source File: TestBlockReplacement.java    From big-c with Apache License 2.0 5 votes vote down vote up
private boolean replaceBlock(
    ExtendedBlock block,
    DatanodeInfo source,
    DatanodeInfo sourceProxy,
    DatanodeInfo destination,
    StorageType targetStorageType) throws IOException, SocketException {
  Socket sock = new Socket();
  try {
    sock.connect(NetUtils.createSocketAddr(destination.getXferAddr()),
        HdfsServerConstants.READ_TIMEOUT);
    sock.setKeepAlive(true);
    // sendRequest
    DataOutputStream out = new DataOutputStream(sock.getOutputStream());
    new Sender(out).replaceBlock(block, targetStorageType,
        BlockTokenSecretManager.DUMMY_TOKEN, source.getDatanodeUuid(),
        sourceProxy);
    out.flush();
    // receiveResponse
    DataInputStream reply = new DataInputStream(sock.getInputStream());

    BlockOpResponseProto proto =
        BlockOpResponseProto.parseDelimitedFrom(reply);
    while (proto.getStatus() == Status.IN_PROGRESS) {
      proto = BlockOpResponseProto.parseDelimitedFrom(reply);
    }
    return proto.getStatus() == Status.SUCCESS;
  } finally {
    sock.close();
  }
}