org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations Java Examples

The following examples show how to use org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: NameNodeRpcServer.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Override // NamenodeProtocol
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
throws IOException {
  if(size <= 0) {
    throw new IllegalArgumentException(
      "Unexpected not positive size: "+size);
  }
  checkNNStartup();
  namesystem.checkSuperuserPrivilege();
  return namesystem.getBlockManager().getBlocks(datanode, size); 
}
 
Example #2
Source File: TestPBHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testConvertBlocksWithLocations() {
  BlockWithLocations[] list = new BlockWithLocations[] {
      getBlockWithLocations(1), getBlockWithLocations(2) };
  BlocksWithLocations locs = new BlocksWithLocations(list);
  BlocksWithLocationsProto locsProto = PBHelper.convert(locs);
  BlocksWithLocations locs2 = PBHelper.convert(locsProto);
  BlockWithLocations[] blocks = locs.getBlocks();
  BlockWithLocations[] blocks2 = locs2.getBlocks();
  assertEquals(blocks.length, blocks2.length);
  for (int i = 0; i < blocks.length; i++) {
    compare(blocks[i], blocks2[i]);
  }
}
 
Example #3
Source File: NamenodeProtocolTranslatorPB.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Override
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
    throws IOException {
  GetBlocksRequestProto req = GetBlocksRequestProto.newBuilder()
      .setDatanode(PBHelper.convert((DatanodeID)datanode)).setSize(size)
      .build();
  try {
    return PBHelper.convert(rpcProxy.getBlocks(NULL_CONTROLLER, req)
        .getBlocks());
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
Example #4
Source File: NamenodeProtocolServerSideTranslatorPB.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Override
public GetBlocksResponseProto getBlocks(RpcController unused,
    GetBlocksRequestProto request) throws ServiceException {
  DatanodeInfo dnInfo = new DatanodeInfo(PBHelper.convert(request
      .getDatanode()));
  BlocksWithLocations blocks;
  try {
    blocks = impl.getBlocks(dnInfo, request.getSize());
  } catch (IOException e) {
    throw new ServiceException(e);
  }
  return GetBlocksResponseProto.newBuilder()
      .setBlocks(PBHelper.convert(blocks)).build();
}
 
Example #5
Source File: PBHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static BlocksWithLocations convert(BlocksWithLocationsProto blocks) {
  List<BlockWithLocationsProto> b = blocks.getBlocksList();
  BlockWithLocations[] ret = new BlockWithLocations[b.size()];
  int i = 0;
  for (BlockWithLocationsProto entry : b) {
    ret[i++] = convert(entry);
  }
  return new BlocksWithLocations(ret);
}
 
Example #6
Source File: PBHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static BlocksWithLocationsProto convert(BlocksWithLocations blks) {
  BlocksWithLocationsProto.Builder builder = BlocksWithLocationsProto
      .newBuilder();
  for (BlockWithLocations b : blks.getBlocks()) {
    builder.addBlocks(convert(b));
  }
  return builder.build();
}
 
Example #7
Source File: NameNode.java    From RDFS with Apache License 2.0 5 votes vote down vote up
/**
 * return a list of blocks & their locations on <code>datanode</code> whose
 * total size is <code>size</code>
 *
 * @param datanode on which blocks are located
 * @param size total size of blocks
 */
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
throws IOException {
  if(size <= 0) {
    throw new IllegalArgumentException(
      "Unexpected not positive size: "+size);
  }

  return namesystem.getBlocks(datanode, size); 
}
 
Example #8
Source File: BlockManager.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * return a list of blocks & their locations on <code>datanode</code> whose
 * total size is <code>size</code>
 * 
 * @param datanode on which blocks are located
 * @param size total size of blocks
 */
public BlocksWithLocations getBlocks(DatanodeID datanode, long size
    ) throws IOException {
  namesystem.checkOperation(OperationCategory.READ);
  namesystem.readLock();
  try {
    namesystem.checkOperation(OperationCategory.READ);
    return getBlocksWithLocations(datanode, size);  
  } finally {
    namesystem.readUnlock();
  }
}
 
Example #9
Source File: Dispatcher.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Fetch new blocks of this source from namenode and update this source's
 * block list & {@link Dispatcher#globalBlocks}.
 * 
 * @return the total size of the received blocks in the number of bytes.
 */
private long getBlockList() throws IOException {
  final long size = Math.min(MAX_BLOCKS_SIZE_TO_FETCH, blocksToReceive);
  final BlocksWithLocations newBlocks = nnc.getBlocks(getDatanodeInfo(), size);

  long bytesReceived = 0;
  for (BlockWithLocations blk : newBlocks.getBlocks()) {
    bytesReceived += blk.getBlock().getNumBytes();
    synchronized (globalBlocks) {
      final DBlock block = globalBlocks.get(blk.getBlock());
      synchronized (block) {
        block.clearLocations();

        // update locations
        final String[] datanodeUuids = blk.getDatanodeUuids();
        final StorageType[] storageTypes = blk.getStorageTypes();
        for (int i = 0; i < datanodeUuids.length; i++) {
          final StorageGroup g = storageGroupMap.get(
              datanodeUuids[i], storageTypes[i]);
          if (g != null) { // not unknown
            block.addLocation(g);
          }
        }
      }
      if (!srcBlocks.contains(block) && isGoodBlockCandidate(block)) {
        // filter bad candidates
        srcBlocks.add(block);
      }
    }
  }
  return bytesReceived;
}
 
Example #10
Source File: NameNodeRpcServer.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Override // NamenodeProtocol
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
throws IOException {
  if(size <= 0) {
    throw new IllegalArgumentException(
      "Unexpected not positive size: "+size);
  }
  checkNNStartup();
  namesystem.checkSuperuserPrivilege();
  return namesystem.getBlockManager().getBlocks(datanode, size); 
}
 
Example #11
Source File: TestPBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testConvertBlocksWithLocations() {
  BlockWithLocations[] list = new BlockWithLocations[] {
      getBlockWithLocations(1), getBlockWithLocations(2) };
  BlocksWithLocations locs = new BlocksWithLocations(list);
  BlocksWithLocationsProto locsProto = PBHelper.convert(locs);
  BlocksWithLocations locs2 = PBHelper.convert(locsProto);
  BlockWithLocations[] blocks = locs.getBlocks();
  BlockWithLocations[] blocks2 = locs2.getBlocks();
  assertEquals(blocks.length, blocks2.length);
  for (int i = 0; i < blocks.length; i++) {
    compare(blocks[i], blocks2[i]);
  }
}
 
Example #12
Source File: NamenodeProtocolTranslatorPB.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Override
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
    throws IOException {
  GetBlocksRequestProto req = GetBlocksRequestProto.newBuilder()
      .setDatanode(PBHelper.convert((DatanodeID)datanode)).setSize(size)
      .build();
  try {
    return PBHelper.convert(rpcProxy.getBlocks(NULL_CONTROLLER, req)
        .getBlocks());
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
Example #13
Source File: NamenodeProtocolServerSideTranslatorPB.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Override
public GetBlocksResponseProto getBlocks(RpcController unused,
    GetBlocksRequestProto request) throws ServiceException {
  DatanodeInfo dnInfo = new DatanodeInfo(PBHelper.convert(request
      .getDatanode()));
  BlocksWithLocations blocks;
  try {
    blocks = impl.getBlocks(dnInfo, request.getSize());
  } catch (IOException e) {
    throw new ServiceException(e);
  }
  return GetBlocksResponseProto.newBuilder()
      .setBlocks(PBHelper.convert(blocks)).build();
}
 
Example #14
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static BlocksWithLocations convert(BlocksWithLocationsProto blocks) {
  List<BlockWithLocationsProto> b = blocks.getBlocksList();
  BlockWithLocations[] ret = new BlockWithLocations[b.size()];
  int i = 0;
  for (BlockWithLocationsProto entry : b) {
    ret[i++] = convert(entry);
  }
  return new BlocksWithLocations(ret);
}
 
Example #15
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static BlocksWithLocationsProto convert(BlocksWithLocations blks) {
  BlocksWithLocationsProto.Builder builder = BlocksWithLocationsProto
      .newBuilder();
  for (BlockWithLocations b : blks.getBlocks()) {
    builder.addBlocks(convert(b));
  }
  return builder.build();
}
 
Example #16
Source File: NameNode.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
/**
 * return a list of blocks & their locations on <code>datanode</code> whose
 * total size is <code>size</code>
 * 
 * @param datanode on which blocks are located
 * @param size total size of blocks
 */
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
throws IOException {
  if(size <= 0) {
    throw new IllegalArgumentException(
      "Unexpected not positive size: "+size);
  }

  return namesystem.getBlocks(datanode, size); 
}
 
Example #17
Source File: BlockManager.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * return a list of blocks & their locations on <code>datanode</code> whose
 * total size is <code>size</code>
 * 
 * @param datanode on which blocks are located
 * @param size total size of blocks
 */
public BlocksWithLocations getBlocks(DatanodeID datanode, long size
    ) throws IOException {
  namesystem.checkOperation(OperationCategory.READ);
  namesystem.readLock();
  try {
    namesystem.checkOperation(OperationCategory.READ);
    return getBlocksWithLocations(datanode, size);  
  } finally {
    namesystem.readUnlock();
  }
}
 
Example #18
Source File: Dispatcher.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Fetch new blocks of this source from namenode and update this source's
 * block list & {@link Dispatcher#globalBlocks}.
 * 
 * @return the total size of the received blocks in the number of bytes.
 */
private long getBlockList() throws IOException {
  final long size = Math.min(MAX_BLOCKS_SIZE_TO_FETCH, blocksToReceive);
  final BlocksWithLocations newBlocks = nnc.getBlocks(getDatanodeInfo(), size);

  long bytesReceived = 0;
  for (BlockWithLocations blk : newBlocks.getBlocks()) {
    bytesReceived += blk.getBlock().getNumBytes();
    synchronized (globalBlocks) {
      final DBlock block = globalBlocks.get(blk.getBlock());
      synchronized (block) {
        block.clearLocations();

        // update locations
        final String[] datanodeUuids = blk.getDatanodeUuids();
        final StorageType[] storageTypes = blk.getStorageTypes();
        for (int i = 0; i < datanodeUuids.length; i++) {
          final StorageGroup g = storageGroupMap.get(
              datanodeUuids[i], storageTypes[i]);
          if (g != null) { // not unknown
            block.addLocation(g);
          }
        }
      }
      if (!srcBlocks.contains(block) && isGoodBlockCandidate(block)) {
        // filter bad candidates
        srcBlocks.add(block);
      }
    }
  }
  return bytesReceived;
}
 
Example #19
Source File: FSNamesystem.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
/**
 * return a list of blocks & their locations on <code>datanode</code> whose
 * total size is <code>size</code>
 * 
 * @param datanode on which blocks are located
 * @param size total size of blocks
 */
synchronized BlocksWithLocations getBlocks(DatanodeID datanode, long size)
    throws IOException {
  checkSuperuserPrivilege();

  DatanodeDescriptor node = getDatanode(datanode);
  if (node == null) {
    NameNode.stateChangeLog.warn("BLOCK* NameSystem.getBlocks: "
        + "Asking for blocks from an unrecorded node " + datanode.getName());
    throw new IllegalArgumentException(
        "Unexpected exception.  Got getBlocks message for datanode " + 
        datanode.getName() + ", but there is no info for it");
  }

  int numBlocks = node.numBlocks();
  if(numBlocks == 0) {
    return new BlocksWithLocations(new BlockWithLocations[0]);
  }
  Iterator<Block> iter = node.getBlockIterator();
  int startBlock = r.nextInt(numBlocks); // starting from a random block
  // skip blocks
  for(int i=0; i<startBlock; i++) {
    iter.next();
  }
  List<BlockWithLocations> results = new ArrayList<BlockWithLocations>();
  long totalSize = 0;
  while(totalSize<size && iter.hasNext()) {
    totalSize += addBlock(iter.next(), results);
  }
  if(totalSize<size) {
    iter = node.getBlockIterator(); // start from the beginning
    for(int i=0; i<startBlock&&totalSize<size; i++) {
      totalSize += addBlock(iter.next(), results);
    }
  }
  
  return new BlocksWithLocations(
      results.toArray(new BlockWithLocations[results.size()]));
}
 
Example #20
Source File: NameNodeConnector.java    From big-c with Apache License 2.0 4 votes vote down vote up
/** @return blocks with locations. */
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
    throws IOException {
  return namenode.getBlocks(datanode, size);
}
 
Example #21
Source File: BlockManager.java    From big-c with Apache License 2.0 4 votes vote down vote up
/** Get all blocks with location information from a datanode. */
private BlocksWithLocations getBlocksWithLocations(final DatanodeID datanode,
    final long size) throws UnregisteredNodeException {
  final DatanodeDescriptor node = getDatanodeManager().getDatanode(datanode);
  if (node == null) {
    blockLog.warn("BLOCK* getBlocks: Asking for blocks from an" +
        " unrecorded node {}", datanode);
    throw new HadoopIllegalArgumentException(
        "Datanode " + datanode + " not found.");
  }

  int numBlocks = node.numBlocks();
  if(numBlocks == 0) {
    return new BlocksWithLocations(new BlockWithLocations[0]);
  }
  Iterator<BlockInfoContiguous> iter = node.getBlockIterator();
  int startBlock = DFSUtil.getRandom().nextInt(numBlocks); // starting from a random block
  // skip blocks
  for(int i=0; i<startBlock; i++) {
    iter.next();
  }
  List<BlockWithLocations> results = new ArrayList<BlockWithLocations>();
  long totalSize = 0;
  BlockInfoContiguous curBlock;
  while(totalSize<size && iter.hasNext()) {
    curBlock = iter.next();
    if(!curBlock.isComplete())  continue;
    totalSize += addBlock(curBlock, results);
  }
  if(totalSize<size) {
    iter = node.getBlockIterator(); // start from the beginning
    for(int i=0; i<startBlock&&totalSize<size; i++) {
      curBlock = iter.next();
      if(!curBlock.isComplete())  continue;
      totalSize += addBlock(curBlock, results);
    }
  }

  return new BlocksWithLocations(
      results.toArray(new BlockWithLocations[results.size()]));
}
 
Example #22
Source File: BlockManager.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/** Get all blocks with location information from a datanode. */
private BlocksWithLocations getBlocksWithLocations(final DatanodeID datanode,
    final long size) throws UnregisteredNodeException {
  final DatanodeDescriptor node = getDatanodeManager().getDatanode(datanode);
  if (node == null) {
    blockLog.warn("BLOCK* getBlocks: Asking for blocks from an" +
        " unrecorded node {}", datanode);
    throw new HadoopIllegalArgumentException(
        "Datanode " + datanode + " not found.");
  }

  int numBlocks = node.numBlocks();
  if(numBlocks == 0) {
    return new BlocksWithLocations(new BlockWithLocations[0]);
  }
  Iterator<BlockInfoContiguous> iter = node.getBlockIterator();
  int startBlock = DFSUtil.getRandom().nextInt(numBlocks); // starting from a random block
  // skip blocks
  for(int i=0; i<startBlock; i++) {
    iter.next();
  }
  List<BlockWithLocations> results = new ArrayList<BlockWithLocations>();
  long totalSize = 0;
  BlockInfoContiguous curBlock;
  while(totalSize<size && iter.hasNext()) {
    curBlock = iter.next();
    if(!curBlock.isComplete())  continue;
    totalSize += addBlock(curBlock, results);
  }
  if(totalSize<size) {
    iter = node.getBlockIterator(); // start from the beginning
    for(int i=0; i<startBlock&&totalSize<size; i++) {
      curBlock = iter.next();
      if(!curBlock.isComplete())  continue;
      totalSize += addBlock(curBlock, results);
    }
  }

  return new BlocksWithLocations(
      results.toArray(new BlockWithLocations[results.size()]));
}
 
Example #23
Source File: NameNodeConnector.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/** @return blocks with locations. */
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
    throws IOException {
  return namenode.getBlocks(datanode, size);
}