org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo Java Examples

The following examples show how to use org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FSImageSerialization.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static void writeCacheDirectiveInfo(DataOutputStream out,
    CacheDirectiveInfo directive) throws IOException {
  writeLong(directive.getId(), out);
  int flags =
      ((directive.getPath() != null) ? 0x1 : 0) |
      ((directive.getReplication() != null) ? 0x2 : 0) |
      ((directive.getPool() != null) ? 0x4 : 0) |
      ((directive.getExpiration() != null) ? 0x8 : 0);
  out.writeInt(flags);
  if (directive.getPath() != null) {
    writeString(directive.getPath().toUri().getPath(), out);
  }
  if (directive.getReplication() != null) {
    writeShort(directive.getReplication(), out);
  }
  if (directive.getPool() != null) {
    writeString(directive.getPool(), out);
  }
  if (directive.getExpiration() != null) {
    writeLong(directive.getExpiration().getMillis(), out);
  }
}
 
Example #2
Source File: FSImageSerialization.java    From big-c with Apache License 2.0 6 votes vote down vote up
public static CacheDirectiveInfo readCacheDirectiveInfo(Stanza st)
    throws InvalidXmlException {
  CacheDirectiveInfo.Builder builder =
      new CacheDirectiveInfo.Builder();
  builder.setId(Long.parseLong(st.getValue("ID")));
  String path = st.getValueOrNull("PATH");
  if (path != null) {
    builder.setPath(new Path(path));
  }
  String replicationString = st.getValueOrNull("REPLICATION");
  if (replicationString != null) {
    builder.setReplication(Short.parseShort(replicationString));
  }
  String pool = st.getValueOrNull("POOL");
  if (pool != null) {
    builder.setPool(pool);
  }
  String expiryTime = st.getValueOrNull("EXPIRATION");
  if (expiryTime != null) {
    builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(
        Long.parseLong(expiryTime)));
  }
  return builder.build();
}
 
Example #3
Source File: NameNodeRpcServer.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override // ClientProtocol
public void modifyCacheDirective(
    CacheDirectiveInfo directive, EnumSet<CacheFlag> flags) throws IOException {
  checkNNStartup();
  CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
  if (cacheEntry != null && cacheEntry.isSuccess()) {
    return;
  }

  boolean success = false;
  try {
    namesystem.modifyCacheDirective(directive, flags, cacheEntry != null);
    success = true;
  } finally {
    RetryCache.setState(cacheEntry, success);
  }
}
 
Example #4
Source File: ClientNamenodeProtocolTranslatorPB.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
public BatchedEntries<CacheDirectiveEntry>
    listCacheDirectives(long prevId,
        CacheDirectiveInfo filter) throws IOException {
  if (filter == null) {
    filter = new CacheDirectiveInfo.Builder().build();
  }
  try {
    return new BatchedCacheEntries(
      rpcProxy.listCacheDirectives(null,
        ListCacheDirectivesRequestProto.newBuilder().
          setPrevId(prevId).
          setFilter(PBHelper.convert(filter)).
          build()));
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
Example #5
Source File: NameNodeRpcServer.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override // ClientProtocol
public long addCacheDirective(
    CacheDirectiveInfo path, EnumSet<CacheFlag> flags) throws IOException {
  checkNNStartup();
  CacheEntryWithPayload cacheEntry = RetryCache.waitForCompletion
    (retryCache, null);
  if (cacheEntry != null && cacheEntry.isSuccess()) {
    return (Long) cacheEntry.getPayload();
  }

  boolean success = false;
  long ret = 0;
  try {
    ret = namesystem.addCacheDirective(path, flags, cacheEntry != null);
    success = true;
  } finally {
    RetryCache.setState(cacheEntry, success, ret);
  }
  return ret;
}
 
Example #6
Source File: ClientNamenodeProtocolTranslatorPB.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
public BatchedEntries<CacheDirectiveEntry>
    listCacheDirectives(long prevId,
        CacheDirectiveInfo filter) throws IOException {
  if (filter == null) {
    filter = new CacheDirectiveInfo.Builder().build();
  }
  try {
    return new BatchedCacheEntries(
      rpcProxy.listCacheDirectives(null,
        ListCacheDirectivesRequestProto.newBuilder().
          setPrevId(prevId).
          setFilter(PBHelper.convert(filter)).
          build()));
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
Example #7
Source File: ClientNamenodeProtocolServerSideTranslatorPB.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
public ListCacheDirectivesResponseProto listCacheDirectives(
    RpcController controller, ListCacheDirectivesRequestProto request)
        throws ServiceException {
  try {
    CacheDirectiveInfo filter =
        PBHelper.convert(request.getFilter());
    BatchedEntries<CacheDirectiveEntry> entries =
      server.listCacheDirectives(request.getPrevId(), filter);
    ListCacheDirectivesResponseProto.Builder builder =
        ListCacheDirectivesResponseProto.newBuilder();
    builder.setHasMore(entries.hasMore());
    for (int i=0, n=entries.size(); i<n; i++) {
      builder.addElements(PBHelper.convert(entries.get(i)));
    }
    return builder.build();
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
Example #8
Source File: TestRetryCacheWithHA.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Add a list of cache directives, list cache directives,
 * switch active NN, and list cache directives again.
 */
@Test (timeout=60000)
public void testListCacheDirectives() throws Exception {
  final int poolCount = 7;
  HashSet<String> poolNames = new HashSet<String>(poolCount);
  Path path = new Path("/p");
  for (int i=0; i<poolCount; i++) {
    String poolName = "testListCacheDirectives-" + i;
    CacheDirectiveInfo directiveInfo =
      new CacheDirectiveInfo.Builder().setPool(poolName).setPath(path).build();
    dfs.addCachePool(new CachePoolInfo(poolName));
    dfs.addCacheDirective(directiveInfo, EnumSet.of(CacheFlag.FORCE));
    poolNames.add(poolName);
  }
  listCacheDirectives(poolNames, 0);

  cluster.transitionToStandby(0);
  cluster.transitionToActive(1);
  cluster.waitActive(1);
  listCacheDirectives(poolNames, 1);
}
 
Example #9
Source File: CacheRegistry.java    From nnproxy with Apache License 2.0 6 votes vote down vote up
List<CacheDirectiveEntry> getAllCacheDirectives(UpstreamManager.Upstream upstream) throws IOException {
    CacheDirectiveInfo filter = new CacheDirectiveInfo.Builder().build();
    List<CacheDirectiveEntry> directives = new ArrayList<>();
    long prevId = -1;
    while (true) {
        BatchedRemoteIterator.BatchedEntries<CacheDirectiveEntry> it =
                upstream.protocol.listCacheDirectives(prevId, filter);
        if (it.size() == 0) {
            break;
        }
        for (int i = 0; i < it.size(); i++) {
            CacheDirectiveEntry entry = it.get(i);
            prevId = entry.getInfo().getId();
            directives.add(entry);
        }
    }
    return directives;
}
 
Example #10
Source File: TestRetryCacheWithHA.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
boolean checkNamenodeBeforeReturn() throws Exception {
  for (int i = 0; i < CHECKTIMES; i++) {
    RemoteIterator<CacheDirectiveEntry> iter =
        dfs.listCacheDirectives(
            new CacheDirectiveInfo.Builder().
              setPool(directive.getPool()).
              setPath(directive.getPath()).
              build());
    if (!iter.hasNext()) {
      return true;
    }
    Thread.sleep(1000);
  }
  return false;
}
 
Example #11
Source File: FSImageSerialization.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static CacheDirectiveInfo readCacheDirectiveInfo(Stanza st)
    throws InvalidXmlException {
  CacheDirectiveInfo.Builder builder =
      new CacheDirectiveInfo.Builder();
  builder.setId(Long.parseLong(st.getValue("ID")));
  String path = st.getValueOrNull("PATH");
  if (path != null) {
    builder.setPath(new Path(path));
  }
  String replicationString = st.getValueOrNull("REPLICATION");
  if (replicationString != null) {
    builder.setReplication(Short.parseShort(replicationString));
  }
  String pool = st.getValueOrNull("POOL");
  if (pool != null) {
    builder.setPool(pool);
  }
  String expiryTime = st.getValueOrNull("EXPIRATION");
  if (expiryTime != null) {
    builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(
        Long.parseLong(expiryTime)));
  }
  return builder.build();
}
 
Example #12
Source File: FSImageSerialization.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static CacheDirectiveInfo readCacheDirectiveInfo(DataInput in)
    throws IOException {
  CacheDirectiveInfo.Builder builder =
      new CacheDirectiveInfo.Builder();
  builder.setId(readLong(in));
  int flags = in.readInt();
  if ((flags & 0x1) != 0) {
    builder.setPath(new Path(readString(in)));
  }
  if ((flags & 0x2) != 0) {
    builder.setReplication(readShort(in));
  }
  if ((flags & 0x4) != 0) {
    builder.setPool(readString(in));
  }
  if ((flags & 0x8) != 0) {
    builder.setExpiration(
        CacheDirectiveInfo.Expiration.newAbsolute(readLong(in)));
  }
  if ((flags & ~0xF) != 0) {
    throw new IOException("unknown flags set in " +
        "ModifyCacheDirectiveInfoOp: " + flags);
  }
  return builder.build();
}
 
Example #13
Source File: TestRetryCacheWithHA.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
boolean checkNamenodeBeforeReturn() throws Exception {
  for (int i = 0; i < CHECKTIMES; i++) {
    RemoteIterator<CacheDirectiveEntry> iter =
        dfs.listCacheDirectives(
            new CacheDirectiveInfo.Builder().
                setPool(directive.getPool()).
                setPath(directive.getPath()).
                build());
    if (iter.hasNext()) {
      return true;
    }
    Thread.sleep(1000);
  }
  return false;
}
 
Example #14
Source File: CacheManager.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Factory method that makes a new CacheDirectiveInfo by applying fields in a
 * CacheDirectiveInfo to an existing CacheDirective.
 * 
 * @param info with some or all fields set.
 * @param defaults directive providing default values for unset fields in
 *          info.
 * 
 * @return new CacheDirectiveInfo of the info applied to the defaults.
 */
private static CacheDirectiveInfo createFromInfoAndDefaults(
    CacheDirectiveInfo info, CacheDirective defaults) {
  // Initialize the builder with the default values
  CacheDirectiveInfo.Builder builder =
      new CacheDirectiveInfo.Builder(defaults.toInfo());
  // Replace default with new value if present
  if (info.getPath() != null) {
    builder.setPath(info.getPath());
  }
  if (info.getReplication() != null) {
    builder.setReplication(info.getReplication());
  }
  if (info.getPool() != null) {
    builder.setPool(info.getPool());
  }
  if (info.getExpiration() != null) {
    builder.setExpiration(info.getExpiration());
  }
  return builder.build();
}
 
Example #15
Source File: PBHelper.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static CacheDirectiveInfo convert
    (CacheDirectiveInfoProto proto) {
  CacheDirectiveInfo.Builder builder =
      new CacheDirectiveInfo.Builder();
  if (proto.hasId()) {
    builder.setId(proto.getId());
  }
  if (proto.hasPath()) {
    builder.setPath(new Path(proto.getPath()));
  }
  if (proto.hasReplication()) {
    builder.setReplication(Shorts.checkedCast(
        proto.getReplication()));
  }
  if (proto.hasPool()) {
    builder.setPool(proto.getPool());
  }
  if (proto.hasExpiration()) {
    builder.setExpiration(convert(proto.getExpiration()));
  }
  return builder.build();
}
 
Example #16
Source File: FSNDNCacheOp.java    From big-c with Apache License 2.0 6 votes vote down vote up
static CacheDirectiveInfo addCacheDirective(
    FSNamesystem fsn, CacheManager cacheManager,
    CacheDirectiveInfo directive, EnumSet<CacheFlag> flags,
    boolean logRetryCache)
    throws IOException {

  final FSPermissionChecker pc = getFsPermissionChecker(fsn);

  if (directive.getId() != null) {
    throw new IOException("addDirective: you cannot specify an ID " +
        "for this operation.");
  }
  CacheDirectiveInfo effectiveDirective =
      cacheManager.addDirective(directive, pc, flags);
  fsn.getEditLog().logAddCacheDirectiveInfo(effectiveDirective,
      logRetryCache);
  return effectiveDirective;
}
 
Example #17
Source File: CacheManager.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Load cache directives from the fsimage
 */
private void loadDirectives(DataInput in) throws IOException {
  StartupProgress prog = NameNode.getStartupProgress();
  Step step = new Step(StepType.CACHE_ENTRIES);
  prog.beginStep(Phase.LOADING_FSIMAGE, step);
  int numDirectives = in.readInt();
  prog.setTotal(Phase.LOADING_FSIMAGE, step, numDirectives);
  Counter counter = prog.getCounter(Phase.LOADING_FSIMAGE, step);
  for (int i = 0; i < numDirectives; i++) {
    CacheDirectiveInfo info = FSImageSerialization.readCacheDirectiveInfo(in);
    // Get pool reference by looking it up in the map
    final String poolName = info.getPool();
    CacheDirective directive =
        new CacheDirective(info.getId(), info.getPath().toUri().getPath(),
            info.getReplication(), info.getExpiration().getAbsoluteMillis());
    addCacheDirective(poolName, directive);
    counter.increment();
  }
  prog.endStep(Phase.LOADING_FSIMAGE, step);
}
 
Example #18
Source File: TestCacheDirectives.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test(timeout=60000)
public void testExceedsCapacity() throws Exception {
  // Create a giant file
  final Path fileName = new Path("/exceeds");
  final long fileLen = CACHE_CAPACITY * (NUM_DATANODES*2);
  int numCachedReplicas = (int) ((CACHE_CAPACITY*NUM_DATANODES)/BLOCK_SIZE);
  DFSTestUtil.createFile(dfs, fileName, fileLen, (short) NUM_DATANODES,
      0xFADED);
  dfs.addCachePool(new CachePoolInfo("pool"));
  dfs.addCacheDirective(new CacheDirectiveInfo.Builder().setPool("pool")
      .setPath(fileName).setReplication((short) 1).build());
  waitForCachedBlocks(namenode, -1, numCachedReplicas,
      "testExceeds:1");
  checkPendingCachedEmpty(cluster);
  Thread.sleep(1000);
  checkPendingCachedEmpty(cluster);

  // Try creating a file with giant-sized blocks that exceed cache capacity
  dfs.delete(fileName, false);
  DFSTestUtil.createFile(dfs, fileName, 4096, fileLen, CACHE_CAPACITY * 2,
      (short) 1, 0xFADED);
  checkPendingCachedEmpty(cluster);
  Thread.sleep(1000);
  checkPendingCachedEmpty(cluster);
}
 
Example #19
Source File: PBHelper.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static CacheDirectiveInfoProto convert
    (CacheDirectiveInfo info) {
  CacheDirectiveInfoProto.Builder builder = 
      CacheDirectiveInfoProto.newBuilder();
  if (info.getId() != null) {
    builder.setId(info.getId());
  }
  if (info.getPath() != null) {
    builder.setPath(info.getPath().toUri().getPath());
  }
  if (info.getReplication() != null) {
    builder.setReplication(info.getReplication());
  }
  if (info.getPool() != null) {
    builder.setPool(info.getPool());
  }
  if (info.getExpiration() != null) {
    builder.setExpiration(convert(info.getExpiration()));
  }
  return builder.build();
}
 
Example #20
Source File: ClientNamenodeProtocolServerSideTranslatorPB.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
public ListCacheDirectivesResponseProto listCacheDirectives(
    RpcController controller, ListCacheDirectivesRequestProto request)
        throws ServiceException {
  try {
    CacheDirectiveInfo filter =
        PBHelper.convert(request.getFilter());
    BatchedEntries<CacheDirectiveEntry> entries =
      server.listCacheDirectives(request.getPrevId(), filter);
    ListCacheDirectivesResponseProto.Builder builder =
        ListCacheDirectivesResponseProto.newBuilder();
    builder.setHasMore(entries.hasMore());
    for (int i=0, n=entries.size(); i<n; i++) {
      builder.addElements(PBHelper.convert(entries.get(i)));
    }
    return builder.build();
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
Example #21
Source File: FSImageSerialization.java    From big-c with Apache License 2.0 6 votes vote down vote up
public static void writeCacheDirectiveInfo(ContentHandler contentHandler,
    CacheDirectiveInfo directive) throws SAXException {
  XMLUtils.addSaxString(contentHandler, "ID",
      Long.toString(directive.getId()));
  if (directive.getPath() != null) {
    XMLUtils.addSaxString(contentHandler, "PATH",
        directive.getPath().toUri().getPath());
  }
  if (directive.getReplication() != null) {
    XMLUtils.addSaxString(contentHandler, "REPLICATION",
        Short.toString(directive.getReplication()));
  }
  if (directive.getPool() != null) {
    XMLUtils.addSaxString(contentHandler, "POOL", directive.getPool());
  }
  if (directive.getExpiration() != null) {
    XMLUtils.addSaxString(contentHandler, "EXPIRATION",
        "" + directive.getExpiration().getMillis());
  }
}
 
Example #22
Source File: CacheManager.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Modifies a directive, skipping most error checking. This is for careful
 * internal use only. modifyDirective can be non-deterministic since its error
 * checking depends on current system time, which poses a problem for edit log
 * replay.
 */
void modifyDirectiveFromEditLog(CacheDirectiveInfo info)
    throws InvalidRequestException {
  // Check for invalid IDs.
  Long id = info.getId();
  if (id == null) {
    throw new InvalidRequestException("Must supply an ID.");
  }
  CacheDirective prevEntry = getById(id);
  CacheDirectiveInfo newInfo = createFromInfoAndDefaults(info, prevEntry);
  removeInternal(prevEntry);
  addInternal(new CacheDirective(newInfo), getCachePool(newInfo.getPool()));
}
 
Example #23
Source File: TestRetryCacheWithHA.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test (timeout=60000)
public void testAddCacheDirectiveInfo() throws Exception {
  DFSClient client = genClientWithDummyHandler();
  AtMostOnceOp op = new AddCacheDirectiveInfoOp(client, 
      new CacheDirectiveInfo.Builder().
          setPool("pool").
          setPath(new Path("/path")).
          build());
  testClientRetryWithFailover(op);
}
 
Example #24
Source File: PBHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static CacheDirectiveInfo.Expiration convert(
    CacheDirectiveInfoExpirationProto proto) {
  if (proto.getIsRelative()) {
    return CacheDirectiveInfo.Expiration.newRelative(proto.getMillis());
  }
  return CacheDirectiveInfo.Expiration.newAbsolute(proto.getMillis());
}
 
Example #25
Source File: CacheManager.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Adds a directive, skipping most error checking. This should only be called
 * internally in special scenarios like edit log replay.
 */
CacheDirectiveInfo addDirectiveFromEditLog(CacheDirectiveInfo directive)
    throws InvalidRequestException {
  long id = directive.getId();
  CacheDirective entry = new CacheDirective(directive);
  CachePool pool = cachePools.get(directive.getPool());
  addInternal(entry, pool);
  if (nextDirectiveId <= id) {
    nextDirectiveId = id + 1;
  }
  return entry.toInfo();
}
 
Example #26
Source File: CacheAdmin.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private static CacheDirectiveInfo.Expiration parseExpirationString(String ttlString)
    throws IOException {
  CacheDirectiveInfo.Expiration ex = null;
  if (ttlString != null) {
    if (ttlString.equalsIgnoreCase("never")) {
      ex = CacheDirectiveInfo.Expiration.NEVER;
    } else {
      long ttl = DFSUtil.parseRelativeTime(ttlString);
      ex = CacheDirectiveInfo.Expiration.newRelative(ttl);
    }
  }
  return ex;
}
 
Example #27
Source File: CacheManager.java    From big-c with Apache License 2.0 5 votes vote down vote up
private static short validateReplication(CacheDirectiveInfo directive,
    short defaultValue) throws InvalidRequestException {
  short repl = (directive.getReplication() != null)
      ? directive.getReplication() : defaultValue;
  if (repl <= 0) {
    throw new InvalidRequestException("Invalid replication factor " + repl
        + " <= 0");
  }
  return repl;
}
 
Example #28
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static CacheDirectiveInfo.Expiration convert(
    CacheDirectiveInfoExpirationProto proto) {
  if (proto.getIsRelative()) {
    return CacheDirectiveInfo.Expiration.newRelative(proto.getMillis());
  }
  return CacheDirectiveInfo.Expiration.newAbsolute(proto.getMillis());
}
 
Example #29
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static CacheDirectiveInfoExpirationProto convert(
    CacheDirectiveInfo.Expiration expiration) {
  return CacheDirectiveInfoExpirationProto.newBuilder()
      .setIsRelative(expiration.isRelative())
      .setMillis(expiration.getMillis())
      .build();
}
 
Example #30
Source File: CacheManager.java    From big-c with Apache License 2.0 5 votes vote down vote up
private static String validatePath(CacheDirectiveInfo directive)
    throws InvalidRequestException {
  if (directive.getPath() == null) {
    throw new InvalidRequestException("No path specified.");
  }
  String path = directive.getPath().toUri().getPath();
  if (!DFSUtil.isValidName(path)) {
    throw new InvalidRequestException("Invalid path '" + path + "'.");
  }
  return path;
}