Java Code Examples for org.apache.hadoop.yarn.server.utils.LeveldbIterator#next()

The following examples show how to use org.apache.hadoop.yarn.server.utils.LeveldbIterator#next() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: NMLeveldbStateStoreService.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private List<LocalizedResourceProto> loadCompletedResources(
    LeveldbIterator iter, String keyPrefix) throws IOException {
  List<LocalizedResourceProto> rsrcs =
      new ArrayList<LocalizedResourceProto>();
  while (iter.hasNext()) {
    Entry<byte[],byte[]> entry = iter.peekNext();
    String key = asString(entry.getKey());
    if (!key.startsWith(keyPrefix)) {
      break;
    }

    if (LOG.isDebugEnabled()) {
      LOG.debug("Loading completed resource from " + key);
    }
    rsrcs.add(LocalizedResourceProto.parseFrom(entry.getValue()));
    iter.next();
  }

  return rsrcs;
}
 
Example 2
Source File: LeveldbTimelineStateStore.java    From big-c with Apache License 2.0 6 votes vote down vote up
private int loadTokens(TimelineServiceState state) throws IOException {
  byte[] base = KeyBuilder.newInstance().add(TOKEN_ENTRY_PREFIX)
      .getBytesForLookup();
  int numTokens = 0;
  LeveldbIterator iterator = null;
  try {
    for (iterator = new LeveldbIterator(db), iterator.seek(base);
        iterator.hasNext(); iterator.next()) {
      byte[] k = iterator.peekNext().getKey();
      if (!prefixMatches(base, base.length, k)) {
        break;
      }
      byte[] v = iterator.peekNext().getValue();
      loadTokenData(state, v);
      ++numTokens;
    }
  } catch (DBException e) {
    throw new IOException(e);
  } finally {
    IOUtils.cleanup(LOG, iterator);
  }
  return numTokens;
}
 
Example 3
Source File: LeveldbTimelineStateStore.java    From big-c with Apache License 2.0 6 votes vote down vote up
private int loadTokenMasterKeys(TimelineServiceState state)
    throws IOException {
  byte[] base = KeyBuilder.newInstance().add(TOKEN_MASTER_KEY_ENTRY_PREFIX)
      .getBytesForLookup();
  int numKeys = 0;
  LeveldbIterator iterator = null;
  try {
    for (iterator = new LeveldbIterator(db), iterator.seek(base);
        iterator.hasNext(); iterator.next()) {
      byte[] k = iterator.peekNext().getKey();
      if (!prefixMatches(base, base.length, k)) {
        break;
      }
      byte[] v = iterator.peekNext().getValue();
      loadTokenMasterKeyData(state, v);
      ++numKeys;
    }
  } finally {
    IOUtils.cleanup(LOG, iterator);
  }
  return numKeys;
}
 
Example 4
Source File: NMLeveldbStateStoreService.java    From big-c with Apache License 2.0 6 votes vote down vote up
private Map<LocalResourceProto, Path> loadStartedResources(
    LeveldbIterator iter, String keyPrefix) throws IOException {
  Map<LocalResourceProto, Path> rsrcs =
      new HashMap<LocalResourceProto, Path>();
  while (iter.hasNext()) {
    Entry<byte[],byte[]> entry = iter.peekNext();
    String key = asString(entry.getKey());
    if (!key.startsWith(keyPrefix)) {
      break;
    }

    Path localPath = new Path(key.substring(keyPrefix.length()));
    if (LOG.isDebugEnabled()) {
      LOG.debug("Loading in-progress resource at " + localPath);
    }
    rsrcs.put(LocalResourceProto.parseFrom(entry.getValue()), localPath);
    iter.next();
  }

  return rsrcs;
}
 
Example 5
Source File: NMLeveldbStateStoreService.java    From big-c with Apache License 2.0 6 votes vote down vote up
private List<LocalizedResourceProto> loadCompletedResources(
    LeveldbIterator iter, String keyPrefix) throws IOException {
  List<LocalizedResourceProto> rsrcs =
      new ArrayList<LocalizedResourceProto>();
  while (iter.hasNext()) {
    Entry<byte[],byte[]> entry = iter.peekNext();
    String key = asString(entry.getKey());
    if (!key.startsWith(keyPrefix)) {
      break;
    }

    if (LOG.isDebugEnabled()) {
      LOG.debug("Loading completed resource from " + key);
    }
    rsrcs.add(LocalizedResourceProto.parseFrom(entry.getValue()));
    iter.next();
  }

  return rsrcs;
}
 
Example 6
Source File: LeveldbTimelineStateStore.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private int loadTokens(TimelineServiceState state) throws IOException {
  byte[] base = KeyBuilder.newInstance().add(TOKEN_ENTRY_PREFIX)
      .getBytesForLookup();
  int numTokens = 0;
  LeveldbIterator iterator = null;
  try {
    for (iterator = new LeveldbIterator(db), iterator.seek(base);
        iterator.hasNext(); iterator.next()) {
      byte[] k = iterator.peekNext().getKey();
      if (!prefixMatches(base, base.length, k)) {
        break;
      }
      byte[] v = iterator.peekNext().getValue();
      loadTokenData(state, v);
      ++numTokens;
    }
  } catch (DBException e) {
    throw new IOException(e);
  } finally {
    IOUtils.cleanup(LOG, iterator);
  }
  return numTokens;
}
 
Example 7
Source File: LeveldbTimelineStateStore.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private int loadTokenMasterKeys(TimelineServiceState state)
    throws IOException {
  byte[] base = KeyBuilder.newInstance().add(TOKEN_MASTER_KEY_ENTRY_PREFIX)
      .getBytesForLookup();
  int numKeys = 0;
  LeveldbIterator iterator = null;
  try {
    for (iterator = new LeveldbIterator(db), iterator.seek(base);
        iterator.hasNext(); iterator.next()) {
      byte[] k = iterator.peekNext().getKey();
      if (!prefixMatches(base, base.length, k)) {
        break;
      }
      byte[] v = iterator.peekNext().getValue();
      loadTokenMasterKeyData(state, v);
      ++numKeys;
    }
  } finally {
    IOUtils.cleanup(LOG, iterator);
  }
  return numKeys;
}
 
Example 8
Source File: NMLeveldbStateStoreService.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private Map<LocalResourceProto, Path> loadStartedResources(
    LeveldbIterator iter, String keyPrefix) throws IOException {
  Map<LocalResourceProto, Path> rsrcs =
      new HashMap<LocalResourceProto, Path>();
  while (iter.hasNext()) {
    Entry<byte[],byte[]> entry = iter.peekNext();
    String key = asString(entry.getKey());
    if (!key.startsWith(keyPrefix)) {
      break;
    }

    Path localPath = new Path(key.substring(keyPrefix.length()));
    if (LOG.isDebugEnabled()) {
      LOG.debug("Loading in-progress resource at " + localPath);
    }
    rsrcs.put(LocalResourceProto.parseFrom(entry.getValue()), localPath);
    iter.next();
  }

  return rsrcs;
}
 
Example 9
Source File: LeveldbTimelineStore.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private static TimelineDomain getTimelineDomain(
    LeveldbIterator iterator, String domainId, byte[] prefix) throws IOException {
  // Iterate over all the rows whose key starts with prefix to retrieve the
  // domain information.
  TimelineDomain domain = new TimelineDomain();
  domain.setId(domainId);
  boolean noRows = true;
  for (; iterator.hasNext(); iterator.next()) {
    byte[] key = iterator.peekNext().getKey();
    if (!prefixMatches(prefix, prefix.length, key)) {
      break;
    }
    if (noRows) {
      noRows = false;
    }
    byte[] value = iterator.peekNext().getValue();
    if (value != null && value.length > 0) {
      if (key[prefix.length] == DESCRIPTION_COLUMN[0]) {
        domain.setDescription(new String(value, Charset.forName("UTF-8")));
      } else if (key[prefix.length] == OWNER_COLUMN[0]) {
        domain.setOwner(new String(value, Charset.forName("UTF-8")));
      } else if (key[prefix.length] == READER_COLUMN[0]) {
        domain.setReaders(new String(value, Charset.forName("UTF-8")));
      } else if (key[prefix.length] == WRITER_COLUMN[0]) {
        domain.setWriters(new String(value, Charset.forName("UTF-8")));
      } else if (key[prefix.length] == TIMESTAMP_COLUMN[0]) {
        domain.setCreatedTime(readReverseOrderedLong(value, 0));
        domain.setModifiedTime(readReverseOrderedLong(value, 8));
      } else {
        LOG.error("Unrecognized domain column: " + key[prefix.length]);
      }
    }
  }
  if (noRows) {
    return null;
  } else {
    return domain;
  }
}
 
Example 10
Source File: LeveldbTimelineStore.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Finds all keys in the db that have a given prefix and deletes them on
 * the given write batch.
 */
private void deleteKeysWithPrefix(WriteBatch writeBatch, byte[] prefix,
    LeveldbIterator iterator) {
  for (iterator.seek(prefix); iterator.hasNext(); iterator.next()) {
    byte[] key = iterator.peekNext().getKey();
    if (!prefixMatches(prefix, prefix.length, key)) {
      break;
    }
    writeBatch.delete(key);
  }
}
 
Example 11
Source File: NMLeveldbStateStoreService.java    From big-c with Apache License 2.0 5 votes vote down vote up
private RecoveredContainerState loadContainerState(ContainerId containerId,
    LeveldbIterator iter, String keyPrefix) throws IOException {
  RecoveredContainerState rcs = new RecoveredContainerState();
  rcs.status = RecoveredContainerStatus.REQUESTED;
  while (iter.hasNext()) {
    Entry<byte[],byte[]> entry = iter.peekNext();
    String key = asString(entry.getKey());
    if (!key.startsWith(keyPrefix)) {
      break;
    }
    iter.next();

    String suffix = key.substring(keyPrefix.length()-1);  // start with '/'
    if (suffix.equals(CONTAINER_REQUEST_KEY_SUFFIX)) {
      rcs.startRequest = new StartContainerRequestPBImpl(
          StartContainerRequestProto.parseFrom(entry.getValue()));
    } else if (suffix.equals(CONTAINER_DIAGS_KEY_SUFFIX)) {
      rcs.diagnostics = asString(entry.getValue());
    } else if (suffix.equals(CONTAINER_LAUNCHED_KEY_SUFFIX)) {
      if (rcs.status == RecoveredContainerStatus.REQUESTED) {
        rcs.status = RecoveredContainerStatus.LAUNCHED;
      }
    } else if (suffix.equals(CONTAINER_KILLED_KEY_SUFFIX)) {
      rcs.killed = true;
    } else if (suffix.equals(CONTAINER_EXIT_CODE_KEY_SUFFIX)) {
      rcs.status = RecoveredContainerStatus.COMPLETED;
      rcs.exitCode = Integer.parseInt(asString(entry.getValue()));
    } else {
      throw new IOException("Unexpected container state key: " + key);
    }
  }
  return rcs;
}
 
Example 12
Source File: LeveldbRMStateStore.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private int loadRMApp(RMState rmState, LeveldbIterator iter, String appIdStr,
    byte[] appData) throws IOException {
  ApplicationStateData appState = createApplicationState(appIdStr, appData);
  ApplicationId appId =
      appState.getApplicationSubmissionContext().getApplicationId();
  rmState.appState.put(appId, appState);
  String attemptNodePrefix = getApplicationNodeKey(appId) + SEPARATOR;
  while (iter.hasNext()) {
    Entry<byte[],byte[]> entry = iter.peekNext();
    String key = asString(entry.getKey());
    if (!key.startsWith(attemptNodePrefix)) {
      break;
    }

    String attemptId = key.substring(attemptNodePrefix.length());
    if (attemptId.startsWith(ApplicationAttemptId.appAttemptIdStrPrefix)) {
      ApplicationAttemptStateData attemptState =
          createAttemptState(attemptId, entry.getValue());
      appState.attempts.put(attemptState.getAttemptId(), attemptState);
    } else {
      LOG.warn("Ignoring unknown application key: " + key);
    }
    iter.next();
  }
  int numAttempts = appState.attempts.size();
  if (LOG.isDebugEnabled()) {
    LOG.debug("Loaded application " + appId + " with " + numAttempts
        + " attempts");
  }
  return numAttempts;
}
 
Example 13
Source File: LeveldbRMStateStore.java    From big-c with Apache License 2.0 5 votes vote down vote up
private int loadRMApp(RMState rmState, LeveldbIterator iter, String appIdStr,
    byte[] appData) throws IOException {
  ApplicationStateData appState = createApplicationState(appIdStr, appData);
  ApplicationId appId =
      appState.getApplicationSubmissionContext().getApplicationId();
  rmState.appState.put(appId, appState);
  String attemptNodePrefix = getApplicationNodeKey(appId) + SEPARATOR;
  while (iter.hasNext()) {
    Entry<byte[],byte[]> entry = iter.peekNext();
    String key = asString(entry.getKey());
    if (!key.startsWith(attemptNodePrefix)) {
      break;
    }

    String attemptId = key.substring(attemptNodePrefix.length());
    if (attemptId.startsWith(ApplicationAttemptId.appAttemptIdStrPrefix)) {
      ApplicationAttemptStateData attemptState =
          createAttemptState(attemptId, entry.getValue());
      appState.attempts.put(attemptState.getAttemptId(), attemptState);
    } else {
      LOG.warn("Ignoring unknown application key: " + key);
    }
    iter.next();
  }
  int numAttempts = appState.attempts.size();
  if (LOG.isDebugEnabled()) {
    LOG.debug("Loaded application " + appId + " with " + numAttempts
        + " attempts");
  }
  return numAttempts;
}
 
Example 14
Source File: LeveldbTimelineStore.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Finds all keys in the db that have a given prefix and deletes them on
 * the given write batch.
 */
private void deleteKeysWithPrefix(WriteBatch writeBatch, byte[] prefix,
    LeveldbIterator iterator) {
  for (iterator.seek(prefix); iterator.hasNext(); iterator.next()) {
    byte[] key = iterator.peekNext().getKey();
    if (!prefixMatches(prefix, prefix.length, key)) {
      break;
    }
    writeBatch.delete(key);
  }
}
 
Example 15
Source File: LeveldbTimelineStore.java    From big-c with Apache License 2.0 5 votes vote down vote up
private static TimelineDomain getTimelineDomain(
    LeveldbIterator iterator, String domainId, byte[] prefix) throws IOException {
  // Iterate over all the rows whose key starts with prefix to retrieve the
  // domain information.
  TimelineDomain domain = new TimelineDomain();
  domain.setId(domainId);
  boolean noRows = true;
  for (; iterator.hasNext(); iterator.next()) {
    byte[] key = iterator.peekNext().getKey();
    if (!prefixMatches(prefix, prefix.length, key)) {
      break;
    }
    if (noRows) {
      noRows = false;
    }
    byte[] value = iterator.peekNext().getValue();
    if (value != null && value.length > 0) {
      if (key[prefix.length] == DESCRIPTION_COLUMN[0]) {
        domain.setDescription(new String(value, Charset.forName("UTF-8")));
      } else if (key[prefix.length] == OWNER_COLUMN[0]) {
        domain.setOwner(new String(value, Charset.forName("UTF-8")));
      } else if (key[prefix.length] == READER_COLUMN[0]) {
        domain.setReaders(new String(value, Charset.forName("UTF-8")));
      } else if (key[prefix.length] == WRITER_COLUMN[0]) {
        domain.setWriters(new String(value, Charset.forName("UTF-8")));
      } else if (key[prefix.length] == TIMESTAMP_COLUMN[0]) {
        domain.setCreatedTime(readReverseOrderedLong(value, 0));
        domain.setModifiedTime(readReverseOrderedLong(value, 8));
      } else {
        LOG.error("Unrecognized domain column: " + key[prefix.length]);
      }
    }
  }
  if (noRows) {
    return null;
  } else {
    return domain;
  }
}
 
Example 16
Source File: NMLeveldbStateStoreService.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private RecoveredContainerState loadContainerState(ContainerId containerId,
    LeveldbIterator iter, String keyPrefix) throws IOException {
  RecoveredContainerState rcs = new RecoveredContainerState();
  rcs.status = RecoveredContainerStatus.REQUESTED;
  while (iter.hasNext()) {
    Entry<byte[],byte[]> entry = iter.peekNext();
    String key = asString(entry.getKey());
    if (!key.startsWith(keyPrefix)) {
      break;
    }
    iter.next();

    String suffix = key.substring(keyPrefix.length()-1);  // start with '/'
    if (suffix.equals(CONTAINER_REQUEST_KEY_SUFFIX)) {
      rcs.startRequest = new StartContainerRequestPBImpl(
          StartContainerRequestProto.parseFrom(entry.getValue()));
    } else if (suffix.equals(CONTAINER_DIAGS_KEY_SUFFIX)) {
      rcs.diagnostics = asString(entry.getValue());
    } else if (suffix.equals(CONTAINER_LAUNCHED_KEY_SUFFIX)) {
      if (rcs.status == RecoveredContainerStatus.REQUESTED) {
        rcs.status = RecoveredContainerStatus.LAUNCHED;
      }
    } else if (suffix.equals(CONTAINER_KILLED_KEY_SUFFIX)) {
      rcs.killed = true;
    } else if (suffix.equals(CONTAINER_EXIT_CODE_KEY_SUFFIX)) {
      rcs.status = RecoveredContainerStatus.COMPLETED;
      rcs.exitCode = Integer.parseInt(asString(entry.getValue()));
    } else {
      throw new IOException("Unexpected container state key: " + key);
    }
  }
  return rcs;
}
 
Example 17
Source File: LeveldbTimelineStore.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * Read entity from a db iterator.  If no information is found in the
 * specified fields for this entity, return null.
 */
private static TimelineEntity getEntity(String entityId, String entityType,
    Long startTime, EnumSet<Field> fields, LeveldbIterator iterator,
    byte[] prefix, int prefixlen) throws IOException {
  if (fields == null) {
    fields = EnumSet.allOf(Field.class);
  }

  TimelineEntity entity = new TimelineEntity();
  boolean events = false;
  boolean lastEvent = false;
  if (fields.contains(Field.EVENTS)) {
    events = true;
  } else if (fields.contains(Field.LAST_EVENT_ONLY)) {
    lastEvent = true;
  } else {
    entity.setEvents(null);
  }
  boolean relatedEntities = false;
  if (fields.contains(Field.RELATED_ENTITIES)) {
    relatedEntities = true;
  } else {
    entity.setRelatedEntities(null);
  }
  boolean primaryFilters = false;
  if (fields.contains(Field.PRIMARY_FILTERS)) {
    primaryFilters = true;
  } else {
    entity.setPrimaryFilters(null);
  }
  boolean otherInfo = false;
  if (fields.contains(Field.OTHER_INFO)) {
    otherInfo = true;
  } else {
    entity.setOtherInfo(null);
  }

  // iterate through the entity's entry, parsing information if it is part
  // of a requested field
  for (; iterator.hasNext(); iterator.next()) {
    byte[] key = iterator.peekNext().getKey();
    if (!prefixMatches(prefix, prefixlen, key)) {
      break;
    }
    if (key.length == prefixlen) {
      continue;
    }
    if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) {
      if (primaryFilters) {
        addPrimaryFilter(entity, key,
            prefixlen + PRIMARY_FILTERS_COLUMN.length);
      }
    } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) {
      if (otherInfo) {
        entity.addOtherInfo(parseRemainingKey(key,
            prefixlen + OTHER_INFO_COLUMN.length),
            GenericObjectMapper.read(iterator.peekNext().getValue()));
      }
    } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) {
      if (relatedEntities) {
        addRelatedEntity(entity, key,
            prefixlen + RELATED_ENTITIES_COLUMN.length);
      }
    } else if (key[prefixlen] == EVENTS_COLUMN[0]) {
      if (events || (lastEvent &&
          entity.getEvents().size() == 0)) {
        TimelineEvent event = getEntityEvent(null, key, prefixlen +
            EVENTS_COLUMN.length, iterator.peekNext().getValue());
        if (event != null) {
          entity.addEvent(event);
        }
      }
    } else if (key[prefixlen] == DOMAIN_ID_COLUMN[0]) {
      byte[] v = iterator.peekNext().getValue();
      String domainId = new String(v, Charset.forName("UTF-8"));
      entity.setDomainId(domainId);
    } else {
      if (key[prefixlen] !=
          INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) {
        LOG.warn(String.format("Found unexpected column for entity %s of " +
            "type %s (0x%02x)", entityId, entityType, key[prefixlen]));
      }
    }
  }

  entity.setEntityId(entityId);
  entity.setEntityType(entityType);
  entity.setStartTime(startTime);

  return entity;
}
 
Example 18
Source File: LeveldbTimelineStore.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Read entity from a db iterator.  If no information is found in the
 * specified fields for this entity, return null.
 */
private static TimelineEntity getEntity(String entityId, String entityType,
    Long startTime, EnumSet<Field> fields, LeveldbIterator iterator,
    byte[] prefix, int prefixlen) throws IOException {
  if (fields == null) {
    fields = EnumSet.allOf(Field.class);
  }

  TimelineEntity entity = new TimelineEntity();
  boolean events = false;
  boolean lastEvent = false;
  if (fields.contains(Field.EVENTS)) {
    events = true;
  } else if (fields.contains(Field.LAST_EVENT_ONLY)) {
    lastEvent = true;
  } else {
    entity.setEvents(null);
  }
  boolean relatedEntities = false;
  if (fields.contains(Field.RELATED_ENTITIES)) {
    relatedEntities = true;
  } else {
    entity.setRelatedEntities(null);
  }
  boolean primaryFilters = false;
  if (fields.contains(Field.PRIMARY_FILTERS)) {
    primaryFilters = true;
  } else {
    entity.setPrimaryFilters(null);
  }
  boolean otherInfo = false;
  if (fields.contains(Field.OTHER_INFO)) {
    otherInfo = true;
  } else {
    entity.setOtherInfo(null);
  }

  // iterate through the entity's entry, parsing information if it is part
  // of a requested field
  for (; iterator.hasNext(); iterator.next()) {
    byte[] key = iterator.peekNext().getKey();
    if (!prefixMatches(prefix, prefixlen, key)) {
      break;
    }
    if (key.length == prefixlen) {
      continue;
    }
    if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) {
      if (primaryFilters) {
        addPrimaryFilter(entity, key,
            prefixlen + PRIMARY_FILTERS_COLUMN.length);
      }
    } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) {
      if (otherInfo) {
        entity.addOtherInfo(parseRemainingKey(key,
            prefixlen + OTHER_INFO_COLUMN.length),
            GenericObjectMapper.read(iterator.peekNext().getValue()));
      }
    } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) {
      if (relatedEntities) {
        addRelatedEntity(entity, key,
            prefixlen + RELATED_ENTITIES_COLUMN.length);
      }
    } else if (key[prefixlen] == EVENTS_COLUMN[0]) {
      if (events || (lastEvent &&
          entity.getEvents().size() == 0)) {
        TimelineEvent event = getEntityEvent(null, key, prefixlen +
            EVENTS_COLUMN.length, iterator.peekNext().getValue());
        if (event != null) {
          entity.addEvent(event);
        }
      }
    } else if (key[prefixlen] == DOMAIN_ID_COLUMN[0]) {
      byte[] v = iterator.peekNext().getValue();
      String domainId = new String(v, Charset.forName("UTF-8"));
      entity.setDomainId(domainId);
    } else {
      if (key[prefixlen] !=
          INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) {
        LOG.warn(String.format("Found unexpected column for entity %s of " +
            "type %s (0x%02x)", entityId, entityType, key[prefixlen]));
      }
    }
  }

  entity.setEntityId(entityId);
  entity.setEntityType(entityType);
  entity.setStartTime(startTime);

  return entity;
}