Java Code Examples for org.apache.hadoop.yarn.api.records.timeline.TimelineEntities#setEntities()

The following examples show how to use org.apache.hadoop.yarn.api.records.timeline.TimelineEntities#setEntities() . These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hadoop   File: MemoryTimelineStore.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public synchronized TimelineEntities getEntities(String entityType, Long limit,
    Long windowStart, Long windowEnd, String fromId, Long fromTs,
    NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
    EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
  if (limit == null) {
    limit = DEFAULT_LIMIT;
  }
  if (windowStart == null) {
    windowStart = Long.MIN_VALUE;
  }
  if (windowEnd == null) {
    windowEnd = Long.MAX_VALUE;
  }
  if (fields == null) {
    fields = EnumSet.allOf(Field.class);
  }

  Iterator<TimelineEntity> entityIterator = null;
  if (fromId != null) {
    TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId,
        entityType));
    if (firstEntity == null) {
      return new TimelineEntities();
    } else {
      entityIterator = new TreeSet<TimelineEntity>(entities.values())
          .tailSet(firstEntity, true).iterator();
    }
  }
  if (entityIterator == null) {
    entityIterator = new PriorityQueue<TimelineEntity>(entities.values())
        .iterator();
  }

  List<TimelineEntity> entitiesSelected = new ArrayList<TimelineEntity>();
  while (entityIterator.hasNext()) {
    TimelineEntity entity = entityIterator.next();
    if (entitiesSelected.size() >= limit) {
      break;
    }
    if (!entity.getEntityType().equals(entityType)) {
      continue;
    }
    if (entity.getStartTime() <= windowStart) {
      continue;
    }
    if (entity.getStartTime() > windowEnd) {
      continue;
    }
    if (fromTs != null && entityInsertTimes.get(new EntityIdentifier(
        entity.getEntityId(), entity.getEntityType())) > fromTs) {
      continue;
    }
    if (primaryFilter != null &&
        !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
      continue;
    }
    if (secondaryFilters != null) { // AND logic
      boolean flag = true;
      for (NameValuePair secondaryFilter : secondaryFilters) {
        if (secondaryFilter != null && !matchPrimaryFilter(
            entity.getPrimaryFilters(), secondaryFilter) &&
            !matchFilter(entity.getOtherInfo(), secondaryFilter)) {
          flag = false;
          break;
        }
      }
      if (!flag) {
        continue;
      }
    }
    if (entity.getDomainId() == null) {
      entity.setDomainId(DEFAULT_DOMAIN_ID);
    }
    if (checkAcl == null || checkAcl.check(entity)) {
      entitiesSelected.add(entity);
    }
  }
  List<TimelineEntity> entitiesToReturn = new ArrayList<TimelineEntity>();
  for (TimelineEntity entitySelected : entitiesSelected) {
    entitiesToReturn.add(maskFields(entitySelected, fields));
  }
  Collections.sort(entitiesToReturn);
  TimelineEntities entitiesWrapper = new TimelineEntities();
  entitiesWrapper.setEntities(entitiesToReturn);
  return entitiesWrapper;
}
 
Example 2
Source Project: hadoop   File: TestLeveldbTimelineStore.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testDeleteEntitiesPrimaryFilters()
    throws IOException, InterruptedException {
  Map<String, Set<Object>> primaryFilter =
      Collections.singletonMap("user", Collections.singleton(
          (Object) "otheruser"));
  TimelineEntities atsEntities = new TimelineEntities();
  atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
      entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
      null, domainId1)));
  TimelinePutResponse response = store.put(atsEntities);
  assertEquals(0, response.getErrors().size());

  NameValuePair pfPair = new NameValuePair("user", "otheruser");
  List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
      pfPair);
  assertEquals(1, entities.size());
  verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2),
      EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0),
      domainId1);

  entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
  assertEquals(3, entities.size());
  verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(0), domainId1);
  verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(1), domainId1);
  verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(2), domainId2);

  ((LeveldbTimelineStore)store).discardOldEntities(-123l);
  assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size());

  ((LeveldbTimelineStore)store).discardOldEntities(123l);
  assertEquals(0, getEntities("type_1").size());
  assertEquals(0, getEntities("type_2").size());
  assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());

  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
}
 
Example 3
Source Project: hadoop   File: TimelineStoreTestUtils.java    License: Apache License 2.0 4 votes vote down vote up
protected void loadTestDomainData() throws IOException {
  domain1 = new TimelineDomain();
  domain1.setId("domain_id_1");
  domain1.setDescription("description_1");
  domain1.setOwner("owner_1");
  domain1.setReaders("reader_user_1 reader_group_1");
  domain1.setWriters("writer_user_1 writer_group_1");
  store.put(domain1);

  domain2 = new TimelineDomain();
  domain2.setId("domain_id_2");
  domain2.setDescription("description_2");
  domain2.setOwner("owner_2");
  domain2.setReaders("reader_user_2 reader_group_2");
  domain2.setWriters("writer_user_2 writer_group_2");
  store.put(domain2);

  // Wait a second before updating the domain information
  elapsedTime = 1000;
  try {
    Thread.sleep(elapsedTime);
  } catch (InterruptedException e) {
    throw new IOException(e);
  }

  domain2.setDescription("description_3");
  domain2.setOwner("owner_3");
  domain2.setReaders("reader_user_3 reader_group_3");
  domain2.setWriters("writer_user_3 writer_group_3");
  store.put(domain2);

  domain3 = new TimelineDomain();
  domain3.setId("domain_id_4");
  domain3.setDescription("description_4");
  domain3.setOwner("owner_1");
  domain3.setReaders("reader_user_4 reader_group_4");
  domain3.setWriters("writer_user_4 writer_group_4");
  store.put(domain3);

  TimelineEntities entities = new TimelineEntities();
  if (store instanceof LeveldbTimelineStore) {
    LeveldbTimelineStore leveldb = (LeveldbTimelineStore) store;
    entities.setEntities(Collections.singletonList(createEntity(
            "ACL_ENTITY_ID_11", "ACL_ENTITY_TYPE_1", 63l, null, null, null, null,
            "domain_id_4")));
    leveldb.put(entities);
    entities.setEntities(Collections.singletonList(createEntity(
            "ACL_ENTITY_ID_22", "ACL_ENTITY_TYPE_1", 64l, null, null, null, null,
            "domain_id_2")));
    leveldb.put(entities);
  }
}
 
Example 4
Source Project: big-c   File: MemoryTimelineStore.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public synchronized TimelineEntities getEntities(String entityType, Long limit,
    Long windowStart, Long windowEnd, String fromId, Long fromTs,
    NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
    EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
  if (limit == null) {
    limit = DEFAULT_LIMIT;
  }
  if (windowStart == null) {
    windowStart = Long.MIN_VALUE;
  }
  if (windowEnd == null) {
    windowEnd = Long.MAX_VALUE;
  }
  if (fields == null) {
    fields = EnumSet.allOf(Field.class);
  }

  Iterator<TimelineEntity> entityIterator = null;
  if (fromId != null) {
    TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId,
        entityType));
    if (firstEntity == null) {
      return new TimelineEntities();
    } else {
      entityIterator = new TreeSet<TimelineEntity>(entities.values())
          .tailSet(firstEntity, true).iterator();
    }
  }
  if (entityIterator == null) {
    entityIterator = new PriorityQueue<TimelineEntity>(entities.values())
        .iterator();
  }

  List<TimelineEntity> entitiesSelected = new ArrayList<TimelineEntity>();
  while (entityIterator.hasNext()) {
    TimelineEntity entity = entityIterator.next();
    if (entitiesSelected.size() >= limit) {
      break;
    }
    if (!entity.getEntityType().equals(entityType)) {
      continue;
    }
    if (entity.getStartTime() <= windowStart) {
      continue;
    }
    if (entity.getStartTime() > windowEnd) {
      continue;
    }
    if (fromTs != null && entityInsertTimes.get(new EntityIdentifier(
        entity.getEntityId(), entity.getEntityType())) > fromTs) {
      continue;
    }
    if (primaryFilter != null &&
        !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
      continue;
    }
    if (secondaryFilters != null) { // AND logic
      boolean flag = true;
      for (NameValuePair secondaryFilter : secondaryFilters) {
        if (secondaryFilter != null && !matchPrimaryFilter(
            entity.getPrimaryFilters(), secondaryFilter) &&
            !matchFilter(entity.getOtherInfo(), secondaryFilter)) {
          flag = false;
          break;
        }
      }
      if (!flag) {
        continue;
      }
    }
    if (entity.getDomainId() == null) {
      entity.setDomainId(DEFAULT_DOMAIN_ID);
    }
    if (checkAcl == null || checkAcl.check(entity)) {
      entitiesSelected.add(entity);
    }
  }
  List<TimelineEntity> entitiesToReturn = new ArrayList<TimelineEntity>();
  for (TimelineEntity entitySelected : entitiesSelected) {
    entitiesToReturn.add(maskFields(entitySelected, fields));
  }
  Collections.sort(entitiesToReturn);
  TimelineEntities entitiesWrapper = new TimelineEntities();
  entitiesWrapper.setEntities(entitiesToReturn);
  return entitiesWrapper;
}
 
Example 5
Source Project: big-c   File: TestLeveldbTimelineStore.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testDeleteEntitiesPrimaryFilters()
    throws IOException, InterruptedException {
  Map<String, Set<Object>> primaryFilter =
      Collections.singletonMap("user", Collections.singleton(
          (Object) "otheruser"));
  TimelineEntities atsEntities = new TimelineEntities();
  atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
      entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
      null, domainId1)));
  TimelinePutResponse response = store.put(atsEntities);
  assertEquals(0, response.getErrors().size());

  NameValuePair pfPair = new NameValuePair("user", "otheruser");
  List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
      pfPair);
  assertEquals(1, entities.size());
  verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2),
      EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0),
      domainId1);

  entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
  assertEquals(3, entities.size());
  verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(0), domainId1);
  verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(1), domainId1);
  verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(2), domainId2);

  ((LeveldbTimelineStore)store).discardOldEntities(-123l);
  assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size());

  ((LeveldbTimelineStore)store).discardOldEntities(123l);
  assertEquals(0, getEntities("type_1").size());
  assertEquals(0, getEntities("type_2").size());
  assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());

  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
}
 
Example 6
Source Project: big-c   File: TimelineStoreTestUtils.java    License: Apache License 2.0 4 votes vote down vote up
protected void loadTestDomainData() throws IOException {
  domain1 = new TimelineDomain();
  domain1.setId("domain_id_1");
  domain1.setDescription("description_1");
  domain1.setOwner("owner_1");
  domain1.setReaders("reader_user_1 reader_group_1");
  domain1.setWriters("writer_user_1 writer_group_1");
  store.put(domain1);

  domain2 = new TimelineDomain();
  domain2.setId("domain_id_2");
  domain2.setDescription("description_2");
  domain2.setOwner("owner_2");
  domain2.setReaders("reader_user_2 reader_group_2");
  domain2.setWriters("writer_user_2 writer_group_2");
  store.put(domain2);

  // Wait a second before updating the domain information
  elapsedTime = 1000;
  try {
    Thread.sleep(elapsedTime);
  } catch (InterruptedException e) {
    throw new IOException(e);
  }

  domain2.setDescription("description_3");
  domain2.setOwner("owner_3");
  domain2.setReaders("reader_user_3 reader_group_3");
  domain2.setWriters("writer_user_3 writer_group_3");
  store.put(domain2);

  domain3 = new TimelineDomain();
  domain3.setId("domain_id_4");
  domain3.setDescription("description_4");
  domain3.setOwner("owner_1");
  domain3.setReaders("reader_user_4 reader_group_4");
  domain3.setWriters("writer_user_4 writer_group_4");
  store.put(domain3);

  TimelineEntities entities = new TimelineEntities();
  if (store instanceof LeveldbTimelineStore) {
    LeveldbTimelineStore leveldb = (LeveldbTimelineStore) store;
    entities.setEntities(Collections.singletonList(createEntity(
            "ACL_ENTITY_ID_11", "ACL_ENTITY_TYPE_1", 63l, null, null, null, null,
            "domain_id_4")));
    leveldb.put(entities);
    entities.setEntities(Collections.singletonList(createEntity(
            "ACL_ENTITY_ID_22", "ACL_ENTITY_TYPE_1", 64l, null, null, null, null,
            "domain_id_2")));
    leveldb.put(entities);
  }
}