org.apache.hadoop.yarn.util.timeline.TimelineUtils Java Examples

The following examples show how to use org.apache.hadoop.yarn.util.timeline.TimelineUtils. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TimelineWebServices.java    From ambari-metrics with Apache License 2.0 6 votes vote down vote up
/**
 * Store the given metrics into the timeline store, and return errors that
 * happened during storing.
 */
@Path("/metrics/aggregated")
@POST
@Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
public TimelinePutResponse postAggregatedMetrics(
  @Context HttpServletRequest req,
  @Context HttpServletResponse res,
  AggregationResult metrics) {

  init(res);
  if (metrics == null) {
    return new TimelinePutResponse();
  }

  try {
    if (LOG.isTraceEnabled()) {
      LOG.trace("Storing aggregated metrics: " +
              TimelineUtils.dumpTimelineRecordtoJSON(metrics, true));
    }

    return timelineMetricStore.putHostAggregatedMetrics(metrics);
  } catch (Exception e) {
    LOG.error("Error saving metrics.", e);
    throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
  }
}
 
Example #2
Source File: TimelineWebServices.java    From ambari-metrics with Apache License 2.0 6 votes vote down vote up
@Path("/containermetrics")
@POST
@Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
public TimelinePutResponse postContainerMetrics(
    @Context HttpServletRequest req,
    @Context HttpServletResponse res,
    List<ContainerMetric> metrics) {
  init(res);
  if (metrics == null || metrics.isEmpty()) {
    return new TimelinePutResponse();
  }

  try {
    if (LOG.isTraceEnabled()) {
      LOG.trace("Storing container metrics: " + TimelineUtils
          .dumpTimelineRecordtoJSON(metrics, true));
    }

    return timelineMetricStore.putContainerMetrics(metrics);

  } catch (Exception e) {
    LOG.error("Error saving metrics.", e);
    throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
  }
}
 
Example #3
Source File: JstormOnYarn.java    From jstorm with Apache License 2.0 5 votes vote down vote up
private void prepareTimelineDomain() {
    TimelineClient timelineClient = null;
    if (jstormClientContext.conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
            YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
        timelineClient = TimelineClient.createTimelineClient();
        timelineClient.init(jstormClientContext.conf);
        timelineClient.start();
    } else {
        LOG.warn("Cannot put the domain " + jstormClientContext.domainId +
                " because the timeline service is not enabled");
        return;
    }
    try {
        TimelineDomain domain = new TimelineDomain();
        domain.setId(jstormClientContext.domainId);
        domain.setReaders(
                jstormClientContext.viewACLs != null && jstormClientContext.viewACLs.length() > 0 ? jstormClientContext.viewACLs : JOYConstants.BLANK);
        domain.setWriters(
                jstormClientContext.modifyACLs != null && jstormClientContext.modifyACLs.length() > 0 ? jstormClientContext.modifyACLs : JOYConstants.BLANK);
        timelineClient.putDomain(domain);
        LOG.info("Put the timeline domain: " +
                TimelineUtils.dumpTimelineRecordtoJSON(domain));
    } catch (Exception e) {
        LOG.error("Error when putting the timeline domain", e);
    } finally {
        timelineClient.stop();
    }
}
 
Example #4
Source File: TimelineWebServices.java    From ambari-metrics with Apache License 2.0 5 votes vote down vote up
/**
 * Store the given metrics into the timeline store, and return errors that
 * happened during storing.
 */
@Path("/metrics")
@POST
@Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
public TimelinePutResponse postMetrics(
  @Context HttpServletRequest req,
  @Context HttpServletResponse res,
  TimelineMetrics metrics) {

  init(res);
  if (metrics == null) {
    return new TimelinePutResponse();
  }

  try {

    // TODO: Check ACLs for MetricEntity using the TimelineACLManager.
    // TODO: Save owner of the MetricEntity.

    if (LOG.isTraceEnabled()) {
      LOG.trace("Storing metrics: " +
        TimelineUtils.dumpTimelineRecordtoJSON(metrics, true));
    }

    if (CollectionUtils.isNotEmpty(metrics.getMetrics()) && metrics.getMetrics().get(0).getAppId().equals(SMOKETEST_METRIC_APP_ID)) {
      return timelineMetricStore.putMetricsSkipCache(metrics);
    } else {
      return timelineMetricStore.putMetrics(metrics);
    }

  } catch (Exception e) {
    LOG.error("Error saving metrics.", e);
    throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
  }
}
 
Example #5
Source File: Client.java    From metron with Apache License 2.0 5 votes vote down vote up
private void prepareTimelineDomain() {
  TimelineClient timelineClient = null;
  if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
          YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
    timelineClient = TimelineClient.createTimelineClient();
    timelineClient.init(conf);
    timelineClient.start();
  } else {
    LOG.warn("Cannot put the domain " + domainId +
            " because the timeline service is not enabled");
    return;
  }
  try {
    TimelineDomain domain = new TimelineDomain();
    domain.setId(domainId);
    domain.setReaders(
            viewACLs != null && viewACLs.length() > 0 ? viewACLs : " ");
    domain.setWriters(
            modifyACLs != null && modifyACLs.length() > 0 ? modifyACLs : " ");
    timelineClient.putDomain(domain);
    LOG.info("Put the timeline domain: " +
            TimelineUtils.dumpTimelineRecordtoJSON(domain));
  } catch (Exception e) {
    LOG.error("Error when putting the timeline domain", e);
  } finally {
    timelineClient.stop();
  }
}
 
Example #6
Source File: Client.java    From big-c with Apache License 2.0 5 votes vote down vote up
private void prepareTimelineDomain() {
  TimelineClient timelineClient = null;
  if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
      YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
    timelineClient = TimelineClient.createTimelineClient();
    timelineClient.init(conf);
    timelineClient.start();
  } else {
    LOG.warn("Cannot put the domain " + domainId +
        " because the timeline service is not enabled");
    return;
  }
  try {
    //TODO: we need to check and combine the existing timeline domain ACLs,
    //but let's do it once we have client java library to query domains.
    TimelineDomain domain = new TimelineDomain();
    domain.setId(domainId);
    domain.setReaders(
        viewACLs != null && viewACLs.length() > 0 ? viewACLs : " ");
    domain.setWriters(
        modifyACLs != null && modifyACLs.length() > 0 ? modifyACLs : " ");
    timelineClient.putDomain(domain);
    LOG.info("Put the timeline domain: " +
        TimelineUtils.dumpTimelineRecordtoJSON(domain));
  } catch (Exception e) {
    LOG.error("Error when putting the timeline domain", e);
  } finally {
    timelineClient.stop();
  }
}
 
Example #7
Source File: YarnClientImpl.java    From big-c with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("deprecation")
@Override
protected void serviceInit(Configuration conf) throws Exception {
  asyncApiPollIntervalMillis =
      conf.getLong(YarnConfiguration.YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS,
        YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS);
  asyncApiPollTimeoutMillis =
      conf.getLong(YarnConfiguration.YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_TIMEOUT_MS,
          YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_TIMEOUT_MS);
  submitPollIntervalMillis = asyncApiPollIntervalMillis;
  if (conf.get(YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS)
      != null) {
    submitPollIntervalMillis = conf.getLong(
      YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS,
      YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS);
  }

  if (conf.getBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED,
    YarnConfiguration.DEFAULT_APPLICATION_HISTORY_ENABLED)) {
    historyServiceEnabled = true;
    historyClient = AHSClient.createAHSClient();
    historyClient.init(conf);
  }

  if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
      YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
    timelineServiceEnabled = true;
    timelineClient = createTimelineClient();
    timelineClient.init(conf);
    timelineDTRenewer = getTimelineDelegationTokenRenewer(conf);
    timelineService = TimelineUtils.buildTimelineTokenService(conf);
  }

  timelineServiceBestEffort = conf.getBoolean(
      YarnConfiguration.TIMELINE_SERVICE_CLIENT_BEST_EFFORT,
      YarnConfiguration.DEFAULT_TIMELINE_SERVICE_CLIENT_BEST_EFFORT);
  super.serviceInit(conf);
}
 
Example #8
Source File: SystemMetricsPublisher.java    From big-c with Apache License 2.0 5 votes vote down vote up
private void putEntity(TimelineEntity entity) {
  try {
    if (LOG.isDebugEnabled()) {
      LOG.debug("Publishing the entity " + entity.getEntityId() +
          ", JSON-style content: " + TimelineUtils.dumpTimelineRecordtoJSON(entity));
    }
    client.putEntities(entity);
  } catch (Exception e) {
    LOG.error("Error when publishing entity [" + entity.getEntityType() + ","
        + entity.getEntityId() + "]", e);
  }
}
 
Example #9
Source File: TestTimelineRecords.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testTimelineDomain() throws Exception {
  TimelineDomains domains = new TimelineDomains();

  TimelineDomain domain = null;
  for (int i = 0; i < 2; ++i) {
    domain = new TimelineDomain();
    domain.setId("test id " + (i + 1));
    domain.setDescription("test description " + (i + 1));
    domain.setOwner("test owner " + (i + 1));
    domain.setReaders("test_reader_user_" + (i + 1) +
        " test_reader_group+" + (i + 1));
    domain.setWriters("test_writer_user_" + (i + 1) +
        " test_writer_group+" + (i + 1));
    domain.setCreatedTime(0L);
    domain.setModifiedTime(1L);
    domains.addDomain(domain);
  }
  LOG.info("Domain in JSON:");
  LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(domains, true));

  Assert.assertEquals(2, domains.getDomains().size());

  for (int i = 0; i < domains.getDomains().size(); ++i) {
    domain = domains.getDomains().get(i);
    Assert.assertEquals("test id " + (i + 1), domain.getId());
    Assert.assertEquals("test description " + (i + 1),
        domain.getDescription());
    Assert.assertEquals("test owner " + (i + 1), domain.getOwner());
    Assert.assertEquals("test_reader_user_" + (i + 1) +
        " test_reader_group+" + (i + 1), domain.getReaders());
    Assert.assertEquals("test_writer_user_" + (i + 1) +
        " test_writer_group+" + (i + 1), domain.getWriters());
    Assert.assertEquals(new Long(0L), domain.getCreatedTime());
    Assert.assertEquals(new Long(1L), domain.getModifiedTime());
  }
}
 
Example #10
Source File: TestTimelineRecords.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testTimelinePutErrors() throws Exception {
  TimelinePutResponse TimelinePutErrors = new TimelinePutResponse();
  TimelinePutError error1 = new TimelinePutError();
  error1.setEntityId("entity id 1");
  error1.setEntityId("entity type 1");
  error1.setErrorCode(TimelinePutError.NO_START_TIME);
  TimelinePutErrors.addError(error1);
  List<TimelinePutError> response = new ArrayList<TimelinePutError>();
  response.add(error1);
  TimelinePutError error2 = new TimelinePutError();
  error2.setEntityId("entity id 2");
  error2.setEntityId("entity type 2");
  error2.setErrorCode(TimelinePutError.IO_EXCEPTION);
  response.add(error2);
  TimelinePutErrors.addErrors(response);
  LOG.info("Errors in JSON:");
  LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(TimelinePutErrors, true));

  Assert.assertEquals(3, TimelinePutErrors.getErrors().size());
  TimelinePutError e = TimelinePutErrors.getErrors().get(0);
  Assert.assertEquals(error1.getEntityId(), e.getEntityId());
  Assert.assertEquals(error1.getEntityType(), e.getEntityType());
  Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
  e = TimelinePutErrors.getErrors().get(1);
  Assert.assertEquals(error1.getEntityId(), e.getEntityId());
  Assert.assertEquals(error1.getEntityType(), e.getEntityType());
  Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
  e = TimelinePutErrors.getErrors().get(2);
  Assert.assertEquals(error2.getEntityId(), e.getEntityId());
  Assert.assertEquals(error2.getEntityType(), e.getEntityType());
  Assert.assertEquals(error2.getErrorCode(), e.getErrorCode());
}
 
Example #11
Source File: Client.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private void prepareTimelineDomain() {
  TimelineClient timelineClient = null;
  if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
      YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
    timelineClient = TimelineClient.createTimelineClient();
    timelineClient.init(conf);
    timelineClient.start();
  } else {
    LOG.warn("Cannot put the domain " + domainId +
        " because the timeline service is not enabled");
    return;
  }
  try {
    //TODO: we need to check and combine the existing timeline domain ACLs,
    //but let's do it once we have client java library to query domains.
    TimelineDomain domain = new TimelineDomain();
    domain.setId(domainId);
    domain.setReaders(
        viewACLs != null && viewACLs.length() > 0 ? viewACLs : " ");
    domain.setWriters(
        modifyACLs != null && modifyACLs.length() > 0 ? modifyACLs : " ");
    timelineClient.putDomain(domain);
    LOG.info("Put the timeline domain: " +
        TimelineUtils.dumpTimelineRecordtoJSON(domain));
  } catch (Exception e) {
    LOG.error("Error when putting the timeline domain", e);
  } finally {
    timelineClient.stop();
  }
}
 
Example #12
Source File: YarnClientImpl.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("deprecation")
@Override
protected void serviceInit(Configuration conf) throws Exception {
  asyncApiPollIntervalMillis =
      conf.getLong(YarnConfiguration.YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS,
        YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS);
  asyncApiPollTimeoutMillis =
      conf.getLong(YarnConfiguration.YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_TIMEOUT_MS,
          YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_TIMEOUT_MS);
  submitPollIntervalMillis = asyncApiPollIntervalMillis;
  if (conf.get(YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS)
      != null) {
    submitPollIntervalMillis = conf.getLong(
      YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS,
      YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS);
  }

  if (conf.getBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED,
    YarnConfiguration.DEFAULT_APPLICATION_HISTORY_ENABLED)) {
    historyServiceEnabled = true;
    historyClient = AHSClient.createAHSClient();
    historyClient.init(conf);
  }

  if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
      YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
    timelineServiceEnabled = true;
    timelineClient = createTimelineClient();
    timelineClient.init(conf);
    timelineDTRenewer = getTimelineDelegationTokenRenewer(conf);
    timelineService = TimelineUtils.buildTimelineTokenService(conf);
  }

  timelineServiceBestEffort = conf.getBoolean(
      YarnConfiguration.TIMELINE_SERVICE_CLIENT_BEST_EFFORT,
      YarnConfiguration.DEFAULT_TIMELINE_SERVICE_CLIENT_BEST_EFFORT);
  super.serviceInit(conf);
}
 
Example #13
Source File: SystemMetricsPublisher.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private void putEntity(TimelineEntity entity) {
  try {
    if (LOG.isDebugEnabled()) {
      LOG.debug("Publishing the entity " + entity.getEntityId() +
          ", JSON-style content: " + TimelineUtils.dumpTimelineRecordtoJSON(entity));
    }
    client.putEntities(entity);
  } catch (Exception e) {
    LOG.error("Error when publishing entity [" + entity.getEntityType() + ","
        + entity.getEntityId() + "]", e);
  }
}
 
Example #14
Source File: TestTimelineRecords.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testTimelineDomain() throws Exception {
  TimelineDomains domains = new TimelineDomains();

  TimelineDomain domain = null;
  for (int i = 0; i < 2; ++i) {
    domain = new TimelineDomain();
    domain.setId("test id " + (i + 1));
    domain.setDescription("test description " + (i + 1));
    domain.setOwner("test owner " + (i + 1));
    domain.setReaders("test_reader_user_" + (i + 1) +
        " test_reader_group+" + (i + 1));
    domain.setWriters("test_writer_user_" + (i + 1) +
        " test_writer_group+" + (i + 1));
    domain.setCreatedTime(0L);
    domain.setModifiedTime(1L);
    domains.addDomain(domain);
  }
  LOG.info("Domain in JSON:");
  LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(domains, true));

  Assert.assertEquals(2, domains.getDomains().size());

  for (int i = 0; i < domains.getDomains().size(); ++i) {
    domain = domains.getDomains().get(i);
    Assert.assertEquals("test id " + (i + 1), domain.getId());
    Assert.assertEquals("test description " + (i + 1),
        domain.getDescription());
    Assert.assertEquals("test owner " + (i + 1), domain.getOwner());
    Assert.assertEquals("test_reader_user_" + (i + 1) +
        " test_reader_group+" + (i + 1), domain.getReaders());
    Assert.assertEquals("test_writer_user_" + (i + 1) +
        " test_writer_group+" + (i + 1), domain.getWriters());
    Assert.assertEquals(new Long(0L), domain.getCreatedTime());
    Assert.assertEquals(new Long(1L), domain.getModifiedTime());
  }
}
 
Example #15
Source File: TestTimelineRecords.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testTimelinePutErrors() throws Exception {
  TimelinePutResponse TimelinePutErrors = new TimelinePutResponse();
  TimelinePutError error1 = new TimelinePutError();
  error1.setEntityId("entity id 1");
  error1.setEntityId("entity type 1");
  error1.setErrorCode(TimelinePutError.NO_START_TIME);
  TimelinePutErrors.addError(error1);
  List<TimelinePutError> response = new ArrayList<TimelinePutError>();
  response.add(error1);
  TimelinePutError error2 = new TimelinePutError();
  error2.setEntityId("entity id 2");
  error2.setEntityId("entity type 2");
  error2.setErrorCode(TimelinePutError.IO_EXCEPTION);
  response.add(error2);
  TimelinePutErrors.addErrors(response);
  LOG.info("Errors in JSON:");
  LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(TimelinePutErrors, true));

  Assert.assertEquals(3, TimelinePutErrors.getErrors().size());
  TimelinePutError e = TimelinePutErrors.getErrors().get(0);
  Assert.assertEquals(error1.getEntityId(), e.getEntityId());
  Assert.assertEquals(error1.getEntityType(), e.getEntityType());
  Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
  e = TimelinePutErrors.getErrors().get(1);
  Assert.assertEquals(error1.getEntityId(), e.getEntityId());
  Assert.assertEquals(error1.getEntityType(), e.getEntityType());
  Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
  e = TimelinePutErrors.getErrors().get(2);
  Assert.assertEquals(error2.getEntityId(), e.getEntityId());
  Assert.assertEquals(error2.getEntityType(), e.getEntityType());
  Assert.assertEquals(error2.getErrorCode(), e.getErrorCode());
}
 
Example #16
Source File: PhoenixHBaseAccessor.java    From ambari-metrics with Apache License 2.0 5 votes vote down vote up
private void commitTransientMetrics(Connection conn, Collection<TimelineMetric> transientMetrics) throws SQLException, IOException {
  LOG.debug("Committing transient metrics to store");
  PreparedStatement metricTransientRecordStmt = null;

  metricTransientRecordStmt = conn.prepareStatement(String.format(
    UPSERT_TRANSIENT_METRICS_SQL, METRIC_TRANSIENT_TABLE_NAME));
  for (TimelineMetric metric : transientMetrics) {

    metricTransientRecordStmt.clearParameters();

    if (LOG.isTraceEnabled()) {
      LOG.trace("host: " + metric.getHostName() + ", " +
        "metricName = " + metric.getMetricName() + ", " +
        "values: " + metric.getMetricValues());
    }
    double[] aggregates = AggregatorUtils.calculateAggregates(
      metric.getMetricValues());

    metricTransientRecordStmt.setString(1, metric.getMetricName());
    metricTransientRecordStmt.setString(2, metric.getHostName());
    metricTransientRecordStmt.setString(3, metric.getAppId());
    metricTransientRecordStmt.setString(4, metric.getInstanceId());
    metricTransientRecordStmt.setLong(5, metric.getStartTime());
    metricTransientRecordStmt.setString(6, metric.getUnits());
    metricTransientRecordStmt.setDouble(7, aggregates[0]);
    metricTransientRecordStmt.setDouble(8, aggregates[1]);
    metricTransientRecordStmt.setDouble(9, aggregates[2]);
    metricTransientRecordStmt.setLong(10, (long) aggregates[3]);
    String json = TimelineUtils.dumpTimelineRecordtoJSON(metric.getMetricValues());
    metricTransientRecordStmt.setString(11, json);

    try {
      metricTransientRecordStmt.executeUpdate();
    } catch (SQLException sql) {
      LOG.error("Failed on inserting transient metric records to store.", sql);
    }
  }
}
 
Example #17
Source File: TestTimelineRecords.java    From big-c with Apache License 2.0 4 votes vote down vote up
@Test
public void testEntities() throws Exception {
  TimelineEntities entities = new TimelineEntities();
  for (int j = 0; j < 2; ++j) {
    TimelineEntity entity = new TimelineEntity();
    entity.setEntityId("entity id " + j);
    entity.setEntityType("entity type " + j);
    entity.setStartTime(System.currentTimeMillis());
    for (int i = 0; i < 2; ++i) {
      TimelineEvent event = new TimelineEvent();
      event.setTimestamp(System.currentTimeMillis());
      event.setEventType("event type " + i);
      event.addEventInfo("key1", "val1");
      event.addEventInfo("key2", "val2");
      entity.addEvent(event);
    }
    entity.addRelatedEntity("test ref type 1", "test ref id 1");
    entity.addRelatedEntity("test ref type 2", "test ref id 2");
    entity.addPrimaryFilter("pkey1", "pval1");
    entity.addPrimaryFilter("pkey2", "pval2");
    entity.addOtherInfo("okey1", "oval1");
    entity.addOtherInfo("okey2", "oval2");
    entity.setDomainId("domain id " + j);
    entities.addEntity(entity);
  }
  LOG.info("Entities in JSON:");
  LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true));

  Assert.assertEquals(2, entities.getEntities().size());
  TimelineEntity entity1 = entities.getEntities().get(0);
  Assert.assertEquals("entity id 0", entity1.getEntityId());
  Assert.assertEquals("entity type 0", entity1.getEntityType());
  Assert.assertEquals(2, entity1.getRelatedEntities().size());
  Assert.assertEquals(2, entity1.getEvents().size());
  Assert.assertEquals(2, entity1.getPrimaryFilters().size());
  Assert.assertEquals(2, entity1.getOtherInfo().size());
  Assert.assertEquals("domain id 0", entity1.getDomainId());
  TimelineEntity entity2 = entities.getEntities().get(1);
  Assert.assertEquals("entity id 1", entity2.getEntityId());
  Assert.assertEquals("entity type 1", entity2.getEntityType());
  Assert.assertEquals(2, entity2.getRelatedEntities().size());
  Assert.assertEquals(2, entity2.getEvents().size());
  Assert.assertEquals(2, entity2.getPrimaryFilters().size());
  Assert.assertEquals(2, entity2.getOtherInfo().size());
  Assert.assertEquals("domain id 1", entity2.getDomainId());
}
 
Example #18
Source File: TestTimelineRecords.java    From big-c with Apache License 2.0 4 votes vote down vote up
@Test
public void testEvents() throws Exception {
  TimelineEvents events = new TimelineEvents();
  for (int j = 0; j < 2; ++j) {
    TimelineEvents.EventsOfOneEntity partEvents =
        new TimelineEvents.EventsOfOneEntity();
    partEvents.setEntityId("entity id " + j);
    partEvents.setEntityType("entity type " + j);
    for (int i = 0; i < 2; ++i) {
      TimelineEvent event = new TimelineEvent();
      event.setTimestamp(System.currentTimeMillis());
      event.setEventType("event type " + i);
      event.addEventInfo("key1", "val1");
      event.addEventInfo("key2", "val2");
      partEvents.addEvent(event);
    }
    events.addEvent(partEvents);
  }
  LOG.info("Events in JSON:");
  LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(events, true));

  Assert.assertEquals(2, events.getAllEvents().size());
  TimelineEvents.EventsOfOneEntity partEvents1 = events.getAllEvents().get(0);
  Assert.assertEquals("entity id 0", partEvents1.getEntityId());
  Assert.assertEquals("entity type 0", partEvents1.getEntityType());
  Assert.assertEquals(2, partEvents1.getEvents().size());
  TimelineEvent event11 = partEvents1.getEvents().get(0);
  Assert.assertEquals("event type 0", event11.getEventType());
  Assert.assertEquals(2, event11.getEventInfo().size());
  TimelineEvent event12 = partEvents1.getEvents().get(1);
  Assert.assertEquals("event type 1", event12.getEventType());
  Assert.assertEquals(2, event12.getEventInfo().size());
  TimelineEvents.EventsOfOneEntity partEvents2 = events.getAllEvents().get(1);
  Assert.assertEquals("entity id 1", partEvents2.getEntityId());
  Assert.assertEquals("entity type 1", partEvents2.getEntityType());
  Assert.assertEquals(2, partEvents2.getEvents().size());
  TimelineEvent event21 = partEvents2.getEvents().get(0);
  Assert.assertEquals("event type 0", event21.getEventType());
  Assert.assertEquals(2, event21.getEventInfo().size());
  TimelineEvent event22 = partEvents2.getEvents().get(1);
  Assert.assertEquals("event type 1", event22.getEventType());
  Assert.assertEquals(2, event22.getEventInfo().size());
}
 
Example #19
Source File: TestTimelineRecords.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Test
public void testEvents() throws Exception {
  TimelineEvents events = new TimelineEvents();
  for (int j = 0; j < 2; ++j) {
    TimelineEvents.EventsOfOneEntity partEvents =
        new TimelineEvents.EventsOfOneEntity();
    partEvents.setEntityId("entity id " + j);
    partEvents.setEntityType("entity type " + j);
    for (int i = 0; i < 2; ++i) {
      TimelineEvent event = new TimelineEvent();
      event.setTimestamp(System.currentTimeMillis());
      event.setEventType("event type " + i);
      event.addEventInfo("key1", "val1");
      event.addEventInfo("key2", "val2");
      partEvents.addEvent(event);
    }
    events.addEvent(partEvents);
  }
  LOG.info("Events in JSON:");
  LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(events, true));

  Assert.assertEquals(2, events.getAllEvents().size());
  TimelineEvents.EventsOfOneEntity partEvents1 = events.getAllEvents().get(0);
  Assert.assertEquals("entity id 0", partEvents1.getEntityId());
  Assert.assertEquals("entity type 0", partEvents1.getEntityType());
  Assert.assertEquals(2, partEvents1.getEvents().size());
  TimelineEvent event11 = partEvents1.getEvents().get(0);
  Assert.assertEquals("event type 0", event11.getEventType());
  Assert.assertEquals(2, event11.getEventInfo().size());
  TimelineEvent event12 = partEvents1.getEvents().get(1);
  Assert.assertEquals("event type 1", event12.getEventType());
  Assert.assertEquals(2, event12.getEventInfo().size());
  TimelineEvents.EventsOfOneEntity partEvents2 = events.getAllEvents().get(1);
  Assert.assertEquals("entity id 1", partEvents2.getEntityId());
  Assert.assertEquals("entity type 1", partEvents2.getEntityType());
  Assert.assertEquals(2, partEvents2.getEvents().size());
  TimelineEvent event21 = partEvents2.getEvents().get(0);
  Assert.assertEquals("event type 0", event21.getEventType());
  Assert.assertEquals(2, event21.getEventInfo().size());
  TimelineEvent event22 = partEvents2.getEvents().get(1);
  Assert.assertEquals("event type 1", event22.getEventType());
  Assert.assertEquals(2, event22.getEventInfo().size());
}
 
Example #20
Source File: TestTimelineRecords.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Test
public void testEntities() throws Exception {
  TimelineEntities entities = new TimelineEntities();
  for (int j = 0; j < 2; ++j) {
    TimelineEntity entity = new TimelineEntity();
    entity.setEntityId("entity id " + j);
    entity.setEntityType("entity type " + j);
    entity.setStartTime(System.currentTimeMillis());
    for (int i = 0; i < 2; ++i) {
      TimelineEvent event = new TimelineEvent();
      event.setTimestamp(System.currentTimeMillis());
      event.setEventType("event type " + i);
      event.addEventInfo("key1", "val1");
      event.addEventInfo("key2", "val2");
      entity.addEvent(event);
    }
    entity.addRelatedEntity("test ref type 1", "test ref id 1");
    entity.addRelatedEntity("test ref type 2", "test ref id 2");
    entity.addPrimaryFilter("pkey1", "pval1");
    entity.addPrimaryFilter("pkey2", "pval2");
    entity.addOtherInfo("okey1", "oval1");
    entity.addOtherInfo("okey2", "oval2");
    entity.setDomainId("domain id " + j);
    entities.addEntity(entity);
  }
  LOG.info("Entities in JSON:");
  LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true));

  Assert.assertEquals(2, entities.getEntities().size());
  TimelineEntity entity1 = entities.getEntities().get(0);
  Assert.assertEquals("entity id 0", entity1.getEntityId());
  Assert.assertEquals("entity type 0", entity1.getEntityType());
  Assert.assertEquals(2, entity1.getRelatedEntities().size());
  Assert.assertEquals(2, entity1.getEvents().size());
  Assert.assertEquals(2, entity1.getPrimaryFilters().size());
  Assert.assertEquals(2, entity1.getOtherInfo().size());
  Assert.assertEquals("domain id 0", entity1.getDomainId());
  TimelineEntity entity2 = entities.getEntities().get(1);
  Assert.assertEquals("entity id 1", entity2.getEntityId());
  Assert.assertEquals("entity type 1", entity2.getEntityType());
  Assert.assertEquals(2, entity2.getRelatedEntities().size());
  Assert.assertEquals(2, entity2.getEvents().size());
  Assert.assertEquals(2, entity2.getPrimaryFilters().size());
  Assert.assertEquals(2, entity2.getOtherInfo().size());
  Assert.assertEquals("domain id 1", entity2.getDomainId());
}
 
Example #21
Source File: AbstractMiniHBaseClusterTest.java    From ambari-metrics with Apache License 2.0 4 votes vote down vote up
protected void insertMetricRecords(Connection conn, TimelineMetrics metrics)
  throws SQLException, IOException {

  List<TimelineMetric> timelineMetrics = metrics.getMetrics();
  if (timelineMetrics == null || timelineMetrics.isEmpty()) {
    LOG.debug("Empty metrics insert request.");
    return;
  }

  PreparedStatement metricRecordStmt = null;

  try {
    metricRecordStmt = conn.prepareStatement(String.format(
      UPSERT_METRICS_SQL, METRICS_RECORD_TABLE_NAME));

    for (TimelineMetric metric : timelineMetrics) {
      metricRecordStmt.clearParameters();

      if (LOG.isTraceEnabled()) {
        LOG.trace("host: " + metric.getHostName() + ", " +
          "metricName = " + metric.getMetricName() + ", " +
          "values: " + metric.getMetricValues());
      }
      double[] aggregates =  AggregatorUtils.calculateAggregates(
        metric.getMetricValues());

      byte[] uuid = metadataManager.getUuid(metric, true);
      if (uuid == null) {
        LOG.error("Error computing UUID for metric. Cannot write metrics : " + metric.toString());
        continue;
      }
      metricRecordStmt.setBytes(1, uuid);
      metricRecordStmt.setLong(2, metric.getStartTime());
      metricRecordStmt.setDouble(3, aggregates[0]);
      metricRecordStmt.setDouble(4, aggregates[1]);
      metricRecordStmt.setDouble(5, aggregates[2]);
      metricRecordStmt.setInt(6, (int) aggregates[3]);
      String json = TimelineUtils.dumpTimelineRecordtoJSON(metric.getMetricValues());
      metricRecordStmt.setString(7, json);

      try {
        int row = metricRecordStmt.executeUpdate();
        LOG.info("Inserted " + row + " rows.");
      } catch (SQLException sql) {
        LOG.error(sql);
      }
    }

    conn.commit();

  } finally {
    if (metricRecordStmt != null) {
      try {
        metricRecordStmt.close();
      } catch (SQLException e) {
        // Ignore
      }
    }
  }
}
 
Example #22
Source File: PhoenixHBaseAccessor.java    From ambari-metrics with Apache License 2.0 4 votes vote down vote up
public void commitMetrics(Collection<TimelineMetrics> timelineMetricsCollection) {
  LOG.debug("Committing metrics to store");
  Connection conn = null;
  PreparedStatement metricRecordStmt = null;
  List<TimelineMetric> transientMetrics = new ArrayList<>();
  int rowCount = 0;

  try {
    conn = getConnection();
    metricRecordStmt = conn.prepareStatement(String.format(
            UPSERT_METRICS_SQL, METRICS_RECORD_TABLE_NAME));
    for (TimelineMetrics timelineMetrics : timelineMetricsCollection) {
      for (TimelineMetric metric : timelineMetrics.getMetrics()) {

        if (metadataManagerInstance.isTransientMetric(metric.getMetricName(), metric.getAppId())) {
          transientMetrics.add(metric);
          continue;
        }
        metricRecordStmt.clearParameters();

        if (LOG.isTraceEnabled()) {
          LOG.trace("host: " + metric.getHostName() + ", " +
                  "metricName = " + metric.getMetricName() + ", " +
                  "values: " + metric.getMetricValues());
        }
        double[] aggregates = AggregatorUtils.calculateAggregates(
                metric.getMetricValues());

        if (aggregates[3] != 0.0) {
          rowCount++;
          byte[] uuid = metadataManagerInstance.getUuid(metric, true);
          if (uuid == null) {
            LOG.error("Error computing UUID for metric. Cannot write metrics : " + metric.toString());
            continue;
          }
          metricRecordStmt.setBytes(1, uuid);
          metricRecordStmt.setLong(2, metric.getStartTime());
          metricRecordStmt.setDouble(3, aggregates[0]);
          metricRecordStmt.setDouble(4, aggregates[1]);
          metricRecordStmt.setDouble(5, aggregates[2]);
          metricRecordStmt.setLong(6, (long) aggregates[3]);
          String json = TimelineUtils.dumpTimelineRecordtoJSON(metric.getMetricValues());
          metricRecordStmt.setString(7, json);

          try {
            int rows = metricRecordStmt.executeUpdate();
          } catch (SQLException | NumberFormatException ex) {
            LOG.warn("Failed on insert records to store : " + ex.getMessage());
            LOG.warn("Metric that cannot be stored : [" + metric.getMetricName() + "," + metric.getAppId() + "]" +
              metric.getMetricValues().toString());
            continue;
          }

          if (rowCount >= PHOENIX_MAX_MUTATION_STATE_SIZE - 1) {
            conn.commit();
            rowCount = 0;
          }

        } else {
          LOG.debug("Discarding empty metric record for : [" + metric.getMetricName() + "," +
            metric.getAppId() + "," +
            metric.getHostName() + "," +
            metric.getInstanceId() + "]");
        }

      }
    }
    if (CollectionUtils.isNotEmpty(transientMetrics)) {
      commitTransientMetrics(conn, transientMetrics);
    }

    // commit() blocked if HBase unavailable
    conn.commit();
  } catch (Exception exception){
    exception.printStackTrace();
  }
  finally {
    if (metricRecordStmt != null) {
      try {
        metricRecordStmt.close();
      } catch (SQLException e) {
        // Ignore
      }
    }
    if (conn != null) {
      try {
        conn.close();
      } catch (SQLException sql) {
        // Ignore
      }
    }
  }
}