com.google.api.services.monitoring.v3.model.TimeSeries Java Examples

The following examples show how to use com.google.api.services.monitoring.v3.model.TimeSeries. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StackdriverWriterTest.java    From java-monitoring-client-library with Apache License 2.0 6 votes vote down vote up
@Test
public void getEncodedTimeSeries_nullLabels_encodes() throws Exception {
  ByteArrayInputStream inputStream = new ByteArrayInputStream("".getBytes(UTF_8));
  HttpResponse response = GoogleJsonResponseExceptionHelper.createHttpResponse(400, inputStream);
  HttpResponseException.Builder httpResponseExceptionBuilder =
      new HttpResponseException.Builder(response);
  httpResponseExceptionBuilder.setStatusCode(400);
  httpResponseExceptionBuilder.setStatusMessage("ALREADY_EXISTS");
  GoogleJsonResponseException exception =
      new GoogleJsonResponseException(httpResponseExceptionBuilder, null);
  when(metricDescriptorCreate.execute()).thenThrow(exception);
  when(metricDescriptorGet.execute())
      .thenReturn(new MetricDescriptor().setName("foo").setLabels(null));
  StackdriverWriter writer =
      new StackdriverWriter(client, PROJECT, MONITORED_RESOURCE, MAX_QPS, MAX_POINTS_PER_REQUEST);
  writer.registerMetric(metric);

  TimeSeries timeSeries =
      writer.getEncodedTimeSeries(
          MetricPoint.create(metric, ImmutableList.of("foo"), Instant.ofEpochMilli(1337), 10L));

  assertThat(timeSeries.getMetric().getLabels()).isEmpty();
}
 
Example #2
Source File: StackdriverWriterTest.java    From java-monitoring-client-library with Apache License 2.0 6 votes vote down vote up
@Test
public void getEncodedTimeSeries_cumulativeMetricPoint_ZeroInterval_encodesGreaterEndTime()
    throws Exception {
  StackdriverWriter writer =
      new StackdriverWriter(client, PROJECT, MONITORED_RESOURCE, MAX_QPS, MAX_POINTS_PER_REQUEST);
  MetricPoint<Long> nativePoint =
      MetricPoint.create(
          metric,
          ImmutableList.of("foo"),
          Instant.ofEpochMilli(1337),
          Instant.ofEpochMilli(1337),
          10L);

  TimeSeries timeSeries = writer.getEncodedTimeSeries(nativePoint);

  assertThat(timeSeries.getValueType()).isEqualTo("INT64");
  assertThat(timeSeries.getMetricKind()).isEqualTo("CUMULATIVE");
  List<Point> points = timeSeries.getPoints();
  assertThat(points).hasSize(1);
  Point point = points.get(0);
  assertThat(point.getValue().getInt64Value()).isEqualTo(10L);
  assertThat(point.getInterval().getStartTime()).isEqualTo("1970-01-01T00:00:01.337Z");
  assertThat(point.getInterval().getEndTime()).isEqualTo("1970-01-01T00:00:01.338Z");
}
 
Example #3
Source File: StackdriverWriterTest.java    From java-monitoring-client-library with Apache License 2.0 6 votes vote down vote up
@Test
public void getEncodedTimeSeries_cumulativeMetricPoint_nonZeroInterval_encodesSameInterval()
    throws Exception {
  StackdriverWriter writer =
      new StackdriverWriter(client, PROJECT, MONITORED_RESOURCE, MAX_QPS, MAX_POINTS_PER_REQUEST);
  MetricPoint<Long> nativePoint =
      MetricPoint.create(
          metric,
          ImmutableList.of("foo"),
          Instant.ofEpochMilli(1337),
          Instant.ofEpochMilli(1339),
          10L);

  TimeSeries timeSeries = writer.getEncodedTimeSeries(nativePoint);

  assertThat(timeSeries.getValueType()).isEqualTo("INT64");
  assertThat(timeSeries.getMetricKind()).isEqualTo("CUMULATIVE");
  List<Point> points = timeSeries.getPoints();
  assertThat(points).hasSize(1);
  Point point = points.get(0);
  assertThat(point.getValue().getInt64Value()).isEqualTo(10L);
  assertThat(point.getInterval().getStartTime()).isEqualTo("1970-01-01T00:00:01.337Z");
  assertThat(point.getInterval().getEndTime()).isEqualTo("1970-01-01T00:00:01.339Z");
}
 
Example #4
Source File: StackdriverWriterTest.java    From java-monitoring-client-library with Apache License 2.0 6 votes vote down vote up
@Test
public void getEncodedTimeSeries_booleanMetric_encodes() throws Exception {
  StackdriverWriter writer =
      new StackdriverWriter(client, PROJECT, MONITORED_RESOURCE, MAX_QPS, MAX_POINTS_PER_REQUEST);

  MetricDescriptor boolDescriptor = StackdriverWriter.encodeMetricDescriptor(boolMetric);
  when(metricDescriptorCreate.execute()).thenReturn(boolDescriptor);
  MetricPoint<Boolean> nativePoint =
      MetricPoint.create(boolMetric, ImmutableList.of("foo"), Instant.ofEpochMilli(1337), true);

  TimeSeries timeSeries = writer.getEncodedTimeSeries(nativePoint);

  assertThat(timeSeries.getValueType()).isEqualTo("BOOL");
  assertThat(timeSeries.getMetricKind()).isEqualTo("GAUGE");
  List<Point> points = timeSeries.getPoints();
  assertThat(points).hasSize(1);
  Point point = points.get(0);
  assertThat(point.getValue().getBoolValue()).isEqualTo(true);
  assertThat(point.getInterval().getEndTime()).isEqualTo("1970-01-01T00:00:01.337Z");
  assertThat(point.getInterval().getStartTime()).isEqualTo("1970-01-01T00:00:01.337Z");
}
 
Example #5
Source File: StackdriverWriter.java    From kork with Apache License 2.0 6 votes vote down vote up
/**
 * Helper function for logging time series errors in more detail.
 *
 * @see #findProblematicTimeSeriesElement
 */
private void handleTimeSeriesResponseException(
    HttpResponseException rex, String msg, List<TimeSeries> nextN) {
  Matcher matcher = INVALID_LABEL_REGEX.matcher(rex.getContent());
  TimeSeries ts = null;
  String label = null;
  if (matcher.find()) {
    int tsIndex = Integer.parseInt(matcher.group(1));
    ts = nextN.get(tsIndex);
    label = matcher.group(2);
    log.error("{}:  time series element: {}", rex.getMessage(), ts.toString());
    cache.addLabel(ts.getMetric().getType(), label);
    try {
      log.info("Retrying individual time series element");
      CreateTimeSeriesRequest tsRequest = new CreateTimeSeriesRequest();
      tsRequest.setTimeSeries(nextN.subList(tsIndex, tsIndex + 1));
      service.projects().timeSeries().create(projectResourceName, tsRequest).execute();
    } catch (IOException ioex) {
      log.error("Retry failed with " + ioex);
    }
  } else {
    log.error("Caught HttpResponseException {}", msg, rex);
  }
}
 
Example #6
Source File: StackdriverWriter.java    From kork with Apache License 2.0 6 votes vote down vote up
/** Add a TimeSeries for each appropriate meter measurement. */
void addMeterToTimeSeries(Registry registry, Meter meter, List<TimeSeries> tsList) {
  Iterable<Measurement> measurements = meter.measure();
  boolean applyFilter = true;

  if (cache.meterIsTimer(registry, meter)) {
    measurements = transformTimerMeasurements(measurements);
    applyFilter = false;
  }
  for (Measurement measurement : measurements) {
    if (applyFilter && !measurementFilter.test(measurement)) {
      continue;
    }

    String descriptorType = cache.idToDescriptorType(measurement.id());
    tsList.add(measurementToTimeSeries(descriptorType, registry, meter, measurement));
  }
}
 
Example #7
Source File: StackdriverWriter.java    From java-monitoring-client-library with Apache License 2.0 5 votes vote down vote up
/** Encodes and writes a metric point to Stackdriver. The point may be buffered. */
@Override
public <V> void write(com.google.monitoring.metrics.MetricPoint<V> point) throws IOException {
  checkNotNull(point);

  TimeSeries timeSeries = getEncodedTimeSeries(point);
  timeSeriesBuffer.add(timeSeries);

  logger.fine(String.format("Enqueued metric %s for writing", timeSeries.getMetric().getType()));
  if (timeSeriesBuffer.size() == maxPointsPerRequest) {
    flush();
  }
}
 
Example #8
Source File: StackdriverWriterTest.java    From java-monitoring-client-library with Apache License 2.0 5 votes vote down vote up
@Test
public void getEncodedTimeSeries_distributionMetricCustomFitter_encodes() throws Exception {
  StackdriverWriter writer =
      new StackdriverWriter(client, PROJECT, MONITORED_RESOURCE, MAX_QPS, MAX_POINTS_PER_REQUEST);

  MetricDescriptor descriptor = StackdriverWriter.encodeMetricDescriptor(distributionMetric);
  when(metricDescriptorCreate.execute()).thenReturn(descriptor);
  MutableDistribution distribution =
      new MutableDistribution(CustomFitter.create(ImmutableSet.of(5.0)));
  distribution.add(10.0, 5L);
  distribution.add(0.0, 5L);
  MetricPoint<Distribution> nativePoint =
      MetricPoint.create(
          distributionMetric, ImmutableList.of("foo"), Instant.ofEpochMilli(1337), distribution);

  TimeSeries timeSeries = writer.getEncodedTimeSeries(nativePoint);

  assertThat(timeSeries.getValueType()).isEqualTo("DISTRIBUTION");
  assertThat(timeSeries.getMetricKind()).isEqualTo("GAUGE");
  List<Point> points = timeSeries.getPoints();
  assertThat(points).hasSize(1);
  Point point = points.get(0);
  assertThat(point.getValue().getDistributionValue())
      .isEqualTo(
          new com.google.api.services.monitoring.v3.model.Distribution()
              .setMean(5.0)
              .setSumOfSquaredDeviation(250.0)
              .setCount(10L)
              .setBucketCounts(ImmutableList.of(5L, 5L))
              .setBucketOptions(
                  new BucketOptions()
                      .setExplicitBuckets(new Explicit().setBounds(ImmutableList.of(5.0)))));
  assertThat(point.getInterval().getEndTime()).isEqualTo("1970-01-01T00:00:01.337Z");
  assertThat(point.getInterval().getStartTime()).isEqualTo("1970-01-01T00:00:01.337Z");
}
 
Example #9
Source File: StackdriverWriterTest.java    From java-monitoring-client-library with Apache License 2.0 5 votes vote down vote up
@Test
public void getEncodedTimeSeries_distributionMetricLinearFitter_encodes() throws Exception {
  StackdriverWriter writer =
      new StackdriverWriter(client, PROJECT, MONITORED_RESOURCE, MAX_QPS, MAX_POINTS_PER_REQUEST);

  MetricDescriptor descriptor = StackdriverWriter.encodeMetricDescriptor(distributionMetric);
  when(metricDescriptorCreate.execute()).thenReturn(descriptor);
  MutableDistribution distribution = new MutableDistribution(LinearFitter.create(2, 5.0, 3.0));
  distribution.add(0.0, 1L);
  distribution.add(3.0, 2L);
  distribution.add(10.0, 5L);
  distribution.add(20.0, 5L);
  MetricPoint<Distribution> nativePoint =
      MetricPoint.create(
          distributionMetric, ImmutableList.of("foo"), Instant.ofEpochMilli(1337), distribution);

  TimeSeries timeSeries = writer.getEncodedTimeSeries(nativePoint);

  assertThat(timeSeries.getValueType()).isEqualTo("DISTRIBUTION");
  assertThat(timeSeries.getMetricKind()).isEqualTo("GAUGE");
  List<Point> points = timeSeries.getPoints();
  assertThat(points).hasSize(1);
  Point point = points.get(0);
  assertThat(point.getValue().getDistributionValue())
      .isEqualTo(
          new com.google.api.services.monitoring.v3.model.Distribution()
              .setMean(12.0)
              .setSumOfSquaredDeviation(646.0)
              .setCount(13L)
              .setBucketCounts(ImmutableList.of(1L, 2L, 5L, 5L))
              .setBucketOptions(
                  new BucketOptions()
                      .setLinearBuckets(
                          new Linear().setNumFiniteBuckets(2).setWidth(5.0).setOffset(3.0))));
  assertThat(point.getInterval().getEndTime()).isEqualTo("1970-01-01T00:00:01.337Z");
  assertThat(point.getInterval().getStartTime()).isEqualTo("1970-01-01T00:00:01.337Z");
}
 
Example #10
Source File: StackdriverWriter.java    From kork with Apache License 2.0 5 votes vote down vote up
/**
 * Convert a Spectator metric Meter into a Stackdriver TimeSeries entry.
 *
 * @param descriptorType The Stackdriver MetricDescriptorType name for the measurement.
 * @param measurement The Spectator Measurement to encode.
 * @return The Stackdriver TimeSeries equivalent for the measurement.
 */
public TimeSeries measurementToTimeSeries(
    String descriptorType, Registry registry, Meter meter, Measurement measurement) {
  Map<String, String> labels =
      cache.tagsToTimeSeriesLabels(descriptorType, measurement.id().tags());

  long millis = measurement.timestamp();
  double value = measurement.value();

  TimeInterval timeInterval = new TimeInterval();
  Date date = new Date(millis);
  timeInterval.setEndTime(rfc3339.format(date));

  String descriptorKind = cache.descriptorTypeToKind(descriptorType, registry, meter);
  if (descriptorKind == "CUMULATIVE") {
    timeInterval.setStartTime(counterStartTimeRfc3339);
  }

  TypedValue typedValue = new TypedValue();
  typedValue.setDoubleValue(value);

  Point point = new Point();
  point.setValue(typedValue);
  point.setInterval(timeInterval);

  Metric metric = new Metric();
  metric.setType(descriptorType);
  metric.setLabels(labels);

  TimeSeries ts = new TimeSeries();
  ts.setResource(monitoredResource);
  ts.setMetric(metric);
  ts.setMetricKind(descriptorKind);
  ts.setValueType("DOUBLE");
  ts.setPoints(Lists.<Point>newArrayList(point));

  return ts;
}
 
Example #11
Source File: StackdriverWriter.java    From kork with Apache License 2.0 5 votes vote down vote up
/** Produce a TimeSeries for each appropriate measurement in the registry. */
public List<TimeSeries> registryToTimeSeries(Registry registry) {
  log.debug("Collecting metrics...");
  ArrayList<TimeSeries> tsList = new ArrayList<TimeSeries>();
  Iterator<Meter> iterator = registry.iterator();

  while (iterator.hasNext()) {
    addMeterToTimeSeries(registry, iterator.next(), tsList);
  }
  return tsList;
}
 
Example #12
Source File: StackdriverWriter.java    From kork with Apache License 2.0 5 votes vote down vote up
/** Implementation of writeRegistry wrapped for timing. */
private void writeRegistryHelper(Registry registry) {
  MonitoredResource resource = determineMonitoredResource();
  if (resource == null) {
    log.warn("Cannot determine the managed resource - not flushing metrics.");
    return;
  }
  List<TimeSeries> tsList = registryToTimeSeries(registry);
  if (tsList.isEmpty()) {
    log.debug("No metric data points.");
    return;
  }

  CreateTimeSeriesRequest tsRequest = new CreateTimeSeriesRequest();
  int offset = 0;
  int failed = 0;
  List<TimeSeries> nextN;

  log.debug("Writing metrics...");
  while (offset < tsList.size()) {
    if (offset + MAX_TS_PER_REQUEST < tsList.size()) {
      nextN = tsList.subList(offset, offset + MAX_TS_PER_REQUEST);
      offset += MAX_TS_PER_REQUEST;
    } else {
      nextN = tsList.subList(offset, tsList.size());
      offset = tsList.size();
    }
    tsRequest.setTimeSeries(nextN);
    try {
      service.projects().timeSeries().create(projectResourceName, tsRequest).execute();
    } catch (HttpResponseException rex) {
      handleTimeSeriesResponseException(rex, "creating time series", nextN);
      failed += nextN.size();
    } catch (IOException ioex) {
      log.error("Caught Exception creating time series " + ioex);
      failed += nextN.size();
    }
  }
  log.debug("Wrote {} values", tsList.size() - failed);
}
 
Example #13
Source File: StackdriverWriterTest.java    From java-monitoring-client-library with Apache License 2.0 4 votes vote down vote up
@Test
public void getEncodedTimeSeries_gaugeMetricPoint_zeroInterval_encodesSameInterval()
    throws Exception {
  when(metric.getMetricSchema())
      .thenReturn(
          MetricSchema.create(
              "/name",
              "desc",
              "vdn",
              Kind.GAUGE,
              ImmutableSet.of(LabelDescriptor.create("label1", "desc1"))));
  // Store in an intermediate value, because Mockito hates when mocks are evaluated inside of
  // thenReturn() methods.
  MetricPoint<Long> testPoint =
      MetricPoint.create(metric, ImmutableList.of("foo"), Instant.ofEpochMilli(1337), 10L);
  when(metric.getTimestampedValues()).thenReturn(ImmutableList.of(testPoint));
  // Store in an intermediate value, because Mockito hates when mocks are evaluated inside of
  // thenReturn() methods.
  MetricDescriptor descriptor = StackdriverWriter.encodeMetricDescriptor(metric);
  when(metricDescriptorCreate.execute()).thenReturn(descriptor);
  StackdriverWriter writer =
      new StackdriverWriter(client, PROJECT, MONITORED_RESOURCE, MAX_QPS, MAX_POINTS_PER_REQUEST);
  MetricPoint<Long> nativePoint =
      MetricPoint.create(
          metric,
          ImmutableList.of("foo"),
          Instant.ofEpochMilli(1337),
          Instant.ofEpochMilli(1337),
          10L);

  TimeSeries timeSeries = writer.getEncodedTimeSeries(nativePoint);

  assertThat(timeSeries.getValueType()).isEqualTo("INT64");
  assertThat(timeSeries.getMetricKind()).isEqualTo("GAUGE");
  List<Point> points = timeSeries.getPoints();
  assertThat(points).hasSize(1);
  Point point = points.get(0);
  assertThat(point.getValue().getInt64Value()).isEqualTo(10L);
  assertThat(point.getInterval().getStartTime()).isEqualTo("1970-01-01T00:00:01.337Z");
  assertThat(point.getInterval().getEndTime()).isEqualTo("1970-01-01T00:00:01.337Z");
}
 
Example #14
Source File: StackdriverWriterTest.java    From java-monitoring-client-library with Apache License 2.0 4 votes vote down vote up
@Test
public void getEncodedTimeSeries_distributionMetricExponentialFitter_encodes() throws Exception {
  StackdriverWriter writer =
      new StackdriverWriter(client, PROJECT, MONITORED_RESOURCE, MAX_QPS, MAX_POINTS_PER_REQUEST);

  MetricDescriptor descriptor = StackdriverWriter.encodeMetricDescriptor(distributionMetric);
  when(metricDescriptorCreate.execute()).thenReturn(descriptor);
  MutableDistribution distribution =
      new MutableDistribution(ExponentialFitter.create(2, 3.0, 0.5));
  distribution.add(0.0, 1L);
  distribution.add(3.0, 2L);
  distribution.add(10.0, 5L);
  distribution.add(20.0, 5L);
  MetricPoint<Distribution> nativePoint =
      MetricPoint.create(
          distributionMetric, ImmutableList.of("foo"), Instant.ofEpochMilli(1337), distribution);

  TimeSeries timeSeries = writer.getEncodedTimeSeries(nativePoint);

  assertThat(timeSeries.getValueType()).isEqualTo("DISTRIBUTION");
  assertThat(timeSeries.getMetricKind()).isEqualTo("GAUGE");
  List<Point> points = timeSeries.getPoints();
  assertThat(points).hasSize(1);
  Point point = points.get(0);
  assertThat(point.getValue().getDistributionValue())
      .isEqualTo(
          new com.google.api.services.monitoring.v3.model.Distribution()
              .setMean(12.0)
              .setSumOfSquaredDeviation(646.0)
              .setCount(13L)
              .setBucketCounts(ImmutableList.of(1L, 0L, 2L, 10L))
              .setBucketOptions(
                  new BucketOptions()
                      .setExponentialBuckets(
                          new Exponential()
                              .setNumFiniteBuckets(2)
                              .setGrowthFactor(3.0)
                              .setScale(0.5))));
  assertThat(point.getInterval().getEndTime()).isEqualTo("1970-01-01T00:00:01.337Z");
  assertThat(point.getInterval().getStartTime()).isEqualTo("1970-01-01T00:00:01.337Z");
}
 
Example #15
Source File: GoogleMonitoringIngester.java    From macrobase with Apache License 2.0 4 votes vote down vote up
@Override
public MBStream<Datum> getStream() throws Exception {
    if (!loaded) {
        QueryConf queryConf = getQueries(conf.getString(GOOGLE_MONITORING_QUERIES));
        String queryStart = conf.getString(GOOGLE_MONITORING_START_TIME);
        String queryEnd = conf.getString(GOOGLE_MONITORING_END_TIME);

        if (metrics.size() == 0) {
            throw new IllegalArgumentException("No metrics selected.");
        }

        // Record the attribute names.
        int idx = 1;
        Set<String> sortedAttributes = new TreeSet<>(attributes);
        for (String a : sortedAttributes) {
            conf.getEncoder().recordAttributeName(idx, a);
            ++idx;
        }

        // Each TimeSeries returned has a unique set of metric/resource labels and a stream
        // of values. Restructure the data to correlate by time so that we can ensure each Datum
        // contains the requested metrics and attributes. To ensure that the streams returned
        // by the API can be correlated by time, supply a per-series aligner.
        //
        // {timestamp, {attr_key, record}}
        Map<String, Map<String, Record>> byTime = new TreeMap<>();

        Monitoring client = buildClient();
        for (QueryConf.Query query : queryConf.getQueries()) {
            String pageToken = "";

            do {
                Projects.TimeSeries.List request = client.projects().timeSeries()
                    .list("projects/" + query.getProject())
                    .setFilter(query.getFilter())
                    .setIntervalStartTime(queryStart)
                    .setIntervalEndTime(queryEnd)
                    .setPageToken(pageToken)
                    .setAggregationAlignmentPeriod(query.getAlignmentPeriod())
                    .setAggregationPerSeriesAligner(query.getPerSeriesAligner())
                    .setAggregationCrossSeriesReducer(query.getCrossSeriesReducer())
                    .setAggregationGroupByFields(query.getGroupByFields());
                log.trace("Request: {}", request.toString());

                ListTimeSeriesResponse response = request.execute();
                log.trace("Response: {}", response.toPrettyString());

                processResponse(response, metrics, byTime);
                pageToken = response.getNextPageToken();
            } while (pageToken != null && !pageToken.isEmpty());
        }

        dataStream = convertToStream(byTime);

        log.info("Loaded {} points. Skipped {} TimeSeries and {} partial records.",
                 pointsAdded, skippedTimeSeries, skippedPoints);
        loaded = true;
    }

    return dataStream;
}