Java Code Examples for org.influxdb.dto.QueryResult#Series

The following examples show how to use org.influxdb.dto.QueryResult#Series . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: UITemplateManagementDAOImpl.java    From skywalking with Apache License 2.0 6 votes vote down vote up
@Override
public TemplateChangeStatus changeTemplate(final DashboardSetting setting) throws IOException {
    final UITemplate.Builder builder = new UITemplate.Builder();
    final UITemplate uiTemplate = setting.toEntity();

    WhereQueryImpl<SelectQueryImpl> query = select().all()
                                                    .from(client.getDatabase(), UITemplate.INDEX_NAME)
                                                    .where(eq(InfluxConstants.TagName.ID_COLUMN, uiTemplate.id()));

    QueryResult.Series series = client.queryForSingleSeries(query);
    if (Objects.nonNull(series)) {
        Point point = Point.measurement(UITemplate.INDEX_NAME)
                           .fields(builder.data2Map(uiTemplate))
                           .tag(InfluxConstants.TagName.ID_COLUMN, uiTemplate.id())
                           .time(1L, TimeUnit.NANOSECONDS)
                           .build();
        client.write(point);
        return TemplateChangeStatus.builder().status(true).build();
    } else {
        return TemplateChangeStatus.builder().status(false).message("Can't find the template").build();
    }
}
 
Example 2
Source File: InfluxDBResultMapperTest.java    From influxdb-java with MIT License 6 votes vote down vote up
@Test
public void testToPOJO_HappyPath() {
  // Given...
  List<String> columnList = Arrays.asList("time", "uuid");
  List<Object> firstSeriesResult = Arrays.asList(Instant.now().toEpochMilli(), UUID.randomUUID().toString());

  QueryResult.Series series = new QueryResult.Series();
  series.setColumns(columnList);
  series.setName("CustomMeasurement");
  series.setValues(Arrays.asList(firstSeriesResult));

  QueryResult.Result internalResult = new QueryResult.Result();
  internalResult.setSeries(Arrays.asList(series));

  QueryResult queryResult = new QueryResult();
  queryResult.setResults(Arrays.asList(internalResult));

  //When...
  List<MyCustomMeasurement> myList = mapper.toPOJO(queryResult, MyCustomMeasurement.class);

  // Then...
  Assertions.assertEquals(1, myList.size(), "there must be one entry in the result list");
}
 
Example 3
Source File: MetadataQuery.java    From skywalking with Apache License 2.0 6 votes vote down vote up
@Override
public List<Database> getAllDatabases() throws IOException {
    SelectSubQueryImpl<SelectQueryImpl> subQuery = select()
        .fromSubQuery(client.getDatabase())
        .column(ID_COLUMN).column(NAME)
        .from(ServiceTraffic.INDEX_NAME)
        .where(eq(InfluxConstants.TagName.NODE_TYPE, NodeType.Database.value()))
        .groupBy(TagName.NAME, TagName.NODE_TYPE);
    SelectQueryImpl query = select(ID_COLUMN, NAME).from(client.getDatabase());
    query.setSubQuery(subQuery);
    QueryResult.Series series = client.queryForSingleSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result: {}", query.getCommand(), series);
    }

    List<Database> databases = Lists.newArrayList();
    if (Objects.nonNull(series)) {
        for (List<Object> values : series.getValues()) {
            Database database = new Database();
            database.setId((String) values.get(1));
            database.setName((String) values.get(2));
            databases.add(database);
        }
    }
    return databases;
}
 
Example 4
Source File: MetadataQuery.java    From skywalking with Apache License 2.0 6 votes vote down vote up
private List<Service> buildServices(Query query) throws IOException {
    QueryResult.Series series = client.queryForSingleSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result: {}", query.getCommand(), series);
    }

    ArrayList<Service> services = Lists.newArrayList();
    if (Objects.nonNull(series)) {
        for (List<Object> values : series.getValues()) {
            Service service = new Service();
            service.setId((String) values.get(1));
            service.setName((String) values.get(2));
            services.add(service);
        }
    }
    return services;
}
 
Example 5
Source File: InfluxDBResultMapperTest.java    From influxdb-java with MIT License 6 votes vote down vote up
@Test
public void testUnsupportedField() {
  // Given...
	mapper.cacheMeasurementClass(MyPojoWithUnsupportedField.class);

	List<String> columnList = Arrays.asList("bar");
	List<Object> firstSeriesResult = Arrays.asList("content representing a Date");

	QueryResult.Series series = new QueryResult.Series();
	series.setColumns(columnList);
	series.setValues(Arrays.asList(firstSeriesResult));

	//When...
	List<MyPojoWithUnsupportedField> result = new LinkedList<>();
	Assertions.assertThrows(InfluxDBMapperException.class, () -> {
		mapper.parseSeriesAs(series, MyPojoWithUnsupportedField.class, result);
	});
}
 
Example 6
Source File: InfluxClient.java    From skywalking with Apache License 2.0 5 votes vote down vote up
/**
 * Execute a query against InfluxDB with a `select count(*)` statement and return the count only.
 *
 * @throws IOException if there is an error on the InfluxDB server or communication error
 */
public int getCounter(Query query) throws IOException {
    QueryResult.Series series = queryForSingleSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result: {}", query.getCommand(), series);
    }
    if (Objects.isNull(series)) {
        return 0;
    }
    return ((Number) series.getValues().get(0).get(1)).intValue();
}
 
Example 7
Source File: MetricsQuery.java    From skywalking with Apache License 2.0 5 votes vote down vote up
@Override
public HeatMap readHeatMap(final MetricsCondition condition,
                           final String valueColumnName,
                           final Duration duration) throws IOException {
    final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
    List<String> ids = new ArrayList<>(pointOfTimes.size());
    pointOfTimes.forEach(pointOfTime -> {
        ids.add(pointOfTime.id(condition.getEntity().buildId()));
    });

    WhereQueryImpl<SelectQueryImpl> query = select()
        .column(ID_COLUMN)
        .column(valueColumnName)
        .from(client.getDatabase(), condition.getName())
        .where(contains(ID_COLUMN, Joiner.on("|").join(ids)));
    Map<String, List<Long>> thermodynamicValueMatrix = new HashMap<>();

    QueryResult.Series series = client.queryForSingleSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result set: {}", query.getCommand(), series);
    }

    final int defaultValue = ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName());

    HeatMap heatMap = new HeatMap();
    if (series != null) {
        for (List<Object> values : series.getValues()) {
            heatMap.buildColumn(values.get(1).toString(), values.get(2).toString(), defaultValue);
        }
    }

    heatMap.fixMissingColumns(ids, defaultValue);

    return heatMap;
}
 
Example 8
Source File: InfluxDBResultMapperTest.java    From influxdb-java with MIT License 5 votes vote down vote up
@Test
public void testToPOJO_ticket573() {
  // Given...
  mapper.cacheMeasurementClass(MyCustomMeasurement.class);

  List<String> columnList = Arrays.asList("time");
  List<List<Object>> valuesList = Arrays.asList(
    Arrays.asList("2015-08-17T19:00:00-05:00"), // Chicago (UTC-5)
    Arrays.asList("2015-08-17T19:00:00.000000001-05:00"), // Chicago (UTC-5)
    Arrays.asList("2000-01-01T00:00:00-00:00"),
    Arrays.asList("2000-01-02T00:00:00+00:00")
  );

  QueryResult.Series series = new QueryResult.Series();
  series.setColumns(columnList);
  series.setValues(valuesList);

  // When...
  List<MyCustomMeasurement> result = new LinkedList<>();
  mapper.parseSeriesAs(series, MyCustomMeasurement.class, result);

  // Then...
  Assertions.assertEquals(4, result.size(), "incorrect number of elemets");
  // Note: RFC3339 timestamp with TZ from InfluxDB are parsed into an Instant (UTC)
  Assertions.assertTrue(result.get(0).time.equals(Instant.parse("2015-08-18T00:00:00Z")));
  Assertions.assertTrue(result.get(1).time.equals(Instant.parse("2015-08-18T00:00:00.000000001Z")));
  // RFC3339 section 4.3 https://tools.ietf.org/html/rfc3339#section-4.3
  Assertions.assertTrue(result.get(2).time.equals(Instant.parse("2000-01-01T00:00:00Z")));
  Assertions.assertTrue(result.get(3).time.equals(Instant.parse("2000-01-02T00:00:00Z")));
}
 
Example 9
Source File: InfluxDBResultMapperTest.java    From influxdb-java with MIT License 5 votes vote down vote up
@Test
void testToPOJOInheritance() {
  // Given...
  mapper.cacheMeasurementClass(MySubMeasurement.class);

  String superValue = UUID.randomUUID().toString();
  String subValue = "my sub value";
  List<String> columnList = Arrays.asList("superValue", "subValue");

  List<Object> firstSeriesResult = Arrays.asList(superValue, subValue);

  QueryResult.Series series = new QueryResult.Series();
  series.setName("MySeriesName");
  series.setColumns(columnList);
  series.setValues(Arrays.asList(firstSeriesResult));

  QueryResult.Result internalResult = new QueryResult.Result();
  internalResult.setSeries(Arrays.asList(series));

  QueryResult queryResult = new QueryResult();
  queryResult.setResults(Arrays.asList(internalResult));

  //When...
  List<MySubMeasurement> result =
      mapper.toPOJO(queryResult, MySubMeasurement.class, "MySeriesName");

  //Then...
  Assertions.assertTrue(result.size() == 1);
  Assertions.assertEquals(superValue, result.get(0).superValue);
  Assertions.assertEquals(subValue, result.get(0).subValue);
}
 
Example 10
Source File: InfluxDBResultMapperTest.java    From influxdb-java with MIT License 5 votes vote down vote up
@Test
public void testToPOJO_HasTimeColumn() {
 // Given...
 mapper.cacheMeasurementClass(HasTimeColumnMeasurement.class);

 List<String> columnList = Arrays.asList("time");
 List<List<Object>> valuesList = Arrays.asList(
	  Arrays.asList("2015-08-17T19:00:00-05:00"), // Chicago (UTC-5)
	  Arrays.asList("2015-08-17T19:00:00.000000001-05:00"), // Chicago (UTC-5)
	  Arrays.asList("2000-01-01T00:00:00-00:00"),
	  Arrays.asList("2000-01-02T00:00:00+00:00")
 );

 QueryResult.Series series = new QueryResult.Series();
 series.setColumns(columnList);
 series.setValues(valuesList);

 // When...
 List<HasTimeColumnMeasurement> result = new LinkedList<>();
 mapper.parseSeriesAs(series, HasTimeColumnMeasurement.class, result);

 // Then...
 Assertions.assertEquals(4, result.size(), "incorrect number of elemets");
 // Note: RFC3339 timestamp with TZ from InfluxDB are parsed into an Instant (UTC)
 Assertions.assertTrue(result.get(0).time.equals(Instant.parse("2015-08-18T00:00:00Z")));
 Assertions.assertTrue(result.get(1).time.equals(Instant.parse("2015-08-18T00:00:00.000000001Z")));
 // RFC3339 section 4.3 https://tools.ietf.org/html/rfc3339#section-4.3
 Assertions.assertTrue(result.get(2).time.equals(Instant.parse("2000-01-01T00:00:00Z")));
 Assertions.assertTrue(result.get(3).time.equals(Instant.parse("2000-01-02T00:00:00Z")));

}
 
Example 11
Source File: ProfileTaskQuery.java    From skywalking with Apache License 2.0 5 votes vote down vote up
@Override
public ProfileTask getById(final String id) throws IOException {
    if (StringUtil.isEmpty(id)) {
        return null;
    }
    SelectQueryImpl query = select(
        InfluxConstants.ID_COLUMN,
        ProfileTaskRecord.SERVICE_ID,
        ProfileTaskRecord.ENDPOINT_NAME,
        ProfileTaskRecord.START_TIME,
        ProfileTaskRecord.CREATE_TIME,
        InfluxConstants.DURATION,
        ProfileTaskRecord.MIN_DURATION_THRESHOLD,
        ProfileTaskRecord.DUMP_PERIOD,
        ProfileTaskRecord.MAX_SAMPLING_COUNT
    )
        .from(client.getDatabase(), ProfileTaskRecord.INDEX_NAME)
        .where()
        .and(eq(InfluxConstants.ID_COLUMN, id))
        .limit(1);

    QueryResult.Series series = client.queryForSingleSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result: {}", query.getCommand(), series);
    }
    if (Objects.nonNull(series)) {
        return profileTaskBuilder(series.getValues().get(0));
    }
    return null;
}
 
Example 12
Source File: InfluxDBResultMapperTest.java    From influxdb-java with MIT License 5 votes vote down vote up
@Test
public void testParseSeriesAs_testTwoValidSeries() {
  // Given...
	mapper.cacheMeasurementClass(MyCustomMeasurement.class);

	List<String> columnList = Arrays.asList("time", "uuid");

	List<Object> firstSeriesResult = Arrays.asList(Instant.now().toEpochMilli(), UUID.randomUUID().toString());
	List<Object> secondSeriesResult = Arrays.asList(Instant.now().plusSeconds(1).toEpochMilli(), UUID.randomUUID().toString());

	QueryResult.Series series = new QueryResult.Series();
	series.setColumns(columnList);
	series.setValues(Arrays.asList(firstSeriesResult, secondSeriesResult));

	//When...
	List<MyCustomMeasurement> result = new LinkedList<>();
	mapper.parseSeriesAs(series, MyCustomMeasurement.class, result);

	//Then...
	Assertions.assertTrue(result.size() == 2, "there must be two series in the result list");

	Assertions.assertEquals(firstSeriesResult.get(0), result.get(0).time.toEpochMilli(), "Field 'time' (1st series) is not valid");
	Assertions.assertEquals(firstSeriesResult.get(1), result.get(0).uuid, "Field 'uuid' (1st series) is not valid");

	Assertions.assertEquals(secondSeriesResult.get(0), result.get(1).time.toEpochMilli(), "Field 'time' (2nd series) is not valid");
	Assertions.assertEquals(secondSeriesResult.get(1), result.get(1).uuid, "Field 'uuid' (2nd series) is not valid");
}
 
Example 13
Source File: SeriesHandler.java    From monsoon with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
private static GroupName seriesToGroupName(QueryResult.Series series) {
    final SimpleGroupPath groupPath = pathStrToGroupPath(series.getName());
    final Tags tags;
    if (series.getTags() == null) {
        tags = Tags.EMPTY;
    } else {
        tags = Tags.valueOf(series.getTags().entrySet().stream()
                .filter(tagEntry -> !Objects.equals(tagEntry.getKey(), InfluxUtil.MONSOON_RANGE_TAG))
                .filter(tagEntry -> tagEntry.getValue() != null)
                .map(tagEntry -> SimpleMapEntry.create(tagEntry.getKey(), tagValueToMetricValue(tagEntry.getValue()))));
    }
    return GroupName.valueOf(groupPath, tags);
}
 
Example 14
Source File: MetadataQuery.java    From skywalking with Apache License 2.0 4 votes vote down vote up
@Override
public List<ServiceInstance> getServiceInstances(final long startTimestamp,
                                                 final long endTimestamp,
                                                 final String serviceId) throws IOException {
    final long minuteTimeBucket = TimeBucket.getMinuteTimeBucket(startTimestamp);

    SelectSubQueryImpl<SelectQueryImpl> subQuery = select()
        .fromSubQuery(client.getDatabase())
        .column(ID_COLUMN).column(NAME).column(InstanceTraffic.PROPERTIES)
        .from(InstanceTraffic.INDEX_NAME)
        .where()
        .and(gte(InstanceTraffic.LAST_PING_TIME_BUCKET, minuteTimeBucket))
        .and(eq(InfluxConstants.TagName.SERVICE_ID, serviceId))
        .groupBy(TagName.NAME, TagName.SERVICE_ID);

    SelectQueryImpl query = select().column(ID_COLUMN)
                                    .column(NAME)
                                    .column(InstanceTraffic.PROPERTIES)
                                    .from(client.getDatabase(), InstanceTraffic.INDEX_NAME);
    query.setSubQuery(subQuery);

    QueryResult.Series series = client.queryForSingleSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result: {}", query.getCommand(), series);
    }

    if (Objects.isNull(series)) {
        return Collections.EMPTY_LIST;
    }

    List<List<Object>> result = series.getValues();
    List<ServiceInstance> instances = Lists.newArrayList();
    for (List<Object> values : result) {
        ServiceInstance serviceInstance = new ServiceInstance();

        serviceInstance.setId((String) values.get(1));
        serviceInstance.setName((String) values.get(2));
        serviceInstance.setInstanceUUID(serviceInstance.getId());

        String propertiesString = (String) values.get(3);
        if (!Strings.isNullOrEmpty(propertiesString)) {
            JsonObject properties = GSON.fromJson(propertiesString, JsonObject.class);
            for (Map.Entry<String, JsonElement> property : properties.entrySet()) {
                String key = property.getKey();
                String value = property.getValue().getAsString();
                if (key.equals(InstanceTraffic.PropertyUtil.LANGUAGE)) {
                    serviceInstance.setLanguage(Language.value(value));
                } else {
                    serviceInstance.getAttributes().add(new Attribute(key, value));
                }

            }
        } else {
            serviceInstance.setLanguage(Language.UNKNOWN);
        }
        instances.add(serviceInstance);
    }
    return instances;
}
 
Example 15
Source File: ProfileThreadSnapshotQuery.java    From skywalking with Apache License 2.0 4 votes vote down vote up
@Override
public List<BasicTrace> queryProfiledSegments(String taskId) throws IOException {
    WhereQueryImpl query = select(ProfileThreadSnapshotRecord.SEGMENT_ID)
        .from(client.getDatabase(), ProfileThreadSnapshotRecord.INDEX_NAME)
        .where()
        .and(eq(ProfileThreadSnapshotRecord.TASK_ID, taskId))
        .and(eq(ProfileThreadSnapshotRecord.SEQUENCE, 0));

    final LinkedList<String> segments = new LinkedList<>();
    QueryResult.Series series = client.queryForSingleSeries(query);
    if (Objects.isNull(series)) {
        return Collections.emptyList();
    }
    series.getValues().forEach(values -> {
        segments.add((String) values.get(1));
    });

    if (segments.isEmpty()) {
        return Collections.emptyList();
    }

    query = select()
        .function(InfluxConstants.SORT_ASC, SegmentRecord.START_TIME, segments.size())
        .column(SegmentRecord.SEGMENT_ID)
        .column(SegmentRecord.START_TIME)
        .column(SegmentRecord.ENDPOINT_NAME)
        .column(SegmentRecord.LATENCY)
        .column(SegmentRecord.IS_ERROR)
        .column(SegmentRecord.TRACE_ID)
        .from(client.getDatabase(), SegmentRecord.INDEX_NAME)
        .where()
        .and(contains(SegmentRecord.SEGMENT_ID, Joiner.on("|").join(segments)));

    ArrayList<BasicTrace> result = Lists.newArrayListWithCapacity(segments.size());
    client.queryForSingleSeries(query)
          .getValues()
          .stream()
          .sorted((a, b) -> Long.compare(((Number) b.get(1)).longValue(), ((Number) a.get(1)).longValue()))
          .forEach(values -> {
              BasicTrace basicTrace = new BasicTrace();

              basicTrace.setSegmentId((String) values.get(2));
              basicTrace.setStart(String.valueOf(values.get(3)));
              basicTrace.getEndpointNames().add((String) values.get(4));
              basicTrace.setDuration(((Number) values.get(5)).intValue());
              basicTrace.setError(BooleanUtils.valueToBoolean(((Number) values.get(6)).intValue()));
              String traceIds = (String) values.get(7);
              basicTrace.getTraceIds().add(traceIds);

              result.add(basicTrace);
          });

    return result;
}
 
Example 16
Source File: ProfileThreadSnapshotQuery.java    From skywalking with Apache License 2.0 4 votes vote down vote up
@Override
public SegmentRecord getProfiledSegment(String segmentId) throws IOException {
    WhereQueryImpl query = select().column(SegmentRecord.SEGMENT_ID)
            .column(SegmentRecord.TRACE_ID)
            .column(SegmentRecord.SERVICE_ID)
            .column(SegmentRecord.ENDPOINT_NAME)
            .column(SegmentRecord.START_TIME)
            .column(SegmentRecord.END_TIME)
            .column(SegmentRecord.LATENCY)
            .column(SegmentRecord.IS_ERROR)
            .column(SegmentRecord.DATA_BINARY)
            .column(SegmentRecord.VERSION)
            .from(client.getDatabase(), SegmentRecord.INDEX_NAME)
            .where()
            .and(eq(SegmentRecord.SEGMENT_ID, segmentId));
    List<QueryResult.Series> series = client.queryForSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result set: {}", query.getCommand(), series);
    }
    if (Objects.isNull(series) || series.isEmpty()) {
        return null;
    }

    List<Object> values = series.get(0).getValues().get(0);
    SegmentRecord segmentRecord = new SegmentRecord();

    segmentRecord.setSegmentId((String) values.get(1));
    segmentRecord.setTraceId((String) values.get(2));
    segmentRecord.setServiceId((String) values.get(3));
    segmentRecord.setEndpointName((String) values.get(4));
    segmentRecord.setStartTime((long) values.get(5));
    segmentRecord.setEndTime((long) values.get(6));
    segmentRecord.setLatency((int) values.get(7));
    segmentRecord.setIsError((int) values.get(8));
    segmentRecord.setVersion((int) values.get(10));

    String base64 = (String) values.get(9);
    if (!Strings.isNullOrEmpty(base64)) {
        segmentRecord.setDataBinary(Base64.getDecoder().decode(base64));
    }

    return segmentRecord;
}
 
Example 17
Source File: AlarmQuery.java    From skywalking with Apache License 2.0 4 votes vote down vote up
@Override
public Alarms getAlarm(Integer scopeId, String keyword, int limit, int from, long startTB,
                       long endTB) throws IOException {

    WhereQueryImpl<SelectQueryImpl> recallQuery = select()
        .function("top", AlarmRecord.START_TIME, limit + from).as(AlarmRecord.START_TIME)
        .column(AlarmRecord.ID0)
        .column(AlarmRecord.ALARM_MESSAGE)
        .column(AlarmRecord.SCOPE)
        .from(client.getDatabase(), AlarmRecord.INDEX_NAME)
        .where();
    if (startTB > 0 && endTB > 0) {
        recallQuery.and(gte(InfluxClient.TIME, InfluxClient.timeInterval(startTB)))
                   .and(lte(InfluxClient.TIME, InfluxClient.timeInterval(endTB)));
    }
    if (!Strings.isNullOrEmpty(keyword)) {
        recallQuery.and(contains(AlarmRecord.ALARM_MESSAGE, keyword.replaceAll("/", "\\\\/")));
    }
    if (Objects.nonNull(scopeId)) {
        recallQuery.and(eq(AlarmRecord.SCOPE, scopeId));
    }

    WhereQueryImpl<SelectQueryImpl> countQuery = select().count(AlarmRecord.ID0)
                                                         .from(client.getDatabase(), AlarmRecord.INDEX_NAME)
                                                         .where();
    recallQuery.getClauses().forEach(clause -> {
        countQuery.where(clause);
    });

    Query query = new Query(countQuery.getCommand() + recallQuery.getCommand());
    List<QueryResult.Result> results = client.query(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result set: {}", query.getCommand(), results);
    }
    if (results.size() != 2) {
        throw new IOException("Expecting to get 2 Results, but it is " + results.size());
    }
    List<QueryResult.Series> series = results.get(1).getSeries();
    if (series == null || series.isEmpty()) {
        return new Alarms();
    }
    List<QueryResult.Series> counter = results.get(0).getSeries();
    Alarms alarms = new Alarms();
    alarms.setTotal(((Number) counter.get(0).getValues().get(0).get(1)).intValue());

    series.get(0).getValues()
          .stream()
          // re-sort by self, because of the result order by time.
          .sorted((a, b) -> Long.compare((long) b.get(1), (long) a.get(1)))
          .skip(from)
          .forEach(values -> {
              final int sid = ((Number) values.get(4)).intValue();
              Scope scope = Scope.Finder.valueOf(sid);

              AlarmMessage message = new AlarmMessage();
              message.setStartTime((long) values.get(1));
              message.setId((String) values.get(2));
              message.setMessage((String) values.get(3));
              message.setScope(scope);
              message.setScopeId(sid);

              alarms.getMsgs().add(message);
          });
    return alarms;
}
 
Example 18
Source File: MetricsQuery.java    From skywalking with Apache License 2.0 4 votes vote down vote up
@Override
public MetricsValues readMetricsValues(final MetricsCondition condition,
                                       final String valueColumnName,
                                       final Duration duration) throws IOException {
    final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
    List<String> ids = new ArrayList<>(pointOfTimes.size());
    pointOfTimes.forEach(pointOfTime -> {
        ids.add(pointOfTime.id(condition.getEntity().buildId()));
    });

    WhereQueryImpl<SelectQueryImpl> query = select()
        .column(ID_COLUMN)
        .column(valueColumnName)
        .from(client.getDatabase(), condition.getName())
        .where();

    if (CollectionUtils.isNotEmpty(ids)) {
        if (ids.size() == 1) {
            query.where(eq(ID_COLUMN, ids.get(0)));
        } else {
            query.where(contains(ID_COLUMN, Joiner.on("|").join(ids)));
        }
    }
    List<QueryResult.Series> seriesList = client.queryForSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result set: {}", query.getCommand(), seriesList);
    }

    MetricsValues metricsValues = new MetricsValues();
    // Label is null, because in readMetricsValues, no label parameter.
    final IntValues intValues = metricsValues.getValues();

    if (CollectionUtils.isNotEmpty(seriesList)) {
        seriesList.get(0).getValues().forEach(values -> {
            KVInt kv = new KVInt();
            kv.setValue(((Number) values.get(2)).longValue());
            kv.setId((String) values.get(1));
            intValues.addKVInt(kv);
        });
    }
    metricsValues.setValues(
        Util.sortValues(intValues, ids, ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName()))
    );
    return metricsValues;
}
 
Example 19
Source File: AggregationQuery.java    From skywalking with Apache License 2.0 4 votes vote down vote up
@Override
public List<SelectedRecord> sortMetrics(final TopNCondition condition,
                                        final String valueColumnName,
                                        final Duration duration,
                                        final List<KeyValue> additionalConditions) throws IOException {
    String measurement = condition.getName();

    // Have to re-sort here. Because the function, top()/bottom(), get the result ordered by the `time`.
    Comparator<SelectedRecord> comparator = DESCENDING;
    String functionName = InfluxConstants.SORT_DES;
    if (condition.getOrder().equals(Order.ASC)) {
        functionName = InfluxConstants.SORT_ASC;
        comparator = ASCENDING;
    }

    SelectQueryImpl query = select().function(functionName, "mean", condition.getTopN()).as("value")
                                    .column(InfluxConstants.TagName.ENTITY_ID)
                                    .from(client.getDatabase(), measurement);

    WhereSubQueryImpl<SelectSubQueryImpl<SelectQueryImpl>, SelectQueryImpl> where = select()
        .fromSubQuery(client.getDatabase())
        .mean(valueColumnName)
        .from(condition.getName()).where();
    if (additionalConditions != null) {
        additionalConditions.forEach(moreCondition -> {
            where.and(eq(moreCondition.getKey(), moreCondition.getValue()));
        });
    }
    final SelectSubQueryImpl<SelectQueryImpl> subQuery = where
        .and(gte(InfluxClient.TIME, InfluxClient.timeInterval(duration.getStartTimeBucket())))
        .and(lte(InfluxClient.TIME, InfluxClient.timeInterval(duration.getEndTimeBucket())))
        .groupBy(InfluxConstants.TagName.ENTITY_ID);

    query.setSubQuery(subQuery);

    List<QueryResult.Series> series = client.queryForSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result set: {}", query.getCommand(), series);
    }
    if (series == null || series.isEmpty()) {
        return Collections.emptyList();
    }

    List<List<Object>> dataset = series.get(0).getValues();
    List<SelectedRecord> entities = Lists.newArrayListWithCapacity(dataset.size());
    dataset.forEach(values -> {
        final SelectedRecord entity = new SelectedRecord();
        entity.setId((String) values.get(2));
        entity.setValue(((Double) values.get(1)).longValue() + "");
        entities.add(entity);
    });

    Collections.sort(entities, comparator); // re-sort by self, because of the result order by time.
    return entities;
}
 
Example 20
Source File: InfluxDBResultMapper.java    From influxdb-java with MIT License 4 votes vote down vote up
<T> List<T> parseSeriesAs(final QueryResult.Series series, final Class<T> clazz, final List<T> result) {
  return parseSeriesAs(series, clazz, result, TimeUnit.MILLISECONDS);
}