org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram Java Examples

The following examples show how to use org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AnalyticsServiceElasticsearch.java    From hawkular-apm with Apache License 2.0 5 votes vote down vote up
@Override
public List<NodeTimeseriesStatistics> getNodeTimeseriesStatistics(String tenantId, Criteria criteria, long interval) {
    String index = client.getIndex(tenantId);
    if (!refresh(index)) {
        return null;
    }

    AvgBuilder avgBuilder = AggregationBuilders
            .avg("avg")
            .field(ElasticsearchUtil.ACTUAL_FIELD);

    TermsBuilder componentsBuilder = AggregationBuilders
            .terms("components")
            .field("componentType")
            .size(criteria.getMaxResponseSize())
            .subAggregation(avgBuilder);

    DateHistogramBuilder histogramBuilder = AggregationBuilders
            .dateHistogram("histogram")
            .interval(interval)
            .field(ElasticsearchUtil.TIMESTAMP_FIELD)
            .subAggregation(componentsBuilder);

    BoolQueryBuilder query = buildQuery(criteria, ElasticsearchUtil.TRANSACTION_FIELD, NodeDetails.class);
    SearchRequestBuilder request = getNodeDetailsRequest(index, criteria, query, 0)
            .addAggregation(histogramBuilder);

    SearchResponse response = getSearchResponse(request);
    DateHistogram histogram = response.getAggregations().get("histogram");

    return histogram.getBuckets().stream()
            .map(AnalyticsServiceElasticsearch::toNodeTimeseriesStatistics)
            .collect(Collectors.toList());
}
 
Example #2
Source File: Search.java    From elasticsearch-rest-command with The Unlicense 5 votes vote down vote up
public static void buildTimeline(XContentBuilder builder,
		SearchResponse response, ESLogger logger) throws IOException {
	logger.info("Report took in millseconds:" + response.getTookInMillis());
	DateHistogram timeline = response.getAggregations().get(
			"data_over_time");

	// 格式化结果后输出

	builder.startObject();
	builder.field("took", response.getTookInMillis());
	builder.field("total", timeline.getBuckets().size());

	builder.startArray("fields");
	builder.value("_bucket_timevalue");
	builder.value("_doc_count");
	builder.endArray();

	builder.startArray("rows");
	for (Bucket bucket : timeline.getBuckets()) {
		builder.startArray();
		builder.value(bucket.getKey());
		builder.value(bucket.getDocCount());
		builder.endArray();
	}
	builder.endArray().endObject();

}
 
Example #3
Source File: ProcessInstanceHistogramResource.java    From camunda-bpm-elasticsearch with Apache License 2.0 5 votes vote down vote up
protected List<DateHistogramBucketPair> parseDateHistogramAggregation(DateHistogram dateHistogram) {
  ArrayList<DateHistogramBucketPair> dateHistogramBuckets = new ArrayList<>();

  for (DateHistogram.Bucket bucket : dateHistogram.getBuckets()) {
    dateHistogramBuckets.add(new DateHistogramBucketPair(bucket.getKeyAsNumber(), bucket.getDocCount()));
  }

  return dateHistogramBuckets;
}
 
Example #4
Source File: AnalyticsServiceElasticsearch.java    From hawkular-apm with Apache License 2.0 4 votes vote down vote up
@Override
public List<TimeseriesStatistics> getTraceCompletionTimeseriesStatistics(String tenantId, Criteria criteria, long interval) {
    String index = client.getIndex(tenantId);
    if (!refresh(index)) {
        return null;
    }

    StatsBuilder statsBuilder = AggregationBuilders
            .stats("stats")
            .field(ElasticsearchUtil.DURATION_FIELD);

    // TODO: HWKAPM-679 (related to HWKAPM-675), faults now recorded as properties. However this
    // current results in the fault count being an actual count of fault properties, where
    // the original intention of the fault count is the number of txns that have been affected
    // by a fault.
    FilterAggregationBuilder faultCountBuilder = AggregationBuilders
            .filter("faults")
            .filter(FilterBuilders.queryFilter(QueryBuilders.boolQuery()
                    .must(QueryBuilders.matchQuery(ElasticsearchUtil.PROPERTIES_NAME_FIELD, Constants.PROP_FAULT))));

    NestedBuilder nestedFaultCountBuilder = AggregationBuilders
            .nested("nested")
            .path(ElasticsearchUtil.PROPERTIES_FIELD)
            .subAggregation(faultCountBuilder);

    DateHistogramBuilder histogramBuilder = AggregationBuilders
            .dateHistogram("histogram")
            .interval(interval)
            .field(ElasticsearchUtil.TIMESTAMP_FIELD)
            .subAggregation(statsBuilder)
            .subAggregation(nestedFaultCountBuilder);

    BoolQueryBuilder query = buildQuery(criteria, ElasticsearchUtil.TRANSACTION_FIELD, CompletionTime.class);
    SearchRequestBuilder request = getTraceCompletionRequest(index, criteria, query, 0)
            .addAggregation(histogramBuilder);

    SearchResponse response = getSearchResponse(request);
    DateHistogram histogram = response.getAggregations().get("histogram");

    return histogram.getBuckets().stream()
            .map(AnalyticsServiceElasticsearch::toTimeseriesStatistics)
            .collect(Collectors.toList());
}
 
Example #5
Source File: AnalyticsServiceElasticsearch.java    From hawkular-apm with Apache License 2.0 4 votes vote down vote up
@Override
public List<TimeseriesStatistics> getEndpointResponseTimeseriesStatistics(String tenantId, Criteria criteria, long interval) {
    String index = client.getIndex(tenantId);
    if (!refresh(index)) {
        return null;
    }

    StatsBuilder statsBuilder = AggregationBuilders
            .stats("stats")
            .field(ElasticsearchUtil.ELAPSED_FIELD);

    // TODO: HWKAPM-679 (related to HWKAPM-675), faults now recorded as properties. However this
    // current results in the fault count being an actual count of fault properties, where
    // the original intention of the fault count is the number of txns that have been affected
    // by a fault.
    FilterAggregationBuilder faultCountBuilder = AggregationBuilders
            .filter("faults")
            .filter(FilterBuilders.queryFilter(QueryBuilders.boolQuery()
                    .must(QueryBuilders.matchQuery(ElasticsearchUtil.PROPERTIES_NAME_FIELD, Constants.PROP_FAULT))));

    NestedBuilder nestedFaultCountBuilder = AggregationBuilders
            .nested("nested")
            .path(ElasticsearchUtil.PROPERTIES_FIELD)
            .subAggregation(faultCountBuilder);

    DateHistogramBuilder histogramBuilder = AggregationBuilders
            .dateHistogram("histogram")
            .interval(interval)
            .field(ElasticsearchUtil.TIMESTAMP_FIELD)
            .subAggregation(statsBuilder)
            .subAggregation(nestedFaultCountBuilder);

    BoolQueryBuilder query = buildQuery(criteria, ElasticsearchUtil.TRANSACTION_FIELD, NodeDetails.class);
    // Only interested in service endpoints, so just Consumer nodes
    query.must(QueryBuilders.termQuery(ElasticsearchUtil.TYPE_FIELD, "Consumer"));

    SearchRequestBuilder request = getNodeDetailsRequest(index, criteria, query, 0)
            .addAggregation(histogramBuilder);

    SearchResponse response = getSearchResponse(request);
    DateHistogram histogram = response.getAggregations().get("histogram");

    return histogram.getBuckets().stream()
            .map(AnalyticsServiceElasticsearch::toTimeseriesStatistics)
            .collect(Collectors.toList());
}
 
Example #6
Source File: ProcessInstanceHistogramResource.java    From camunda-bpm-elasticsearch with Apache License 2.0 4 votes vote down vote up
@GET
  public AggregationsResult getDateHistogramAggregrations(
      @QueryParam("interval") String interval,
      @QueryParam("timeframe") String timeframe
  ) {

    Client client = ElasticSearchClientProvider.getClient(getProcessEngine());

    DateHistogram.Interval dateInterval = null;
    switch (interval) {
      case "s":
      case "m":
      case "h":
      case "d":
      case "w":
      case "M":
      case "q":
      case "y":
      default:
        dateInterval = DateHistogram.Interval.SECOND;
        break;
    }

    // create buckets based on startTime
    DateHistogramBuilder histogramStartTime = AggregationBuilders.dateHistogram("dateHistogram")
        .minDocCount(0)
        .interval(dateInterval)
        .field("startTime");
    // only get the running process instances
    FilterAggregationBuilder runningPIsAgg = AggregationBuilders.filter("running")
        .filter(FilterBuilders.missingFilter("endTime"));
    runningPIsAgg.subAggregation(histogramStartTime);

    // create buckets based on endTime
    DateHistogramBuilder histogramEndTime = AggregationBuilders.dateHistogram("dateHistogram")
        .minDocCount(0)
        .interval(dateInterval)
        .field("endTime");
    // only get the ended process instances
    FilterAggregationBuilder endedPIsAgg = AggregationBuilders.filter("ended")
        .filter(FilterBuilders.existsFilter("endTime"));
    endedPIsAgg.subAggregation(histogramEndTime);


    SearchRequestBuilder searchRequestBuilder = client.prepareSearch(ES_DEFAULT_INDEX_NAME_CAMUNDA_BPM)
        .setQuery(QueryBuilders.matchAllQuery())
        .addAggregation(runningPIsAgg)
        .addAggregation(endedPIsAgg)
        .setSearchType(SearchType.COUNT);

    System.out.println(searchRequestBuilder);

    SearchResponse searchResponse = searchRequestBuilder.get();

    long totalHits = searchResponse.getHits().getTotalHits();

    Filter running = searchResponse.getAggregations().get("running");
//    long runningTotal = running.getDocCount();

    DateHistogram runningDateHistogram = running.getAggregations().get("dateHistogram");
    List<DateHistogramBucketPair> runningDateHistogramBuckets = parseDateHistogramAggregation(runningDateHistogram);


    Filter ended = searchResponse.getAggregations().get("ended");
//    long endedTotal = ended.getDocCount();

    DateHistogram endedDateHistogram = ended.getAggregations().get("dateHistogram");
    List<DateHistogramBucketPair> endedDateHistogramBuckets = parseDateHistogramAggregation(endedDateHistogram);

    HashMap<String, List<DateHistogramBucketPair>> dateHistogramBucketPairs = new HashMap<>();
    dateHistogramBucketPairs.put("running", runningDateHistogramBuckets);
    dateHistogramBucketPairs.put("ended", endedDateHistogramBuckets);

    AggregationsResult aggregationsResult = new AggregationsResult();
    aggregationsResult.setDateHistogramBuckets(dateHistogramBucketPairs);
    aggregationsResult.setTotalHits(totalHits);

    return aggregationsResult;
  }