org.elasticsearch.search.aggregations.bucket.filter.Filter Java Examples

The following examples show how to use org.elasticsearch.search.aggregations.bucket.filter.Filter. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TotalSalesQueryAdapter.java    From micronaut-microservices-poc with Apache License 2.0 7 votes vote down vote up
@Override
TotalSalesQuery.Result extractResult(SearchResponse searchResponse) {
    TotalSalesQuery.Result.ResultBuilder result = TotalSalesQuery.Result.builder();
    long count = 0;
    BigDecimal amount = BigDecimal.ZERO;
    Filter filterAgg = searchResponse.getAggregations().get("agg_filter");
    Terms products = filterAgg.getAggregations().get("count_by_product");
    for (Terms.Bucket b : products.getBuckets()){
        count += b.getDocCount();
        Sum sum = b.getAggregations().get("total_premium");
        amount = amount.add(BigDecimal.valueOf(sum.getValue()).setScale(2,BigDecimal.ROUND_HALF_UP));
        result.productTotal(b.getKeyAsString(), SalesResult.of(b.getDocCount(),BigDecimal.valueOf(sum.getValue())));
    }
    result.total(SalesResult.of(count,amount));

    return result.build();
}
 
Example #2
Source File: SalesTrendsQueryAdapter.java    From micronaut-microservices-poc with Apache License 2.0 6 votes vote down vote up
@Override
SalesTrendsQuery.Result extractResult(SearchResponse searchResponse) {
    SalesTrendsQuery.Result.ResultBuilder result = SalesTrendsQuery.Result.builder();

    Filter filterAgg = searchResponse.getAggregations().get("agg_filter");
    Histogram agg = filterAgg.getAggregations().get("sales");
    for (Histogram.Bucket b : agg.getBuckets()){
        DateTime key = (DateTime)b.getKey();
        Sum sum = b.getAggregations().get("total_premium");
        result.periodSale(
                new SalesTrendsQuery.PeriodSales(
                        LocalDate.of(key.getYear(),key.getMonthOfYear(),key.getDayOfMonth()),
                        b.getKeyAsString(),
                        SalesResult.of(b.getDocCount(), BigDecimal.valueOf(sum.getValue()).setScale(2, BigDecimal.ROUND_HALF_UP))
                )
        );
    }

    return result.build();
}
 
Example #3
Source File: AgentSalesQueryAdapter.java    From micronaut-microservices-poc with Apache License 2.0 6 votes vote down vote up
@Override
AgentSalesQuery.Result extractResult(SearchResponse searchResponse) {
    AgentSalesQuery.Result.ResultBuilder result = AgentSalesQuery.Result.builder();
    Filter filterAgg = searchResponse.getAggregations().get("agg_filter");
    Terms agents = filterAgg.getAggregations().get("count_by_agent");

    for (Terms.Bucket b : agents.getBuckets()) {
        Sum sum = b.getAggregations().get("total_premium");
        result.agentTotal(
                b.getKeyAsString(),
                SalesResult.of(b.getDocCount(), BigDecimal.valueOf(sum.getValue()))
        );
    }

    return result.build();
}
 
Example #4
Source File: AnalyticsServiceElasticsearch.java    From hawkular-apm with Apache License 2.0 6 votes vote down vote up
private static TimeseriesStatistics toTimeseriesStatistics(Bucket bucket) {
    Stats stat = bucket.getAggregations().get("stats");

    long faultCount = bucket.getAggregations()
            .<Nested>get("nested").getAggregations()
            .<Filter>get("faults").getDocCount();

    TimeseriesStatistics s = new TimeseriesStatistics();
    s.setTimestamp(bucket.getKeyAsDate().getMillis());
    s.setAverage((long)stat.getAvg());
    s.setMin((long)stat.getMin());
    s.setMax((long)stat.getMax());
    s.setCount(stat.getCount());
    s.setFaultCount(faultCount);
    return s;
}
 
Example #5
Source File: FacetResponse.java    From fess with Apache License 2.0 5 votes vote down vote up
public FacetResponse(final Aggregations aggregations) {
    aggregations
            .forEach(aggregation -> {
                if (aggregation.getName().startsWith(Constants.FACET_FIELD_PREFIX)) {
                    final Terms termFacet = (Terms) aggregation;
                    fieldList.add(new Field(termFacet));
                } else if (aggregation.getName().startsWith(Constants.FACET_QUERY_PREFIX)) {
                    final Filter queryFacet = (Filter) aggregation;
                    final String encodedQuery = queryFacet.getName().substring(Constants.FACET_QUERY_PREFIX.length());
                    queryCountMap.put(new String(BaseEncoding.base64().decode(encodedQuery), StandardCharsets.UTF_8),
                            queryFacet.getDocCount());
                }

            });
}
 
Example #6
Source File: EsResponseParser.java    From occurrence with Apache License 2.0 5 votes vote down vote up
/**
 * Extract the buckets of an {@link Aggregation}.
 */
private static List<? extends Terms.Bucket> getBuckets(Aggregation aggregation) {
  if (aggregation instanceof Terms) {
    return ((Terms) aggregation).getBuckets();
  } else if (aggregation instanceof Filter) {
    return
      ((Filter) aggregation)
        .getAggregations().asList()
          .stream()
          .flatMap(agg -> ((Terms) agg).getBuckets().stream())
          .collect(Collectors.toList());
  } else {
    throw new IllegalArgumentException(aggregation.getClass() + " aggregation not supported");
  }
}
 
Example #7
Source File: ProcessInstanceHistogramResource.java    From camunda-bpm-elasticsearch with Apache License 2.0 4 votes vote down vote up
@GET
  public AggregationsResult getDateHistogramAggregrations(
      @QueryParam("interval") String interval,
      @QueryParam("timeframe") String timeframe
  ) {

    Client client = ElasticSearchClientProvider.getClient(getProcessEngine());

    DateHistogram.Interval dateInterval = null;
    switch (interval) {
      case "s":
      case "m":
      case "h":
      case "d":
      case "w":
      case "M":
      case "q":
      case "y":
      default:
        dateInterval = DateHistogram.Interval.SECOND;
        break;
    }

    // create buckets based on startTime
    DateHistogramBuilder histogramStartTime = AggregationBuilders.dateHistogram("dateHistogram")
        .minDocCount(0)
        .interval(dateInterval)
        .field("startTime");
    // only get the running process instances
    FilterAggregationBuilder runningPIsAgg = AggregationBuilders.filter("running")
        .filter(FilterBuilders.missingFilter("endTime"));
    runningPIsAgg.subAggregation(histogramStartTime);

    // create buckets based on endTime
    DateHistogramBuilder histogramEndTime = AggregationBuilders.dateHistogram("dateHistogram")
        .minDocCount(0)
        .interval(dateInterval)
        .field("endTime");
    // only get the ended process instances
    FilterAggregationBuilder endedPIsAgg = AggregationBuilders.filter("ended")
        .filter(FilterBuilders.existsFilter("endTime"));
    endedPIsAgg.subAggregation(histogramEndTime);


    SearchRequestBuilder searchRequestBuilder = client.prepareSearch(ES_DEFAULT_INDEX_NAME_CAMUNDA_BPM)
        .setQuery(QueryBuilders.matchAllQuery())
        .addAggregation(runningPIsAgg)
        .addAggregation(endedPIsAgg)
        .setSearchType(SearchType.COUNT);

    System.out.println(searchRequestBuilder);

    SearchResponse searchResponse = searchRequestBuilder.get();

    long totalHits = searchResponse.getHits().getTotalHits();

    Filter running = searchResponse.getAggregations().get("running");
//    long runningTotal = running.getDocCount();

    DateHistogram runningDateHistogram = running.getAggregations().get("dateHistogram");
    List<DateHistogramBucketPair> runningDateHistogramBuckets = parseDateHistogramAggregation(runningDateHistogram);


    Filter ended = searchResponse.getAggregations().get("ended");
//    long endedTotal = ended.getDocCount();

    DateHistogram endedDateHistogram = ended.getAggregations().get("dateHistogram");
    List<DateHistogramBucketPair> endedDateHistogramBuckets = parseDateHistogramAggregation(endedDateHistogram);

    HashMap<String, List<DateHistogramBucketPair>> dateHistogramBucketPairs = new HashMap<>();
    dateHistogramBucketPairs.put("running", runningDateHistogramBuckets);
    dateHistogramBucketPairs.put("ended", endedDateHistogramBuckets);

    AggregationsResult aggregationsResult = new AggregationsResult();
    aggregationsResult.setDateHistogramBuckets(dateHistogramBucketPairs);
    aggregationsResult.setTotalHits(totalHits);

    return aggregationsResult;
  }