Java Code Examples for org.apache.lucene.search.TotalHits

The following examples show how to use org.apache.lucene.search.TotalHits. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: anomaly-detection   Source File: SearchFeatureDaoTests.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void getFeaturesForPeriod_returnNonEmpty_givenDefaultValue() throws Exception {
    long start = 100L;
    long end = 200L;

    // pre-conditions
    when(ParseUtils.generateInternalFeatureQuery(eq(detector), eq(start), eq(end), eq(xContent))).thenReturn(searchSourceBuilder);
    when(searchResponse.getHits()).thenReturn(new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 1f));

    List<Aggregation> aggList = new ArrayList<>(1);

    NumericMetricsAggregation.SingleValue agg = mock(NumericMetricsAggregation.SingleValue.class);
    when(agg.getName()).thenReturn("deny_max");
    when(agg.value()).thenReturn(0d);

    aggList.add(agg);

    Aggregations aggregations = new Aggregations(aggList);
    when(searchResponse.getAggregations()).thenReturn(aggregations);

    // test
    Optional<double[]> result = searchFeatureDao.getFeaturesForPeriod(detector, start, end);

    // verify
    assertTrue(result.isPresent());
}
 
Example 2
Source Project: anomaly-detection   Source File: TestHelpers.java    License: Apache License 2.0 6 votes vote down vote up
public static SearchResponse createSearchResponse(ToXContentObject o) throws IOException {
    XContentBuilder content = o.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS);

    SearchHit[] hits = new SearchHit[1];
    hits[0] = new SearchHit(0).sourceRef(BytesReference.bytes(content));

    return new SearchResponse(
        new InternalSearchResponse(
            new SearchHits(hits, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f),
            new InternalAggregations(Collections.emptyList()),
            new Suggest(Collections.emptyList()),
            new SearchProfileShardResults(Collections.emptyMap()),
            false,
            false,
            1
        ),
        "",
        5,
        5,
        0,
        100,
        ShardSearchFailure.EMPTY_ARRAY,
        SearchResponse.Clusters.EMPTY
    );
}
 
Example 3
Source Project: anomaly-detection   Source File: TestHelpers.java    License: Apache License 2.0 6 votes vote down vote up
public static SearchResponse createEmptySearchResponse() throws IOException {
    return new SearchResponse(
        new InternalSearchResponse(
            new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1.0f),
            new InternalAggregations(Collections.emptyList()),
            new Suggest(Collections.emptyList()),
            new SearchProfileShardResults(Collections.emptyMap()),
            false,
            false,
            1
        ),
        "",
        5,
        5,
        0,
        100,
        ShardSearchFailure.EMPTY_ARRAY,
        SearchResponse.Clusters.EMPTY
    );
}
 
Example 4
Source Project: lucene-solr   Source File: SearchPanelProvider.java    License: Apache License 2.0 6 votes vote down vote up
private void populateResults(SearchResults res) {
  totalHitsLbl.setText(String.valueOf(res.getTotalHits()));
  if (res.getTotalHits().value > 0) {
    startLbl.setText(String.valueOf(res.getOffset() + 1));
    endLbl.setText(String.valueOf(res.getOffset() + res.size()));

    prevBtn.setEnabled(res.getOffset() > 0);
    nextBtn.setEnabled(res.getTotalHits().relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO || res.getTotalHits().value > res.getOffset() + res.size());

    if (!indexHandler.getState().readOnly() && indexHandler.getState().hasDirectoryReader()) {
      delBtn.setEnabled(true);
    }

    resultsTable.setModel(new SearchResultsTableModel(res));
    resultsTable.getColumnModel().getColumn(SearchResultsTableModel.Column.DOCID.getIndex()).setPreferredWidth(SearchResultsTableModel.Column.DOCID.getColumnWidth());
    resultsTable.getColumnModel().getColumn(SearchResultsTableModel.Column.SCORE.getIndex()).setPreferredWidth(SearchResultsTableModel.Column.SCORE.getColumnWidth());
    resultsTable.getColumnModel().getColumn(SearchResultsTableModel.Column.VALUE.getIndex()).setPreferredWidth(SearchResultsTableModel.Column.VALUE.getColumnWidth());
  } else {
    startLbl.setText("0");
    endLbl.setText("0");
    prevBtn.setEnabled(false);
    nextBtn.setEnabled(false);
    delBtn.setEnabled(false);
  }
}
 
Example 5
Source Project: lucene-solr   Source File: SearchResults.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates a search result page for the given raw Lucene hits.
 *
 * @param totalHits - total number of hits for this query
 * @param docs - array of hits
 * @param offset - offset of the current page
 * @param searcher - index searcher
 * @param fieldsToLoad - fields to load
 * @return the search result page
 * @throws IOException - if there is a low level IO error.
 */
static SearchResults of(TotalHits totalHits, ScoreDoc[] docs, int offset,
                        IndexSearcher searcher, Set<String> fieldsToLoad)
    throws IOException {
  SearchResults res = new SearchResults();

  res.totalHits = Objects.requireNonNull(totalHits);
  Objects.requireNonNull(docs);
  Objects.requireNonNull(searcher);

  for (ScoreDoc sd : docs) {
    Document luceneDoc = (fieldsToLoad == null) ?
        searcher.doc(sd.doc) : searcher.doc(sd.doc, fieldsToLoad);
    res.hits.add(Doc.of(sd.doc, sd.score, luceneDoc));
    res.offset = offset;
  }

  return res;
}
 
Example 6
Source Project: lucene-solr   Source File: TopSuggestDocs.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Returns a new TopSuggestDocs, containing topN results across
 * the provided TopSuggestDocs, sorting by score. Each {@link TopSuggestDocs}
 * instance must be sorted.
 * Analogous to {@link org.apache.lucene.search.TopDocs#merge(int, org.apache.lucene.search.TopDocs[])}
 * for {@link TopSuggestDocs}
 *
 * NOTE: assumes every <code>shardHit</code> is already sorted by score
 */
public static TopSuggestDocs merge(int topN, TopSuggestDocs[] shardHits) {
  SuggestScoreDocPriorityQueue priorityQueue = new SuggestScoreDocPriorityQueue(topN);
  for (TopSuggestDocs shardHit : shardHits) {
    for (SuggestScoreDoc scoreDoc : shardHit.scoreLookupDocs()) {
      if (scoreDoc == priorityQueue.insertWithOverflow(scoreDoc)) {
        break;
      }
    }
  }
  SuggestScoreDoc[] topNResults = priorityQueue.getResults();
  if (topNResults.length > 0) {
    return new TopSuggestDocs(new TotalHits(topNResults.length, TotalHits.Relation.EQUAL_TO), topNResults);
  } else {
    return TopSuggestDocs.EMPTY;
  }
}
 
Example 7
Source Project: lucene-solr   Source File: BinaryResponseWriter.java    License: Apache License 2.0 6 votes vote down vote up
public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOException {
  codec.writeTag(JavaBinCodec.SOLRDOCLST);
  List<Object> l = new ArrayList<>(4);
  l.add( ctx.getDocList().matches());
  l.add((long) ctx.getDocList().offset());
  
  Float maxScore = null;
  if (ctx.wantsScores()) {
    maxScore = ctx.getDocList().maxScore();
  }
  l.add(maxScore);
  l.add(ctx.getDocList().hitCountRelation() == TotalHits.Relation.EQUAL_TO);
  codec.writeArray(l);
  
  // this is a seprate function so that streaming responses can use just that part
  writeResultsBody( ctx, codec );
}
 
Example 8
Source Project: lucene-solr   Source File: TaggerRequestHandler.java    License: Apache License 2.0 6 votes vote down vote up
private DocList getDocList(int rows, FixedBitSet matchDocIdsBS) throws IOException {
  //Now we must supply a Solr DocList and add it to the response.
  //  Typically this is gotten via a SolrIndexSearcher.search(), but in this case we
  //  know exactly what documents to return, the order doesn't matter nor does
  //  scoring.
  //  Ideally an implementation of DocList could be directly implemented off
  //  of a BitSet, but there are way too many methods to implement for a minor
  //  payoff.
  int matchDocs = matchDocIdsBS.cardinality();
  int[] docIds = new int[ Math.min(rows, matchDocs) ];
  DocIdSetIterator docIdIter = new BitSetIterator(matchDocIdsBS, 1);
  for (int i = 0; i < docIds.length; i++) {
    docIds[i] = docIdIter.nextDoc();
  }
  return new DocSlice(0, docIds.length, docIds, null, matchDocs, 1f, TotalHits.Relation.EQUAL_TO);
}
 
Example 9
Source Project: lucene-solr   Source File: RankQueryTestPlugin.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings({"unchecked", "rawtypes"})
public TopDocs topDocs() {
  Collections.sort(list, new Comparator() {
    public int compare(Object o1, Object o2) {
      ScoreDoc s1 = (ScoreDoc) o1;
      ScoreDoc s2 = (ScoreDoc) o2;
      if (s1.score == s2.score) {
        return 0;
      } else if (s1.score < s2.score) {
        return 1;
      } else {
        return -1;
      }
    }
  });
  ScoreDoc[] scoreDocs = list.toArray(new ScoreDoc[list.size()]);
  return new TopDocs(new TotalHits(list.size(), TotalHits.Relation.EQUAL_TO), scoreDocs);
}
 
Example 10
Source Project: lucene-solr   Source File: RankQueryTestPlugin.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings({"unchecked", "rawtypes"})
public TopDocs topDocs() {
  Collections.sort(list, new Comparator() {
    public int compare(Object o1, Object o2) {
      ScoreDoc s1 = (ScoreDoc) o1;
      ScoreDoc s2 = (ScoreDoc) o2;
      if (s1.score == s2.score) {
        return 0;
      } else if (s1.score > s2.score) {
        return 1;
      } else {
        return -1;
      }
    }
  });
  ScoreDoc[] scoreDocs = list.toArray(new ScoreDoc[list.size()]);
  return new TopDocs(new TotalHits(list.size(), TotalHits.Relation.EQUAL_TO), scoreDocs);
}
 
Example 11
Source Project: elasticsearch-sql   Source File: MinusExecutor.java    License: Apache License 2.0 6 votes vote down vote up
private void fillMinusHitsFromOneField(String fieldName, Set<Object> fieldValues, SearchHit someHit) {
    List<SearchHit> minusHitsList = new ArrayList<>();
    int currentId = 1;
    for(Object result : fieldValues){
        Map<String,DocumentField> fields = new HashMap<>();
        ArrayList<Object> values = new ArrayList<Object>();
        values.add(result);
        fields.put(fieldName,new DocumentField(fieldName, values));
        SearchHit searchHit = new SearchHit(currentId,currentId+"", new Text(someHit.getType()), fields, null);
        searchHit.sourceRef(someHit.getSourceRef());
        searchHit.getSourceAsMap().clear();
        Map<String, Object> sourceAsMap = new HashMap<>();
        sourceAsMap.put(fieldName,result);
        searchHit.getSourceAsMap().putAll(sourceAsMap);
        currentId++;
        minusHitsList.add(searchHit);
    }
    int totalSize = currentId - 1;
    SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]);
    this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, TotalHits.Relation.EQUAL_TO), 1.0f);
}
 
Example 12
Source Project: elasticsearch-sql   Source File: IntersectExecutor.java    License: Apache License 2.0 6 votes vote down vote up
private void fillIntersectHitsFromResults(Set<ComperableHitResult> comparableHitResults) {
    int currentId = 1;
    List<SearchHit> intersectHitsList = new ArrayList<>(comparableHitResults.size());
    Set<Map.Entry<String, String>> firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias().entrySet();
    for (ComperableHitResult result : comparableHitResults) {
        SearchHit originalHit = result.getOriginalHit();
        SearchHit searchHit = new SearchHit(currentId, originalHit.getId(), new Text(originalHit.getType()), originalHit.getFields(), null);
        searchHit.sourceRef(originalHit.getSourceRef());
        searchHit.getSourceAsMap().clear();
        Map<String, Object> sourceAsMap = result.getFlattenMap();
        for (Map.Entry<String, String> entry : firstTableFieldToAlias) {
            if (sourceAsMap.containsKey(entry.getKey())) {
                Object value = sourceAsMap.get(entry.getKey());
                sourceAsMap.remove(entry.getKey());
                sourceAsMap.put(entry.getValue(), value);
            }
        }

        searchHit.getSourceAsMap().putAll(sourceAsMap);
        currentId++;
        intersectHitsList.add(searchHit);
    }
    int totalSize = currentId - 1;
    SearchHit[] unionHitsArr = intersectHitsList.toArray(new SearchHit[totalSize]);
    this.intersectHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, TotalHits.Relation.EQUAL_TO), 1.0f);
}
 
Example 13
Source Project: lucene-solr   Source File: SearchImpl.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Optional<SearchResults> nextPage() {
  if (currentPage < 0 || query == null) {
    throw new LukeException(new IllegalStateException("Search session not started."));
  }

  // proceed to next page
  currentPage += 1;

  if (totalHits.value == 0 ||
      (totalHits.relation == TotalHits.Relation.EQUAL_TO && currentPage * pageSize >= totalHits.value)) {
    log.warn("No more next search results are available.");
    return Optional.empty();
  }

  try {

    if (currentPage * pageSize < docs.length) {
      // if cached results exist, return that.
      int from = currentPage * pageSize;
      int to = Math.min(from + pageSize, docs.length);
      ScoreDoc[] part = ArrayUtil.copyOfSubArray(docs, from, to);
      return Optional.of(SearchResults.of(totalHits, part, from, searcher, fieldsToLoad));
    } else {
      return Optional.of(search());
    }

  } catch (IOException e) {
    throw new LukeException("Search Failed.", e);
  }
}
 
Example 14
Source Project: lucene-solr   Source File: GroupDocs.java    License: Apache License 2.0 5 votes vote down vote up
public GroupDocs(float score,
                 float maxScore,
                 TotalHits totalHits,
                 ScoreDoc[] scoreDocs,
                 T groupValue,
                 Object[] groupSortValues) {
  this.score = score;
  this.maxScore = maxScore;
  this.totalHits = totalHits;
  this.scoreDocs = scoreDocs;
  this.groupValue = groupValue;
  this.groupSortValues = groupSortValues;
}
 
Example 15
Source Project: lucene-solr   Source File: TopGroupsTest.java    License: Apache License 2.0 5 votes vote down vote up
private static GroupDocs<String> createEmptyGroupDocs(String groupValue, Object[] groupSortValues) {
return new  GroupDocs<String>(
    Float.NaN /* score */,
    Float.NaN /* maxScore */,
    new TotalHits(0, TotalHits.Relation.EQUAL_TO),
    new ScoreDoc[0],
    groupValue,
    groupSortValues);
}
 
Example 16
Source Project: lucene-solr   Source File: TopGroupsTest.java    License: Apache License 2.0 5 votes vote down vote up
private static GroupDocs<String> createSingletonGroupDocs(String groupValue, Object[] groupSortValues,
  int docId, float docScore, int shardIndex) {
return new  GroupDocs<String>(
    Float.NaN /* score */,
    docScore /* maxScore */,
    new TotalHits(1, TotalHits.Relation.EQUAL_TO),
    new ScoreDoc[] { new ScoreDoc(docId, docScore, shardIndex) },
    groupValue,
    groupSortValues);
}
 
Example 17
Source Project: lucene-solr   Source File: FloatPointNearestNeighbor.java    License: Apache License 2.0 5 votes vote down vote up
public static TopFieldDocs nearest(IndexSearcher searcher, String field, int topN, float... origin) throws IOException {
  if (topN < 1) {
    throw new IllegalArgumentException("topN must be at least 1; got " + topN);
  }
  if (field == null) {
    throw new IllegalArgumentException("field must not be null");
  }
  if (searcher == null) {
    throw new IllegalArgumentException("searcher must not be null");
  }
  List<BKDReader> readers = new ArrayList<>();
  List<Integer> docBases = new ArrayList<>();
  List<Bits> liveDocs = new ArrayList<>();
  int totalHits = 0;
  for (LeafReaderContext leaf : searcher.getIndexReader().leaves()) {
    PointValues points = leaf.reader().getPointValues(field);
    if (points != null) {
      if (points instanceof BKDReader == false) {
        throw new IllegalArgumentException("can only run on Lucene60PointsReader points implementation, but got " + points);
      }
      totalHits += points.getDocCount();
      readers.add((BKDReader)points);
      docBases.add(leaf.docBase);
      liveDocs.add(leaf.reader().getLiveDocs());
    }
  }

  NearestHit[] hits = nearest(readers, liveDocs, docBases, topN, origin);

  // Convert to TopFieldDocs:
  ScoreDoc[] scoreDocs = new ScoreDoc[hits.length];
  for(int i=0;i<hits.length;i++) {
    NearestHit hit = hits[i];
    scoreDocs[i] = new FieldDoc(hit.docID, 0.0f, new Object[] { (float)Math.sqrt(hit.distanceSquared) });
  }
  return new TopFieldDocs(new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), scoreDocs, null);
}
 
Example 18
Source Project: lucene-solr   Source File: TestUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Assert that the given {@link TopDocs} have the same top docs and consistent hit counts.
 */
public static void assertConsistent(TopDocs expected, TopDocs actual) {
  Assert.assertEquals("wrong total hits", expected.totalHits.value == 0, actual.totalHits.value == 0);
  if (expected.totalHits.relation == TotalHits.Relation.EQUAL_TO) {
    if (actual.totalHits.relation == TotalHits.Relation.EQUAL_TO) {
      Assert.assertEquals("wrong total hits", expected.totalHits.value, actual.totalHits.value);
    } else {
      Assert.assertTrue("wrong total hits", expected.totalHits.value >= actual.totalHits.value);
    }
  } else if (actual.totalHits.relation == TotalHits.Relation.EQUAL_TO) {
    Assert.assertTrue("wrong total hits", expected.totalHits.value <= actual.totalHits.value);
  }
  Assert.assertEquals("wrong hit count", expected.scoreDocs.length, actual.scoreDocs.length);
  for(int hitIDX=0;hitIDX<expected.scoreDocs.length;hitIDX++) {
    final ScoreDoc expectedSD = expected.scoreDocs[hitIDX];
    final ScoreDoc actualSD = actual.scoreDocs[hitIDX];
    Assert.assertEquals("wrong hit docID", expectedSD.doc, actualSD.doc);
    Assert.assertEquals("wrong hit score", expectedSD.score, actualSD.score, 0.0);
    if (expectedSD instanceof FieldDoc) {
      Assert.assertTrue(actualSD instanceof FieldDoc);
      Assert.assertArrayEquals("wrong sort field values",
                          ((FieldDoc) expectedSD).fields,
                          ((FieldDoc) actualSD).fields);
    } else {
      Assert.assertFalse(actualSD instanceof FieldDoc);
    }
  }
}
 
Example 19
Source Project: lucene-solr   Source File: SubQueryAugmenterFactory.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public DocList getDocList() {
  return new DocSlice((int)docList.getStart(), 
      docList.size(), new int[0], new float[docList.size()],
      (int) docList.getNumFound(), 
      docList.getMaxScore() == null ?  Float.NaN : docList.getMaxScore(),
          docList.getNumFoundExact() ? TotalHits.Relation.EQUAL_TO : TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
}
 
Example 20
Source Project: lucene-solr   Source File: TextResponseWriter.java    License: Apache License 2.0 5 votes vote down vote up
public final void writeDocuments(String name, ResultContext res) throws IOException {
  DocList ids = res.getDocList();
  Iterator<SolrDocument> docsStreamer = res.getProcessedDocuments();
  writeStartDocumentList(name, ids.offset(), ids.size(), ids.matches(),
      res.wantsScores() ? ids.maxScore() : null, ids.hitCountRelation() == TotalHits.Relation.EQUAL_TO);

  int idx = 0;
  while (docsStreamer.hasNext()) {
    writeSolrDocument(null, docsStreamer.next(), res.getReturnFields(), idx);
    idx++;
  }
  writeEndDocumentList();
}
 
Example 21
Source Project: lucene-solr   Source File: Grouping.java    License: Apache License 2.0 5 votes vote down vote up
protected DocList createSimpleResponse() {
  @SuppressWarnings({"rawtypes"})
  GroupDocs[] groups = result != null ? result.groups : new GroupDocs[0];

  List<Integer> ids = new ArrayList<>();
  List<Float> scores = new ArrayList<>();
  int docsToGather = getMax(offset, numGroups, maxDoc);
  int docsGathered = 0;
  float maxScore = Float.NaN;

  outer:
  for (@SuppressWarnings({"rawtypes"})GroupDocs group : groups) {
    maxScore = maxAvoidNaN(maxScore, group.maxScore);

    for (ScoreDoc scoreDoc : group.scoreDocs) {
      if (docsGathered >= docsToGather) {
        break outer;
      }

      ids.add(scoreDoc.doc);
      scores.add(scoreDoc.score);
      docsGathered++;
    }
  }

  int len = docsGathered > offset ? docsGathered - offset : 0;
  int[] docs = ArrayUtils.toPrimitive(ids.toArray(new Integer[ids.size()]));
  float[] docScores = ArrayUtils.toPrimitive(scores.toArray(new Float[scores.size()]));
  DocSlice docSlice = new DocSlice(offset, len, docs, docScores, getMatches(), maxScore, TotalHits.Relation.EQUAL_TO);

  if (getDocList) {
    for (int i = offset; i < docs.length; i++) {
      idSet.add(docs[i]);
    }
  }

  return docSlice;
}
 
Example 22
Source Project: lucene-solr   Source File: DocSlice.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Primary constructor for a DocSlice instance.
 *
 * @param offset  starting offset for this range of docs
 * @param len     length of results
 * @param docs    array of docids starting at position 0
 * @param scores  array of scores that corresponds to docs, may be null
 * @param matches total number of matches for the query
 * @param matchesRelation Indicates if {@code matches} is exact or an approximation
 */
public DocSlice(int offset, int len, int[] docs, float[] scores, long matches, float maxScore, TotalHits.Relation matchesRelation) {
  this.offset=offset;
  this.len=len;
  this.docs=docs;
  this.scores=scores;
  this.matches=matches;
  this.maxScore=maxScore;
  this.ramBytesUsed = BASE_RAM_BYTES_USED + (docs == null ? 0 : ((long)docs.length << 2)) + (scores == null ? 0 : ((long)scores.length<<2)+RamUsageEstimator.NUM_BYTES_ARRAY_HEADER);
  this.matchesRelation = matchesRelation;
}
 
Example 23
Source Project: lucene-solr   Source File: TestDocSet.java    License: Apache License 2.0 5 votes vote down vote up
public DocSlice getDocSlice(FixedBitSet bs) {
  int len = bs.cardinality();
  int[] arr = new int[len+5];
  arr[0]=10; arr[1]=20; arr[2]=30; arr[arr.length-1]=1; arr[arr.length-2]=2;
  int offset = 3;
  int end = offset + len;

  BitSetIterator iter = new BitSetIterator(bs, 0);
  // put in opposite order... DocLists are not ordered.
  for (int i=end-1; i>=offset; i--) {
    arr[i] = iter.nextDoc();
  }

  return new DocSlice(offset, len, arr, null, len*2, 100.0f, TotalHits.Relation.EQUAL_TO);
}
 
Example 24
Source Project: lucene-solr   Source File: SolrIndexSearcherTest.java    License: Apache License 2.0 5 votes vote down vote up
private QueryResult assertMatchesGreaterThan(int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) throws IOException {
  QueryResult qr = new QueryResult();
  searcher.search(qr, cmd);
  assertTrue("Expecting returned matches to be greater than " + expectedCount + " but got " + qr.getDocList().matches(),
      expectedCount >= qr.getDocList().matches());
  assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, qr.getDocList().hitCountRelation());
  return qr;
}
 
Example 25
Source Project: core-ng-project   Source File: ElasticSearchTypeImpl.java    License: Apache License 2.0 5 votes vote down vote up
private SearchResponse<T> searchResponse(org.elasticsearch.action.search.SearchResponse response) {
    SearchHit[] hits = response.getHits().getHits();
    List<T> items = new ArrayList<>(hits.length);
    for (SearchHit hit : hits) {
        items.add(mapper.fromJSON(BytesReference.toBytes(hit.getSourceRef())));
    }
    Aggregations aggregationResponse = response.getAggregations();
    Map<String, Aggregation> aggregations = aggregationResponse == null ? Map.of() : aggregationResponse.asMap();
    TotalHits totalHits = response.getHits().getTotalHits();
    long total = totalHits == null ? -1 : totalHits.value;
    return new SearchResponse<>(items, total, aggregations);
}
 
Example 26
Source Project: elasticsearch-sql   Source File: UnionExecutor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void run() throws IOException, SqlParseException {
    SearchResponse firstResponse = this.multiQueryBuilder.getFirstSearchRequest().get();
    SearchHit[] hits = firstResponse.getHits().getHits();
    List<SearchHit> unionHits = new ArrayList<>(hits.length);
    fillInternalSearchHits(unionHits,hits,this.multiQueryBuilder.getFirstTableFieldToAlias());
    SearchResponse secondResponse = this.multiQueryBuilder.getSecondSearchRequest().get();
    fillInternalSearchHits(unionHits,secondResponse.getHits().getHits(),this.multiQueryBuilder.getSecondTableFieldToAlias());
    int totalSize = unionHits.size();
    SearchHit[] unionHitsArr = unionHits.toArray(new SearchHit[totalSize]);
    this.results = new SearchHits(unionHitsArr, new TotalHits(totalSize, TotalHits.Relation.EQUAL_TO), 1.0f);
}
 
Example 27
Source Project: elasticsearch-sql   Source File: ElasticJoinExecutor.java    License: Apache License 2.0 5 votes vote down vote up
public void run() throws IOException, SqlParseException {
    long timeBefore = System.currentTimeMillis();
    List<SearchHit> combinedSearchHits =  innerRun();
    int resultsSize = combinedSearchHits.size();
    SearchHit[] hits = combinedSearchHits.toArray(new SearchHit[resultsSize]);
    this.results = new SearchHits(hits, new TotalHits(resultsSize, TotalHits.Relation.EQUAL_TO), 1.0f);
    long joinTimeInMilli = System.currentTimeMillis() - timeBefore;
    this.metaResults.setTookImMilli(joinTimeInMilli);
}
 
Example 28
Source Project: elasticsearch-sql   Source File: MinusExecutor.java    License: Apache License 2.0 5 votes vote down vote up
private void fillMinusHitsFromResults(Set<ComperableHitResult> comperableHitResults) {
    int currentId = 1;
    List<SearchHit> minusHitsList = new ArrayList<>();
    for(ComperableHitResult result : comperableHitResults){
        ArrayList<Object> values = new ArrayList<Object>();
        values.add(result);
        SearchHit originalHit = result.getOriginalHit();
        SearchHit searchHit = new SearchHit(currentId,originalHit.getId(), new Text(originalHit.getType()), originalHit.getFields(), null);
        searchHit.sourceRef(originalHit.getSourceRef());
        searchHit.getSourceAsMap().clear();
        Map<String, Object> sourceAsMap = result.getFlattenMap();
        for(Map.Entry<String,String> entry : this.builder.getFirstTableFieldToAlias().entrySet()){
            if(sourceAsMap.containsKey(entry.getKey())){
                Object value = sourceAsMap.get(entry.getKey());
                sourceAsMap.remove(entry.getKey());
                sourceAsMap.put(entry.getValue(),value);
            }
        }

        searchHit.getSourceAsMap().putAll(sourceAsMap);
        currentId++;
        minusHitsList.add(searchHit);
    }
    int totalSize = currentId - 1;
    SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]);
    this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, TotalHits.Relation.EQUAL_TO), 1.0f);
}
 
Example 29
Source Project: anomaly-detection   Source File: SearchFeatureDaoTests.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void setup() throws Exception {
    MockitoAnnotations.initMocks(this);
    PowerMockito.mockStatic(ParseUtils.class);

    Interpolator interpolator = new LinearUniformInterpolator(new SingleFeatureLinearUniformInterpolator());
    searchFeatureDao = spy(new SearchFeatureDao(client, xContent, interpolator, clientUtil));

    detectionInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES);
    when(detector.getTimeField()).thenReturn("testTimeField");
    when(detector.getIndices()).thenReturn(Arrays.asList("testIndices"));
    when(detector.generateFeatureQuery()).thenReturn(featureQuery);
    when(detector.getDetectionInterval()).thenReturn(detectionInterval);

    searchSourceBuilder = SearchSourceBuilder
        .fromXContent(XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, "{}"));
    searchRequestParams = new HashMap<>();
    searchRequest = new SearchRequest(detector.getIndices().toArray(new String[0]));
    aggsMap = new HashMap<>();
    aggsList = new ArrayList<>();

    when(max.getName()).thenReturn(SearchFeatureDao.AGG_NAME_MAX);
    List<Aggregation> list = new ArrayList<>();
    list.add(max);
    Aggregations aggregations = new Aggregations(list);
    SearchHits hits = new SearchHits(new SearchHit[0], new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f);
    when(searchResponse.getHits()).thenReturn(hits);

    doReturn(Optional.of(searchResponse))
        .when(clientUtil)
        .timedRequest(eq(searchRequest), anyObject(), Matchers.<BiConsumer<SearchRequest, ActionListener<SearchResponse>>>anyObject());
    when(searchResponse.getAggregations()).thenReturn(aggregations);

    doReturn(Optional.of(searchResponse))
        .when(clientUtil)
        .throttledTimedRequest(
            eq(searchRequest),
            anyObject(),
            Matchers.<BiConsumer<SearchRequest, ActionListener<SearchResponse>>>anyObject(),
            anyObject()
        );

    multiSearchRequest = new MultiSearchRequest();
    SearchRequest request = new SearchRequest(detector.getIndices().toArray(new String[0]));
    multiSearchRequest.add(request);
    doReturn(Optional.of(multiSearchResponse))
        .when(clientUtil)
        .timedRequest(
            eq(multiSearchRequest),
            anyObject(),
            Matchers.<BiConsumer<MultiSearchRequest, ActionListener<MultiSearchResponse>>>anyObject()
        );
    when(multiSearchResponse.getResponses()).thenReturn(new Item[] { multiSearchResponseItem });
    when(multiSearchResponseItem.getResponse()).thenReturn(searchResponse);
}
 
Example 30
Source Project: lucene-solr   Source File: SearchResults.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Returns the total number of hits for this query.
 */
public TotalHits getTotalHits() {
  return totalHits;
}