org.apache.solr.client.solrj.SolrQuery Java Examples
The following examples show how to use
org.apache.solr.client.solrj.SolrQuery.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractFullDistribZkTestBase.java From lucene-solr with Apache License 2.0 | 6 votes |
public static void waitForNon403or404or503(HttpSolrClient collectionClient) throws Exception { SolrException exp = null; final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (! timeout.hasTimedOut()) { boolean missing = false; try { collectionClient.query(new SolrQuery("*:*")); } catch (SolrException e) { if (!(e.code() == 403 || e.code() == 503 || e.code() == 404)) { throw e; } exp = e; missing = true; } if (!missing) { return; } Thread.sleep(50); } fail("Could not find the new collection - " + exp.code() + " : " + collectionClient.getBaseURL()); }
Example #2
Source File: ShardSplitTest.java From lucene-solr with Apache License 2.0 | 6 votes |
protected void checkSubShardConsistency(String shard) throws SolrServerException, IOException { SolrQuery query = new SolrQuery("*:*").setRows(1000).setFields("id", "_version_"); query.set("distrib", false); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); Slice slice = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(shard); long[] numFound = new long[slice.getReplicasMap().size()]; int c = 0; for (Replica replica : slice.getReplicas()) { String coreUrl = new ZkCoreNodeProps(replica).getCoreUrl(); QueryResponse response; try (HttpSolrClient client = getHttpSolrClient(coreUrl)) { response = client.query(query); } numFound[c++] = response.getResults().getNumFound(); if (log.isInfoEnabled()) { log.info("Shard: {} Replica: {} has {} docs", shard, coreUrl, String.valueOf(response.getResults().getNumFound())); } assertTrue("Shard: " + shard + " Replica: " + coreUrl + " has 0 docs", response.getResults().getNumFound() > 0); } for (int i = 0; i < slice.getReplicasMap().size(); i++) { assertEquals(shard + " is not consistent", numFound[0], numFound[i]); } }
Example #3
Source File: DistributedQueryComponentOptimizationTest.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testDistribSinglePass() throws Exception { QueryResponse rsp = cluster.getSolrClient().query(COLLECTION, new SolrQuery("q", "*:*", "fl", "id,test_sS,score", "sort", "payload asc", "rows", "20", "distrib.singlePass", "true")); assertFieldValues(rsp.getResults(), id, "7", "1", "6", "4", "2", "10", "12", "3", "5", "9", "8", "13", "19", "11"); assertFieldValues(rsp.getResults(), "test_sS", "27", "21", "26", "24", "22", "30", "32", "23", "25", "29", "28", "33", null, "31"); QueryResponse nonDistribRsp = cluster.getSolrClient().query(COLLECTION, new SolrQuery("q", "*:*", "fl", "id,test_sS,score", "sort", "payload asc", "rows", "20")); compareResponses(rsp, nonDistribRsp); // make sure distrib and distrib.singlePass return the same thing nonDistribRsp = cluster.getSolrClient().query(COLLECTION, new SolrQuery("q", "*:*", "fl", "score", "sort", "payload asc", "rows", "20")); rsp = cluster.getSolrClient().query(COLLECTION, new SolrQuery("q", "*:*", "fl", "score", "sort", "payload asc", "rows", "20", "distrib.singlePass", "true")); compareResponses(rsp, nonDistribRsp); // make sure distrib and distrib.singlePass return the same thing }
Example #4
Source File: BasicDistributedZkTest.java From lucene-solr with Apache License 2.0 | 6 votes |
private long checkSlicesSameCounts(DocCollection dColl) throws SolrServerException, IOException { long docTotal = 0; // total number of documents found counting only one replica per slice. for (Slice slice : dColl.getActiveSlices()) { long sliceDocCount = -1; for (Replica rep : slice.getReplicas()) { try (HttpSolrClient one = getHttpSolrClient(rep.getCoreUrl())) { SolrQuery query = new SolrQuery("*:*"); query.setDistrib(false); QueryResponse resp = one.query(query); long hits = resp.getResults().getNumFound(); if (sliceDocCount == -1) { sliceDocCount = hits; docTotal += hits; } else { if (hits != sliceDocCount) { return -1; } } } } } return docTotal; }
Example #5
Source File: ItemSearchServiceLiveTest.java From tutorials with MIT License | 6 votes |
@Test public void whenSearchingWithKeywordWithMistake_thenSpellingSuggestionsShouldBeReturned() throws Exception { itemSearchService.index("hm0001", "Brand1 Washing Machine", "Home Appliances", 100f); itemSearchService.index("hm0002", "Brand1 Refrigerator", "Home Appliances", 300f); itemSearchService.index("hm0003", "Brand2 Ceiling Fan", "Home Appliances", 200f); itemSearchService.index("hm0004", "Brand2 Dishwasher", "Washing equipments", 250f); SolrQuery query = new SolrQuery(); query.setQuery("hme"); query.set("spellcheck", "on"); QueryResponse response = solrClient.query(query); SpellCheckResponse spellCheckResponse = response.getSpellCheckResponse(); assertEquals(false, spellCheckResponse.isCorrectlySpelled()); Suggestion suggestion = spellCheckResponse.getSuggestions().get(0); assertEquals("hme", suggestion.getToken()); List<String> alternatives = suggestion.getAlternatives(); String alternative = alternatives.get(0); assertEquals("home", alternative); }
Example #6
Source File: TestFieldLengthFeature.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testIfFieldIsMissingInDocumentLengthIsZero() throws Exception { // add a document without the field 'description' assertU(adoc("id", "42", "title", "w10")); assertU(commit()); loadFeature("description-length2", FieldLengthFeature.class.getName(), "{\"field\":\"description\"}"); loadModel("description-model2", LinearModel.class.getName(), new String[] {"description-length2"}, "{\"weights\":{\"description-length2\":1.0}}"); final SolrQuery query = new SolrQuery(); query.setQuery("title:w10"); query.add("fl", "*, score"); query.add("rows", "4"); query.add("rq", "{!ltr model=description-model2 reRankDocs=8}"); assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.0"); }
Example #7
Source File: ItemSearchServiceLiveTest.java From tutorials with MIT License | 6 votes |
@Test public void whenSearchingWithFacetRange_thenAllMatchingFacetsShouldAvialble() throws Exception { itemSearchService.index("hm0001", "Brand1 Washing Machine", "CategoryA", 100f); itemSearchService.index("hm0002", "Brand1 Refrigerator", "CategoryA", 125f); itemSearchService.index("hm0003", "Brand2 Ceiling Fan", "CategoryB", 150f); itemSearchService.index("hm0004", "Brand2 Dishwasher", "CategoryB", 250f); SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.addNumericRangeFacet("price", 100, 275, 25); QueryResponse response = solrClient.query(query); List<RangeFacet> rangeFacets = response.getFacetRanges().get(0).getCounts(); assertEquals(7, rangeFacets.size()); }
Example #8
Source File: TestJaegerConfigurator.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testInjected() throws Exception{ MiniSolrCloudCluster cluster = new SolrCloudTestCase.Builder(2, createTempDir()) .addConfig("config", TEST_PATH().resolve("collection1").resolve("conf")) .withSolrXml(getFile("solr/solr.xml").toPath()) .build(); CollectionAdminRequest.setClusterProperty(ZkStateReader.SAMPLE_PERCENTAGE, "100.0") .process(cluster.getSolrClient()); try { TimeOut timeOut = new TimeOut(2, TimeUnit.MINUTES, TimeSource.NANO_TIME); timeOut.waitFor("Waiting for GlobalTracer is registered", () -> GlobalTracer.getTracer() instanceof io.jaegertracing.internal.JaegerTracer); //TODO add run Jaeger through Docker and verify spans available after run these commands CollectionAdminRequest.createCollection("test", 2, 1).process(cluster.getSolrClient()); new UpdateRequest() .add("id", "1") .add("id", "2") .process(cluster.getSolrClient(), "test"); cluster.getSolrClient().query("test", new SolrQuery("*:*")); } finally { cluster.shutdown(); } }
Example #9
Source File: Solr6Index.java From atlas with Apache License 2.0 | 6 votes |
private SolrQuery runCommonQuery(RawQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx, String collection, String keyIdField) throws BackendException { final SolrQuery solrQuery = new SolrQuery(query.getQuery()) .addField(keyIdField) .setIncludeScore(true) .setStart(query.getOffset()); if (query.hasLimit()) { solrQuery.setRows(Math.min(query.getLimit(), batchSize)); } else { solrQuery.setRows(batchSize); } if (!query.getOrders().isEmpty()) { addOrderToQuery(solrQuery, query.getOrders()); } for(final Parameter parameter: query.getParameters()) { if (parameter.value() instanceof String[]) { solrQuery.setParam(parameter.key(), (String[]) parameter.value()); } else if (parameter.value() instanceof String) { solrQuery.setParam(parameter.key(), (String) parameter.value()); } } return solrQuery; }
Example #10
Source File: ItemSearchServiceLiveTest.java From tutorials with MIT License | 6 votes |
@Test public void whenSearchingWithFacetFields_thenAllMatchingFacetsShouldAvialble() throws Exception { itemSearchService.index("hm0001", "Brand1 Washing Machine", "CategoryA", 100f); itemSearchService.index("hm0002", "Brand1 Refrigerator", "CategoryA", 300f); itemSearchService.index("hm0003", "Brand2 Ceiling Fan", "CategoryB", 200f); itemSearchService.index("hm0004", "Brand2 Dishwasher", "CategoryB", 250f); SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.addFacetField("category"); QueryResponse response = solrClient.query(query); List<Count> facetResults = response.getFacetField("category").getValues(); assertEquals(2, facetResults.size()); for (Count count : facetResults) { if ("categorya".equalsIgnoreCase(count.getName())) { assertEquals(2, count.getCount()); } else if ("categoryb".equalsIgnoreCase(count.getName())) { assertEquals(2, count.getCount()); } else { fail("unexpected category"); } } }
Example #11
Source File: TestLTRQParserExplain.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testRerankedExplain() throws Exception { loadModel("linear2", LinearModel.class.getName(), new String[] { "constant1", "constant2", "pop"}, "{\"weights\":{\"pop\":1.0,\"constant1\":1.5,\"constant2\":3.5}}"); final SolrQuery query = new SolrQuery(); query.setQuery("title:bloomberg"); query.setParam("debugQuery", "on"); query.add("rows", "2"); query.add("rq", "{!ltr reRankDocs=2 model=linear2}"); query.add("fl", "*,score"); assertJQ( "/query" + query.toQueryString(), "/debug/explain/9=='\n13.5 = LinearModel(name=linear2,featureWeights=[constant1=1.5,constant2=3.5,pop=1.0]) model applied to features, sum of:\n 1.5 = prod of:\n 1.5 = weight on feature\n 1.0 = ValueFeature [name=constant1, params={value=1}]\n 7.0 = prod of:\n 3.5 = weight on feature\n 2.0 = ValueFeature [name=constant2, params={value=2}]\n 5.0 = prod of:\n 1.0 = weight on feature\n 5.0 = FieldValueFeature [name=pop, params={field=popularity}]\n'"); }
Example #12
Source File: CustomCollectionTest.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testRouteFieldForImplicitRouter() throws Exception { int numShards = 4; int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2; int maxShardsPerNode = ((numShards * replicationFactor) / NODE_COUNT) + 1; String shard_fld = "shard_s"; final String collection = "withShardField"; CollectionAdminRequest.createCollectionWithImplicitRouter(collection, "conf", "a,b,c,d", replicationFactor) .setMaxShardsPerNode(maxShardsPerNode) .setRouterField(shard_fld) .process(cluster.getSolrClient()); new UpdateRequest() .add("id", "6", shard_fld, "a") .add("id", "7", shard_fld, "a") .add("id", "8", shard_fld, "b") .commit(cluster.getSolrClient(), collection); assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); assertEquals(1, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "b")).getResults().getNumFound()); assertEquals(2, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound()); }
Example #13
Source File: SolrTemplate.java From dubbox with Apache License 2.0 | 6 votes |
@Override public long count(final SolrDataQuery query) { Assert.notNull(query, "Query must not be 'null'."); return execute(new SolrCallback<Long>() { @Override public Long doInSolr(SolrClient solrClient) throws SolrServerException, IOException { SolrQuery solrQuery = queryParsers.getForClass(query.getClass()).constructSolrQuery(query); solrQuery.setStart(0); solrQuery.setRows(0); return solrClient.query(solrQuery).getResults().getNumFound(); } }); }
Example #14
Source File: SolrConnector.java From TagRec with GNU Affero General Public License v3.0 | 6 votes |
@SuppressWarnings("unchecked") public Map<String, Set<String>> getTweets() { Map<String, Set<String>> tweets = new LinkedHashMap<String, Set<String>>(); SolrQuery solrParams = new SolrQuery(); solrParams.set("q", "*:*"); solrParams.set("fl", "text,hashtags"); solrParams.set("rows", Integer.MAX_VALUE); QueryResponse r = null; try { r = this.server.query(solrParams); SolrDocumentList docs = r.getResults(); for (SolrDocument d : docs) { tweets.put((String) d.get("text"), new LinkedHashSet<String>((List<String>) d.get("hashtags"))); } } catch (SolrServerException e) { e.printStackTrace(); } return tweets; }
Example #15
Source File: SolrLookingBlurServerTest.java From incubator-retired-blur with Apache License 2.0 | 6 votes |
@Test public void weShouldBeAbleToPageResults() throws SolrServerException, IOException, BlurException, TException { String table = "weShouldBeAbleToPageResults"; SolrServer server = createServerAndTableWithSimpleTestDoc(table); SolrQuery query = new SolrQuery("123"); query.setFields("fam.value"); QueryResponse response = server.query(query); assertEquals("We should get our doc back for a valid test.", 1l, response.getResults().getNumFound()); SolrDocument docResult = response.getResults().get(0); assertEquals("123", docResult.getFieldValue("fam.value")); assertNull("We shouldn't get this one back since it wasnt in our fields.", docResult.getFieldValues("fam.mvf")); removeTable(table); }
Example #16
Source File: ServiceLogTruncatedRequestQueryConverterTest.java From ambari-logsearch with Apache License 2.0 | 6 votes |
@Test public void testConvert() { // GIVEN ServiceLogTruncatedRequest request = new ServiceLogTruncatedQueryRequest(); fillBaseLogRequestWithTestData(request); request.setScrollType("0"); request.setNumberRows(10); request.setId("id"); // WHEN SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); // THEN assertEquals("?q=*%3A*&start=0&rows=10&fq=type%3A%28logsearch_app+%22OR%22+secure_log%29" + "&fq=-type%3A%28hst_agent+%22OR%22+system_message%29&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage" + "&fq=cluster%3Acl1&sort=logtime+desc%2Cseq_num+desc", query.toQueryString()); }
Example #17
Source File: TestBulkSchemaAPI.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testAddNewFieldAndQuery() throws Exception { getSolrClient().add(Arrays.asList( sdoc("id", "1", "term_s", "tux"))); getSolrClient().commit(true, true); Map<String,Object> attrs = new HashMap<>(); attrs.put("name", "newstringtestfield"); attrs.put("type", "string"); new SchemaRequest.AddField(attrs).process(getSolrClient()); SolrQuery query = new SolrQuery("*:*"); query.addFacetField("newstringtestfield"); int size = getSolrClient().query(query).getResults().size(); assertEquals(1, size); }
Example #18
Source File: DocumentShrinker.java From thoth with BSD 3-Clause Clear License | 6 votes |
/** * Tag slower documents and add them to the shrank core */ private void tagAndAddSlowThothDocuments() throws IOException, SolrServerException { // Query to return top MAX_NUMBER_SLOW_THOTH_DOCS slower thoth documents QueryResponse qr = realTimeServer.query( new SolrQuery() .setQuery(createThothDocsAggregationQuery()) .addSort(QTIME, SolrQuery.ORDER.desc) .setRows(MAX_NUMBER_SLOW_THOTH_DOCS) ); for (SolrDocument solrDocument: qr.getResults()){ SolrInputDocument si = ClientUtils.toSolrInputDocument(solrDocument); // Remove old ID and version si.removeField(ID); si.removeField("_version_"); // Tag document as slow si.addField(SLOW_QUERY_DOCUMENT, true); LOG.debug("Adding slow query document for server " + serverDetail.getName()); shrankServer.add(si); } }
Example #19
Source File: SolrFilterVisitor.java From Knowage-Server with GNU Affero General Public License v3.0 | 6 votes |
public void apply(SolrQuery solrQuery, Filter filter) { visit(filter); solrQuery.addFilterQuery(queryBuilder.toString()); List<LikeFilter> likeFilters = extractLikeFilters(filter); if (!likeFilters.isEmpty()) { solrQuery.addField("id"); solrQuery.setHighlight(true); solrQuery.setHighlightFragsize(0); solrQuery.add("hl.q", "*" + likeFilters.get(0).getValue() + "*"); for (LikeFilter likeFilter : likeFilters) { String fieldName = likeFilter.getProjection().getName(); if(highlightFields.contains(fieldName)) { solrQuery.addHighlightField(fieldName); } } } }
Example #20
Source File: AbstractSolrMorphlineTest.java From kite with Apache License 2.0 | 6 votes |
protected int queryResultSetSize(String query) { // return collector.getRecords().size(); try { testServer.commitTransaction(); solrClient.commit(false, true, true); QueryResponse rsp = solrClient.query(new SolrQuery(query).setRows(Integer.MAX_VALUE)); LOGGER.debug("rsp: {}", rsp); int i = 0; for (SolrDocument doc : rsp.getResults()) { LOGGER.debug("rspDoc #{}: {}", i++, doc); } int size = rsp.getResults().size(); return size; } catch (Exception e) { throw new RuntimeException(e); } }
Example #21
Source File: CheckHdfsIndexTest.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void doTest() throws Exception { waitForRecoveriesToFinish(false); indexr(id, 1); commit(); waitForRecoveriesToFinish(false); String[] args; { SolrClient client = clients.get(0); NamedList<Object> response = client.query(new SolrQuery().setRequestHandler("/admin/system")).getResponse(); @SuppressWarnings({"unchecked"}) NamedList<Object> coreInfo = (NamedList<Object>) response.get("core"); @SuppressWarnings({"unchecked"}) String indexDir = ((NamedList<Object>) coreInfo.get("directory")).get("data") + "/index"; args = new String[] {indexDir}; } assertEquals("CheckHdfsIndex return status", 0, CheckHdfsIndex.doMain(args)); }
Example #22
Source File: SolrQueryUtil.java From scipio-erp with Apache License 2.0 | 5 votes |
public static void addFilterQueries(SolrQuery solrQuery, Collection<String> queryFilters) { if (queryFilters != null) { for(String filter : queryFilters) { solrQuery.addFilterQuery(filter); } } }
Example #23
Source File: TestLTRQParserExplain.java From lucene-solr with Apache License 2.0 | 5 votes |
@Test public void testRerankedExplainSameBetweenDifferentDocsWithSameFeatures() throws Exception { loadFeatures("features-linear.json"); loadModels("linear-model.json"); final SolrQuery query = new SolrQuery(); query.setQuery("title:bloomberg"); query.setParam("debugQuery", "on"); query.add("rows", "4"); query.add("rq", "{!ltr reRankDocs=4 model=6029760550880411648}"); query.add("fl", "*,score"); query.add("wt", "json"); final String expectedExplainNormalizer = "normalized using MinMaxNormalizer(min=0.0,max=10.0)"; final String expectedExplain = "\n3.5116758 = LinearModel(name=6029760550880411648,featureWeights=[" + "title=0.0," + "description=0.1," + "keywords=0.2," + "popularity=0.3," + "text=0.4," + "queryIntentPerson=0.1231231," + "queryIntentCompany=0.12121211" + "]) model applied to features, sum of:\n 0.0 = prod of:\n 0.0 = weight on feature\n 1.0 = ValueFeature [name=title, params={value=1}]\n 0.2 = prod of:\n 0.1 = weight on feature\n 2.0 = ValueFeature [name=description, params={value=2}]\n 0.4 = prod of:\n 0.2 = weight on feature\n 2.0 = ValueFeature [name=keywords, params={value=2}]\n 0.09 = prod of:\n 0.3 = weight on feature\n 0.3 = "+expectedExplainNormalizer+"\n 3.0 = ValueFeature [name=popularity, params={value=3}]\n 1.6 = prod of:\n 0.4 = weight on feature\n 4.0 = ValueFeature [name=text, params={value=4}]\n 0.6156155 = prod of:\n 0.1231231 = weight on feature\n 5.0 = ValueFeature [name=queryIntentPerson, params={value=5}]\n 0.60606056 = prod of:\n 0.12121211 = weight on feature\n 5.0 = ValueFeature [name=queryIntentCompany, params={value=5}]\n"; assertJQ( "/query" + query.toQueryString(), "/debug/explain/7=='"+expectedExplain+"'}"); assertJQ( "/query" + query.toQueryString(), "/debug/explain/9=='"+expectedExplain+"'}"); }
Example #24
Source File: TestCoreAdmin.java From lucene-solr with Apache License 2.0 | 5 votes |
@Test public void testReloadCoreAfterFailure() throws Exception { cores.shutdown(); useFactory(null); // use FS factory try { cores = CoreContainer.createAndLoad(SOLR_HOME, getSolrXml()); String ddir = CoreAdminRequest.getCoreStatus("core0", getSolrCore0()).getDataDirectory(); Path data = Paths.get(ddir, "index"); assumeTrue("test can't handle relative data directory paths (yet?)", data.isAbsolute()); getSolrCore0().add(new SolrInputDocument("id", "core0-1")); getSolrCore0().commit(); cores.shutdown(); // destroy the index Files.move(data.resolve("_0.si"), data.resolve("backup")); cores = CoreContainer.createAndLoad(SOLR_HOME, getSolrXml()); // Need to run a query to confirm that the core couldn't load expectThrows(SolrException.class, () -> getSolrCore0().query(new SolrQuery("*:*"))); // We didn't fix anything, so should still throw expectThrows(SolrException.class, () -> CoreAdminRequest.reloadCore("core0", getSolrCore0())); Files.move(data.resolve("backup"), data.resolve("_0.si")); CoreAdminRequest.reloadCore("core0", getSolrCore0()); assertEquals(1, getSolrCore0().query(new SolrQuery("*:*")).getResults().getNumFound()); } finally { resetFactory(); } }
Example #25
Source File: TermsQueryParser.java From dubbox with Apache License 2.0 | 5 votes |
@Override public SolrQuery doConstructSolrQuery(TermsQuery query) { Assert.notNull(query, "Cannot construct solrQuery from null value."); SolrQuery solrQuery = new SolrQuery(); String queryString = getQueryString(query); if (StringUtils.hasText(queryString)) { solrQuery.setParam(CommonParams.Q, queryString); } appendTermsOptionsToSolrQuery(query.getTermsOptions(), solrQuery); processTermsFields(solrQuery, query); appendRequestHandler(solrQuery, query.getRequestHandler()); return solrQuery; }
Example #26
Source File: RangeFacetCloudTest.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testInclude_Lower() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet<FacetRangeOther> other : OTHERS) { final String otherStr = formatFacetRangeOther(other); for (String include : Arrays.asList(", include:lower", "")) { // same behavior final SolrQuery solrQuery = new SolrQuery ("q", "*:*", "rows", "0", "json.facet", // exclude a single low value from our ranges "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:5, gap:1"+otherStr+include+subFacet+" } }"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) final NamedList<Object> foo = ((NamedList<NamedList<Object>>)rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List<NamedList<Object>> buckets = (List<NamedList<Object>>) foo.get("buckets"); assertEquals("num buckets", 4, buckets.size()); for (int i = 0; i < 4; i++) { int expectedVal = i+1; assertBucket("bucket#" + i, expectedVal, modelVals(expectedVal), subFacetLimit, buckets.get(i)); } assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); } catch (AssertionError|RuntimeException ae) { throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } }
Example #27
Source File: AbstractOperationHolderConverter.java From ambari-logsearch with Apache License 2.0 | 5 votes |
public SolrQuery addInFiltersIfNotNullAndEnabled(SolrQuery query, String value, String field, boolean condition) { if (condition) { List<String> valuesList = value.length() == 0 ? singletonList("\\-1") : splitValueAsList(value, ","); if (valuesList.size() > 1) { query.addFilterQuery(String.format("%s:(%s)", field, StringUtils.join(valuesList, " OR "))); } else { query.addFilterQuery(String.format("%s:%s", field, valuesList.get(0))); } } return query; }
Example #28
Source File: Solr07TestUtil.java From datacollector with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public List<Map<String,Object>> query(Map<String, String> q) throws Exception { SolrQuery parameters = new SolrQuery(); for(Map.Entry<String, String> entry : q.entrySet()) { parameters.set(entry.getKey(), entry.getValue()); } QueryResponse response = solrServer.query(parameters); List<SolrDocument> solrDocumentList = response.getResults(); List<Map<String, Object>> result = new ArrayList(); for(SolrDocument document : solrDocumentList) { result.add(document); } return result; }
Example #29
Source File: SolrStreamingService.java From chronix.spark with Apache License 2.0 | 5 votes |
private void streamNextDocumentsFromSolr() { SolrQuery solrQuery = query.getCopy(); solrQuery.setRows(nrOfTimeSeriesPerBatch); solrQuery.setStart(currentDocumentCount); solrStreamingHandler.init(nrOfTimeSeriesPerBatch, currentDocumentCount); try { connection.queryAndStreamResponse(solrQuery, solrStreamingHandler); convertStream(); } catch (SolrServerException | IOException e) { LOGGER.warn("Exception while streaming the data points from Solr", e); } }
Example #30
Source File: ServiceLogsManager.java From ambari-logsearch with Apache License 2.0 | 5 votes |
public GraphDataListResponse getAggregatedInfo(ServiceLogAggregatedInfoRequest request) { SimpleQuery solrDataQuery = new BaseServiceLogRequestQueryConverter().convert(request); SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(solrDataQuery); String hierarchy = String.format("%s,%s,%s", HOST, COMPONENT, LEVEL); solrQuery.setQuery("*:*"); SolrUtil.setFacetPivot(solrQuery, 1, hierarchy); QueryResponse response = serviceLogsSolrDao.process(solrQuery); return responseDataGenerator.generateSimpleGraphResponse(response, hierarchy); }