Java Code Examples for org.apache.solr.common.util.SimpleOrderedMap

The following examples show how to use org.apache.solr.common.util.SimpleOrderedMap. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: lucene-solr   Source File: SystemInfoHandler.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Get system info
 */
public static SimpleOrderedMap<Object> getSystemInfo() {
  SimpleOrderedMap<Object> info = new SimpleOrderedMap<>();
  
  OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean();
  info.add(NAME, os.getName()); // add at least this one
  // add remaining ones dynamically using Java Beans API
  // also those from JVM implementation-specific classes
  MetricUtils.addMXBeanMetrics(os, MetricUtils.OS_MXBEAN_CLASSES, null, (name, metric) -> {
    if (info.get(name) == null) {
      info.add(name, ((Gauge) metric).getValue());
    }
  });

  return info;
}
 
Example 2
Source Project: lucene-solr   Source File: UniqueSlotAcc.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings({"unchecked", "rawtypes"})
private Object getShardHLL(int slot) throws IOException {
  FixedBitSet ords = arr[slot];
  if (ords == null) return HLLAgg.NO_VALUES;

  HLL hll = factory.getHLL();
  long maxOrd = ords.length();
  Hash.LongPair hashResult = new Hash.LongPair();
  for(int ord=-1; ++ord < maxOrd;) {
    ord = ords.nextSetBit(ord);
    if (ord == DocIdSetIterator.NO_MORE_DOCS) break;
    BytesRef val = lookupOrd(ord);
    // way to avoid recomputing hash across slots?  Prob not worth space
    Hash.murmurhash3_x64_128(val.bytes, val.offset, val.length, 0, hashResult);
    // idea: if the set is small enough, just send the hashes?  We can add at the top
    // level or even just do a hash table at the top level.
    hll.addRaw(hashResult.val1);
  }

  SimpleOrderedMap map = new SimpleOrderedMap();
  map.add("hll", hll.toBytes());
  return map;
}
 
Example 3
Source Project: lucene-solr   Source File: RangeFacetRequest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Helper method to merge range facet values from a shard's response to already accumulated
 * values for each range.
 *
 * @param rangeCounts a {@link LinkedHashMap} containing the accumulated values for each range
 *                    keyed by the 'key' of the facet.range. Must not be null.
 * @param shardRanges the facet_ranges response from a shard. Must not be null.
 */
public static void mergeFacetRangesFromShardResponse(LinkedHashMap<String, DistribRangeFacet> rangeCounts,
                                                     SimpleOrderedMap<SimpleOrderedMap<Object>> shardRanges) {
  assert shardRanges != null;
  assert rangeCounts != null;
  for (Map.Entry<String, SimpleOrderedMap<Object>> entry : shardRanges) {
    String rangeKey = entry.getKey();

    RangeFacetRequest.DistribRangeFacet existing = rangeCounts.get(rangeKey);
    if (existing == null) {
      rangeCounts.put(rangeKey, new RangeFacetRequest.DistribRangeFacet(entry.getValue()));
    } else {
      existing.mergeContributionFromShard(entry.getValue());
    }
  }
}
 
Example 4
Source Project: lucene-solr   Source File: TestToleratedUpdateError.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings({"unchecked"})
public void testParseMap() {
  // trivial
  @SuppressWarnings({"rawtypes"})
  SimpleOrderedMap valid = new SimpleOrderedMap<String>();
  valid.add("type", CmdType.ADD.toString());
  valid.add("id", "some id");
  valid.add("message", "some message");
  
  ToleratedUpdateError in = ToleratedUpdateError.parseMap(valid);
  compare(in, MAP_COPPIER);
  compare(in, METADATA_COPPIER);

  // randomized
  int numIters = atLeast(5000);
  for (int i = 0; i < numIters; i++) {
    valid = new SimpleOrderedMap<String>();
    valid.add("type", ALL_TYPES[TestUtil.nextInt(random(), 0, ALL_TYPES.length-1)].toString());
    valid.add("id", TestUtil.randomUnicodeString(random()));
    valid.add("message", TestUtil.randomUnicodeString(random()));
    
    in = ToleratedUpdateError.parseMap(valid);
    compare(in, MAP_COPPIER);
    compare(in, METADATA_COPPIER);
  }
}
 
Example 5
Source Project: lucene-solr   Source File: FacetDebugInfo.java    License: Apache License 2.0 6 votes vote down vote up
public SimpleOrderedMap<Object> getFacetDebugInfo() {
  SimpleOrderedMap<Object> info = new SimpleOrderedMap<>();
  
  if (filter != null) info.add("filter", filter);
  if (processor != null) info.add("processor", processor);
  if (elapse != -1) info.add("elapse", elapse);
  if (reqDescription != null) {
    info.addAll(reqDescription);
  } 
  info.addAll(this.info);
  
  if (children != null && children.size() > 0) {
    List<Object> subfacet = new ArrayList<Object>();
    info.add("sub-facet", subfacet);
    for (FacetDebugInfo child : children) {
      subfacet.add(child.getFacetDebugInfo());
    }
  }     
  return info;
}
 
Example 6
static NamedList<Object> buildTxReport(TrackerRegistry trackerRegistry, InformationServer srv, String coreName, MetadataTracker tracker, Long txid) throws JSONException
{
    NamedList<Object> nr = new SimpleOrderedMap<>();
    nr.add("TXID", txid);
    nr.add("transaction", buildTrackerReport(trackerRegistry, srv, coreName, txid, txid, 0L, 0L, null, null));
    NamedList<Object> nodes = new SimpleOrderedMap<>();

    // add node reports ....
    List<Node> dbNodes = tracker.getFullNodesForDbTransaction(txid);
    for (Node node : dbNodes)
    {
        nodes.add("DBID " + node.getId(), buildNodeReport(tracker, node));
    }

    nr.add("txDbNodeCount", dbNodes.size());
    nr.add("nodes", nodes);
    return nr;
}
 
Example 7
Source Project: solr-redis   Source File: TaggedQueryHighlighter.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Merges all parial results to single response for Solr highlighting
 *
 * @param results Partial results from default highlighting and tagged queries highlighting.
 * @return Returns merged results of default highlighting and tagged queries highlighting.
 */
private SimpleOrderedMap mergeResults(final Map<String, SimpleOrderedMap> results) {
  final SimpleOrderedMap mergedResult = new SimpleOrderedMap();
  for (final Map.Entry<String, SimpleOrderedMap> partialResultEntry : results.entrySet()) {
    for (final Object subResultEntryObject : partialResultEntry.getValue()) {
      final Map.Entry<String, Object> subResultEntry = (Map.Entry<String, Object>) subResultEntryObject;
      for (final Object docEntryObject : (Iterable<? extends Object>) subResultEntry.getValue()) {
        final Map.Entry<String, Object> docEntry = (Map.Entry<String, Object>) docEntryObject;
        String fieldName = partialResultEntry.getKey();
        //If results are from main highlight we should add original field name. In other case we should use
        //field alias which comes from tagged query
        if (MAIN_HIGHLIGHT.equals(fieldName)) {
          fieldName = docEntry.getKey();
        }
        addFragmentToDoc(mergedResult, subResultEntry.getKey(), fieldName, (String[]) docEntry.getValue());
      }
    }
  }
  return mergedResult;
}
 
Example 8
Source Project: BioSolr   Source File: AbstractXJoinTestCase.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("rawtypes")
protected NamedList test(ModifiableSolrParams params, String componentName) {
  SolrCore core = h.getCore();

  SearchComponent sc = core.getSearchComponent(componentName);
  assertTrue("XJoinSearchComponent not found in solrconfig", sc != null);
    
  QParserPlugin qp = core.getQueryPlugin("xjoin");
  assertTrue("XJoinQParserPlugin not found in solrconfig", qp != null);
  
  params.add("q", "*:*");
  params.add("fq", "{!xjoin}" + componentName);

  SolrQueryResponse rsp = new SolrQueryResponse();
  rsp.add("responseHeader", new SimpleOrderedMap<>());
  SolrQueryRequest req = new LocalSolrQueryRequest(core, params);

  SolrRequestHandler handler = core.getRequestHandler("standard");
  handler.handleRequest(req, rsp);
  req.close();
  assertNull(rsp.getException());
    
  return rsp.getValues();
}
 
Example 9
Source Project: BioSolr   Source File: AbstractXJoinTestCase.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("rawtypes")
protected NamedList test(ModifiableSolrParams params, String componentName) {
  SolrCore core = h.getCore();

  SearchComponent sc = core.getSearchComponent(componentName);
  assertTrue("XJoinSearchComponent not found in solrconfig", sc != null);
    
  QParserPlugin qp = core.getQueryPlugin("xjoin");
  assertTrue("XJoinQParserPlugin not found in solrconfig", qp != null);
  
  params.add("q", "*:*");
  params.add("fq", "{!xjoin}" + componentName);

  SolrQueryResponse rsp = new SolrQueryResponse();
  rsp.add("responseHeader", new SimpleOrderedMap<>());
  SolrQueryRequest req = new LocalSolrQueryRequest(core, params);

  SolrRequestHandler handler = core.getRequestHandler("standard");
  handler.handleRequest(req, rsp);
  req.close();
  assertNull(rsp.getException());
    
  return rsp.getValues();
}
 
Example 10
/**
 * Add a nodeid, txid, acltxid, aclid or SOLR query to be reindexed on the
 * next maintenance operation performed by MetadataTracker and AclTracker.
 *
 * Asynchronous execution
 *
 * @param params Query Request with following parameters:
 * - core, mandatory: The name of the SOLR Core
 * - txid, optional, the number of the Transaction to reindex
 * - acltxid, optional, the number of the ACL Transaction to reindex
 * - nodeId, optional, the number of the node to reindex
 * - aclid, optional, the number of the ACL to reindex
 * - query, optional, SOLR Query to reindex results
 * @return Response including the action result:
 * - action.status: scheduled, as it will be executed by Trackers on the next maintenance operation
 */
private NamedList<Object> actionREINDEX(SolrParams params)
{
    Consumer<String> reindexOnSpecificCore = coreName -> {
        final MetadataTracker metadataTracker = trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class);
        final AclTracker aclTracker = trackerRegistry.getTrackerForCore(coreName, AclTracker.class);

        apply(params, ARG_TXID, metadataTracker::addTransactionToReindex);
        apply(params, ARG_ACLTXID, aclTracker::addAclChangeSetToReindex);
        apply(params, ARG_NODEID, metadataTracker::addNodeToReindex);
        apply(params, ARG_ACLID, aclTracker::addAclToReindex);

        ofNullable(params.get(ARG_QUERY)).ifPresent(metadataTracker::addQueryToReindex);
    };

    String requestedCoreName = coreName(params);

    coreNames().stream()
            .filter(coreName -> requestedCoreName == null || coreName.equals(requestedCoreName))
            .filter(this::isMasterOrStandalone)
            .forEach(reindexOnSpecificCore);

    NamedList<Object> response = new SimpleOrderedMap<>();
    response.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
    return response;
}
 
Example 11
NamedList<Object> manageTransactionsToBeFixed(
        IOpenBitSet transactions,
        LongToIntFunction nodesCounter,
        Consumer<Long> scheduler,
        AtomicInteger limit)
{
    final NamedList<Object> transactionsList = new SimpleOrderedMap<>();

    long txid = -1;
    while ((txid = transactions.nextSetBit(txid + 1)) != -1 && limit.decrementAndGet() >= 0)
    {
        transactionsList.add(String.valueOf(txid), nodesCounter.applyAsInt(txid));
        scheduler.accept(txid);
    }

    return transactionsList;
}
 
Example 12
Source Project: mtas   Source File: MtasSolrResultUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Rewrite merge data.
 *
 * @param key the key
 * @param subKey the sub key
 * @param snl the snl
 * @param tnl the tnl
 */
@SuppressWarnings({ "unused", "unchecked" })
private static void rewriteMergeData(String key, String subKey,
    NamedList<Object> snl, NamedList<Object> tnl) {
  if (snl != null) {
    Object o = tnl.get(key);
    NamedList<Object> tnnnl;
    if (o != null && o instanceof NamedList) {
      tnnnl = (NamedList<Object>) o;
    } else {
      tnnnl = new SimpleOrderedMap<>();
      tnl.add(key, tnnnl);
    }
    tnnnl.add(subKey, snl);
  }
}
 
Example 13
Source Project: lucene-solr   Source File: TestJsonFacetRefinement.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Use SimpleOrderedMap rather than Map to match responses from shards
 */
public static Object fromJSON(String json) throws IOException {
  JSONParser parser = new JSONParser(json);
  ObjectBuilder ob = new ObjectBuilder(parser) {
    @Override
    @SuppressWarnings({"rawtypes"})
    public Object newObject() throws IOException {
      return new SimpleOrderedMap();
    }

    @Override
    @SuppressWarnings({"unchecked", "rawtypes"})
    public void addKeyVal(Object map, Object key, Object val) throws IOException {
      ((SimpleOrderedMap) map).add(key.toString(), val);
    }
  };

  return ob.getObject();
}
 
Example 14
Source Project: lucene-solr   Source File: SolrPluginUtils.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Generates an NamedList of Explanations for each item in a list of docs.
 *
 * @param query The Query you want explanations in the context of
 * @param docs The Documents you want explained relative that query
 */
public static NamedList<Explanation> getExplanations
  (Query query,
   DocList docs,
   SolrIndexSearcher searcher,
   IndexSchema schema) throws IOException {

  NamedList<Explanation> explainList = new SimpleOrderedMap<>();
  DocIterator iterator = docs.iterator();
  for (int i=0; i<docs.size(); i++) {
    int id = iterator.nextDoc();

    Document doc = searcher.doc(id);
    String strid = schema.printableUniqueKey(doc);

    explainList.add(strid, searcher.explain(query, id) );
  }
  return explainList;
}
 
Example 15
Source Project: semantic-knowledge-graph   Source File: NodeNormalizer.java    License: Apache License 2.0 6 votes vote down vote up
private void populateNorms(AggregationWaitable runner,
                           String requestValue,
                           LinkedList<String> normalizedStrings,
                           LinkedList<SimpleOrderedMap<String>> normalizedMaps) {
    for(SimpleOrderedMap<Object> bucket : runner.buckets)
    {
        SimpleOrderedMap<String> facetResult = runner.adapter.getMapValue(bucket);
        if(MapUtility.mapContainsValue(requestValue.toLowerCase(), facetResult))
        {
            normalizedStrings.add(runner.adapter.getStringValue(bucket));
            normalizedMaps.add(runner.adapter.getMapValue(bucket));
            return;
        }
    }
    normalizedStrings.add(requestValue);
    normalizedMaps.add(null);
}
 
Example 16
@Test
public void coreNamesAreTrimmed_oneCoreNameAtTime() {
    AlfrescoCoreAdminHandler spy = spy(new AlfrescoCoreAdminHandler() {
        @Override
        protected NamedList<Object> newCore(String coreName, int numShards, StoreRef storeRef, String templateName, int replicationFactor, int nodeInstance, int numNodes, String shardIds, Properties extraProperties)
        {
            // Do nothing here otherwise we cannot spy it
            return new SimpleOrderedMap<>();
        }
    });

    // First let's try a list of names, one by one
    final List<String> coreNames =
            asList(
                    ARCHIVE_CORE_NAME + "  ", // whitespace char at the end
                    "\t " + ALFRESCO_CORE_NAME, // whitespace chars at the beginning
                    "   " + VERSION_CORE_NAME + "  \t", // beginning and end
                    "   \t"); // empty name

    coreNames.forEach(spy::setupNewDefaultCores);

    verify(spy).newCore(eq(ARCHIVE_CORE_NAME), eq(1), eq(STORE_REF_MAP.get(ARCHIVE_CORE_NAME)), anyString(), eq(1), eq(1), eq(1), eq(null), eq(null));
    verify(spy).newCore(eq(ALFRESCO_CORE_NAME), eq(1), eq(STORE_REF_MAP.get(ALFRESCO_CORE_NAME)), anyString(), eq(1), eq(1), eq(1), eq(null), eq(null));
    verify(spy).newCore(eq(VERSION_CORE_NAME), eq(1), eq(STORE_REF_MAP.get(VERSION_CORE_NAME)), anyString(), eq(1), eq(1), eq(1), eq(null), eq(null));
}
 
Example 17
Source Project: BioSolr   Source File: TreeFacetField.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Convert this object to a SimpleOrderedMap, making it easier to serialize.
 * @return the equivalent SimpleOrderedMap for this object.
 */
public SimpleOrderedMap<Object> toMap() {
	SimpleOrderedMap<Object> map = new SimpleOrderedMap<>();
	
	if (label != null) {
		map.add(LABEL_KEY, label);
	}
	map.add(VALUE_KEY, value);
	map.add(COUNT_KEY, count);
	map.add(TOTAL_KEY, getTotal());
	if (hierarchy != null && hierarchy.size() > 0) {
		// Recurse through the child nodes, converting each to a map
		List<NamedList<Object>> hierarchyList = 
				hierarchy.stream().map(TreeFacetField::toMap).collect(Collectors.toList());
		map.add(HIERARCHY_KEY, hierarchyList);
	}
	
	return map;
}
 
Example 18
Source Project: lucene-solr   Source File: SuggesterResponse.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings({"unchecked", "rawtypes"})
public SuggesterResponse(Map<String, NamedList<Object>> suggestInfo) {
  for (Map.Entry<String, NamedList<Object>> entry : suggestInfo.entrySet()) {
    SimpleOrderedMap suggestionsNode = (SimpleOrderedMap) entry.getValue().getVal(0);
    List<SimpleOrderedMap> suggestionListToParse;
    List<Suggestion> suggestionList = new LinkedList<>();
    if (suggestionsNode != null) {

      suggestionListToParse = (List<SimpleOrderedMap>) suggestionsNode.get(SUGGESTIONS_NODE_NAME);
      for (SimpleOrderedMap suggestion : suggestionListToParse) {
        String term = (String) suggestion.get(TERM_NODE_NAME);
        long weight = (long) suggestion.get(WEIGHT_NODE_NAME);
        String payload = (String) suggestion.get(PAYLOAD_NODE_NAME);

        Suggestion parsedSuggestion = new Suggestion(term, weight, payload);
        suggestionList.add(parsedSuggestion);
      }
      suggestionsPerDictionary.put(entry.getKey(), suggestionList);
    }
  }
}
 
Example 19
Source Project: lucene-solr   Source File: DebugComponent.java    License: Apache License 2.0 6 votes vote down vote up
private NamedList<String> getTrackResponse(ShardResponse shardResponse) {
  NamedList<String> namedList = new SimpleOrderedMap<>();
  if (shardResponse.getException() != null) {
    namedList.add("Exception", shardResponse.getException().getMessage());
    return namedList;
  }
  NamedList<Object> responseNL = shardResponse.getSolrResponse().getResponse();
  @SuppressWarnings("unchecked")
  NamedList<Object> responseHeader = (NamedList<Object>)responseNL.get("responseHeader");
  if(responseHeader != null) {
    namedList.add("QTime", responseHeader.get("QTime").toString());
  }
  namedList.add("ElapsedTime", String.valueOf(shardResponse.getSolrResponse().getElapsedTime()));
  namedList.add("RequestPurpose", shardResponse.getShardRequest().params.get(CommonParams.REQUEST_PURPOSE));
  SolrDocumentList docList = (SolrDocumentList)shardResponse.getSolrResponse().getResponse().get("response");
  if(docList != null) {
    namedList.add("NumFound", String.valueOf(docList.getNumFound()));
  }
  namedList.add("Response", String.valueOf(responseNL));
  return namedList;
}
 
Example 20
@Override
public void process(ResponseBuilder rb) throws IOException {
  final PhrasesContextData contextData = (PhrasesContextData) rb.req.getContext().get(this.getClass());
  if (null == contextData) {
    // if prepare didn't give us anything to work with, then we should do nothing
    return;
  }

  // regardless of single node / shard, we need local stats...
  Phrase.populateStats(contextData.allPhrases, contextData.fieldWeights.keySet(), rb.req.getSearcher());

  if ( rb.req.getParams().getBool(ShardParams.IS_SHARD, false) ) {
    // shard request, return stats for all phrases (in original order)
    SimpleOrderedMap<Object> output = new SimpleOrderedMap<>();
    output.add("_all", Phrase.formatShardResponse(contextData.allPhrases));
    // TODO: might want to add numDocs() & getSumTotalTermFreq(f)/getDocCount(f) stats from each field...
    // so that we can sum/merge them for use in scoring?
    rb.rsp.add("phrases", output);
  } else {
    // full single node request...
    scoreAndAddResultsToResponse(rb, contextData);
  }
}
 
Example 21
Source Project: lucene-solr   Source File: SolrInfoMBeanHandler.java    License: Apache License 2.0 5 votes vote down vote up
protected NamedList<NamedList<NamedList<Object>>> getMBeanInfo(SolrQueryRequest req) {

    NamedList<NamedList<NamedList<Object>>> cats = new NamedList<>();
    
    String[] requestedCats = req.getParams().getParams("cat");
    if (null == requestedCats || 0 == requestedCats.length) {
      for (SolrInfoBean.Category cat : SolrInfoBean.Category.values()) {
        cats.add(cat.name(), new SimpleOrderedMap<NamedList<Object>>());
      }
    } else {
      for (String catName : requestedCats) {
        cats.add(catName,new SimpleOrderedMap<NamedList<Object>>());
      }
    }
         
    Set<String> requestedKeys = arrayToSet(req.getParams().getParams("key"));
    
    Map<String, SolrInfoBean> reg = req.getCore().getInfoRegistry();
    for (Map.Entry<String, SolrInfoBean> entry : reg.entrySet()) {
      addMBean(req, cats, requestedKeys, entry.getKey(),entry.getValue());
    }

    for (SolrInfoBean infoMBean : req.getCore().getCoreContainer().getResourceLoader().getInfoMBeans()) {
      addMBean(req,cats,requestedKeys,infoMBean.getName(),infoMBean);
    }
    return cats;
  }
 
Example 22
Source Project: ltr4l   Source File: FeaturesExtractorManager.java    License: Apache License 2.0 5 votes vote down vote up
SimpleOrderedMap<Object> parseQ(Map q){
  SimpleOrderedMap<Object> result = new SimpleOrderedMap<Object>();
  result.add("qid", (Integer)q.get("qid"));
  result.add("query", (String)q.get("query"));
  result.add("docs", parseDocs((List<Map>)q.get("docs")));
  return result;
}
 
Example 23
Source Project: solr-researcher   Source File: ReSearcherUtils.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns spellchecker's suggestions from original response. In case there are no suggestions, returns null.
 * 
 * @param rb .
 * @return .
 */
public static NamedList extractSpellcheckerSuggestions(ResponseBuilder rb) {
  if (rb.rsp.getValues().get("spellcheck") == null) {
    return null;
  }
  
  return (NamedList) ((SimpleOrderedMap) rb.rsp.getValues().get("spellcheck")).get("suggestions");
}
 
Example 24
Source Project: mtas   Source File: MtasSolrCollectionResult.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Sets the post.
 *
 * @param now the now
 * @param status the status
 * @throws IOException Signals that an I/O exception has occurred.
 */
public void setPost(long now, SimpleOrderedMap<Object> status)
    throws IOException {
  if (action.equals(ComponentCollection.ACTION_POST)) {
    this.now = now;
    this.status = status;
  } else {
    throw new IOException("not allowed with action '" + action + "'");
  }
}
 
Example 25
Source Project: vind   Source File: SuggestionResultFactory.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * create a multi suggestion result
 * @param core
 * @param rsp
 * @param fields
 * @param query
 * @param df
 * @param limit
 * @return a multi suggestion result
 */
public static SuggestionResult createMultiValueResult(SolrCore core, SolrQueryResponse rsp, String[] fields, String query, String df, int termLimit, int limit, SuggestionRequestHandler.LimitType limitType) {
    SuggestionResultMulti result = new SuggestionResultMulti(limit, limitType);

    SimpleOrderedMap facets = (SimpleOrderedMap)((SimpleOrderedMap)rsp.getValues().get("facet_counts")).get("facet_fields");

    //for each word
    String[] qps = query.split("( |\\+)");
    LinkedList< List<Facet>> list_of_facet_lists = new LinkedList<>();
    for(int i=0; i<qps.length; i++) {
        LinkedList<Facet> l = new LinkedList<>();
        list_of_facet_lists.addLast(l);
        for(String field : fields) {
            Iterator<Map.Entry> iter = ((NamedList)facets.get(field)).iterator();
            while(iter.hasNext()) {
                Map.Entry<String, NamedList<Object>> entry = iter.next();
                String s = " "+FieldAnalyzerService.analyzeString(core, df, entry.getKey());
                //try if it maps to current fields
                if(s.toLowerCase().contains(" "+qps[i].toLowerCase())) {
                    Object o = entry.getValue();
                    l.addLast(new Facet(field,entry.getKey(),(Integer)o));
                }
            }
        }
    }

    if(list_of_facet_lists.isEmpty()) return result;

    getMultiSuggestions(result,list_of_facet_lists,0,new ArrayList<Facet>());

    //SuggestionResultMulti.MultiFacet facet = result.createMultiFacet();
    //facet.add("who","Sebastian Vettel",2);
    //facet.add("who","Mark Webber",1);

    return result;
}
 
Example 26
Source Project: lucene-solr   Source File: FacetHeatmap.java    License: Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings({"unchecked"})
public void process() throws IOException {
  super.process(); // handles domain changes

  //Compute!
  final HeatmapFacetCounter.Heatmap heatmap;
  try {
    heatmap = HeatmapFacetCounter.calcFacets(
        strategy,
        fcontext.searcher.getTopReaderContext(),
        getTopAcceptDocs(fcontext.base, fcontext.searcher), // turn DocSet into Bits
        boundsShape,
        gridLevel,
        maxCells);
  } catch (IllegalArgumentException e) {//e.g. too many cells
    throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.toString(), e);
  }

  //Populate response
  response = new SimpleOrderedMap<>();
  response.add("gridLevel", gridLevel);
  response.add("columns", heatmap.columns);
  response.add("rows", heatmap.rows);
  response.add("minX", heatmap.region.getMinX());
  response.add("maxX", heatmap.region.getMaxX());
  response.add("minY", heatmap.region.getMinY());
  response.add("maxY", heatmap.region.getMaxY());

  //A shard request will always be a PNG
  String format = fcontext.isShard() ? FORMAT_PNG : FacetHeatmap.this.format;

  response.add("counts_" + format, formatCountsVal(format, heatmap.columns, heatmap.rows, heatmap.counts, fcontext.getDebugInfo()));

  // note: we do not call processStats or processSubs as it's not supported yet
}
 
Example 27
Source Project: lucene-solr   Source File: SpatialHeatmapFacets.java    License: Apache License 2.0 5 votes vote down vote up
/** Called by FacetComponent's impl of
 * {@link org.apache.solr.handler.component.SearchComponent#finishStage(ResponseBuilder)}. */
@SuppressWarnings({"unchecked", "rawtypes"})
public static NamedList distribFinish(LinkedHashMap<String, HeatmapFacet> heatmapInfos, ResponseBuilder rb) {
  NamedList<NamedList<Object>> result = new SimpleOrderedMap<>();
  for (Map.Entry<String, HeatmapFacet> entry : heatmapInfos.entrySet()) {
    final HeatmapFacet facet = entry.getValue();
    result.add(entry.getKey(), (NamedList<Object>) facet.jsonFacetMerger.getMergedResult());
  }
  return result;
}
 
Example 28
@SuppressWarnings("unchecked")
private static void addSuccess(NamedList<Object> results, String key, Object value) {
  SimpleOrderedMap<Object> success = (SimpleOrderedMap<Object>) results.get("success");
  if (success == null) {
    success = new SimpleOrderedMap<>();
    results.add("success", success);
  }
  success.add(key, value);
}
 
Example 29
@SuppressWarnings({"rawtypes"})
public NamedList getDetails() {
  SimpleOrderedMap<Object> out = new SimpleOrderedMap<Object>();
  out.add("text", subSequence);
  out.add("offset_start", getOffsetStart());
  out.add("offset_end", getOffsetEnd());
  out.add("score", getTotalScore());
  out.add("field_scores", fieldScores);
  return out;
}
 
Example 30
Source Project: lucene-solr   Source File: HdfsBackupRepositoryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCopyBufferSet() throws IOException {
  try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) {
    NamedList<Object> namedList = new SimpleOrderedMap<>();
    namedList.add(HdfsDirectoryFactory.HDFS_HOME, "hdfs://localhost");
    namedList.add("solr.hdfs.buffer.size", 32768);
    hdfsBackupRepository.init(namedList);
    assertEquals(hdfsBackupRepository.copyBufferSize, 32768);
  }
}