Java Code Examples for com.carrotsearch.hppc.IntArrayList#size()

The following examples show how to use com.carrotsearch.hppc.IntArrayList#size() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GallicSemiring.java    From jopenfst with MIT License 6 votes vote down vote up
/**
 * Factorize a gallic weight into the (head x weight, rest x One); the contract of factorize is that:
 * val (factor1, factor2) = factorize(weight) implies weight = times(factor1, factor2)
 * (see openfst's GallicFactor)
 * @param weight gallic weight to factorize
 * @return
 */
public Pair<GallicWeight, GallicWeight> factorize(GallicWeight weight) {
  Preconditions.checkArgument(isNotZero(weight), "cannot factorize a zero weight");
  IntArrayList labels = weight.getLabels();
  if (labels.isEmpty()) {
    return Pair.of(GallicWeight.createEmptyLabels(weight.getWeight()), one());
  }
  if (labels.size() == 1) {
    return Pair.of(GallicWeight.createSingleLabel(labels.get(0), weight.getWeight()), one());
  }
  IntArrayList prefix = new IntArrayList(1);
  IntArrayList suffix = new IntArrayList(labels.size() - 1);
  prefix.add(labels.get(0));
  for (int i = 1; i < labels.size(); i++) {
    suffix.add(labels.get(i));
  }
  return Pair.of(GallicWeight.create(prefix, weight.getWeight()), GallicWeight.create(suffix, weightSemiring.one()));
}
 
Example 2
Source File: FLASHAlgorithmImpl.java    From arx with Apache License 2.0 6 votes vote down vote up
/**
 * Returns all transformations that do not have the given property and sorts the resulting array
 * according to the strategy.
 *
 * @param level The level which is to be sorted
 * @param triggerSkip The trigger to be used for limiting the number of nodes to be sorted
 * @return A sorted array of nodes remaining on this level
 */
private int[] getSortedUnprocessedNodes(int level, DependentAction triggerSkip) {

    // Create
    IntArrayList list = new IntArrayList();
    for (ObjectIterator<Long> iter = ((SolutionSpaceLong)solutionSpace).unsafeGetLevel(level); iter.hasNext();) {
        long id = iter.next();
        if (!skip(triggerSkip, ((SolutionSpaceLong)solutionSpace).getTransformation(id))) {
            list.add((int)id);
        }            
    }

    // Copy & sort
    int[] array = new int[list.size()];
    System.arraycopy(list.buffer, 0, array, 0, list.elementsCount);
    sort(array);
    return array;
}
 
Example 3
Source File: ShardResponse.java    From crate with Apache License 2.0 6 votes vote down vote up
public void update(ShardResponse response) {
    IntArrayList itemIndices = response.itemIndices();
    List<Failure> failures = response.failures();
    for (int i = 0; i < itemIndices.size(); i++) {
        int location = itemIndices.get(i);
        ShardResponse.Failure failure = failures.get(i);
        if (failure == null) {
            successfulWrites.set(location, true);
        } else {
            failureLocations.set(location, true);
        }
    }
    List<Object[]> resultRows = response.getResultRows();
    if (resultRows != null) {
        this.resultRows.addAll(resultRows);
    }
}
 
Example 4
Source File: ShardFetchRequest.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
public ShardFetchRequest(SearchScrollRequest request, long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
    super(request);
    this.id = id;
    this.docIds = list.buffer;
    this.size = list.size();
    this.lastEmittedDoc = lastEmittedDoc;
}
 
Example 5
Source File: ShardFetchRequest.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
protected ShardFetchRequest(TransportRequest originalRequest, long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
    super(originalRequest);
    this.id = id;
    this.docIds = list.buffer;
    this.size = list.size();
    this.lastEmittedDoc = lastEmittedDoc;
}
 
Example 6
Source File: SolrInformationServer.java    From SearchServices with GNU Lesser General Public License v3.0 5 votes vote down vote up
@Override
public Set<Long> getErrorDocIds() throws IOException
{
    Set<Long> errorDocIds = new HashSet<>();
    RefCounted<SolrIndexSearcher> refCounted = null;
    try
    {
        refCounted = this.core.getSearcher();
        SolrIndexSearcher searcher = refCounted.get();
        TermQuery errorQuery = new TermQuery(new Term(FIELD_DOC_TYPE, DOC_TYPE_ERROR_NODE));
        DocListCollector docListCollector = new DocListCollector();
        searcher.search(errorQuery, docListCollector);
        IntArrayList docList = docListCollector.getDocs();
        int size = docList.size();

        for (int i = 0; i < size; ++i)
        {
            int doc = docList.get(i);
            Document document = searcher.doc(doc, REQUEST_ONLY_ID_FIELD);
            IndexableField id = document.getField(FIELD_SOLR4_ID);
            String idString = id.stringValue();

            if (idString.startsWith(PREFIX_ERROR))
            {
                idString = idString.substring(PREFIX_ERROR.length());
            }

            errorDocIds.add(Long.valueOf(idString));
        }
    }
    finally
    {
        ofNullable(refCounted).ifPresent(RefCounted::decref);
    }
    return errorDocIds;
}
 
Example 7
Source File: Classifier.java    From SFA with GNU General Public License v3.0 5 votes vote down vote up
protected static int[] convertToInt(IntArrayList trainSet) {
  int[] train = new int[trainSet.size()];
  int a = 0;
  for (IntCursor i : trainSet) {
    train[a++] = i.value;
  }
  return train;
}
 
Example 8
Source File: SolrInformationServer.java    From SearchServices with GNU Lesser General Public License v3.0 4 votes vote down vote up
@Override
public List<NodeMetaData> getCascadeNodes(List<Long> txnIds) throws IOException, JSONException
{
    List<FieldInstance> list = dataModel.getIndexedFieldNamesForProperty(ContentModel.PROP_CASCADE_TX).getFields();
    FieldInstance fieldInstance = list.get(0);

    RefCounted<SolrIndexSearcher> refCounted = null;
    IntArrayList docList;
    Set<Long> parentNodesId = new HashSet<>();

    try
    {
        refCounted = core.getSearcher();
        SolrIndexSearcher searcher = refCounted.get();
        String field = fieldInstance.getField();
        SchemaField schemaField = searcher.getSchema().getField(field);
        FieldType fieldType = schemaField.getType();
        BooleanQuery.Builder builder = new BooleanQuery.Builder();
        BooleanQuery booleanQuery;

        for(Long l : txnIds)
        {
            BytesRefBuilder bytesRefBuilder = new BytesRefBuilder();
            fieldType.readableToIndexed(l.toString(), bytesRefBuilder);
            TermQuery termQuery = new TermQuery(new Term(field, bytesRefBuilder.toBytesRef()));
            BooleanClause booleanClause = new BooleanClause(termQuery, BooleanClause.Occur.SHOULD);
            builder.add(booleanClause);
        }

        booleanQuery = builder.build();

        DocListCollector collector = new DocListCollector();
        searcher.search(booleanQuery, collector);
        docList = collector.getDocs();
        int size = docList.size();
        for(int i=0; i<size; i++)
        {
            int docId = docList.get(i);
            Document document = searcher.doc(docId, REQUEST_ONLY_ID_FIELD);
            IndexableField indexableField = document.getField(FIELD_SOLR4_ID);
            String id = indexableField.stringValue();
            TenantDbId ids = AlfrescoSolrDataModel.decodeNodeDocumentId(id);
            parentNodesId.add(ids.dbId);
        }
    }
    finally
    {
        ofNullable(refCounted).ifPresent(RefCounted::decref);
    }

    List<NodeMetaData> allNodeMetaDatas = new ArrayList<>();

    for (Long parentNodeId : parentNodesId)
    {
        NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
        nmdp.setFromNodeId(parentNodeId);
        nmdp.setToNodeId(parentNodeId);
        nmdp.setIncludeAclId(true);
        nmdp.setIncludeChildAssociations(false);
        nmdp.setIncludeChildIds(true);
        nmdp.setIncludeOwner(false);
        nmdp.setIncludeParentAssociations(false);
        nmdp.setIncludePaths(true);
        nmdp.setIncludeProperties(false);
        nmdp.setIncludeTxnId(true);
        nmdp.setMaxResults(1);
        // Gets only one
        Optional<Collection<NodeMetaData>> nodeMetaDatas = getNodesMetaDataFromRepository(nmdp);
        allNodeMetaDatas.addAll(nodeMetaDatas.orElse(Collections.emptyList()));
    }

    return allNodeMetaDatas;
}
 
Example 9
Source File: CFSA2Serializer.java    From morfologik-stemming with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Linearize all states, putting <code>states</code> in front of the automaton
 * and calculating stable state offsets.
 */
private int linearizeAndCalculateOffsets(FSA fsa, IntArrayList states, IntArrayList linearized,
    IntIntHashMap offsets) throws IOException {
  final BitSet visited = new BitSet();
  final IntStack nodes = new IntStack();
  linearized.clear();

  /*
   * Linearize states with most inlinks first.
   */
  for (int i = 0; i < states.size(); i++) {
    linearizeState(fsa, nodes, linearized, visited, states.get(i));
  }

  /*
   * Linearize the remaining states by chaining them one after another, in depth-order.
   */
  nodes.push(fsa.getRootNode());
  while (!nodes.isEmpty()) {
    final int node = nodes.pop();
    if (visited.get(node))
      continue;

    linearizeState(fsa, nodes, linearized, visited, node);
  }

  /*
   * Calculate new state offsets. This is iterative. We start with 
   * maximum potential offsets and recalculate until converged.
   */
  int MAX_OFFSET = Integer.MAX_VALUE;
  for (IntCursor c : linearized) {
    offsets.put(c.value, MAX_OFFSET);
  }

  int i, j = 0;
  while ((i = emitNodes(fsa, null, linearized)) > 0) {
    j = i;
  }
  return j;
}
 
Example 10
Source File: InsertFromValues.java    From crate with Apache License 2.0 4 votes vote down vote up
/**
 * Create bulk-response depending on number of bulk responses
 * <pre>
 *     compressedResult
 *          success: [1, 1, 1, 1]
 *          failure: []
 *
 *     insert into t (x) values (?), (?)   -- bulkParams: [[1, 2], [3, 4]]
 *     Response:
 *      [2, 2]
 *
 *     insert into t (x) values (?)        -- bulkParams: [[1], [2], [3], [4]]
 *     Response:
 *      [1, 1, 1, 1]
 * </pre>
 */
private static long[] createBulkResponse(ShardResponse.CompressedResult result,
                                         int bulkResponseSize,
                                         IntArrayList bulkIndices) {
    long[] resultRowCount = new long[bulkResponseSize];
    Arrays.fill(resultRowCount, 0L);
    for (int i = 0; i < bulkIndices.size(); i++) {
        int resultIdx = bulkIndices.get(i);
        if (result.successfulWrites(i)) {
            resultRowCount[resultIdx]++;
        } else if (result.failed(i)) {
            resultRowCount[resultIdx] = Row1.ERROR;
        }
    }
    return resultRowCount;
}