Java Code Examples for org.apache.lucene.util.ArrayUtil#oversize()

The following examples show how to use org.apache.lucene.util.ArrayUtil#oversize() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SortingLeafReader.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
@Override
public int nextPosition() throws IOException {
  final int token = postingInput.readVInt();
  pos += token >>> 1;
  if (storeOffsets) {
    startOffset = endOffset + postingInput.readVInt();
    endOffset = startOffset + postingInput.readVInt();
  }
  if ((token & 1) != 0) {
    payload.offset = 0;
    payload.length = postingInput.readVInt();
    if (payload.length > payload.bytes.length) {
      payload.bytes = new byte[ArrayUtil.oversize(payload.length, 1)];
    }
    postingInput.readBytes(payload.bytes, 0, payload.length);
  } else {
    payload.length = 0;
  }
  return pos;
}
 
Example 2
Source File: CompressingTermVectorsWriter.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
void addPosition(int position, int startOffset, int length, int payloadLength) {
  if (hasPositions) {
    if (posStart + totalPositions == positionsBuf.length) {
      positionsBuf = ArrayUtil.grow(positionsBuf);
    }
    positionsBuf[posStart + totalPositions] = position;
  }
  if (hasOffsets) {
    if (offStart + totalPositions == startOffsetsBuf.length) {
      final int newLength = ArrayUtil.oversize(offStart + totalPositions, 4);
      startOffsetsBuf = ArrayUtil.growExact(startOffsetsBuf, newLength);
      lengthsBuf = ArrayUtil.growExact(lengthsBuf, newLength);
    }
    startOffsetsBuf[offStart + totalPositions] = startOffset;
    lengthsBuf[offStart + totalPositions] = length;
  }
  if (hasPayloads) {
    if (payStart + totalPositions == payloadLengthsBuf.length) {
      payloadLengthsBuf = ArrayUtil.grow(payloadLengthsBuf);
    }
    payloadLengthsBuf[payStart + totalPositions] = payloadLength;
  }
  ++totalPositions;
}
 
Example 3
Source File: JapaneseTokenizer.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
private void reserve(int n) {
  if (capacity < n) {
    int oversize = ArrayUtil.oversize(n, Integer.BYTES);
    nodeDicType = new Type[oversize];
    nodeWordID = new int[oversize];
    nodeMark = new int[oversize];
    nodeLeftID = new int[oversize];
    nodeRightID = new int[oversize];
    nodeWordCost = new int[oversize];
    nodeLeftCost = new int[oversize];
    nodeRightCost = new int[oversize];
    nodeLeftNode = new int[oversize];
    nodeRightNode = new int[oversize];
    nodeLeft = new int[oversize];
    nodeRight = new int[oversize];
    nodeLeftChain = new int[oversize];
    nodeRightChain = new int[oversize];
    capacity = oversize;
  }
}
 
Example 4
Source File: IDVersionSegmentTermsEnum.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private FST.Arc<Pair<BytesRef,Long>> getArc(int ord) {
  if (ord >= arcs.length) {
    @SuppressWarnings({"rawtypes","unchecked"}) final FST.Arc<Pair<BytesRef,Long>>[] next =
    new FST.Arc[ArrayUtil.oversize(1+ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
    System.arraycopy(arcs, 0, next, 0, arcs.length);
    for(int arcOrd=arcs.length;arcOrd<next.length;arcOrd++) {
      next[arcOrd] = new FST.Arc<>();
    }
    arcs = next;
  }
  return arcs[ord];
}
 
Example 5
Source File: SegmentTermsEnumFrame.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public void setFloorData(ByteArrayDataInput in, BytesRef source) {
  final int numBytes = source.length - (in.getPosition() - source.offset);
  if (numBytes > floorData.length) {
    floorData = new byte[ArrayUtil.oversize(numBytes, 1)];
  }
  System.arraycopy(source.bytes, source.offset+in.getPosition(), floorData, 0, numBytes);
  floorDataReader.reset(floorData, 0, numBytes);
  numFollowFloorBlocks = floorDataReader.readVInt();
  nextFloorLabel = floorDataReader.readByte() & 0xff;
  //if (DEBUG) {
  //System.out.println("    setFloorData fpOrig=" + fpOrig + " bytes=" + new BytesRef(source.bytes, source.offset + in.getPosition(), numBytes) + " numFollowFloorBlocks=" + numFollowFloorBlocks + " nextFloorLabel=" + toHex(nextFloorLabel));
  //}
}
 
Example 6
Source File: Lucene80DocValuesConsumer.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private long writeValuesMultipleBlocks(SortedNumericDocValues values, long gcd) throws IOException {
  long[] offsets = new long[ArrayUtil.oversize(1, Long.BYTES)];
  int offsetsIndex = 0;
  final long[] buffer = new long[NUMERIC_BLOCK_SIZE];
  final ByteBuffersDataOutput encodeBuffer = ByteBuffersDataOutput.newResettableInstance();
  int upTo = 0;
  for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) {
    for (int i = 0, count = values.docValueCount(); i < count; ++i) {
      buffer[upTo++] = values.nextValue();
      if (upTo == NUMERIC_BLOCK_SIZE) {
        offsets = ArrayUtil.grow(offsets, offsetsIndex+1);
        offsets[offsetsIndex++] = data.getFilePointer();
        writeBlock(buffer, NUMERIC_BLOCK_SIZE, gcd, encodeBuffer);
        upTo = 0;
      }
    }
  }
  if (upTo > 0) {
    offsets = ArrayUtil.grow(offsets, offsetsIndex+1);
    offsets[offsetsIndex++] = data.getFilePointer();
    writeBlock(buffer, upTo, gcd, encodeBuffer);
  }

  // All blocks has been written. Flush the offset jump-table
  final long offsetsOrigo = data.getFilePointer();
  for (int i = 0 ; i < offsetsIndex ; i++) {
    data.writeLong(offsets[i]);
  }
  data.writeLong(offsetsOrigo);
  return offsetsOrigo;
}
 
Example 7
Source File: WordDelimiterGraphFilter.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/**
 * Saves the existing attribute states
 */
private void saveState() {
  savedTermLength = termAttribute.length();
  savedStartOffset = offsetAttribute.startOffset();
  savedEndOffset = offsetAttribute.endOffset();
  savedState = captureState();

  if (savedTermBuffer.length < savedTermLength) {
    savedTermBuffer = new char[ArrayUtil.oversize(savedTermLength, Character.BYTES)];
  }

  System.arraycopy(termAttribute.buffer(), 0, savedTermBuffer, 0, savedTermLength);
}
 
Example 8
Source File: BlockTreeTermsReader.java    From incubator-retired-blur with Apache License 2.0 5 votes vote down vote up
public void setFloorData(ByteArrayDataInput in, BytesRef source) {
  final int numBytes = source.length - (in.getPosition() - source.offset);
  if (numBytes > floorData.length) {
    floorData = new byte[ArrayUtil.oversize(numBytes, 1)];
  }
  System.arraycopy(source.bytes, source.offset+in.getPosition(), floorData, 0, numBytes);
  floorDataReader.reset(floorData, 0, numBytes);
  numFollowFloorBlocks = floorDataReader.readVInt();
  nextFloorLabel = floorDataReader.readByte() & 0xff;
  //if (DEBUG) {
  //System.out.println("    setFloorData fpOrig=" + fpOrig + " bytes=" + new BytesRef(source.bytes, source.offset + in.getPosition(), numBytes) + " numFollowFloorBlocks=" + numFollowFloorBlocks + " nextFloorLabel=" + toHex(nextFloorLabel));
  //}
}
 
Example 9
Source File: Operations.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private PointTransitions next(int point) {
  // 1st time we are seeing this point
  if (count == points.length) {
    final PointTransitions[] newArray = new PointTransitions[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
    System.arraycopy(points, 0, newArray, 0, count);
    points = newArray;
  }
  PointTransitions points0 = points[count];
  if (points0 == null) {
    points0 = points[count] = new PointTransitions();
  }
  points0.reset(point);
  count++;
  return points0;
}
 
Example 10
Source File: StreamOutput.java    From crate with Apache License 2.0 5 votes vote down vote up
public void writeString(String str) throws IOException {
    final int charCount = str.length();
    final int bufferSize = Math.min(3 * charCount, 1024); // at most 3 bytes per character is needed here
    if (convertStringBuffer.length < bufferSize) { // we don't use ArrayUtils.grow since copying the bytes is unnecessary
        convertStringBuffer = new byte[ArrayUtil.oversize(bufferSize, Byte.BYTES)];
    }
    byte[] buffer = convertStringBuffer;
    int offset = 0;
    writeVInt(charCount);
    for (int i = 0; i < charCount; i++) {
        final int c = str.charAt(i);
        if (c <= 0x007F) {
            buffer[offset++] = ((byte) c);
        } else if (c > 0x07FF) {
            buffer[offset++] = ((byte) (0xE0 | c >> 12 & 0x0F));
            buffer[offset++] = ((byte) (0x80 | c >> 6 & 0x3F));
            buffer[offset++] = ((byte) (0x80 | c >> 0 & 0x3F));
        } else {
            buffer[offset++] = ((byte) (0xC0 | c >> 6 & 0x1F));
            buffer[offset++] = ((byte) (0x80 | c >> 0 & 0x3F));
        }
        // make sure any possible char can fit into the buffer in any possible iteration
        // we need at most 3 bytes so we flush the buffer once we have less than 3 bytes
        // left before we start another iteration
        if (offset > buffer.length - 3) {
            writeBytes(buffer, offset);
            offset = 0;
        }
    }
    writeBytes(buffer, offset);
}
 
Example 11
Source File: IntersectTermsEnum.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private FST.Arc<BytesRef> getArc(int ord) {
  if (ord >= arcs.length) {
    @SuppressWarnings({"rawtypes","unchecked"}) final FST.Arc<BytesRef>[] next =
    new FST.Arc[ArrayUtil.oversize(1+ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
    System.arraycopy(arcs, 0, next, 0, arcs.length);
    for(int arcOrd=arcs.length;arcOrd<next.length;arcOrd++) {
      next[arcOrd] = new FST.Arc<>();
    }
    arcs = next;
  }
  return arcs[ord];
}
 
Example 12
Source File: TermVectorsConsumer.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
void addFieldToFlush(TermVectorsConsumerPerField fieldToFlush) {
  if (numVectorFields == perFields.length) {
    int newSize = ArrayUtil.oversize(numVectorFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
    TermVectorsConsumerPerField[] newArray = new TermVectorsConsumerPerField[newSize];
    System.arraycopy(perFields, 0, newArray, 0, numVectorFields);
    perFields = newArray;
  }

  perFields[numVectorFields++] = fieldToFlush;
}
 
Example 13
Source File: SegmentTermsEnum.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private FST.Arc<BytesRef> getArc(int ord) {
  if (ord >= arcs.length) {
    @SuppressWarnings({"rawtypes","unchecked"}) final FST.Arc<BytesRef>[] next =
    new FST.Arc[ArrayUtil.oversize(1+ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
    System.arraycopy(arcs, 0, next, 0, arcs.length);
    for(int arcOrd=arcs.length;arcOrd<next.length;arcOrd++) {
      next[arcOrd] = new FST.Arc<>();
    }
    arcs = next;
  }
  return arcs[ord];
}
 
Example 14
Source File: Lucene84ScoreSkipReader.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
protected void readImpacts(int level, IndexInput skipStream) throws IOException {
  int length = skipStream.readVInt();
  if (impactData[level].length < length) {
    impactData[level] = new byte[ArrayUtil.oversize(length, Byte.BYTES)];
  }
  skipStream.readBytes(impactData[level], 0, length);
  impactDataLength[level] = length;
}
 
Example 15
Source File: OrdsIntersectTermsEnum.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private FST.Arc<Output> getArc(int ord) {
  if (ord >= arcs.length) {
    @SuppressWarnings({"rawtypes","unchecked"}) final FST.Arc<Output>[] next =
    new FST.Arc[ArrayUtil.oversize(1+ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
    System.arraycopy(arcs, 0, next, 0, arcs.length);
    for(int arcOrd=arcs.length;arcOrd<next.length;arcOrd++) {
      next[arcOrd] = new FST.Arc<>();
    }
    arcs = next;
  }
  return arcs[ord];
}
 
Example 16
Source File: CharTermAttributeImpl.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public final char[] resizeBuffer(int newSize) {
  if(termBuffer.length < newSize){
    // Not big enough; create a new array with slight
    // over allocation and preserve content
    final char[] newCharBuffer = new char[ArrayUtil.oversize(newSize, Character.BYTES)];
    System.arraycopy(termBuffer, 0, newCharBuffer, 0, termBuffer.length);
    termBuffer = newCharBuffer;
  }
  return termBuffer;   
}
 
Example 17
Source File: FSTTermsReader.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
Frame newFrame() {
  if (level+1 == stack.length) {
    final Frame[] temp = new Frame[ArrayUtil.oversize(level+2, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
    System.arraycopy(stack, 0, temp, 0, stack.length);
    for (int i = stack.length; i < temp.length; i++) {
      temp[i] = new Frame();
    }
    stack = temp;
  }
  return stack[level+1];
}
 
Example 18
Source File: FiniteStringsIterator.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/**
 * Grow path stack, if required.
 */
private void growStack(int depth) {
  if (nodes.length == depth) {
    PathNode[] newNodes = new PathNode[ArrayUtil.oversize(nodes.length + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
    System.arraycopy(nodes, 0, newNodes, 0, nodes.length);
    for (int i = depth, end = newNodes.length; i < end; i++) {
      newNodes[i] = new PathNode();
    }
    nodes = newNodes;
  }
}
 
Example 19
Source File: CompressingStoredFieldsWriter.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public void finishDocument() throws IOException {
  if (numBufferedDocs == this.numStoredFields.length) {
    final int newLength = ArrayUtil.oversize(numBufferedDocs + 1, 4);
    this.numStoredFields = ArrayUtil.growExact(this.numStoredFields, newLength);
    endOffsets = ArrayUtil.growExact(endOffsets, newLength);
  }
  this.numStoredFields[numBufferedDocs] = numStoredFieldsInDoc;
  numStoredFieldsInDoc = 0;
  endOffsets[numBufferedDocs] = Math.toIntExact(bufferedDocs.size());
  ++numBufferedDocs;
  if (triggerFlush()) {
    flush();
  }
}
 
Example 20
Source File: GlobalOrdinalsStringTermsAggregator.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
protected static void copy(BytesRef from, BytesRef to) {
    if (to.bytes.length < from.length) {
        to.bytes = new byte[ArrayUtil.oversize(from.length, RamUsageEstimator.NUM_BYTES_BYTE)];
    }
    to.offset = 0;
    to.length = from.length;
    System.arraycopy(from.bytes, from.offset, to.bytes, 0, from.length);
}