Java Code Examples for android.media.Image#getHeight()

The following examples show how to use android.media.Image#getHeight() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DngCreator.java    From android_9.0.0_r45 with Apache License 2.0 6 votes vote down vote up
/**
 * Set the thumbnail image.
 *
 * <p>
 * Pixel data is interpreted as a {@link android.graphics.ImageFormat#YUV_420_888} image.
 * Thumbnail images with a dimension larger than {@link #MAX_THUMBNAIL_DIMENSION} will be
 * rejected.
 * </p>
 *
 * @param pixels an {@link android.media.Image} object with the format
 *               {@link android.graphics.ImageFormat#YUV_420_888}.
 * @return this {@link #DngCreator} object.
 * @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension
 *      larger than {@link #MAX_THUMBNAIL_DIMENSION}.
 */
@NonNull
public DngCreator setThumbnail(@NonNull Image pixels) {
    if (pixels == null) {
        throw new IllegalArgumentException("Null argument to setThumbnail");
    }

    int format = pixels.getFormat();
    if (format != ImageFormat.YUV_420_888) {
        throw new IllegalArgumentException("Unsupported Image format " + format);
    }

    int width = pixels.getWidth();
    int height = pixels.getHeight();

    if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
        throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
                "," + height + ") too large, dimensions must be smaller than " +
                MAX_THUMBNAIL_DIMENSION);
    }

    ByteBuffer rgbBuffer = convertToRGB(pixels);
    nativeSetThumbnail(rgbBuffer, width, height);

    return this;
}
 
Example 2
Source File: ImageScreenCast.java    From DeviceConnect-Android with MIT License 6 votes vote down vote up
private Bitmap decodeToBitmap(final Image img) {
    Image.Plane[] planes = img.getPlanes();
    if (planes[0].getBuffer() == null) {
        return null;
    }

    int width = img.getWidth();
    int height = img.getHeight();

    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * width;

    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height,
            Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(planes[0].getBuffer());
    img.close();

    return Bitmap.createBitmap(bitmap, 0, 0, width, height, null, true);
}
 
Example 3
Source File: ImageCapture.java    From DoraemonKit with Apache License 2.0 6 votes vote down vote up
void capture() {
    if (isCapturing) {
        return;
    }
    if (mImageReader == null) {
        return;
    }
    isCapturing = true;
    Image image = mImageReader.acquireLatestImage();
    if (image == null) {
        return;
    }
    int width = image.getWidth();
    int height = image.getHeight();
    Image.Plane[] planes = image.getPlanes();
    ByteBuffer buffer = planes[0].getBuffer();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPaddingStride = rowStride - pixelStride * width;
    int rowPadding = rowPaddingStride / pixelStride;
    Bitmap recordBitmap = Bitmap.createBitmap(width + rowPadding, height, Bitmap.Config.ARGB_8888);
    recordBitmap.copyPixelsFromBuffer(buffer);
    mBitmap = Bitmap.createBitmap(recordBitmap, 0, 0, width, height);
    image.close();
    isCapturing = false;
}
 
Example 4
Source File: GPUImageChain.java    From CameraCompat with MIT License 6 votes vote down vote up
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void onFrameData(final Image image, final Runnable postProcessedTask) {
    final int width = image.getWidth();
    final int height = image.getHeight();
    if (mGLRgbaBuffer == null) {
        mGLRgbaBuffer = ByteBuffer.allocateDirect(width * height * 4);
    }
    if (mGLYuvBuffer == null) {
        // 16 bytes alignment
        int bufHeight = (width * mGLRender.getFrameWidth() / mGLRender.getFrameHeight())
                        & 0xfffffff0;
        mGLYuvBuffer = ByteBuffer.allocateDirect(width * bufHeight * 3 / 2);
    }
    if (!mGLRender.isBusyDrawing()) {
        RgbYuvConverter.image2rgba(image, mGLRgbaBuffer.array());
        mGLRender.scheduleDrawFrame(mGLRgbaBuffer, width, height, () -> {
            if (!mGLRender.isEnableFilter() && !mGLRender.isPaused()) {
                sendNormalImage(image);
            }
            postProcessedTask.run();
        });
    } else {
        postProcessedTask.run();
    }
}
 
Example 5
Source File: ImageUtils.java    From ScreenCapture with MIT License 6 votes vote down vote up
/**
     * 这个方法可以转换,但是得到的图片右边多了一列,比如上面方法得到1080x2160,这个方法得到1088x2160
     * 所以要对得到的Bitmap裁剪一下
     *
     * @param image
     * @param config
     * @return
     */
    public static Bitmap image_2_bitmap(Image image, Bitmap.Config config) {

        int width = image.getWidth();
        int height = image.getHeight();
        Bitmap bitmap;

        final Image.Plane[] planes = image.getPlanes();
        final ByteBuffer buffer = planes[0].getBuffer();
        int pixelStride = planes[0].getPixelStride();
        int rowStride = planes[0].getRowStride();
        int rowPadding = rowStride - pixelStride * width;
        Log.d("WOW",
                "pixelStride:" + pixelStride + ". rowStride:" + rowStride + ". rowPadding" + rowPadding);

        bitmap = Bitmap.createBitmap(
                width + rowPadding / pixelStride/*equals: rowStride/pixelStride */
                , height, config);
        bitmap.copyPixelsFromBuffer(buffer);

        return Bitmap.createBitmap(bitmap, 0, 0, width, height);
//        return bitmap;
    }
 
Example 6
Source File: SRManager.java    From VMLibrary with Apache License 2.0 6 votes vote down vote up
/**
 * 通过底层来获取下一帧的图像
 */
public Bitmap cutoutFrame() {
    Image image = imageReader.acquireLatestImage();
    if (image == null) {
        return null;
    }
    int width = image.getWidth();
    int height = image.getHeight();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    return Bitmap.createBitmap(bitmap, 0, 0, width, height);
}
 
Example 7
Source File: ImageDecoder.java    From FastBarcodeScanner with Apache License 2.0 5 votes vote down vote up
private static void getNV21(Image src, byte[] dest)
{
    // Check nPlanes etc.
    Image.Plane yPlane = src.getPlanes()[0];
    Image.Plane uPlane = src.getPlanes()[1];
    Image.Plane vPlane = src.getPlanes()[2];

    int ySize = yPlane.getBuffer().capacity();
    int uSize = uPlane.getBuffer().capacity();
    int vSize = vPlane.getBuffer().capacity();

    if (ySize != src.getWidth() * src.getHeight())
        throw new RuntimeException("Y-plane in planar YUV_420_888 is expected to be width*height bytes");

    if (ySize != 2 * (uSize + 1))
        throw new RuntimeException("U-plane in planar YUV_420_888 is expected to be (width*height/2 - 1) bytes");

    if (ySize != 2 * (vSize + 1))
        throw new RuntimeException("V-plane in planar YUV_420_888 is expected to be (width*height/2 - 1) bytes");

    //int nextFree = getNonInterleaved(yPlane.getBuffer(), dest, 0);
    //getInterleaved(vPlane.getBuffer(), 2, dest, nextFree, 2);
    //getInterleaved(uPlane.getBuffer(), 2, dest, nextFree + 1, 2);
    int nextFree = 0;
    nextFree += getNonInterleaved(yPlane.getBuffer(), dest, nextFree);
    nextFree += getNonInterleaved(vPlane.getBuffer(), dest, nextFree);
    nextFree += getNonInterleaved(uPlane.getBuffer(), dest, nextFree);
}
 
Example 8
Source File: SRManager.java    From VMLibrary with Apache License 2.0 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Image image = reader.acquireLatestImage();
    long currTime = System.currentTimeMillis();
    VMLog.d("捕获图片有效回调 %d", currTime - oldTime);
    if (currTime - oldTime > 100) {
        oldTime = currTime;
        Bitmap bitmap = null;
        if (image != null) {
            Image.Plane[] planes = image.getPlanes();
            ByteBuffer buffer = planes[0].getBuffer();
            int width = image.getWidth();
            int height = image.getHeight();
            int pixelStride = planes[0].getPixelStride();
            int rowStride = planes[0].getRowStride();
            int rowPadding = rowStride - pixelStride * width;
            // create bitmap
            bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
            bitmap.copyPixelsFromBuffer(buffer);
            bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
            if (screenShortCallback != null) {
                screenShortCallback.onBitmap(bitmap);
            }
        }
    }
    if (image != null) {
        image.close();
    }
}
 
Example 9
Source File: ScreenCaptureSocket.java    From pc-android-controller-android with Apache License 2.0 5 votes vote down vote up
@Override
protected Bitmap doInBackground(Image... params) {

    if (params == null || params.length < 1 || params[0] == null) {

        L.e(" params is null ...");
        return null;
    }

    Image image = params[0];

    int width = image.getWidth();
    int height = image.getHeight();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();
    //每个像素的间距
    int pixelStride = planes[0].getPixelStride();
    //总的间距
    int rowStride = planes[0].getRowStride();
    image.close();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
    image.close();

    compressAndWrite(bitmap);

    return null;
}
 
Example 10
Source File: CameraHelp2.java    From WeiXinRecordedDemo with MIT License 5 votes vote down vote up
private byte[] getYUVI420(Image image){

        int width = image.getWidth();
        int height = image.getHeight();

        byte[] yuvI420 = new byte[image.getWidth()*image.getHeight()*3/2];

        byte[] yData = new byte[image.getPlanes()[0].getBuffer().remaining()];
        byte[] uData = new byte[image.getPlanes()[1].getBuffer().remaining()];
        byte[] vData = new byte[image.getPlanes()[2].getBuffer().remaining()];
        image.getPlanes()[0].getBuffer().get(yData);
        image.getPlanes()[1].getBuffer().get(uData);
        image.getPlanes()[2].getBuffer().get(vData);

        System.arraycopy(yData, 0, yuvI420, 0, yData.length);
        int index = yData.length;

        for (int r = 0; r < height / 2; ++r) {
            for (int c = 0; c < width; c += 2) { //各一个byte存一个U值和V值
                yuvI420[index++] = uData[r * width + c];
            }
        }
        for (int r = 0; r < height / 2; ++r) {
            for (int c = 0; c < width; c += 2) { //各一个byte存一个U值和V值
                yuvI420[index++] = vData[r * width + c];
            }
        }
        return yuvI420;
    }
 
Example 11
Source File: HyperionScreenEncoder.java    From hyperion-android-grabber with MIT License 5 votes vote down vote up
private void sendImage(Image img) {
    Image.Plane plane = img.getPlanes()[0];
    ByteBuffer buffer = plane.getBuffer();

    int width = img.getWidth();
    int height = img.getHeight();
    int pixelStride = plane.getPixelStride();
    int rowStride = plane.getRowStride();
    int firstX = 0;
    int firstY = 0;

    if (mRemoveBorders || mAvgColor) {
        mBorderProcessor.parseBorder(buffer, width, height, rowStride, pixelStride);
        BorderProcessor.BorderObject border = mBorderProcessor.getCurrentBorder();
        if (border != null && border.isKnown()) {
            firstX = border.getHorizontalBorderIndex();
            firstY = border.getVerticalBorderIndex();
        }
    }

    if (mAvgColor) {
        mListener.sendFrame(
                getAverageColor(buffer, width, height, rowStride, pixelStride, firstX, firstY),
                1,
                1
        );
    } else {
        mListener.sendFrame(
                getPixels(buffer, width, height, rowStride, pixelStride, firstX, firstY),
                width - firstX * 2,
                height - firstY * 2
        );
    }
}
 
Example 12
Source File: WindowCaptureFragment.java    From ViewCapture with Apache License 2.0 5 votes vote down vote up
private Bitmap createBitmap() {
    Image image = mImageReader.acquireLatestImage();
    int width = image.getWidth();
    int height = image.getHeight();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
    image.close();
    return bitmap;
}
 
Example 13
Source File: TensorflowImageListener.java    From AndroidDemoProjects with Apache License 2.0 4 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}
 
Example 14
Source File: Screenshotter.java    From RelaxFinger with GNU General Public License v2.0 4 votes vote down vote up
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void onImageAvailable(ImageReader reader) {

    Image image = null;

    try {
        image = reader.acquireLatestImage();

    }catch (UnsupportedOperationException e){

        e.printStackTrace();
        return;
    }

    if(image == null){
        return;
    }
    int width = image.getWidth();
    int height = image.getHeight();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width+rowPadding/pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0,width, height);
    cb.onScreenshot(bitmap);
    if(virtualDisplay!= null){

        virtualDisplay.release();
        virtualDisplay = null;
    }

    if (mMediaProjection != null) {
        mMediaProjection.stop();
        mMediaProjection = null;
    }
    image.close();
    mImageReader = null;
}
 
Example 15
Source File: TensorflowImageListener.java    From Paideia with MIT License 4 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}
 
Example 16
Source File: ResultProcessor.java    From libsoftwaresync with Apache License 2.0 4 votes vote down vote up
private static YuvImage yuvImageFromNv21Image(Image src) {
  long t0 = System.nanoTime();

  Image.Plane[] planes = src.getPlanes();
  Image.Plane luma = planes[0];
  Image.Plane chromaU = planes[1];
  Image.Plane chromaV = planes[2];

  int width = src.getWidth();
  int height = src.getHeight();

  // Luma should be tightly packed and chroma should be tightly interleaved.
  assert (luma.getPixelStride() == 1);
  assert (chromaU.getPixelStride() == 2);
  assert (chromaV.getPixelStride() == 2);

  // Duplicate (shallow copy) each buffer so as to not disturb the underlying position/limit/etc.
  ByteBuffer lumaBuffer = luma.getBuffer().duplicate();
  ByteBuffer chromaUBuffer = chromaU.getBuffer().duplicate();
  ByteBuffer chromaVBuffer = chromaV.getBuffer().duplicate();

  // Yes, y, v, then u since it's NV21.
  int[] yvuRowStrides =
      new int[] {luma.getRowStride(), chromaV.getRowStride(), chromaU.getRowStride()};

  // Compute bytes needed to concatenate all the (potentially padded) YUV data in one buffer.
  int lumaBytes = height * luma.getRowStride();
  int interleavedChromaBytes = (height / 2) * chromaV.getRowStride();
  assert (lumaBuffer.capacity() == lumaBytes);
  int packedYVUBytes = lumaBytes + interleavedChromaBytes;
  byte[] packedYVU = new byte[packedYVUBytes];

  int packedYVUOffset = 0;
  lumaBuffer.get(
      packedYVU,
      packedYVUOffset,
      lumaBuffer.capacity()); // packedYVU[0..lumaBytes) <-- lumaBuffer.
  packedYVUOffset += lumaBuffer.capacity();

  // Write the V buffer. Since the V buffer contains U data, write all of V and then check how
  // much U data is left over. There be at most 1 byte plus padding.
  chromaVBuffer.get(packedYVU, packedYVUOffset, /*length=*/ chromaVBuffer.capacity());
  packedYVUOffset += chromaVBuffer.capacity();

  // Write the remaining portion of the U buffer (if any).
  int chromaUPosition = chromaVBuffer.capacity() - 1;
  if (chromaUPosition < chromaUBuffer.capacity()) {
    chromaUBuffer.position(chromaUPosition);

    int remainingBytes = Math.min(chromaUBuffer.remaining(), lumaBytes - packedYVUOffset);

    if (remainingBytes > 0) {
      chromaUBuffer.get(packedYVU, packedYVUOffset, remainingBytes);
    }
  }
  YuvImage yuvImage = new YuvImage(packedYVU, ImageFormat.NV21, width, height, yvuRowStrides);

  long t1 = System.nanoTime();
  Log.i(TAG, String.format("yuvImageFromNv212Image took %f ms.", (t1 - t0) * 1e-6f));

  return yuvImage;
}
 
Example 17
Source File: ImageUtils.java    From FastBarcodeScanner with Apache License 2.0 4 votes vote down vote up
public static byte[] getPlane(Image image, int planeNo)
{
    ByteBuffer buffer;
    int rowStride;
    int pixelStride;
    int pixelWidth = image.getWidth();
    int pixelHeight = image.getHeight();
    int encodedRowStart = 0;

    Image.Plane[] planes = image.getPlanes();
    int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
    byte[] pixels = new byte[image.getWidth() * image.getHeight() * bytesPerPixel];
    byte[] rowData = new byte[planes[0].getRowStride()];

    for (int i = 0; i < planes.length; i++) {
        buffer = planes[i].getBuffer();
        rowStride = planes[i].getRowStride();
        pixelStride = planes[i].getPixelStride();
        int encodedWidthInPixels = (i == 0) ? pixelWidth : pixelWidth / 2;
        int encodedHeightInPixels = (i == 0) ? pixelHeight : pixelHeight / 2;
        for (int row = 0; row < encodedHeightInPixels; row++) {
            if (pixelStride == bytesPerPixel) {
                int encodedWidthInBytes = encodedWidthInPixels * bytesPerPixel;
                buffer.get(pixels, encodedRowStart, encodedWidthInBytes);

                // Advance buffer the remainder of the row stride, unless on the last row.
                // Otherwise, this will throw an IllegalArgumentException because the buffer
                // doesn't include the last padding.
                if (encodedHeightInPixels - row != 1) {
                    int padding = rowStride - encodedWidthInBytes;
                    buffer.position(buffer.position() + padding);
                }
                encodedRowStart += encodedWidthInBytes;
            } else {

                // On the last row only read the width of the image minus the pixel stride
                // plus one. Otherwise, this will throw a BufferUnderflowException because the
                // buffer doesn't include the last padding.
                if (encodedHeightInPixels - row == 1) {
                    buffer.get(rowData, 0, pixelWidth - pixelStride + 1);
                } else {
                    buffer.get(rowData, 0, rowStride);
                }

                for (int col = 0; col < encodedWidthInPixels; col++) {
                    pixels[encodedRowStart + col] = rowData[col * pixelStride];
                }
                //encodedRowStart += encodedWidthInBytes;
            }
        }
    }

    // Finally, create the Mat.
    //Mat mat = new Mat(pixelHeight + pixelHeight / 2, pixelWidth, CvType.CV_8UC1);
    //mat.put(0, 0, pixels);

    return pixels;
}
 
Example 18
Source File: ImageUtils.java    From FastBarcodeScanner with Apache License 2.0 4 votes vote down vote up
/**
 * Takes an Android Image in the YUV_420_888 format and returns an OpenCV Mat.
 *
 * @param image Image in the YUV_420_888 format.
 * @return OpenCV Mat.
 */
public static byte[] imageToMat(Image image) {
    ByteBuffer buffer;
    int rowStride;
    int pixelStride;
    int pixelWidth = image.getWidth();
    int pixelHeight = image.getHeight();
    int encodedRowStart = 0;

    Image.Plane[] planes = image.getPlanes();
    int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
    byte[] pixels = new byte[image.getWidth() * image.getHeight() * bytesPerPixel];
    byte[] rowData = new byte[planes[0].getRowStride()];

    for (int i = 0; i < planes.length; i++) {
        buffer = planes[i].getBuffer();
        rowStride = planes[i].getRowStride();
        pixelStride = planes[i].getPixelStride();
        int encodedWidthInPixels = (i == 0) ? pixelWidth : pixelWidth / 2;
        int encodedHeightInPixels = (i == 0) ? pixelHeight : pixelHeight / 2;
        for (int row = 0; row < encodedHeightInPixels; row++) {
            if (pixelStride == bytesPerPixel) {
                int encodedWidthInBytes = encodedWidthInPixels * bytesPerPixel;
                buffer.get(pixels, encodedRowStart, encodedWidthInBytes);

                // Advance buffer the remainder of the row stride, unless on the last row.
                // Otherwise, this will throw an IllegalArgumentException because the buffer
                // doesn't include the last padding.
                if (encodedHeightInPixels - row != 1) {
                    int padding = rowStride - encodedWidthInBytes;
                    buffer.position(buffer.position() + padding);
                }
                encodedRowStart += encodedWidthInBytes;
            } else {

                // On the last row only read the width of the image minus the pixel stride
                // plus one. Otherwise, this will throw a BufferUnderflowException because the
                // buffer doesn't include the last padding.
                if (encodedHeightInPixels - row == 1) {
                    buffer.get(rowData, 0, pixelWidth - pixelStride + 1);
                } else {
                    buffer.get(rowData, 0, rowStride);
                }

                for (int col = 0; col < encodedWidthInPixels; col++) {
                    pixels[encodedRowStart++] = rowData[col * pixelStride];
                }
            }
        }
    }

    // Finally, create the Mat.
    //Mat mat = new Mat(pixelHeight + pixelHeight / 2, pixelWidth, CvType.CV_8UC1);
    //mat.put(0, 0, pixels);

    return pixels;
}
 
Example 19
Source File: ImageDecoder.java    From FastBarcodeScanner with Apache License 2.0 4 votes vote down vote up
private static int getNV21Size(Image src)
{
    //return (int)(src.getHeight() * src.getWidth() * 1.5);
    return (int)(src.getHeight() * src.getWidth() * 2);
}
 
Example 20
Source File: DngCreator.java    From android_9.0.0_r45 with Apache License 2.0 4 votes vote down vote up
/**
 * Generate a direct RGB {@link ByteBuffer} from a YUV420_888 {@link Image}.
 */
private static ByteBuffer convertToRGB(Image yuvImage) {
    // TODO: Optimize this with renderscript intrinsic.
    int width = yuvImage.getWidth();
    int height = yuvImage.getHeight();
    ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);

    Image.Plane yPlane = yuvImage.getPlanes()[0];
    Image.Plane uPlane = yuvImage.getPlanes()[1];
    Image.Plane vPlane = yuvImage.getPlanes()[2];

    ByteBuffer yBuf = yPlane.getBuffer();
    ByteBuffer uBuf = uPlane.getBuffer();
    ByteBuffer vBuf = vPlane.getBuffer();

    yBuf.rewind();
    uBuf.rewind();
    vBuf.rewind();

    int yRowStride = yPlane.getRowStride();
    int vRowStride = vPlane.getRowStride();
    int uRowStride = uPlane.getRowStride();

    int yPixStride = yPlane.getPixelStride();
    int vPixStride = vPlane.getPixelStride();
    int uPixStride = uPlane.getPixelStride();

    byte[] yuvPixel = { 0, 0, 0 };
    byte[] yFullRow = new byte[yPixStride * (width - 1) + 1];
    byte[] uFullRow = new byte[uPixStride * (width / 2 - 1) + 1];
    byte[] vFullRow = new byte[vPixStride * (width / 2 - 1) + 1];
    byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
    for (int i = 0; i < height; i++) {
        int halfH = i / 2;
        yBuf.position(yRowStride * i);
        yBuf.get(yFullRow);
        uBuf.position(uRowStride * halfH);
        uBuf.get(uFullRow);
        vBuf.position(vRowStride * halfH);
        vBuf.get(vFullRow);
        for (int j = 0; j < width; j++) {
            int halfW = j / 2;
            yuvPixel[0] = yFullRow[yPixStride * j];
            yuvPixel[1] = uFullRow[uPixStride * halfW];
            yuvPixel[2] = vFullRow[vPixStride * halfW];
            yuvToRgb(yuvPixel, j * BYTES_PER_RGB_PIX, /*out*/finalRow);
        }
        buf.put(finalRow);
    }

    yBuf.rewind();
    uBuf.rewind();
    vBuf.rewind();
    buf.rewind();
    return buf;
}