android.media.Image.Plane Java Examples

The following examples show how to use android.media.Image.Plane. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CameraActivity.java    From dbclf with Apache License 2.0 5 votes vote down vote up
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
    // Because of the variable row stride it's not possible to know in
    // advance the actual necessary dimensions of the yuv planes.
    for (int i = 0; i < planes.length; ++i) {
        final ByteBuffer buffer = planes[i].getBuffer();
        if (yuvBytes[i] == null) {
            //LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
            yuvBytes[i] = new byte[buffer.capacity()];
        }
        buffer.get(yuvBytes[i]);
    }
}
 
Example #2
Source File: CameraActivity.java    From tensorflow-classifier-android with Apache License 2.0 5 votes vote down vote up
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
 
Example #3
Source File: CameraActivity.java    From dbclf with Apache License 2.0 4 votes vote down vote up
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
    //We need to wait until we have some size from onPreviewSizeChosen
    if (previewWidth == 0 || previewHeight == 0) {
        return;
    }
    if (rgbBytes == null) {
        rgbBytes = new int[previewWidth * previewHeight];
    }
    try {
        final Image image = reader.acquireLatestImage();

        if (image == null) {
            return;
        }

        if (isProcessingFrame) {
            image.close();
            return;
        }
        isProcessingFrame = true;
        final Plane[] planes = image.getPlanes();
        fillBytes(planes, yuvBytes);
        yRowStride = planes[0].getRowStride();
        final int uvRowStride = planes[1].getRowStride();
        final int uvPixelStride = planes[1].getPixelStride();

        imageConverter = () -> ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);

        postInferenceCallback = () -> {
            image.close();
            isProcessingFrame = false;
        };

        processImage();
    } catch (final Exception ignored) {
    }
}
 
Example #4
Source File: CameraActivity.java    From tensorflow-classifier-android with Apache License 2.0 4 votes vote down vote up
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
  //We need wait until we have some size from onPreviewSizeChosen
  if (previewWidth == 0 || previewHeight == 0) {
    return;
  }
  if (rgbBytes == null) {
    rgbBytes = new int[previewWidth * previewHeight];
  }
  try {
    final Image image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (isProcessingFrame) {
      image.close();
      return;
    }
    isProcessingFrame = true;
    Trace.beginSection("imageAvailable");
    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);
    yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();

    imageConverter =
        new Runnable() {
          @Override
          public void run() {
            ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);
          }
        };

    postInferenceCallback =
        new Runnable() {
          @Override
          public void run() {
            image.close();
            isProcessingFrame = false;
          }
        };

    processImage();
  } catch (final Exception e) {
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }
  Trace.endSection();
}
 
Example #5
Source File: TensorflowImageListener.java    From Paideia with MIT License 4 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}
 
Example #6
Source File: TensorflowImageListener.java    From AndroidDemoProjects with Apache License 2.0 4 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}