Java Code Examples for android.media.ImageReader#acquireLatestImage()

The following examples show how to use android.media.ImageReader#acquireLatestImage() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ImageUtils.java    From SimpleSmsRemote with MIT License 8 votes vote down vote up
/**
 * Retrieve Bitmap with specific format from ImageReader.
 *
 * @param imageReader the image reader
 * @return bitmap
 */
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
public static Bitmap GetBitmapFromImageReader(ImageReader imageReader) {
    Bitmap bitmap;

    //get image buffer
    Image image = imageReader.acquireLatestImage();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();

    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * image.getWidth();
    // create bitmap
    bitmap = Bitmap.createBitmap(image.getWidth() + rowPadding / pixelStride, image.getHeight(), Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    image.close();
    return bitmap;
}
 
Example 2
Source File: Screenshotter.java    From loco-answers with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Image image = reader.acquireLatestImage();
    if (image == null) {
        Log.d(TAG, "onImageAvailable: image is null");
        return;
    }

    final Image.Plane[] planes = image.getPlanes();
    final Buffer buffer = planes[0].getBuffer().rewind();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    tearDown();
    image.close();
    cb.onScreenshot(bitmap);
}
 
Example 3
Source File: Camera2BasicFragment.java    From Cam2Caption with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Image image = null;
    try {
        image = reader.acquireLatestImage();
        ByteBuffer buffer = image.getPlanes()[0].getBuffer();
        byte[] imageBytes = new byte[buffer.remaining()];
        buffer.get(imageBytes);
        Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
        final String text = runModel(bitmap);
        getActivity().runOnUiThread(new Runnable() {
            @Override
            public void run() {
                textView.setText(text);

            }
        });
    } finally {
        if (image != null) {
            image.close();
        }
    }
}
 
Example 4
Source File: BaseLiveGPUActivity.java    From fritz-examples with MIT License 6 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    final Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!computing.compareAndSet(false, true)) {
        image.close();
        return;
    }
    fritzVisionImage = FritzVisionImage.fromMediaImage(image, orientation);
    runInference(fritzVisionImage);
    image.close();
    computing.set(false);
}
 
Example 5
Source File: BaseLiveVideoActivity.java    From fritz-examples with MIT License 6 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!computing.compareAndSet(false, true)) {
        image.close();
        return;
    }
    fritzVisionImage = FritzVisionImage.fromMediaImage(image, orientation);
    image.close();

    runInBackground(
            new Runnable() {
                @Override
                public void run() {
                    runInference(fritzVisionImage);
                    requestRender();
                    computing.set(false);
                }
            });
}
 
Example 6
Source File: MainActivity.java    From fritz-examples with MIT License 6 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!shouldSample.get()) {
        image.close();
        return;
    }

    if (!isComputing.compareAndSet(false, true)) {
        image.close();
        return;
    }

    visionImage = FritzVisionImage.fromMediaImage(image, orientation);
    image.close();

    runInBackground(() -> {
        labelResult = predictor.predict(visionImage);
        requestRender();
    });
}
 
Example 7
Source File: FragmentDecoder.java    From camera2QRcodeReader with MIT License 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Log.e(TAG, "onImageAvailable: " + count++);
    Image img = null;
    img = reader.acquireLatestImage();
    Result rawResult = null;
    try {
        if (img == null) throw new NullPointerException("cannot be null");
        ByteBuffer buffer = img.getPlanes()[0].getBuffer();
        byte[] data = new byte[buffer.remaining()];
        buffer.get(data);
        int width = img.getWidth();
        int height = img.getHeight();
        PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height);
        BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));

        rawResult = mQrReader.decode(bitmap);
        onQRCodeRead(rawResult.getText());
    } catch (ReaderException ignored) {
        Log.e(TAG, "Reader shows an exception! ", ignored);
        /* Ignored */
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    } finally {
        mQrReader.reset();
        Log.e(TAG, "in the finally! ------------");
        if (img != null)
            img.close();

    }
    if (rawResult != null) {
        Log.e(TAG, "Decoding successful!");
    } else {
        Log.d(TAG, "No QR code found…");
    }
}
 
Example 8
Source File: ClassifierActivity.java    From android-yolo-v2 with Do What The F*ck You Want To Public License 5 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = null;

    try {
        image = reader.acquireLatestImage();

        if (image == null) {
            return;
        }

        if (computing) {
            image.close();
            return;
        }

        computing = true;
        fillCroppedBitmap(image);
        image.close();
    } catch (final Exception ex) {
        if (image != null) {
            image.close();
        }
        Log.e(LOGGING_TAG, ex.getMessage());
    }

    runInBackground(() -> {
        final long startTime = SystemClock.uptimeMillis();
        final List<Recognition> results = recognizer.recognizeImage(croppedBitmap);
        lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
        overlayView.setResults(results);
        speak(results);
        requestRender();
        computing = false;
    });
}
 
Example 9
Source File: MainActivity.java    From androidthings-cameraCar with Apache License 2.0 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Log.d(TAG, "PhotoCamera OnImageAvailableListener");

    Image image = reader.acquireLatestImage();
    // get image bytes
    ByteBuffer imageBuf = image.getPlanes()[0].getBuffer();
    final byte[] imageBytes = new byte[imageBuf.remaining()];
    imageBuf.get(imageBytes);
    image.close();

    onPictureTaken(imageBytes);
}
 
Example 10
Source File: SRManager.java    From VMLibrary with Apache License 2.0 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Image image = reader.acquireLatestImage();
    long currTime = System.currentTimeMillis();
    VMLog.d("捕获图片有效回调 %d", currTime - oldTime);
    if (currTime - oldTime > 100) {
        oldTime = currTime;
        Bitmap bitmap = null;
        if (image != null) {
            Image.Plane[] planes = image.getPlanes();
            ByteBuffer buffer = planes[0].getBuffer();
            int width = image.getWidth();
            int height = image.getHeight();
            int pixelStride = planes[0].getPixelStride();
            int rowStride = planes[0].getRowStride();
            int rowPadding = rowStride - pixelStride * width;
            // create bitmap
            bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
            bitmap.copyPixelsFromBuffer(buffer);
            bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
            if (screenShortCallback != null) {
                screenShortCallback.onBitmap(bitmap);
            }
        }
    }
    if (image != null) {
        image.close();
    }
}
 
Example 11
Source File: LiveCameraActivity.java    From fritz-examples with MIT License 5 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!computing.compareAndSet(false, true)) {
        image.close();
        return;
    }

    setupImageForPrediction(image);

    image.close();

    runInBackground(
            new Runnable() {
                @Override
                public void run() {

                    runInference();
                    // Fire callback to change the OverlayView
                    requestRender();
                    computing.set(false);
                }
            });
}
 
Example 12
Source File: ImageLabelingActivity.java    From fritz-examples with MIT License 5 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!computing.compareAndSet(false, true)) {
        image.close();
        return;
    }

    final FritzVisionImage fritzImage = FritzVisionImage.fromMediaImage(image, orientation);
    image.close();


    runInBackground(
            new Runnable() {
                @Override
                public void run() {
                    final long startTime = SystemClock.uptimeMillis();
                    labelResult = predictor.predict(fritzImage);
                    labelResult.logResult();

                    if (resultsView == null) {
                        resultsView = findViewById(R.id.results);
                    }
                    resultsView.setResult(labelResult.getVisionLabels());
                    Log.d(TAG, "INFERENCE TIME:" + (SystemClock.uptimeMillis() - startTime));
                    requestRender();
                    computing.set(false);
                }
            });
}
 
Example 13
Source File: MainActivity.java    From AndroidPlayground with MIT License 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader imageReader) {
    Log.d("MainActivity", "onImageAvailable");
    final Image image = imageReader.acquireLatestImage();
    count++;
    if (count == 100) {
        byte[] yuv = new byte[image.getWidth() * image.getHeight() * 3 / 2];
        image2yuv(image, yuv);
        saveRawYuvData(yuv, image.getWidth(), image.getHeight(), "org");
    }
    image.close();
}
 
Example 14
Source File: DoorbellActivity.java    From doorbell with Apache License 2.0 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Image image = reader.acquireLatestImage();
    // get image bytes
    ByteBuffer imageBuf = image.getPlanes()[0].getBuffer();
    final byte[] imageBytes = new byte[imageBuf.remaining()];
    imageBuf.get(imageBytes);
    image.close();

    onPictureTaken(imageBytes);
}
 
Example 15
Source File: TensorflowImageListener.java    From AndroidDemoProjects with Apache License 2.0 4 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}
 
Example 16
Source File: TensorflowImageListener.java    From Paideia with MIT License 4 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}
 
Example 17
Source File: CameraActivity.java    From next18-ai-in-motion with Apache License 2.0 4 votes vote down vote up
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
    //We need wait until we have some size from onPreviewSizeChosen
    if (previewWidth == 0 || previewHeight == 0) {
        return;
    }
    if (rgbBytes == null) {
        rgbBytes = new int[previewWidth * previewHeight];
    }
    try {
        final Image image = reader.acquireLatestImage();

        if (image == null) {
            return;
        }

        if (isProcessingFrame) {
            image.close();
            return;
        }
        isProcessingFrame = true;
        Trace.beginSection("imageAvailable");
        final Image.Plane[] planes = image.getPlanes();
        fillBytes(planes, yuvBytes);
        yRowStride = planes[0].getRowStride();
        final int uvRowStride = planes[1].getRowStride();
        final int uvPixelStride = planes[1].getPixelStride();

        imageConverter =
                new Runnable() {
                    @Override
                    public void run() {
                        ImageUtils.convertYUV420ToARGB8888(
                                yuvBytes[0],
                                yuvBytes[1],
                                yuvBytes[2],
                                previewWidth,
                                previewHeight,
                                yRowStride,
                                uvRowStride,
                                uvPixelStride,
                                rgbBytes);
                    }
                };

        postInferenceCallback =
                new Runnable() {
                    @Override
                    public void run() {
                        image.close();
                        isProcessingFrame = false;
                    }
                };

        processImage();
    } catch (final Exception e) {
        Log.e("Exception!", e.toString());
        Trace.endSection();
        return;
    }
    Trace.endSection();
}
 
Example 18
Source File: CameraImageActivity.java    From AndroidDemo with MIT License 4 votes vote down vote up
@Override
        public void onImageAvailable(ImageReader reader) {

            Image image = reader.acquireLatestImage();
            //我们可以将这帧数据转成字节数组,类似于Camera1的PreviewCallback回调的预览帧数据
//            int len = image.getPlanes().length;
//            if (buffer == null) {
//                buffer = ByteBuffer.allocate(reader.getWidth() * reader.getHeight());
//            }
//            for (int i = 0; i < len; i++) {
//                ByteBuffer _buffer = image.getPlanes()[i].getBuffer();
//                byte[] data = new byte[_buffer.remaining()];
//                _buffer.get(data);
//                buffer.put(data);
//            }
//            buffer.flip();
//            byte[] data = buffer.array();
//            buffer.flip();
//            buffer.clear();
//
//            byte[] nv21Data = new byte[data.length];
//            YuvUtil.yuvI420ToNV21(data, nv21Data, reader.getWidth(), reader.getHeight());
//
//            //这里采用yuvImage将yuvi420转化为图片,当然用libyuv也是可以做到的,这里主要介绍libyuv的裁剪,旋转,缩放,镜像的操作
//            YuvImage yuvImage = new YuvImage(nv21Data, ImageFormat.NV21, reader.getWidth(), reader.getHeight(), null);
//            ByteArrayOutputStream fOut = new ByteArrayOutputStream();
//            yuvImage.compressToJpeg(new Rect(0, 0, reader.getWidth(), reader.getHeight()), 100, fOut);
//            byte[] bytes = fOut.toByteArray();

            try {
//                ByteBuffer buffer = image.getPlanes()[0].getBuffer();
//                byte[] bytes = new byte[buffer.remaining()];
//                buffer.get(bytes);
//                show(bytes);
//                Log.d(TAG, "format = " + reader.getImageFormat() + ", planes length =  " + image.getPlanes().length);
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                image.close();
            }
        }
 
Example 19
Source File: CameraActivity.java    From tensorflow-classifier-android with Apache License 2.0 4 votes vote down vote up
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
  //We need wait until we have some size from onPreviewSizeChosen
  if (previewWidth == 0 || previewHeight == 0) {
    return;
  }
  if (rgbBytes == null) {
    rgbBytes = new int[previewWidth * previewHeight];
  }
  try {
    final Image image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (isProcessingFrame) {
      image.close();
      return;
    }
    isProcessingFrame = true;
    Trace.beginSection("imageAvailable");
    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);
    yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();

    imageConverter =
        new Runnable() {
          @Override
          public void run() {
            ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);
          }
        };

    postInferenceCallback =
        new Runnable() {
          @Override
          public void run() {
            image.close();
            isProcessingFrame = false;
          }
        };

    processImage();
  } catch (final Exception e) {
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }
  Trace.endSection();
}
 
Example 20
Source File: CameraActivity.java    From dbclf with Apache License 2.0 4 votes vote down vote up
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
    //We need to wait until we have some size from onPreviewSizeChosen
    if (previewWidth == 0 || previewHeight == 0) {
        return;
    }
    if (rgbBytes == null) {
        rgbBytes = new int[previewWidth * previewHeight];
    }
    try {
        final Image image = reader.acquireLatestImage();

        if (image == null) {
            return;
        }

        if (isProcessingFrame) {
            image.close();
            return;
        }
        isProcessingFrame = true;
        final Plane[] planes = image.getPlanes();
        fillBytes(planes, yuvBytes);
        yRowStride = planes[0].getRowStride();
        final int uvRowStride = planes[1].getRowStride();
        final int uvPixelStride = planes[1].getPixelStride();

        imageConverter = () -> ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);

        postInferenceCallback = () -> {
            image.close();
            isProcessingFrame = false;
        };

        processImage();
    } catch (final Exception ignored) {
    }
}