Java Code Examples for android.media.Image#close()

The following examples show how to use android.media.Image#close() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BaseRecordingActivity.java    From fritz-examples with MIT License 6 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    // Save Images when we're recording
    if (!isRecording.get()) {
        image.close();
        return;
    }

    // Only grab a frame every 100ms
    if (System.currentTimeMillis() - lastRecordedFrameAt.get() < TIME_BETWEEN_FRAMES_MS) {
        image.close();
        return;
    }

    // Add the frame to a queue to process
    lastRecordedFrameAt.set(System.currentTimeMillis());
    final FritzVisionImage fritzImage = FritzVisionImage.fromMediaImage(image, orientation);
    videoProcessingQueue.addVisionImage(fritzImage);
    image.close();
}
 
Example 2
Source File: ImagePreprocessor.java    From androidthings-imageclassifier with Apache License 2.0 6 votes vote down vote up
public Bitmap preprocessImage(final Image image) {
    if (image == null) {
        return null;
    }

    Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
    Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());

    if (croppedBitmap != null && rgbFrameBitmap != null) {
        ByteBuffer bb = image.getPlanes()[0].getBuffer();
        rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
        cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
    }

    image.close();

    // For debugging
    if (SAVE_PREVIEW_BITMAP) {
        saveBitmap(croppedBitmap);
    }
    return croppedBitmap;
}
 
Example 3
Source File: Screenshotter.java    From loco-answers with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Image image = reader.acquireLatestImage();
    if (image == null) {
        Log.d(TAG, "onImageAvailable: image is null");
        return;
    }

    final Image.Plane[] planes = image.getPlanes();
    final Buffer buffer = planes[0].getBuffer().rewind();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    tearDown();
    image.close();
    cb.onScreenshot(bitmap);
}
 
Example 4
Source File: BaseLiveGPUActivity.java    From fritz-examples with MIT License 6 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    final Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!computing.compareAndSet(false, true)) {
        image.close();
        return;
    }
    fritzVisionImage = FritzVisionImage.fromMediaImage(image, orientation);
    runInference(fritzVisionImage);
    image.close();
    computing.set(false);
}
 
Example 5
Source File: MainActivity.java    From fritz-examples with MIT License 6 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!shouldSample.get()) {
        image.close();
        return;
    }

    if (!isComputing.compareAndSet(false, true)) {
        image.close();
        return;
    }

    visionImage = FritzVisionImage.fromMediaImage(image, orientation);
    image.close();

    runInBackground(() -> {
        poseResult = predictor.predict(visionImage);
        requestRender();
    });
}
 
Example 6
Source File: HyperionScreenEncoder.java    From hyperion-android-grabber with MIT License 6 votes vote down vote up
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
@Override
public void onImageAvailable(ImageReader reader) {
    if (mListener != null && isCapturing()) {
        try {
            long now = System.nanoTime();
            Image img = reader.acquireLatestImage();
            if (img != null && now - lastFrame >= min_nano_time) {
                sendImage(img);
                img.close();
                lastFrame = now;
            } else if (img != null) {
                img.close();
            }
        } catch (final Exception e) {
            if (DEBUG) Log.w(TAG, "sendImage exception:", e);
        }
    }
}
 
Example 7
Source File: FragmentDecoder.java    From camera2QRcodeReader with MIT License 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Log.e(TAG, "onImageAvailable: " + count++);
    Image img = null;
    img = reader.acquireLatestImage();
    Result rawResult = null;
    try {
        if (img == null) throw new NullPointerException("cannot be null");
        ByteBuffer buffer = img.getPlanes()[0].getBuffer();
        byte[] data = new byte[buffer.remaining()];
        buffer.get(data);
        int width = img.getWidth();
        int height = img.getHeight();
        PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height);
        BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));

        rawResult = mQrReader.decode(bitmap);
        onQRCodeRead(rawResult.getText());
    } catch (ReaderException ignored) {
        Log.e(TAG, "Reader shows an exception! ", ignored);
        /* Ignored */
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    } finally {
        mQrReader.reset();
        Log.e(TAG, "in the finally! ------------");
        if (img != null)
            img.close();

    }
    if (rawResult != null) {
        Log.e(TAG, "Decoding successful!");
    } else {
        Log.d(TAG, "No QR code found…");
    }
}
 
Example 8
Source File: Frame.java    From libsoftwaresync with Apache License 2.0 5 votes vote down vote up
@Override
public void close() {
  if (closed) {
    throw new IllegalStateException("This Frame is already closed");
  }
  for (Image image : output.images) {
    image.close();
  }
  output.close();
  closed = true;
}
 
Example 9
Source File: WindowCaptureFragment.java    From ViewCapture with Apache License 2.0 5 votes vote down vote up
private Bitmap createBitmap() {
    Image image = mImageReader.acquireLatestImage();
    int width = image.getWidth();
    int height = image.getHeight();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
    image.close();
    return bitmap;
}
 
Example 10
Source File: Camera2Source.java    From Camera2Vision with Apache License 2.0 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Image mImage = reader.acquireNextImage();
    if(mImage == null) {
        return;
    }
    mFrameProcessor.setNextFrame(convertYUV420888ToNV21(mImage));
    mImage.close();
}
 
Example 11
Source File: DoorbellActivity.java    From doorbell with Apache License 2.0 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Image image = reader.acquireLatestImage();
    // get image bytes
    ByteBuffer imageBuf = image.getPlanes()[0].getBuffer();
    final byte[] imageBytes = new byte[imageBuf.remaining()];
    imageBuf.get(imageBytes);
    image.close();

    onPictureTaken(imageBytes);
}
 
Example 12
Source File: OneCameraImpl.java    From Camera2 with Apache License 2.0 5 votes vote down vote up
/**
 * Given an image reader, this extracts the final image. If the image in the
 * reader is JPEG, we extract and return it as is. If the image is YUV, we
 * convert it to JPEG and return the result.
 *
 * @param image the image we got from the image reader.
 * @return A valid JPEG image.
 */
private static byte[] acquireJpegBytesAndClose(Image image)
{
    ByteBuffer buffer;
    if (image.getFormat() == ImageFormat.JPEG)
    {
        Image.Plane plane0 = image.getPlanes()[0];
        buffer = plane0.getBuffer();
    } else if (image.getFormat() == ImageFormat.YUV_420_888)
    {
        buffer = ByteBuffer.allocateDirect(image.getWidth() * image.getHeight() * 3);

        Log.v(TAG, "Compressing JPEG with software encoder.");
        int numBytes = JpegUtilNative.compressJpegFromYUV420Image(new AndroidImageProxy(image), buffer,
                JPEG_QUALITY);

        if (numBytes < 0)
        {
            throw new RuntimeException("Error compressing jpeg.");
        }
        buffer.limit(numBytes);
    } else
    {
        throw new RuntimeException("Unsupported image format.");
    }

    byte[] imageBytes = new byte[buffer.remaining()];
    buffer.get(imageBytes);
    buffer.rewind();
    image.close();
    return imageBytes;
}
 
Example 13
Source File: ScreenCapturer.java    From habpanelviewer with GNU General Public License v3.0 5 votes vote down vote up
public synchronized Bitmap captureScreen() throws IllegalStateException {
    AtomicReference<Image> imageHolder = new AtomicReference<>();
    final CountDownLatch latch = new CountDownLatch(1);

    ImageReader mImageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
    mImageReader.setOnImageAvailableListener(imageReader -> {
        imageHolder.set(mImageReader.acquireLatestImage());
        latch.countDown();
    }, mHandler);

    VirtualDisplay display = mProjection.createVirtualDisplay("screen-mirror", mWidth, mHeight, mDensity,
            DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mImageReader.getSurface(),
            null, null);

    try {
        latch.await(1, TimeUnit.SECONDS);

        if (latch.getCount() == 1) {
            throw new IllegalStateException("Screen capturing timed out");
        }

        final Image image = imageHolder.get();
        Image.Plane[] planes = image.getPlanes();
        ByteBuffer buffer = planes[0].getBuffer();
        int pixelStride = planes[0].getPixelStride();
        int rowStride = planes[0].getRowStride();
        int rowPadding = rowStride - pixelStride * mWidth;

        // create bitmap
        Bitmap bmp = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
        bmp.copyPixelsFromBuffer(buffer);
        image.close();

        return bmp;
    } catch (InterruptedException e) {
        throw new IllegalStateException("Got interrupt while capturing screen");
    } finally {
        display.release();
    }
}
 
Example 14
Source File: MainActivity.java    From androidthings-cameraCar with Apache License 2.0 5 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader) {
    Log.d(TAG, "PhotoCamera OnImageAvailableListener");

    Image image = reader.acquireLatestImage();
    // get image bytes
    ByteBuffer imageBuf = image.getPlanes()[0].getBuffer();
    final byte[] imageBytes = new byte[imageBuf.remaining()];
    imageBuf.get(imageBytes);
    image.close();

    onPictureTaken(imageBytes);
}
 
Example 15
Source File: TensorflowImageListener.java    From AndroidDemoProjects with Apache License 2.0 4 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}
 
Example 16
Source File: Screenshotter.java    From RelaxFinger with GNU General Public License v2.0 4 votes vote down vote up
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void onImageAvailable(ImageReader reader) {

    Image image = null;

    try {
        image = reader.acquireLatestImage();

    }catch (UnsupportedOperationException e){

        e.printStackTrace();
        return;
    }

    if(image == null){
        return;
    }
    int width = image.getWidth();
    int height = image.getHeight();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width+rowPadding/pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0,width, height);
    cb.onScreenshot(bitmap);
    if(virtualDisplay!= null){

        virtualDisplay.release();
        virtualDisplay = null;
    }

    if (mMediaProjection != null) {
        mMediaProjection.stop();
        mMediaProjection = null;
    }
    image.close();
    mImageReader = null;
}
 
Example 17
Source File: ImageCaptureManager.java    From Camera2 with Apache License 2.0 4 votes vote down vote up
@Override
public void onImageAvailable(ImageReader reader)
{
    long startTime = SystemClock.currentThreadTimeMillis();

    final Image img = reader.acquireLatestImage();

    if (img != null)
    {
        int numOpenImages = mNumOpenImages.incrementAndGet();
        if (DEBUG_PRINT_OPEN_IMAGE_COUNT)
        {
            Log.v(TAG, "Acquired an image. Number of open images = " + numOpenImages);
        }

        long timestamp = img.getTimestamp();
        // Try to place the newly-acquired image into the ring buffer.
        boolean swapSuccess = doImageSwap(img);
        if (!swapSuccess)
        {
            // If we were unable to save the image to the ring buffer, we
            // must close it now.
            // We should only get here if the ring buffer is closed.
            img.close();
            numOpenImages = mNumOpenImages.decrementAndGet();
            if (DEBUG_PRINT_OPEN_IMAGE_COUNT)
            {
                Log.v(TAG, "Closed an image. Number of open images = " + numOpenImages);
            }
        }

        tryExecutePendingCaptureRequest(timestamp);

        long endTime = SystemClock.currentThreadTimeMillis();
        long totTime = endTime - startTime;
        if (totTime > DEBUG_MAX_IMAGE_CALLBACK_DUR)
        {
            // If it takes too long to swap elements, we will start skipping
            // preview frames, resulting in visible jank.
            Log.v(TAG, "onImageAvailable() took " + totTime + "ms");
        }
    }
}
 
Example 18
Source File: CameraActivity.java    From dbclf with Apache License 2.0 4 votes vote down vote up
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
    //We need to wait until we have some size from onPreviewSizeChosen
    if (previewWidth == 0 || previewHeight == 0) {
        return;
    }
    if (rgbBytes == null) {
        rgbBytes = new int[previewWidth * previewHeight];
    }
    try {
        final Image image = reader.acquireLatestImage();

        if (image == null) {
            return;
        }

        if (isProcessingFrame) {
            image.close();
            return;
        }
        isProcessingFrame = true;
        final Plane[] planes = image.getPlanes();
        fillBytes(planes, yuvBytes);
        yRowStride = planes[0].getRowStride();
        final int uvRowStride = planes[1].getRowStride();
        final int uvPixelStride = planes[1].getPixelStride();

        imageConverter = () -> ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);

        postInferenceCallback = () -> {
            image.close();
            isProcessingFrame = false;
        };

        processImage();
    } catch (final Exception ignored) {
    }
}
 
Example 19
Source File: CameraImageActivity.java    From AndroidDemo with MIT License 4 votes vote down vote up
@Override
        public void onImageAvailable(ImageReader reader) {

            Image image = reader.acquireLatestImage();
            //我们可以将这帧数据转成字节数组,类似于Camera1的PreviewCallback回调的预览帧数据
//            int len = image.getPlanes().length;
//            if (buffer == null) {
//                buffer = ByteBuffer.allocate(reader.getWidth() * reader.getHeight());
//            }
//            for (int i = 0; i < len; i++) {
//                ByteBuffer _buffer = image.getPlanes()[i].getBuffer();
//                byte[] data = new byte[_buffer.remaining()];
//                _buffer.get(data);
//                buffer.put(data);
//            }
//            buffer.flip();
//            byte[] data = buffer.array();
//            buffer.flip();
//            buffer.clear();
//
//            byte[] nv21Data = new byte[data.length];
//            YuvUtil.yuvI420ToNV21(data, nv21Data, reader.getWidth(), reader.getHeight());
//
//            //这里采用yuvImage将yuvi420转化为图片,当然用libyuv也是可以做到的,这里主要介绍libyuv的裁剪,旋转,缩放,镜像的操作
//            YuvImage yuvImage = new YuvImage(nv21Data, ImageFormat.NV21, reader.getWidth(), reader.getHeight(), null);
//            ByteArrayOutputStream fOut = new ByteArrayOutputStream();
//            yuvImage.compressToJpeg(new Rect(0, 0, reader.getWidth(), reader.getHeight()), 100, fOut);
//            byte[] bytes = fOut.toByteArray();

            try {
//                ByteBuffer buffer = image.getPlanes()[0].getBuffer();
//                byte[] bytes = new byte[buffer.remaining()];
//                buffer.get(bytes);
//                show(bytes);
//                Log.d(TAG, "format = " + reader.getImageFormat() + ", planes length =  " + image.getPlanes().length);
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                image.close();
            }
        }
 
Example 20
Source File: TensorflowImageListener.java    From Paideia with MIT License 4 votes vote down vote up
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}