Java Code Examples for android.graphics.ImageFormat

The following are top voted examples for showing how to use android.graphics.ImageFormat. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: androidthings-imageclassifier   File: CameraHandler.java   Source Code and License 8 votes vote down vote up
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
 
Example 2
Project: PeSanKita-android   File: BitmapUtil.java   Source Code and License 7 votes vote down vote up
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
Example 3
Project: FaceDetectDemo   File: JavaCameraView.java   Source Code and License 7 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 4
Project: humaniq-android   File: PhotoFragment.java   Source Code and License 6 votes vote down vote up
private void configureCamera() {
    final Camera.Parameters parameters = camera.getParameters();
    try {
        parameters.setPreviewFormat(ImageFormat.NV21);

        // set focus for video if present
        List<String> focusModes = parameters.getSupportedFocusModes();

        if (null != focusModes && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
            parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
        }

        // check if torch is present
        List<String> flashModes = parameters.getSupportedFlashModes();

        cameraFlashIsSupported = null != flashModes && flashModes.contains(Camera.Parameters.FLASH_MODE_TORCH);

        final Camera.Size bestPreviewSize = getBestPreviewSize();
        photoProcessor.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
        parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
        camera.setParameters(parameters);
    } catch (RuntimeException exception) {
        Toast.makeText(getContext(), R.string.camera_configuration_failed, Toast.LENGTH_SHORT).show();
    }
}
 
Example 5
Project: Amazing   File: CameraActivity.java   Source Code and License 6 votes vote down vote up
private void setParams() {
    //LogUtil.e("preview set size=" + width + " : " + height);
    Camera.Parameters parameters = camera.getParameters();
    //        parameters.setPreviewSize(width, height);
    //        parameters.setPictureSize(width, height);
    parameters.setPreviewFormat(ImageFormat.NV21);
    camera.setDisplayOrientation(90);
    parameters.setRotation(90);

    List<Integer> supportedPreviewFormats = parameters.getSupportedPreviewFormats();
    for (Integer integer : supportedPreviewFormats) {
        //LogUtil.e("preview format=" + integer);
    }

    List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
    for (Camera.Size size : supportedPreviewSizes) {
        //LogUtil.e("preview size=" + size.width + " : " + size.height);
    }
    camera.setParameters(parameters);
}
 
Example 6
Project: seeta4Android   File: FaceDetector.java   Source Code and License 6 votes vote down vote up
private void saveFace(final int x, final int y, final int r, final int b) {
    if (DEBUG) Log.d(TAG, "[saveFace()]");
    new Thread(new Runnable() {
        @Override
        public void run() {
            synchronized (mVideoSource) {
                mImageYuv = new YuvImage(mVideoSource, ImageFormat.NV21, CameraWrapper.IMAGE_WIDTH, CameraWrapper.IMAGE_HEIGHT, null);
            }
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            mImageYuv.compressToJpeg(new Rect(0, 0, CameraWrapper.IMAGE_WIDTH, CameraWrapper.IMAGE_HEIGHT), 100, stream);
            Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());

            int left = (x > 0) ? x : 0;
            int top = (y > 0) ? y : 0;
            int creatW = (r < CameraWrapper.IMAGE_WIDTH) ? (r - x) : (CameraWrapper.IMAGE_HEIGHT - x - 1);
            int creatH = (b < CameraWrapper.IMAGE_WIDTH) ? (b - y) : (CameraWrapper.IMAGE_HEIGHT - y - 1);

            mImage = Bitmap.createBitmap(bitmap, left, top, creatW, creatH, null, false);
            if (DEBUG) Log.d(TAG, "[saveFace()] x:" + x + "  y:" + y + "\n" +
                    "[saveFace()] h:" + mImage.getHeight() + "  w:" + mImage.getWidth());
            if (null != mImage)
                FaceUtil.saveBitmapToFile(mImage);
        }
    }).start();
}
 
Example 7
Project: BuddyBook   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 8
Project: WithYou   File: VideoVerify.java   Source Code and License 6 votes vote down vote up
private Bitmap decodeToBitMap(byte[] data) {
	try {
		YuvImage image = new YuvImage(data, ImageFormat.NV21, PREVIEW_WIDTH,
				PREVIEW_HEIGHT, null);
		if (image != null) {
			ByteArrayOutputStream stream = new ByteArrayOutputStream();
			image.compressToJpeg(new Rect(0, 0, PREVIEW_WIDTH, PREVIEW_HEIGHT),
					80, stream);
			Bitmap bmp = BitmapFactory.decodeByteArray(
					stream.toByteArray(), 0, stream.size());
			stream.close();
			return bmp ;
		}
	} catch (Exception ex) {
		Log.e("Sys", "Error:" + ex.getMessage());
	}
	return null;
}
 
Example 9
Project: OCR-Reader   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 10
Project: Camera2Vision   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;
    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //
    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }
    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 11
Project: BWS-Android   File: IntensityPlane.java   Source Code and License 6 votes vote down vote up
/**
 * Extracts the Y-Plane from the YUV_420_8888 image to creates a IntensityPlane.
 * The actual plane data will be copied into the new IntensityPlane object.
 *
 * @throws IllegalArgumentException if the provided images is not in the YUV_420_888 format
 */
@NonNull
public static IntensityPlane extract(@NonNull Image img) {
    if (img.getFormat() != ImageFormat.YUV_420_888) {
        throw new IllegalArgumentException("image format must be YUV_420_888");
    }

    Image.Plane[] planes = img.getPlanes();

    ByteBuffer buffer = planes[0].getBuffer();
    byte[] yPlane = new byte[buffer.remaining()];
    buffer.get(yPlane);

    int yRowStride = planes[0].getRowStride();

    return new IntensityPlane(img.getWidth(), img.getHeight(), yPlane, yRowStride);
}
 
Example 12
Project: BWS-Android   File: FacialRecognitionFragment.java   Source Code and License 6 votes vote down vote up
/**
 * lazily initialize ImageReader and select preview size
 */
private void setupPreviewSizeAndImageReader() {
    if (previewSize == null) {
        previewSize = cameraHelper.selectPreviewSize(openCamera);
    }

    if (imageReader == null) {
        int maxImages = 2;  // should be at least 2 according to ImageReader.acquireLatestImage() documentation
        imageReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, maxImages);
        imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                Image img = reader.acquireLatestImage();
                if (img != null) {

                    // Make a in memory copy of the image to close the image from the reader as soon as possible.
                    // This helps the thread running the preview staying up to date.
                    IntensityPlane imgCopy = IntensityPlane.extract(img);
                    img.close();

                    int imageRotation = cameraHelper.getImageRotation(openCamera, getRelativeDisplayRotation());

                    presenter.onImageCaptured(imgCopy, imageRotation);
                }
            }
        }, null);
    }
}
 
Example 13
Project: rtmp-rtsp-stream-client-java   File: Camera1Base.java   Source Code and License 6 votes vote down vote up
/**
 * Need be called after @prepareVideo or/and @prepareAudio.
 * This method override resolution of @startPreview to resolution seated in @prepareVideo. If you
 * never startPreview this method startPreview for you to resolution seated in @prepareVideo.
 *
 * @param url of the stream like:
 * protocol://ip:port/application/streamName
 *
 * RTSP: rtsp://192.168.1.1:1935/live/pedroSG94
 * RTSPS: rtsps://192.168.1.1:1935/live/pedroSG94
 * RTMP: rtmp://192.168.1.1:1935/live/pedroSG94
 * RTMPS: rtmps://192.168.1.1:1935/live/pedroSG94
 */
public void startStream(String url) {
  if (openGlView != null && Build.VERSION.SDK_INT >= 18) {
    if (videoEncoder.getRotation() == 90 || videoEncoder.getRotation() == 270) {
      openGlView.setEncoderSize(videoEncoder.getHeight(), videoEncoder.getWidth());
    } else {
      openGlView.setEncoderSize(videoEncoder.getWidth(), videoEncoder.getHeight());
    }
    openGlView.startGLThread();
    openGlView.addMediaCodecSurface(videoEncoder.getInputSurface());
    cameraManager =
        new Camera1ApiManager(openGlView.getSurfaceTexture(), openGlView.getContext());
    cameraManager.prepareCamera(videoEncoder.getWidth(), videoEncoder.getHeight(),
        videoEncoder.getFps(), ImageFormat.NV21);
  }
  startStreamRtp(url);
  videoEncoder.start();
  audioEncoder.start();
  cameraManager.start();
  microphoneManager.start();
  streaming = true;
  onPreview = true;
}
 
Example 14
Project: DeepImagePreview-Project   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 15
Project: Barcode-Reader   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 16
Project: Toodoo   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 17
Project: AndroidOCRFforID   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 18
Project: Moneycim   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 19
Project: Cable-Android   File: BitmapUtil.java   Source Code and License 6 votes vote down vote up
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
Example 20
Project: trust-wallet-android   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 21
Project: xbot_head   File: CommentaryFragment.java   Source Code and License 6 votes vote down vote up
private void startPreview() {
    try {
        CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
        StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

        Size previewSize = Util.getPreferredPreviewSize(
                configMap.getOutputSizes(ImageFormat.JPEG),textureView.getWidth(), textureView.getHeight());

        surfaceTexture.setDefaultBufferSize(previewSize.getWidth(),previewSize.getHeight());
        Surface surface = new Surface(surfaceTexture);
        captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        captureBuilder.addTarget(surface);

        cameraDevice.createCaptureSession(Arrays.asList(surface),captureSessionCallback,backgroundHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}
 
Example 22
Project: Fuse   File: CameraSource.java   Source Code and License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example 23
Project: NotifyTools   File: JavaCameraView.java   Source Code and License 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 24
Project: react-native-camera-android-simple   File: CameraManager.java   Source Code and License 5 votes vote down vote up
public void setDefaultCameraParameters(Camera camera, Camera.CameraInfo cameraInfo) {
    Camera.Parameters parameters = camera.getParameters();

    parameters.setPictureFormat(ImageFormat.JPEG);

    List<Camera.Size> supportedSizes = parameters.getSupportedPictureSizes();
    Camera.Size pictureSize = getBestSize(supportedSizes, 0);
    parameters.setPictureSize(pictureSize.width, pictureSize.height);

    float whRatio = (float) pictureSize.width / pictureSize.height;

    List<Camera.Size> previewSupportedSizes = parameters.getSupportedPreviewSizes();
    Camera.Size previewSize = getBestSize(previewSupportedSizes, whRatio);
    parameters.setPreviewSize(previewSize.width, previewSize.height);

    List<String> supportedFocusModes = camera.getParameters().getSupportedFocusModes();
    boolean hasAutoFocus = supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO);

    if(hasAutoFocus) {
        parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
    }

    if(cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
        parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
    }

    List<String> supportedScreenModes = camera.getParameters().getSupportedSceneModes();
    boolean hasAutoScene = supportedScreenModes != null && supportedFocusModes.contains(Camera.Parameters.SCENE_MODE_AUTO);
    if(hasAutoScene) {
        parameters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
    }

    parameters.setColorEffect(Camera.Parameters.EFFECT_NONE);

    int orientation = cameraInfo.orientation;
    parameters.setRotation(orientation);

    camera.setParameters(parameters);
}
 
Example 25
Project: libRtmp   File: AndroidUntil.java   Source Code and License 5 votes vote down vote up
public static void setPreviewFormat(Camera camera, Camera.Parameters parameters) throws CameraNotSupportException{
    //设置预览回调的图片格式
    try {
        parameters.setPreviewFormat(ImageFormat.NV21);
        camera.setParameters(parameters);
    } catch (Exception e) {
        throw new CameraNotSupportException();
    }
}
 
Example 26
Project: seeta4Android   File: CameraWrapper.java   Source Code and License 5 votes vote down vote up
private void initCamera() {
    if (this.mCamera != null) {
        this.mCameraParamters = this.mCamera.getParameters();
        this.mCameraParamters.setPreviewFormat(ImageFormat.NV21);
        this.mCameraParamters.setFlashMode("off");
        this.mCameraParamters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
        this.mCameraParamters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
        Point p = MyApplication.getBestCameraResolution(this.mCameraParamters, MyApplication.getScreenMetrics());
        IMAGE_WIDTH = p.x;
        IMAGE_HEIGHT = p.y;
        this.mCameraParamters.setPreviewSize(IMAGE_WIDTH, IMAGE_HEIGHT);
        mCameraPreviewCallback = new CameraPreviewCallback();
        byte[] a = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
        byte[] b = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
        byte[] c = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
        mCamera.addCallbackBuffer(a);
        mCamera.addCallbackBuffer(b);
        mCamera.addCallbackBuffer(c);
        mCamera.setPreviewCallbackWithBuffer(mCameraPreviewCallback);
        List<String> focusModes = this.mCameraParamters.getSupportedFocusModes();
        if (focusModes.contains("continuous-video")) {
            this.mCameraParamters
                    .setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
        }
        this.mCamera.setParameters(this.mCameraParamters);
        this.mCamera.startPreview();

        this.mIsPreviewing = true;
    }
}
 
Example 27
Project: heifreader   File: HeifReader.java   Source Code and License 5 votes vote down vote up
private static Bitmap renderHevcImageWithFormat(ByteBuffer bitstream, ImageInfo info, int imageFormat) throws FormatFallbackException {
    try (ImageReader reader = ImageReader.newInstance(info.size.getWidth(), info.size.getHeight(), imageFormat, 1)) {
        renderHevcImage(bitstream, info, reader.getSurface());
        Image image = null;
        try {
            try {
                image = reader.acquireNextImage();
            } catch (UnsupportedOperationException ex) {
                throw new FormatFallbackException(ex);
            }

            switch (image.getFormat()) {
                case ImageFormat.YUV_420_888:
                case ImageFormat.YV12:
                    return convertYuv420ToBitmap(image);
                case ImageFormat.RGB_565:
                    return convertRgb565ToBitmap(image);
                default:
                    throw new RuntimeException("unsupported image format(" + image.getFormat() + ")");
            }
        } finally {
            if (image != null) {
                image.close();
            }
        }
    }
}
 
Example 28
Project: heifreader   File: HeifReader.java   Source Code and License 5 votes vote down vote up
private static Bitmap convertYuv420ToBitmap(Image image) {
    RenderScript rs = mRenderScript;
    final int width = image.getWidth();
    final int height = image.getHeight();

    // prepare input Allocation for RenderScript
    Type.Builder inType = new Type.Builder(rs, Element.U8(rs)).setX(width).setY(height).setYuvFormat(ImageFormat.YV12);
    Allocation inAlloc = Allocation.createTyped(rs, inType.create(), Allocation.USAGE_SCRIPT);
    byte[] rawBuffer = new byte[inAlloc.getBytesSize()];
    int lumaSize = width * height;
    int chromaSize = (width / 2) * (height / 2);
    Image.Plane[] planes = image.getPlanes();
    planes[0].getBuffer().get(rawBuffer, 0, lumaSize);
    planes[1].getBuffer().get(rawBuffer, lumaSize, chromaSize);
    planes[2].getBuffer().get(rawBuffer, lumaSize + chromaSize, chromaSize);
    inAlloc.copyFromUnchecked(rawBuffer);

    // prepare output Allocation for RenderScript
    Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    Allocation outAlloc = Allocation.createFromBitmap(rs, bmp, Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT | Allocation.USAGE_SHARED);

    // convert YUV to RGB colorspace
    ScriptC_yuv2rgb converter = new ScriptC_yuv2rgb(rs);
    converter.set_gYUV(inAlloc);
    converter.forEach_convert(outAlloc);
    outAlloc.copyTo(bmp);
    return bmp;
}
 
Example 29
Project: BuddyBook   File: CameraSource.java   Source Code and License 5 votes vote down vote up
/**
 * Opens the camera and applies the user settings.
 *
 * @throws RuntimeException if the method fails
 */
@SuppressLint("InlinedApi")
private Camera createCamera() {
    int requestedCameraId = getIdForRequestedCamera(mFacing);
    if (requestedCameraId == -1) {
        throw new RuntimeException("Could not find requested camera.");
    }
    Camera camera = Camera.open(requestedCameraId);

    SizePair sizePair = selectSizePair(camera, mRequestedPreviewWidth, mRequestedPreviewHeight);
    if (sizePair == null) {
        throw new RuntimeException("Could not find suitable preview size.");
    }
    Size pictureSize = sizePair.pictureSize();
    mPreviewSize = sizePair.previewSize();

    int[] previewFpsRange = selectPreviewFpsRange(camera, mRequestedFps);
    if (previewFpsRange == null) {
        throw new RuntimeException("Could not find suitable preview frames per second range.");
    }

    Camera.Parameters parameters = camera.getParameters();

    if (pictureSize != null) {
        parameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
    }

    parameters.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
    parameters.setPreviewFpsRange(
            previewFpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
            previewFpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
    parameters.setPreviewFormat(ImageFormat.NV21);

    setRotation(camera, parameters, requestedCameraId);

    if (mFocusMode != null) {
        if (parameters.getSupportedFocusModes().contains(
                mFocusMode)) {
            parameters.setFocusMode(mFocusMode);
        } else {
        }
    }

    // setting mFocusMode to the one set in the params
    mFocusMode = parameters.getFocusMode();

    if (mFlashMode != null) {
        if (parameters.getSupportedFlashModes() != null) {
            if (parameters.getSupportedFlashModes().contains(
                    mFlashMode)) {
                parameters.setFlashMode(mFlashMode);
            } else {
            }
        }
    }

    // setting mFlashMode to the one set in the params
    mFlashMode = parameters.getFlashMode();

    camera.setParameters(parameters);

    // Four frame buffers are needed for working with the camera:
    //
    //   one for the frame that is currently being executed upon in doing detection
    //   one for the next pending frame to process immediately upon completing detection
    //   two for the frames that the camera uses to populate future preview images
    camera.setPreviewCallbackWithBuffer(new CameraPreviewCallback());
    camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize));
    camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize));
    camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize));
    camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize));

    return camera;
}
 
Example 30
Project: Microsphere   File: JavaCameraView.java   Source Code and License 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 31
Project: FamilyBond   File: ImageUtil.java   Source Code and License 5 votes vote down vote up
public static byte[] imageToByteArray(Image image) {
    byte[] data = null;
    if (image.getFormat() == ImageFormat.JPEG) {
        Image.Plane[] planes = image.getPlanes();
        ByteBuffer buffer = planes[0].getBuffer();
        data = new byte[buffer.capacity()];
        buffer.get(data);
        return data;
    } else if (image.getFormat() == ImageFormat.YUV_420_888) {
        data = NV21toJPEG(
                YUV_420_888toNV21(image),
                image.getWidth(), image.getHeight());
    }
    return data;
}
 
Example 32
Project: Eye   File: HomeActivity.java   Source Code and License 5 votes vote down vote up
/**
 * Initialises the output surfaces for the camera's preview.
 * There will be two output surfaces -
 * 1) mSurfaceView : The surface to just show the preview frame.
 * 2) mImageReader : The surface to get the actual pixel image
 * data of the preview frame.
 */
private void setupOutputSurfaces() {

    outputSurfaces = new ArrayList<>(2);

    // For the live preview.
    mSurfaceView.getHolder().setFixedSize(screenMaxX, screenMaxY);
    outputSurfaces.add(mSurfaceView.getHolder().getSurface());

    // For extracting the image.
    mImageReader = ImageReader.newInstance(screenMaxX, screenMaxY,
            ImageFormat.YUV_420_888, maxAcquired);
    mImageReader.setOnImageAvailableListener(getImageAvailableListener(), null);
    outputSurfaces.add(mImageReader.getSurface());
}
 
Example 33
Project: PXLSRT   File: Camera2Api23.java   Source Code and License 5 votes vote down vote up
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
 
Example 34
Project: android-imaging-utils   File: JavaCameraView.java   Source Code and License 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 35
Project: hella-renderscript   File: RsUtil.java   Source Code and License 5 votes vote down vote up
@RequiresApi(18)
public static Type createYuvType(RenderScript rs, int x, int y, int yuvFormat) {
    boolean supported = yuvFormat == ImageFormat.NV21 || yuvFormat == ImageFormat.YV12;
    if (Build.VERSION.SDK_INT >= 19) {
        supported |= yuvFormat == ImageFormat.YUV_420_888;
    }
    if (!supported) {
        throw new IllegalArgumentException("invalid yuv format: " + yuvFormat);
    }
    return new Type.Builder(rs, createYuvElement(rs)).setX(x).setY(y).setYuvFormat(yuvFormat)
            .create();
}
 
Example 36
Project: SmartMath   File: CameraPreview.java   Source Code and License 5 votes vote down vote up
public void setCamera(Camera camera) {
     mCamera = camera;
     if (mCamera != null) {
      Parameters parameters = mCamera.getParameters();
         mSupportedPreviewSizes = parameters.getSupportedPreviewSizes();
List<Integer> formatsList = parameters.getSupportedPreviewFormats();	//get supported preview formats
if(formatsList.contains(ImageFormat.NV21)) {	// formatsList is always not null.
	parameters.setPreviewFormat(ImageFormat.NV21);		//set preview format is NV21,default is NV21 (yuv420sp)
}

  	//  Set Focus mode depending on what is supported. MODE_AUTO is 
  	//  preferred mode.
// need not to test supported mode coz it has been tested in main activity.
  	if (msnFocusMode == 2)	{
  		parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_INFINITY );
  	} else if (msnFocusMode == 1)	{
  		parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_FIXED );
  	} else	{
  		// set to auto focus by default
  		parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_AUTO);
  	}

  	/*if ((parameters.getMaxExposureCompensation() != 0 || parameters.getMinExposureCompensation() != 0)
      		&& ActivitySettings.msnPhotoTakenFrom == 1)	{	// screen mode.
      	parameters.setExposureCompensation(parameters.getMaxExposureCompensation());
      } */
  	parameters.setExposureCompensation(0);	// exposure is not adjusted. Seems that screen mode does not bring much benefit.
      

List<String> scenesList = parameters.getSupportedSceneModes();
if (scenesList != null && scenesList.contains(Camera.Parameters.SCENE_MODE_STEADYPHOTO)) {
	parameters.setSceneMode(Camera.Parameters.SCENE_MODE_STEADYPHOTO);	// this may crash in some devices.
}
boolean bSuccessful = setCameraParams(mCamera, parameters);

      requestLayout();
     } 
 }
 
Example 37
Project: androidthings-imageclassifier   File: CameraHandler.java   Source Code and License 5 votes vote down vote up
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
 
Example 38
Project: androidthings-imageclassifier   File: CameraHandler.java   Source Code and License 5 votes vote down vote up
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
 
Example 39
Project: Fatigue-Detection   File: CameraEngine.java   Source Code and License 5 votes vote down vote up
public void openCamera(boolean facingFront) {
    synchronized (this) {
        int facing=facingFront? Camera.CameraInfo.CAMERA_FACING_FRONT:Camera.CameraInfo.CAMERA_FACING_BACK;
        currentCameraId=getCameraIdWithFacing(facing);
        camera = Camera.open(currentCameraId);
        camera.setPreviewCallbackWithBuffer(this);
        initRotateDegree(currentCameraId);
        if (camera != null) {
            mParams = camera.getParameters();
            List<Camera.Size> supportedPictureSizesList=mParams.getSupportedPictureSizes();
            List<Camera.Size> supportedVideoSizesList=mParams.getSupportedVideoSizes();
            List<Camera.Size> supportedPreviewSizesList=mParams.getSupportedPreviewSizes();
            Logger.logCameraSizes(supportedPictureSizesList);
            Logger.logCameraSizes(supportedVideoSizesList);
            Logger.logCameraSizes(supportedPreviewSizesList);

            previewSize=choosePreferredSize(supportedPreviewSizesList,preferredRatio);
            Camera.Size photoSize=choosePreferredSize(supportedPictureSizesList,preferredRatio);

            frameHeight=previewSize.width;
            frameWidth=previewSize.height;
            Log.d(TAG, "openCamera: choose preview size"+previewSize.height+"x"+previewSize.width);
            mParams.setPreviewSize(frameHeight,frameWidth);

            mParams.setPictureSize(photoSize.width,photoSize.height);
            Log.d(TAG, "openCamera: choose photo size"+photoSize.height+"x"+photoSize.width);

            //mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
            int size = frameWidth*frameHeight;
            size = size * ImageFormat.getBitsPerPixel(mParams.getPreviewFormat()) / 8;
            if (mBuffer==null || mBuffer.length!=size)
                mBuffer = new byte[size];
            mFrameChain[0].init(size);
            mFrameChain[1].init(size);
            camera.addCallbackBuffer(mBuffer);
            camera.setParameters(mParams);
            cameraOpened=true;
        }
    }
}
 
Example 40
Project: CSVideo   File: CsVideo.java   Source Code and License 5 votes vote down vote up
/**
 * 设置相机参数
 */
private void setCameraParameters() {
    if (null != camera) {
        camera.setDisplayOrientation(90);
        Camera.Parameters params = camera.getParameters();
        Camera.Size preViewSize = getOptimalSize(params.getSupportedPreviewSizes(), 1920, 1080);
        if (null != preViewSize) {
            params.setPreviewSize(preViewSize.width, preViewSize.height);
        }

        Camera.Size pictureSize = getOptimalSize(params.getSupportedPictureSizes(), 1920, 1080);
        if (null != pictureSize) {
            params.setPictureSize(pictureSize.width, pictureSize.height);
        }
        //设置图片格式
        params.setPictureFormat(ImageFormat.JPEG);
        params.setJpegQuality(100);
        params.setJpegThumbnailQuality(100);

        List<String> modes = params.getSupportedFocusModes();
        if (modes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
            //支持自动聚焦模式
            params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
        }
        camera.setParameters(params);
    }

}
 
Example 41
Project: sample-tensorflow-imageclassifier   File: CameraHandler.java   Source Code and License 5 votes vote down vote up
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
 
Example 42
Project: polling-station-app   File: CameraHandler.java   Source Code and License 5 votes vote down vote up
/**
 * Sets the preview size of the fragment
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 * @param swappedDimensions - boolean indicating if dimensions need to be swapped
 * @param map - Configurationmap of the camera
 * @return mPreviewSize - the previewsize that is set in the fragment
 */
private Size setFragmentPreviewSize(int width, int height, boolean swappedDimensions, StreamConfigurationMap map) {
    // For still image captures, we use the largest available size.
    Size largest = Collections.max(
            Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
            new CameraFragmentUtil.CompareSizesByArea());

    Point displaySize = new Point();
    fragment.getActivity().getWindowManager().getDefaultDisplay().getSize(displaySize);
    int rotatedPreviewWidth = width;
    int rotatedPreviewHeight = height;
    int maxPreviewWidth = displaySize.x;
    int maxPreviewHeight = displaySize.y;

    if (swappedDimensions) {
        rotatedPreviewWidth = height;
        rotatedPreviewHeight = width;
        maxPreviewWidth = displaySize.y;
        maxPreviewHeight = displaySize.x;
    }

    if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
        maxPreviewWidth = MAX_PREVIEW_WIDTH;
    }
    if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
        maxPreviewHeight = MAX_PREVIEW_HEIGHT;
    }
    // Attempting to use too large a preview size could  exceed the camera bus' bandwidth
    // limitation, resulting in gorgeous previews but the storage of garbage capture data.
    Size mPreviewSize = CameraFragmentUtil.chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
            rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
            maxPreviewHeight, largest);
    fragment.setPreviewSize(mPreviewSize);
    return mPreviewSize;
}
 
Example 43
Project: rtmp-rtsp-stream-client-java   File: Camera1ApiManager.java   Source Code and License 5 votes vote down vote up
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
  //convert yv12 to nv21
  if (imageFormat == ImageFormat.YV12) {
    data = YUVUtil.YV12toNV21(data, width, height);
  }
  //Only if front camera and portrait or reverse portrait
  if (isFrontCamera && (orientation == 90 || orientation == 270)) {
    data = YUVUtil.rotateNV21(data, width, height, 180);
  }
  getCameraData.inputYUVData(data);
  camera.addCallbackBuffer(yuvBuffer);
}
 
Example 44
Project: AppRTC-Android   File: CameraEnumerationAndroid.java   Source Code and License 5 votes vote down vote up
public static int frameSize(int width, int height, int imageFormat) {
  if (imageFormat != ImageFormat.NV21) {
    throw new UnsupportedOperationException("Don't know how to calculate "
        + "the frame size of non-NV21 image formats.");
  }
  return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
 
Example 45
Project: androidthings-kuman-sm9   File: Camera.java   Source Code and License 5 votes vote down vote up
/**
 * Initialize the camera device
 */
@SuppressLint("MissingPermission")
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Timber.e(e, "Cam access exception getting IDs");
    }
    if (camIds.length < 1) {
        Timber.d("No cameras found");
        return;
    }
    String id = camIds[0];
    Timber.d("Using camera id %s", id);

    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);

    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Timber.e(cae, "Camera access exception");
    }
}
 
Example 46
Project: android-things-drawbot   File: CameraHandler.java   Source Code and License 5 votes vote down vote up
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
 
Example 47
Project: robo-car   File: PiCameraDriver.java   Source Code and License 5 votes vote down vote up
/**
 * Initialize the camera device
 */
void initializeCamera(@NonNull Context context,
                      @NonNull Handler backgroundHandler,
                      @NonNull ImageReader.OnImageAvailableListener imageAvailableListener) {
    mBackgroundHandler = backgroundHandler;

    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.e(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.e(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);

    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT, ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(imageAvailableListener, backgroundHandler);

    // Open the camera resource
    try {
        if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
            Log.d(TAG, "initializeCamera: Camera permission not available.");
            return;
        }
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
 
Example 48
Project: MegviiFacepp-Android-SDK   File: ConUtil.java   Source Code and License 5 votes vote down vote up
public static Bitmap decodeToBitMap(byte[] data, Camera _camera) {
	Camera.Size size = _camera.getParameters().getPreviewSize();
	try {
		YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
		if (image != null) {
			ByteArrayOutputStream stream = new ByteArrayOutputStream();
			image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, stream);
			Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
			stream.close();
			return bmp;
		}
	} catch (Exception ex) {
	}
	return null;
}
 
Example 49
Project: AndroidRTC   File: CameraEnumerationAndroid.java   Source Code and License 5 votes vote down vote up
public static int frameSize(int width, int height, int imageFormat) {
  if (imageFormat != ImageFormat.NV21) {
    throw new UnsupportedOperationException("Don't know how to calculate "
        + "the frame size of non-NV21 image formats.");
  }
  return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
 
Example 50
Project: LongImageCamera   File: Camera2Api23.java   Source Code and License 5 votes vote down vote up
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
 
Example 51
Project: phonk   File: CameraNew2.java   Source Code and License 5 votes vote down vote up
/**
 * Sets up member variables related to camera.
 */
private void setUpCameraOutputs(int width, int height) throws CameraAccessException {
    MLog.d(TAG, "setUpCameraOutputs");

    String[] cameras = mCameraManager.getCameraIdList();

    for (int i = 0; i < cameras.length; i++) {
        MLog.d(TAG, "camera " + cameras[i]);
    }

    String cameraId = cameras[0];
    CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(cameraId);
    List<CameraCharacteristics.Key<?>> keys = characteristics.getKeys();
    for (int i = 0; i < keys.size(); i++) {
        Object val = characteristics.get(keys.get(i));
        MLog.d(TAG, "characteristic " + keys.get(i) + " " + val);
    }

    // is it facingcamera
    Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
    StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

    // For still image captures, we use the largest available size.
    Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
    mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2);
    mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);

    // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
    // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
    // garbage capture data.
    int rotatedPreviewWidth = 500;
    int rotatedPreviewHeight = 500;
    int maxPreviewWidth = 500;
    int maxPreviewHeight = 500;
    mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
            rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
            maxPreviewHeight, largest);

    //TODO mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());

    mCameraId = cameraId;
}
 
Example 52
Project: VideoCRE   File: CameraEnumerationAndroid.java   Source Code and License 5 votes vote down vote up
public static int frameSize(int width, int height, int imageFormat) {
  if (imageFormat != ImageFormat.NV21) {
    throw new UnsupportedOperationException("Don't know how to calculate "
        + "the frame size of non-NV21 image formats.");
  }
  return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
 
Example 53
Project: live_master   File: SrsEncoder.java   Source Code and License 5 votes vote down vote up
public SrsEncoder(SrsFlvMuxer flvMuxer, SrsMp4Muxer mp4Muxer) {
    this.flvMuxer = flvMuxer;
    this.mp4Muxer = mp4Muxer;

    mVideoColorFormat = chooseVideoEncoder();
    if (mVideoColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
        VFORMAT = ImageFormat.YV12;
    } else if (mVideoColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
        VFORMAT = ImageFormat.NV21;
    } else {
        throw new IllegalStateException("Unsupported color format!");
    }
}
 
Example 54
Project: EvilsLive   File: EvilsLiveStreamerConfig.java   Source Code and License 5 votes vote down vote up
private EvilsLiveStreamerConfig() {
    this.streamUrl = "";
    this.videoresolution = E640P;
    this.encodeMethod = Defines.EencodeMethod.SOFTWARE_ENCODE;
    this.imageFormat = ImageFormat.NV21;
    this.previewFrameRate = Defines.DEFAULT_FRAME_RATE;
    this.encodeFrameRate = Defines.DEFAULT_FRAME_RATE;
    this.cameraFacing = Defines.EcameraFacing.CAMERA_FACING_FRONT;
}
 
Example 55
Project: xbot_head   File: InteractionFragment.java   Source Code and License 5 votes vote down vote up
@Override
    public void startCamera() {
        try {
            CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraID);
            StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            int width = textureView.getWidth();
            int height = textureView.getHeight();

            //设置一个合适的预览尺寸,防止图像拉伸
//            previewSize = getPreferredPreviewSize(configMap.getOutputSizes(SurfaceTexture.class), width, height);
            previewSize = Util.getPreferredPreviewSize(configMap.getOutputSizes(ImageFormat.JPEG), width, height);
            surfaceTexture.setDefaultBufferSize(previewSize.getWidth(),previewSize.getHeight());
            Log.i(TAG, "previewSize info:" + previewSize.getWidth() + "x" + previewSize.getHeight());

            surface = new Surface(surfaceTexture);

            builder =cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);

            if (surface.isValid()) {
                builder.addTarget(surface);
            }
            Log.i(TAG, "mTextureView info:" + textureView.getWidth() + "x" + textureView.getHeight());

            cameraDevice.createCaptureSession(Arrays.asList(surface),sessionStateCallback,null);

        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }
 
Example 56
Project: DNNLibrary   File: JavaCameraView.java   Source Code and License 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 57
Project: Team9261-2017-2018   File: JavaCameraView.java   Source Code and License 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 58
Project: SocialPaka   File: CameraSource.java   Source Code and License 4 votes vote down vote up
/**
 * Opens the camera and applies the user settings.
 *
 * @throws RuntimeException if the method fails
 */
@SuppressLint("InlinedApi")
private Camera createCamera() {
    int requestedCameraId = getIdForRequestedCamera(mFacing);
    if (requestedCameraId == -1) {
        throw new RuntimeException("Could not find requested camera.");
    }
    Camera camera = Camera.open(requestedCameraId);

    SizePair sizePair = selectSizePair(camera, mRequestedPreviewWidth, mRequestedPreviewHeight);
    if (sizePair == null) {
        throw new RuntimeException("Could not find suitable preview size.");
    }
    Size pictureSize = sizePair.pictureSize();
    mPreviewSize = sizePair.previewSize();

    int[] previewFpsRange = selectPreviewFpsRange(camera, mRequestedFps);
    if (previewFpsRange == null) {
        throw new RuntimeException("Could not find suitable preview frames per second range.");
    }

    Camera.Parameters parameters = camera.getParameters();

    if (pictureSize != null) {
        parameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
    }

    parameters.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
    parameters.setPreviewFpsRange(
            previewFpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
            previewFpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
    parameters.setPreviewFormat(ImageFormat.NV21);

    setRotation(camera, parameters, requestedCameraId);

    if (mFocusMode != null) {
        if (parameters.getSupportedFocusModes().contains(
                mFocusMode)) {
            parameters.setFocusMode(mFocusMode);
        } else {
            Log.i(TAG, "Camera focus mode: " + mFocusMode +
                    " is not supported on this device.");
        }
    }

    // setting mFocusMode to the one set in the params
    mFocusMode = parameters.getFocusMode();

    if (mFlashMode != null) {
        if (parameters.getSupportedFlashModes().contains(
                mFlashMode)) {
            parameters.setFlashMode(mFlashMode);
        } else {
            Log.i(TAG, "Camera flash mode: " + mFlashMode +
                    " is not supported on this device.");
        }
    }

    // setting mFlashMode to the one set in the params
    mFlashMode = parameters.getFlashMode();

    camera.setParameters(parameters);

    // Four frame buffers are needed for working with the camera:
    //
    //   one for the frame that is currently being executed upon in doing detection
    //   one for the next pending frame to process immediately upon completing detection
    //   two for the frames that the camera uses to populate future preview images
    camera.setPreviewCallbackWithBuffer(new CameraPreviewCallback());
    camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize));
    camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize));
    camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize));
    camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize));

    return camera;
}
 
Example 59
Project: SocialPaka   File: CameraSource.java   Source Code and License 4 votes vote down vote up
/**
 * As long as the processing thread is active, this executes detection on frames
 * continuously.  The next pending frame is either immediately available or hasn't been
 * received yet.  Once it is available, we transfer the frame info to local variables and
 * run detection on that frame.  It immediately loops back for the next frame without
 * pausing.
 * <p/>
 * If detection takes longer than the time in between new frames from the camera, this will
 * mean that this loop will run without ever waiting on a frame, avoiding any context
 * switching or frame acquisition time latency.
 * <p/>
 * If you find that this is using more CPU than you'd like, you should probably decrease the
 * FPS setting above to allow for some idle time in between frames.
 */
@Override
public void run() {
    Frame outputFrame;
    ByteBuffer data;

    while (true) {
        synchronized (mLock) {
            while (mActive && (mPendingFrameData == null)) {
                try {
                    // Wait for the next frame to be received from the camera, since we
                    // don't have it yet.
                    mLock.wait();
                } catch (InterruptedException e) {
                    Log.d(TAG, "Frame processing loop terminated.", e);
                    return;
                }
            }

            if (!mActive) {
                // Exit the loop once this camera source is stopped or released.  We check
                // this here, immediately after the wait() above, to handle the case where
                // setActive(false) had been called, triggering the termination of this
                // loop.
                return;
            }

            outputFrame = new Frame.Builder()
                    .setImageData(mPendingFrameData, mPreviewSize.getWidth(),
                            mPreviewSize.getHeight(), ImageFormat.NV21)
                    .setId(mPendingFrameId)
                    .setTimestampMillis(mPendingTimeMillis)
                    .setRotation(mRotation)
                    .build();

            // Hold onto the frame data locally, so that we can use this for detection
            // below.  We need to clear mPendingFrameData to ensure that this buffer isn't
            // recycled back to the camera before we are done using that data.
            data = mPendingFrameData;
            mPendingFrameData = null;
        }

        // The code below needs to run outside of synchronization, because this will allow
        // the camera to add pending frame(s) while we are running detection on the current
        // frame.

        try {
            mDetector.receiveFrame(outputFrame);
        } catch (Throwable t) {
            Log.e(TAG, "Exception thrown from receiver.", t);
        } finally {
            mCamera.addCallbackBuffer(data.array());
        }
    }
}
 
Example 60
Project: CameraFragment   File: Camera2Manager.java   Source Code and License 4 votes vote down vote up
@Override
public Size getPhotoSizeForQuality(@Configuration.MediaQuality int mediaQuality) {
    final StreamConfigurationMap map = currentCameraId.equals(faceBackCameraId) ? backCameraStreamConfigurationMap : frontCameraStreamConfigurationMap;
    return CameraHelper.getPictureSize(Size.fromArray2(map.getOutputSizes(ImageFormat.JPEG)), mediaQuality);
}