android.graphics.ImageFormat Java Examples

The following examples show how to use android.graphics.ImageFormat. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CameraSource.java    From samples-android with Apache License 2.0 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example #2
Source File: STUtils.java    From Fatigue-Detection with MIT License 6 votes vote down vote up
@SuppressLint("NewApi")
public static Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height, Context context) {
	
	TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap");
	
	Rect rect = new Rect(0, 0, width, height);
	
	try {
		Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV");
		Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB");
    	byte[] imageData = nv21;
    	if (mRS == null) {
    		mRS = RenderScript.create(context);
    		mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS));
    		Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV));
    		tb.setX(width);
    		tb.setY(height);
    		tb.setMipmaps(false);
    		tb.setYuvFormat(ImageFormat.NV21);
    		ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT);
    		timings.addSplit("Prepare for ain");
    		Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS));
    		tb2.setX(width);
    		tb2.setY(height);
    		tb2.setMipmaps(false);
    		aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED);
    		timings.addSplit("Prepare for aOut");
    		bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    		timings.addSplit("Create Bitmap");
		}
    	ain.copyFrom(imageData);
		timings.addSplit("ain copyFrom");
		mYuvToRgb.setInput(ain);
		timings.addSplit("setInput ain");
		mYuvToRgb.forEach(aOut);
		timings.addSplit("NV21 to ARGB forEach");
		aOut.copyTo(bitmap);
		timings.addSplit("Allocation to Bitmap");
	} catch (Exception e) {
		YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
		timings.addSplit("NV21 bytes to YuvImage");
		
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, 90, baos);
        byte[] cur = baos.toByteArray();
        timings.addSplit("YuvImage crop and compress to Jpeg Bytes");
        
        bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length);
        timings.addSplit("Jpeg Bytes to Bitmap");
	}
	
   	timings.dumpToLog();
   	return bitmap;
}
 
Example #3
Source File: CameraSource.java    From trust-wallet-android-source with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example #4
Source File: VideoCapture.java    From android-chromium with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@CalledByNative
public int getColorspace() {
    switch (mImageFormat){
    case ImageFormat.YV12:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
    case ImageFormat.NV21:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV21;
    case ImageFormat.YUY2:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YUY2;
    case ImageFormat.NV16:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV16;
    case ImageFormat.JPEG:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_JPEG;
    case ImageFormat.RGB_565:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_RGB_565;
    case ImageFormat.UNKNOWN:
    default:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
    }
}
 
Example #5
Source File: CameraSource.java    From Bluefruit_LE_Connect_Android with MIT License 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example #6
Source File: InstantCameraView.java    From Telegram-FOSS with GNU General Public License v2.0 6 votes vote down vote up
private void createCamera(final SurfaceTexture surfaceTexture) {
    AndroidUtilities.runOnUIThread(() -> {
        if (cameraThread == null) {
            return;
        }
        if (BuildVars.LOGS_ENABLED) {
            FileLog.d("create camera session");
        }

        surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
        cameraSession = new CameraSession(selectedCamera, previewSize, pictureSize, ImageFormat.JPEG);
        cameraThread.setCurrentSession(cameraSession);
        CameraController.getInstance().openRound(cameraSession, surfaceTexture, () -> {
            if (cameraSession != null) {
                if (BuildVars.LOGS_ENABLED) {
                    FileLog.d("camera initied");
                }
                cameraSession.setInitied();
            }
        }, () -> cameraThread.setCurrentSession(cameraSession));
    });
}
 
Example #7
Source File: CameraSource.java    From VehicleInfoOCR with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback. The size of the buffer is based off of the
 * camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
@SuppressLint("InlinedApi")
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = (long) previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    bytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example #8
Source File: CameraPreview.java    From Android-IP-Camera with MIT License 6 votes vote down vote up
public CameraPreview(Context context, Camera camera) {
    super(context);
    mCamera = camera;

    mHolder = getHolder();
    mHolder.addCallback(this);
    // deprecated setting, but required on Android versions prior to 3.0
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    Parameters params = mCamera.getParameters();
    List<Size> sizes = params.getSupportedPreviewSizes();
    for (Size s : sizes) {
    	Log.i(TAG, "preview size = " + s.width + ", " + s.height);
    }
    
    params.setPreviewSize(640, 480); // set preview size. smaller is better
    mCamera.setParameters(params);
    
    mPreviewSize = mCamera.getParameters().getPreviewSize();
    Log.i(TAG, "preview size = " + mPreviewSize.width + ", " + mPreviewSize.height);
    
    int format = mCamera.getParameters().getPreviewFormat();
    mFrameLength = mPreviewSize.width * mPreviewSize.height * ImageFormat.getBitsPerPixel(format) / 8;
}
 
Example #9
Source File: CameraImplV2.java    From habpanelviewer with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void setDeviceRotation(int deviceOrientation) {
    mDeviceOrientation = deviceOrientation;

    // configure transform if preview is running only
    if (isPreviewRunning()) {
        try {
            CameraCharacteristics characteristics = mCamManager.getCameraCharacteristics(mCamera.getId());

            StreamConfigurationMap map = characteristics.get(
                    CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                Log.w(TAG, "Could not find a valid preview size");
            } else {
                final Point previewSize = chooseOptimalSize(toPointArray(map.getOutputSizes(ImageFormat.YUV_420_888)));
                setDeviceOrientation(previewSize);
            }
        } catch (CameraAccessException e) {
            Log.e(TAG, "Failed to set device orientation", e);
        }
    }
}
 
Example #10
Source File: YuvToRgb.java    From unity-android-native-camera with MIT License 6 votes vote down vote up
private void createAllocations(RenderScript rs) {

        final int width = mInputSize.getWidth();
        final int height = mInputSize.getHeight();

        mOutBufferInt = new int[width * height];

        Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs));
        yuvTypeBuilder.setX(width);
        yuvTypeBuilder.setY(height);
        yuvTypeBuilder.setYuvFormat(ImageFormat.YUV_420_888);
        mInputAllocation = Allocation.createTyped(rs, yuvTypeBuilder.create(),
                Allocation.USAGE_IO_INPUT | Allocation.USAGE_SCRIPT);

        Type rgbType = Type.createXY(rs, Element.RGBA_8888(rs), width, height);
        Type intType = Type.createXY(rs, Element.U32(rs), width, height);

        mOutputAllocation = Allocation.createTyped(rs, rgbType,
                Allocation.USAGE_IO_OUTPUT | Allocation.USAGE_SCRIPT);
        mOutputAllocationInt = Allocation.createTyped(rs, intType,
                Allocation.USAGE_SCRIPT);
    }
 
Example #11
Source File: CameraSource.java    From mlkit-material-android with Apache License 2.0 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback. The size of the buffer is based off of the
 * camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings.
 */
private byte[] createPreviewBuffer(Size previewSize) {
  int bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT);
  long sizeInBits = (long) previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
  int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

  // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
  // should guarantee that there will be an array to work with.
  byte[] byteArray = new byte[bufferSize];
  ByteBuffer byteBuffer = ByteBuffer.wrap(byteArray);
  if (!byteBuffer.hasArray() || (byteBuffer.array() != byteArray)) {
    // This should never happen. If it does, then we wouldn't be passing the preview content to
    // the underlying detector later.
    throw new IllegalStateException("Failed to create valid buffer for camera source.");
  }

  bytesToByteBuffer.put(byteArray, byteBuffer);
  return byteArray;
}
 
Example #12
Source File: OneCameraImpl.java    From Camera2 with Apache License 2.0 6 votes vote down vote up
@Override
public Size pickPreviewSize(Size pictureSize, Context context)
{
    if (pictureSize == null)
    {
        // TODO The default should be selected by the caller, and
        // pictureSize should never be null.
        pictureSize = getDefaultPictureSize();
    }
    float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
    Size[] supportedSizes = getSupportedPreviewSizes();

    // Since devices only have one raw resolution we need to be more
    // flexible for selecting a matching preview resolution.
    Double aspectRatioTolerance = sCaptureImageFormat == ImageFormat.RAW_SENSOR ? 10d : null;
    Size size = CaptureModuleUtil.getOptimalPreviewSize(supportedSizes, pictureAspectRatio, aspectRatioTolerance);
    Log.d(TAG, "Selected preview size: " + size);
    return size;
}
 
Example #13
Source File: CameraSource.java    From android-vision with Apache License 2.0 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example #14
Source File: JellyBeanCamera.java    From LiveMultimedia with Apache License 2.0 6 votes vote down vote up
/**********************************************************
 * capture video frame one by one from the preview window
 * setup the buffer to hold the images
 **********************************************************/
private synchronized  void setupVideoFrameCallback() {
    Log.d(TAG, "setupVideoFrameCallback(() called on the Camera class");
    if (mCamera == null) {
        Log.e(TAG, "Camera object is null in setupVideoFrameCallback!");
        return;
    }
    mFrameCatcher = new FrameCatcher( mPreviewWidth,
                                      mPreviewHeight,
                                      getImageFormat(),
                                      mVideoPreview);
    long bufferSize;
    bufferSize = mPreviewWidth * mPreviewHeight  * ImageFormat.getBitsPerPixel(mImageFormat) / 8;
    long sizeWeShouldHave = (mPreviewWidth * 	mPreviewHeight  * 3 / 2);
    mCamera.setPreviewCallbackWithBuffer(null);
    mCamera.setPreviewCallbackWithBuffer( mFrameCatcher );
    for (int i = 0; i < NUM_CAMERA_PREVIEW_BUFFERS; i++) {
        byte [] cameraBuffer = new byte[(int)bufferSize];
        mCamera.addCallbackBuffer(cameraBuffer);
    }
}
 
Example #15
Source File: LegacyCameraDevice.java    From android_9.0.0_r45 with Apache License 2.0 6 votes vote down vote up
public static boolean isPreviewConsumer(Surface output) {
    int usageFlags = detectSurfaceUsageFlags(output);
    int disallowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT |
            GRALLOC_USAGE_SW_READ_OFTEN;
    int allowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
            GRALLOC_USAGE_HW_RENDER;
    boolean previewConsumer = ((usageFlags & disallowedFlags) == 0 &&
            (usageFlags & allowedFlags) != 0);
    int surfaceFormat = ImageFormat.UNKNOWN;
    try {
        surfaceFormat = detectSurfaceType(output);
    } catch(BufferQueueAbandonedException e) {
        throw new IllegalArgumentException("Surface was abandoned", e);
    }

    return previewConsumer;
}
 
Example #16
Source File: LegacyCameraDevice.java    From android_9.0.0_r45 with Apache License 2.0 6 votes vote down vote up
public static boolean isVideoEncoderConsumer(Surface output) {
    int usageFlags = detectSurfaceUsageFlags(output);
    int disallowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
            GRALLOC_USAGE_RENDERSCRIPT | GRALLOC_USAGE_SW_READ_OFTEN;
    int allowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER;
    boolean videoEncoderConsumer = ((usageFlags & disallowedFlags) == 0 &&
            (usageFlags & allowedFlags) != 0);

    int surfaceFormat = ImageFormat.UNKNOWN;
    try {
        surfaceFormat = detectSurfaceType(output);
    } catch(BufferQueueAbandonedException e) {
        throw new IllegalArgumentException("Surface was abandoned", e);
    }

    return videoEncoderConsumer;
}
 
Example #17
Source File: CameraSource.java    From esp-idf-provisioning-android with Apache License 2.0 6 votes vote down vote up
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
Example #18
Source File: H263Stream.java    From spydroid-ipcamera with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Constructs the H.263 stream.
 * @param cameraId Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT 
 * @throws IOException
 */	
public H263Stream(int cameraId) {
	super(cameraId);
	mCameraImageFormat = ImageFormat.NV21;
	mVideoEncoder = MediaRecorder.VideoEncoder.H263;
	mPacketizer = new H263Packetizer();
}
 
Example #19
Source File: CameraEngine.java    From In77Camera with MIT License 5 votes vote down vote up
public void openCamera(boolean facingFront) {
    synchronized (this) {
        int facing=facingFront? Camera.CameraInfo.CAMERA_FACING_FRONT:Camera.CameraInfo.CAMERA_FACING_BACK;
        currentCameraId=getCameraIdWithFacing(facing);
        camera = Camera.open(currentCameraId);
        camera.setPreviewCallbackWithBuffer(this);
        initRotateDegree(currentCameraId);
        if (camera != null) {
            mParams = camera.getParameters();
            List<Camera.Size> supportedPictureSizesList=mParams.getSupportedPictureSizes();
            List<Camera.Size> supportedVideoSizesList=mParams.getSupportedVideoSizes();
            List<Camera.Size> supportedPreviewSizesList=mParams.getSupportedPreviewSizes();
            Logger.logCameraSizes(supportedPictureSizesList);
            Logger.logCameraSizes(supportedVideoSizesList);
            Logger.logCameraSizes(supportedPreviewSizesList);

            previewSize=choosePreferredSize(supportedPreviewSizesList,preferredRatio);
            Camera.Size photoSize=choosePreferredSize(supportedPictureSizesList,preferredRatio);

            frameHeight=previewSize.width;
            frameWidth=previewSize.height;
            Log.d(TAG, "openCamera: choose preview size"+previewSize.height+"x"+previewSize.width);
            mParams.setPreviewSize(frameHeight,frameWidth);

            mParams.setPictureSize(photoSize.width,photoSize.height);
            Log.d(TAG, "openCamera: choose photo size"+photoSize.height+"x"+photoSize.width);

            //mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
            int size = frameWidth*frameHeight;
            size = size * ImageFormat.getBitsPerPixel(mParams.getPreviewFormat()) / 8;
            if (mBuffer==null || mBuffer.length!=size)
                mBuffer = new byte[size];
            mFrameChain[0].init(size);
            mFrameChain[1].init(size);
            camera.addCallbackBuffer(mBuffer);
            camera.setParameters(mParams);
            cameraOpened=true;
        }
    }
}
 
Example #20
Source File: Camera2Api23.java    From TikTok with Apache License 2.0 5 votes vote down vote up
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
 
Example #21
Source File: Camera2.java    From TikTok with Apache License 2.0 5 votes vote down vote up
private void prepareImageReader() {
    if (mImageReader != null) {
        mImageReader.close();
    }
    Size largest = mPictureSizes.sizes(mAspectRatio).last();
    mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
            ImageFormat.JPEG, /* maxImages */ 2);
    mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
}
 
Example #22
Source File: ImageDecoder.java    From FastBarcodeScanner with Apache License 2.0 5 votes vote down vote up
public static Bitmap ToBitmap(byte[] imageBytes, int format, int width, int height)
{
    switch (format) {
        case ImageFormat.NV21:
        case ImageFormat.YUV_420_888:
            return NV21ToBitmap(imageBytes, width, height);
    }

    return null;
}
 
Example #23
Source File: ConUtil.java    From MegviiFacepp-Android-SDK with Apache License 2.0 5 votes vote down vote up
public static Bitmap decodeToBitMap(byte[] data, Camera _camera) {
	Camera.Size size = _camera.getParameters().getPreviewSize();
	try {
		YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
		if (image != null) {
			ByteArrayOutputStream stream = new ByteArrayOutputStream();
			image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, stream);
			Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
			stream.close();
			return bmp;
		}
	} catch (Exception ex) {
	}
	return null;
}
 
Example #24
Source File: PreviewBufferManager.java    From FastBarcodeScanner with Apache License 2.0 5 votes vote down vote up
public void setup(int previewFormat, int previewWidth, int previewHeight) {
    mPreviewFormat = previewFormat;
    mPreviewWidth = previewWidth;
    mPreviewHeight = previewHeight;
    int bitsPerPixel = ImageFormat.getBitsPerPixel(mPreviewFormat);
    mPreviewBufferSize = (mPreviewWidth * mPreviewHeight * bitsPerPixel) / 8;
}
 
Example #25
Source File: CameraManager.java    From Camdroid with Apache License 2.0 5 votes vote down vote up
private static void initPictureFormat(Camera.Parameters params) {
    Integer pictureFormat = null;
    pictureFormat = findSettableValue(
            params.getSupportedPictureFormats(), ImageFormat.JPEG);
    if (pictureFormat != null) {
        params.setPictureFormat(pictureFormat);
        params.setJpegQuality(100);
    }

    if (params.isZoomSupported()) {
        params.setZoom(0);
    }

}
 
Example #26
Source File: FaceDetectRGBActivity.java    From FaceDetectCamera with Apache License 2.0 5 votes vote down vote up
@Override
protected void onPostCreate(Bundle savedInstanceState) {
    super.onPostCreate(savedInstanceState);
    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    SurfaceHolder holder = mView.getHolder();
    holder.addCallback(this);
    holder.setFormat(ImageFormat.NV21);
}
 
Example #27
Source File: CameraManager.java    From AudioVideoCodec with Apache License 2.0 5 votes vote down vote up
private void startCamera(int cameraId) {
    try {
        camera = Camera.open(cameraId);
        camera.setPreviewTexture(surfaceTexture);

        Camera.Parameters parameters = camera.getParameters();
        parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
        parameters.setPreviewFormat(ImageFormat.NV21);

        //设置对焦模式,后置摄像头开启时打开,切换到前置时关闭(三星、华为不能设置前置对焦,魅族、小米部分机型可行)
        if (cameraId == 0) {
            //小米、魅族手机存在对焦无效情况,需要针对设备适配,想要无感知对焦完全适配最好是监听加速度传感器
            camera.cancelAutoFocus();
            //这种设置方式存在屏幕闪烁一下问题,包括Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO
            parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
        }
        Camera.Size size = getCameraSize(parameters.getSupportedPreviewSizes(), screenWidth,
                screenHeight, 0.1f);
        parameters.setPreviewSize(size.width, size.height);
        //水平方向未旋转,所以宽就是竖直方向的高,对应旋转操作
        Log.d(TAG, "startCamera: 预览宽:" + size.width + " -- " + "预览高:" + size.height);
        previewWidth = size.width;
        previewHeight = size.height;

        size = getCameraSize(parameters.getSupportedPictureSizes(), screenWidth, screenHeight, 0.1f);
        parameters.setPictureSize(size.width, size.height);
        //水平方向未旋转,所以宽就是竖直方向的高
        Log.d(TAG, "startCamera: 图片宽:" + size.width + " -- " + "图片高:" + size.height);

        camera.setParameters(parameters);
        camera.startPreview();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
 
Example #28
Source File: ImageDecoder.java    From FastBarcodeScanner with Apache License 2.0 5 votes vote down vote up
private static Bitmap NV21ToBitmap(byte[] nv21Bytes, int width, int height)
{
    YuvImage yuv = new YuvImage(nv21Bytes, ImageFormat.NV21, width, height, null);

    // pWidth and pHeight define the size of the preview Frame
    ByteArrayOutputStream jpegStream = new ByteArrayOutputStream();
    yuv.compressToJpeg(new Rect(0, 0, width, height), 50, jpegStream);
    byte[] jpegBytes = jpegStream.toByteArray();

    Bitmap bitmap= BitmapFactory.decodeByteArray(jpegBytes, 0, jpegBytes.length);

    return bitmap;
}
 
Example #29
Source File: CameraMetadataNative.java    From android_9.0.0_r45 with Apache License 2.0 5 votes vote down vote up
private int[] getAvailableFormats() {
    int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS);
    if (availableFormats != null) {
        for (int i = 0; i < availableFormats.length; i++) {
            // JPEG has different value between native and managed side, need override.
            if (availableFormats[i] == NATIVE_JPEG_FORMAT) {
                availableFormats[i] = ImageFormat.JPEG;
            }
        }
    }

    return availableFormats;
}
 
Example #30
Source File: JCameraView.java    From CameraView with Apache License 2.0 5 votes vote down vote up
private void setStartPreview(Camera camera, SurfaceHolder holder) {
    try {
        Camera.Parameters parameters = mCamera.getParameters();
        parameters.setPictureFormat(ImageFormat.JPEG);
        List<Camera.Size> sizeList = parameters.getSupportedPreviewSizes();//获取所有支持的camera尺寸
        Iterator<Camera.Size> itor = sizeList.iterator();
        while (itor.hasNext()) {
            Camera.Size cur = itor.next();
            Log.i("CJT", "所有的  width = " + cur.width + " height = " + cur.height);
            if (cur.width >= width&& cur.height >= height) {
                width = cur.width;
                height = cur.height;
            }
        }
        Log.i("size", "width : height" + width + " : " + height + " ==== " + getWidth() + " : " + getHeight());
        parameters.setPreviewSize(width, height);//把camera.size赋值到parameters
        parameters.setPictureSize(width, height);
        parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
        camera.setParameters(parameters);

        camera.setPreviewDisplay(holder);
        camera.setDisplayOrientation(90);
        camera.startPreview();
    } catch (IOException e) {
        e.printStackTrace();
    }
}