Java Code Examples for android.graphics.ImageFormat#NV21

The following examples show how to use android.graphics.ImageFormat#NV21 . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BitmapUtil.java    From Silence with GNU General Public License v3.0 6 votes vote down vote up
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
Example 2
Source File: STUtils.java    From Fatigue-Detection with MIT License 6 votes vote down vote up
@SuppressLint("NewApi")
public static Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height, Context context) {
	
	TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap");
	
	Rect rect = new Rect(0, 0, width, height);
	
	try {
		Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV");
		Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB");
    	byte[] imageData = nv21;
    	if (mRS == null) {
    		mRS = RenderScript.create(context);
    		mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS));
    		Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV));
    		tb.setX(width);
    		tb.setY(height);
    		tb.setMipmaps(false);
    		tb.setYuvFormat(ImageFormat.NV21);
    		ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT);
    		timings.addSplit("Prepare for ain");
    		Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS));
    		tb2.setX(width);
    		tb2.setY(height);
    		tb2.setMipmaps(false);
    		aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED);
    		timings.addSplit("Prepare for aOut");
    		bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    		timings.addSplit("Create Bitmap");
		}
    	ain.copyFrom(imageData);
		timings.addSplit("ain copyFrom");
		mYuvToRgb.setInput(ain);
		timings.addSplit("setInput ain");
		mYuvToRgb.forEach(aOut);
		timings.addSplit("NV21 to ARGB forEach");
		aOut.copyTo(bitmap);
		timings.addSplit("Allocation to Bitmap");
	} catch (Exception e) {
		YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
		timings.addSplit("NV21 bytes to YuvImage");
		
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, 90, baos);
        byte[] cur = baos.toByteArray();
        timings.addSplit("YuvImage crop and compress to Jpeg Bytes");
        
        bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length);
        timings.addSplit("Jpeg Bytes to Bitmap");
	}
	
   	timings.dumpToLog();
   	return bitmap;
}
 
Example 3
Source File: ImageDecoder.java    From FastBarcodeScanner with Apache License 2.0 5 votes vote down vote up
public static Bitmap ToBitmap(byte[] imageBytes, int format, int width, int height)
{
    switch (format) {
        case ImageFormat.NV21:
        case ImageFormat.YUV_420_888:
            return NV21ToBitmap(imageBytes, width, height);
    }

    return null;
}
 
Example 4
Source File: H264Stream.java    From spydroid-ipcamera with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Constructs the H.264 stream.
 * @param cameraId Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
 * @throws IOException
 */
public H264Stream(int cameraId) {
	super(cameraId);
	mMimeType = "video/avc";
	mCameraImageFormat = ImageFormat.NV21;
	mVideoEncoder = MediaRecorder.VideoEncoder.H264;
	mPacketizer = new H264Packetizer();
}
 
Example 5
Source File: H263Stream.java    From libstreaming with Apache License 2.0 5 votes vote down vote up
/**
 * Constructs the H.263 stream.
 * @param cameraId Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT 
 * @throws IOException
 */	
public H263Stream(int cameraId) {
	super(cameraId);
	mCameraImageFormat = ImageFormat.NV21;
	mVideoEncoder = MediaRecorder.VideoEncoder.H263;
	mPacketizer = new H263Packetizer();
}
 
Example 6
Source File: CameraUtils.java    From libcommon with Apache License 2.0 5 votes vote down vote up
/**
 * 対応する映像フォーマットをlogCatへ出力する
 * @param params
 */
public static void dumpSupportedPictureFormats(@NonNull final Camera.Parameters params) {
	final List<Integer> formats = params.getSupportedPictureFormats();
	for (final int format: formats) {
		switch (format) {
		case ImageFormat.DEPTH16:			Log.i(TAG, "supported: DEPTH16"); break;
		case ImageFormat.DEPTH_POINT_CLOUD:	Log.i(TAG, "supported: DEPTH_POINT_CLOUD"); break;
		case ImageFormat.FLEX_RGBA_8888:	Log.i(TAG, "supported: FLEX_RGBA_8888"); break;
		case ImageFormat.FLEX_RGB_888:		Log.i(TAG, "supported: FLEX_RGB_888"); break;
		case ImageFormat.JPEG:				Log.i(TAG, "supported: JPEG"); break;
		case ImageFormat.NV16:				Log.i(TAG, "supported: NV16"); break;
		case ImageFormat.NV21:				Log.i(TAG, "supported: NV21"); break;
		case ImageFormat.PRIVATE:			Log.i(TAG, "supported: PRIVATE"); break;
		case ImageFormat.RAW10:				Log.i(TAG, "supported: RAW10"); break;
		case ImageFormat.RAW12:				Log.i(TAG, "supported: RAW12"); break;
		case ImageFormat.RAW_PRIVATE:		Log.i(TAG, "supported: RAW_PRIVATE"); break;
		case ImageFormat.RAW_SENSOR:		Log.i(TAG, "supported: RAW_SENSOR"); break;
		case ImageFormat.RGB_565:			Log.i(TAG, "supported: RGB_565"); break;
		case ImageFormat.UNKNOWN:			Log.i(TAG, "supported: UNKNOWN"); break;
		case ImageFormat.YUV_420_888:		Log.i(TAG, "supported: YUV_420_888"); break;
		case ImageFormat.YUV_422_888:		Log.i(TAG, "supported: YUV_422_888"); break;
		case ImageFormat.YUV_444_888:		Log.i(TAG, "supported: YUV_444_888"); break;
		case ImageFormat.YUY2:				Log.i(TAG, "supported: YUY2"); break;
		case ImageFormat.YV12:				Log.i(TAG, "supported: YV12"); break;
		default:
			Log.i(TAG, String.format("supported: unknown, %08x", format));
			break;
		}
	}
}
 
Example 7
Source File: Camera.java    From android_9.0.0_r45 with Apache License 2.0 5 votes vote down vote up
private String cameraFormatForPixelFormat(int pixel_format) {
    switch(pixel_format) {
    case ImageFormat.NV16:      return PIXEL_FORMAT_YUV422SP;
    case ImageFormat.NV21:      return PIXEL_FORMAT_YUV420SP;
    case ImageFormat.YUY2:      return PIXEL_FORMAT_YUV422I;
    case ImageFormat.YV12:      return PIXEL_FORMAT_YUV420P;
    case ImageFormat.RGB_565:   return PIXEL_FORMAT_RGB565;
    case ImageFormat.JPEG:      return PIXEL_FORMAT_JPEG;
    default:                    return null;
    }
}
 
Example 8
Source File: JavaCameraView.java    From AndroidDocumentScanner with MIT License 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 9
Source File: VideoCapture.java    From android-chromium with BSD 2-Clause "Simplified" License 5 votes vote down vote up
static int getImageFormat() {
    if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) {
        return ImageFormat.NV21;
    }

    for (String buggyDevice : sBUGGY_DEVICE_LIST) {
        if (buggyDevice.contentEquals(android.os.Build.MODEL)) {
            return ImageFormat.NV21;
        }
    }
    return ImageFormat.YV12;
}
 
Example 10
Source File: YuvToRgbTest.java    From easyrs with MIT License 5 votes vote down vote up
@NonNull
private Bitmap getExpectedBitmap(Nv21Image nv21Image) {
    YuvImage yuvImage = new YuvImage(nv21Image.nv21ByteArray, ImageFormat.NV21, nv21Image.width,
            nv21Image.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, nv21Image.width, nv21Image.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    return BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
}
 
Example 11
Source File: JavaCameraView.java    From LicensePlateDiscern with MIT License 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 12
Source File: ImageStream.java    From ssj with GNU General Public License v3.0 5 votes vote down vote up
public ImageStream(int num, int dim, double sr)
{
    super(num, dim, sr);
    this.type = Cons.Type.IMAGE;
    this.width = 0;
    this.height = 0;
    this.format = ImageFormat.NV21;
}
 
Example 13
Source File: ColorFormatUtil.java    From MultiMediaSample with Apache License 2.0 5 votes vote down vote up
public static Bitmap convertYUV420sp2RGB(byte[] yuv, int width, int height) {
    //YUV420sp转RGB数据 5-60ms
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21, width, height, null);
    yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, out);
    byte[] imageBytes = out.toByteArray();
    return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
}
 
Example 14
Source File: CameraManager.java    From Roid-Library with Apache License 2.0 5 votes vote down vote up
/**
 * A factory method to build the appropriate LuminanceSource object based
 * on the format of the preview buffers, as described by
 * Camera.Parameters.
 * 
 * @param data A preview frame.
 * @param width The width of the image.
 * @param height The height of the image.
 * @return A PlanarYUVLuminanceSource instance.
 */
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
    Rect rect = getFramingRectInPreview();
    int previewFormat = configManager.getPreviewFormat();
    String previewFormatString = configManager.getPreviewFormatString();
    switch (previewFormat) {
    // This is the standard Android format which all devices are REQUIRED
    // to support.
    // In theory, it's the only one we should ever care about.
        case ImageFormat.NV21:
            // This format has never been seen in the wild, but is
            // compatible as we only care
            // about the Y channel, so allow it.
        case ImageFormat.NV16:
            return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top, rect.width(),
                    rect.height());
        default:
            // The Samsung Moment incorrectly uses this variant instead of
            // the 'sp' version.
            // Fortunately, it too has all the Y data up front, so we can
            // read it.
            if ("yuv420p".equals(previewFormatString)) {
                return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top, rect.width(),
                        rect.height());
            }
    }
    throw new IllegalArgumentException("Unsupported picture format: " + previewFormat + '/' + previewFormatString);
}
 
Example 15
Source File: ImageUtils.java    From IDCardCamera with Apache License 2.0 5 votes vote down vote up
/**
 * 将byte[]转换成Bitmap
 *
 * @param bytes
 * @param width
 * @param height
 * @return
 */
public static Bitmap getBitmapFromByte(byte[] bytes, int width, int height) {
    final YuvImage image = new YuvImage(bytes, ImageFormat.NV21, width, height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream(bytes.length);
    if (!image.compressToJpeg(new Rect(0, 0, width, height), 100, os)) {
        return null;
    }
    byte[] tmp = os.toByteArray();
    Bitmap bmp = BitmapFactory.decodeByteArray(tmp, 0, tmp.length);
    return bmp;
}
 
Example 16
Source File: JavaCameraView.java    From VIA-AI with MIT License 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 17
Source File: JavaCamera2View.java    From OpenCvFaceDetect with Apache License 2.0 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
    else if (mPreviewFormat == ImageFormat.YUV_420_888) {
        assert (mUVFrameData != null);
        Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
    } else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example 18
Source File: StreamConfigurationMap.java    From android_9.0.0_r45 with Apache License 2.0 4 votes vote down vote up
private String formatToString(int format) {
    switch (format) {
        case ImageFormat.YV12:
            return "YV12";
        case ImageFormat.YUV_420_888:
            return "YUV_420_888";
        case ImageFormat.NV21:
            return "NV21";
        case ImageFormat.NV16:
            return "NV16";
        case PixelFormat.RGB_565:
            return "RGB_565";
        case PixelFormat.RGBA_8888:
            return "RGBA_8888";
        case PixelFormat.RGBX_8888:
            return "RGBX_8888";
        case PixelFormat.RGB_888:
            return "RGB_888";
        case ImageFormat.JPEG:
            return "JPEG";
        case ImageFormat.YUY2:
            return "YUY2";
        case ImageFormat.Y8:
            return "Y8";
        case ImageFormat.Y16:
            return "Y16";
        case ImageFormat.RAW_SENSOR:
            return "RAW_SENSOR";
        case ImageFormat.RAW_PRIVATE:
            return "RAW_PRIVATE";
        case ImageFormat.RAW10:
            return "RAW10";
        case ImageFormat.DEPTH16:
            return "DEPTH16";
        case ImageFormat.DEPTH_POINT_CLOUD:
            return "DEPTH_POINT_CLOUD";
        case ImageFormat.RAW_DEPTH:
            return "RAW_DEPTH";
        case ImageFormat.PRIVATE:
            return "PRIVATE";
        default:
            return "UNKNOWN";
    }
}
 
Example 19
Source File: STUtils.java    From TikTok with Apache License 2.0 4 votes vote down vote up
@SuppressLint("NewApi")
public static Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height, Context context) {
	
	TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap");
	
	Rect rect = new Rect(0, 0, width, height);
	
	try {
		Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV");
		Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB");
    	byte[] imageData = nv21;
    	if (mRS == null) {
    		mRS = RenderScript.create(context);
    		mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS));
    		Type.Builder tb = new Type.Builder(mRS, Element
                       .createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV));
    		tb.setX(width);
    		tb.setY(height);
    		tb.setMipmaps(false);
    		tb.setYuvFormat(ImageFormat.NV21);
    		ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT);
    		timings.addSplit("Prepare for ain");
    		Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS));
    		tb2.setX(width);
    		tb2.setY(height);
    		tb2.setMipmaps(false);
    		aOut = Allocation
                       .createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED);
    		timings.addSplit("Prepare for aOut");
    		bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    		timings.addSplit("Create Bitmap");
		}
    	ain.copyFrom(imageData);
		timings.addSplit("ain copyFrom");
		mYuvToRgb.setInput(ain);
		timings.addSplit("setInput ain");
		mYuvToRgb.forEach(aOut);
		timings.addSplit("NV21 to ARGB forEach");
		aOut.copyTo(bitmap);
		timings.addSplit("Allocation to Bitmap");
	} catch (Exception e) {
		YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
		timings.addSplit("NV21 bytes to YuvImage");
		
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, 90, baos);
        byte[] cur = baos.toByteArray();
        timings.addSplit("YuvImage crop and compress to Jpeg Bytes");
        
        bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length);
        timings.addSplit("Jpeg Bytes to Bitmap");
	}
	
   	timings.dumpToLog();
   	return bitmap;
}
 
Example 20
Source File: LegacyCameraDevice.java    From android_9.0.0_r45 with Apache License 2.0 2 votes vote down vote up
/**
 * Check if a given surface uses {@link ImageFormat#YUV_420_888} or format that can be readily
 * converted to this; YV12 and NV21 are the two currently supported formats.
 *
 * @param s the surface to check.
 * @return {@code true} if the surfaces uses {@link ImageFormat#YUV_420_888} or a compatible
 *          format.
 */
static boolean needsConversion(Surface s) throws BufferQueueAbandonedException {
    int nativeType = detectSurfaceType(s);
    return nativeType == ImageFormat.YUV_420_888 || nativeType == ImageFormat.YV12 ||
            nativeType == ImageFormat.NV21;
}