android.graphics.YuvImage Java Examples

The following examples show how to use android.graphics.YuvImage. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BitmapUtil.java    From mollyim-android with GNU General Public License v3.0 6 votes vote down vote up
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
Example #2
Source File: BitmapUtils.java    From quickstart-android with Apache License 2.0 6 votes vote down vote up
@Nullable
public static Bitmap getBitmap(ByteBuffer data, FrameMetadata metadata) {
    data.rewind();
    byte[] imageInBuffer = new byte[data.limit()];
    data.get(imageInBuffer, 0, imageInBuffer.length);
    try {
        YuvImage image =
                new YuvImage(
                        imageInBuffer, ImageFormat.NV21, metadata.getWidth(), metadata.getHeight(), null);
        if (image != null) {
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            image.compressToJpeg(new Rect(0, 0, metadata.getWidth(), metadata.getHeight()), 80, stream);

            Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());

            stream.close();
            return rotateBitmap(bmp, metadata.getRotation(), metadata.getCameraFacing());
        }
    } catch (Exception e) {
        Log.e("VisionProcessorBase", "Error: " + e.getMessage());
    }
    return null;
}
 
Example #3
Source File: SnapshotVideoRenderer.java    From video-quickstart-android with MIT License 6 votes vote down vote up
private YuvImage fastI420ToYuvImage(ByteBuffer[] yuvPlanes,
                                    int[] yuvStrides,
                                    int width,
                                    int height) {
    byte[] bytes = new byte[width * height * 3 / 2];
    int i = 0;
    for (int row = 0 ; row < height ; row++) {
        for (int col = 0 ; col < width ; col++) {
            bytes[i++] = yuvPlanes[0].get(col + row * yuvStrides[0]);
        }
    }
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2; col++) {
            bytes[i++] = yuvPlanes[2].get(col + row * yuvStrides[2]);
            bytes[i++] = yuvPlanes[1].get(col + row * yuvStrides[1]);
        }
    }
    return new YuvImage(bytes, NV21, width, height, null);
}
 
Example #4
Source File: SnapshotVideoRenderer.java    From video-quickstart-android with MIT License 6 votes vote down vote up
private Bitmap captureBitmapFromYuvFrame(I420Frame i420Frame) {
    YuvImage yuvImage = i420ToYuvImage(i420Frame.yuvPlanes,
            i420Frame.yuvStrides,
            i420Frame.width,
            i420Frame.height);
    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    Rect rect = new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight());

    // Compress YuvImage to jpeg
    yuvImage.compressToJpeg(rect, 100, stream);

    // Convert jpeg to Bitmap
    byte[] imageBytes = stream.toByteArray();
    Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
    Matrix matrix = new Matrix();

    // Apply any needed rotation
    matrix.postRotate(i420Frame.rotationDegree);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix,
            true);

    return bitmap;
}
 
Example #5
Source File: MyRenderer.java    From Viewer with Apache License 2.0 6 votes vote down vote up
public void rawByteArray2RGBABitmap2(FileOutputStream b)
{
	int yuvi = yuv_w * yuv_h;
	int uvi = 0;
	byte[] yuv = new byte[yuv_w * yuv_h * 3 / 2];
	System.arraycopy(y, 0, yuv, 0, yuvi);
	for (int i = 0; i < yuv_h / 2; i++)
	{
		for (int j = 0; j < yuv_w / 2; j++)
		{
			yuv[yuvi++] = v[uvi];
			yuv[yuvi++] = u[uvi++];
		}
	}
	YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21, yuv_w, yuv_h, null);
	Rect rect = new Rect(0, 0, yuv_w, yuv_h);
	yuvImage.compressToJpeg(rect, 100, b);
}
 
Example #6
Source File: LearnImages.java    From PHONK with GNU General Public License v3.0 6 votes vote down vote up
private Bitmap cameraDataToBmp(byte[] data, Camera camera) {
    // transform camera data to bmp
    Camera.Parameters parameters = camera.getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;

    // get support preview format
    // MLog.d("qq", );
    YuvImage yuv = new YuvImage(data, parameters.getPreviewFormat(), width, height, null);

    ByteArrayOutputStream out = new ByteArrayOutputStream();
    // maybe pass the output to the callbacks and do each compression there?
    yuv.compressToJpeg(new Rect(0, 0, (int) Math.floor(width * 0.2), (int) Math.floor(height * 0.2)), 100, out);
    byte[] bytes = out.toByteArray();
    BitmapFactory.Options bitmap_options = new BitmapFactory.Options();
    bitmap_options.inPreferredConfig = Bitmap.Config.RGB_565;
    final Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length, bitmap_options);
    return bmp;
}
 
Example #7
Source File: CameraModelImpl.java    From BluetoothCameraAndroid with MIT License 6 votes vote down vote up
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    if (mInitAck && mDataAck) {
        mInitAck = false;
        mDataAck = false;
        previewMissedCount = 0;
        ThreadHandler.getInstance().doInBackground(new Runnable() {
            @Override
            public void run() {
                Camera.Size size = camera.getParameters().getPreviewSize();
                ByteArrayOutputStream out = new ByteArrayOutputStream();
                YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
                yuvImage.compressToJpeg(new Rect(0, 0, size.width, size.height), 50, out);
                byte[] imageBytes = out.toByteArray();
                mBluetoothHandler.write(BluetoothHandler.DATA_START.getBytes());
                mPendingImageBytes = imageBytes;
            }
        });
    } else {
        previewMissedCount++;
        if (previewMissedCount > 50) {
            mInitAck = true;
            mDataAck = true;
        }
    }
}
 
Example #8
Source File: BitmapUtil.java    From deltachat-android with GNU General Public License v3.0 6 votes vote down vote up
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
Example #9
Source File: ImageShot.java    From Telephoto with Apache License 2.0 6 votes vote down vote up
private byte[] imgToByte(boolean quality) {
    Camera.Parameters parameters = getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;

    YuvImage yuv = new YuvImage(getImage(), parameters.getPreviewFormat(), width, height, null);
    ByteArrayOutputStream out =
            new ByteArrayOutputStream();
    yuv.compressToJpeg(new Rect(0, 0, width, height), 100, out);

    byte[] compressed = out.toByteArray();

    Bitmap newBmp = BitmapFactory.decodeByteArray(compressed, 0, compressed.length);
    Matrix mat = new Matrix();
    mat.postRotate(PrefsController.instance.getPrefs().getCameraPrefs(cameraId).angle);
    newBmp = Bitmap.createBitmap(newBmp, 0, 0, newBmp.getWidth(), newBmp.getHeight(), mat, true);
    ByteArrayOutputStream out2 = new ByteArrayOutputStream();
    if (quality) {
        newBmp.compress(Bitmap.CompressFormat.PNG, 100, out2);
    } else {
        newBmp.compress(Bitmap.CompressFormat.JPEG, 80, out2);
    }

    return out2.toByteArray();
}
 
Example #10
Source File: STUtils.java    From Fatigue-Detection with MIT License 6 votes vote down vote up
@SuppressLint("NewApi")
public static Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height, Context context) {
	
	TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap");
	
	Rect rect = new Rect(0, 0, width, height);
	
	try {
		Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV");
		Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB");
    	byte[] imageData = nv21;
    	if (mRS == null) {
    		mRS = RenderScript.create(context);
    		mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS));
    		Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV));
    		tb.setX(width);
    		tb.setY(height);
    		tb.setMipmaps(false);
    		tb.setYuvFormat(ImageFormat.NV21);
    		ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT);
    		timings.addSplit("Prepare for ain");
    		Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS));
    		tb2.setX(width);
    		tb2.setY(height);
    		tb2.setMipmaps(false);
    		aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED);
    		timings.addSplit("Prepare for aOut");
    		bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    		timings.addSplit("Create Bitmap");
		}
    	ain.copyFrom(imageData);
		timings.addSplit("ain copyFrom");
		mYuvToRgb.setInput(ain);
		timings.addSplit("setInput ain");
		mYuvToRgb.forEach(aOut);
		timings.addSplit("NV21 to ARGB forEach");
		aOut.copyTo(bitmap);
		timings.addSplit("Allocation to Bitmap");
	} catch (Exception e) {
		YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
		timings.addSplit("NV21 bytes to YuvImage");
		
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, 90, baos);
        byte[] cur = baos.toByteArray();
        timings.addSplit("YuvImage crop and compress to Jpeg Bytes");
        
        bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length);
        timings.addSplit("Jpeg Bytes to Bitmap");
	}
	
   	timings.dumpToLog();
   	return bitmap;
}
 
Example #11
Source File: BitmapUtil.java    From Silence with GNU General Public License v3.0 6 votes vote down vote up
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
Example #12
Source File: ResultProcessor.java    From libsoftwaresync with Apache License 2.0 6 votes vote down vote up
private static boolean saveJpg(YuvImage src, int quality, File file) {
  long t0 = System.nanoTime();
  try (FileOutputStream outputStream = new FileOutputStream(file)) {
    Rect rect = new Rect(0, 0, src.getWidth(), src.getHeight());
    boolean ok = src.compressToJpeg(rect, quality, outputStream);
    if (!ok) {
      // TODO(jiawen,samansari): Toast.
      Log.w(TAG, "Error saving JPEG to: " + file.getAbsolutePath());
    }
    long t1 = System.nanoTime();
    Log.i(TAG, String.format("saveJpg took %f ms.", (t1 - t0) * 1e-6f));
    return ok;
  } catch (IOException e) {
    // TODO(jiawen,samansari): Toast.
    Log.w(TAG, "Error saving JPEG image to: " + file.getAbsolutePath());
    return false;
  }
}
 
Example #13
Source File: ResultProcessor.java    From libsoftwaresync with Apache License 2.0 6 votes vote down vote up
private boolean saveJpg(YuvImage yuvImage, File jpgFile) {
  // Save JPEG and also add to the photos gallery by inserting into MediaStore.
  long t0 = System.nanoTime();
  if (saveJpg(yuvImage, jpgQuality, jpgFile)) {
    try {
      MediaStore.Images.Media.insertImage(
          context.getContentResolver(),
          jpgFile.getAbsolutePath(),
          jpgFile.getName(),
          "Full path: " + jpgFile.getAbsolutePath());
    } catch (FileNotFoundException e) {
      Log.e(TAG, "Unable to find file to link in media store.");
    }
    long t1 = System.nanoTime();
    Log.i(TAG, String.format("Saving JPG to disk took %f ms.", (t1 - t0) * 1e-6f));
    context.notifyCaptured(jpgFile.getName());
    return true;
  }
  return false;
}
 
Example #14
Source File: CameraActivity.java    From Android-MobileFaceNet-MTCNN-FaceAntiSpoofing with MIT License 6 votes vote down vote up
private Bitmap convertBitmap(byte[] data, Camera camera) {
    Camera.Size previewSize = camera.getParameters().getPreviewSize();
    YuvImage yuvimage = new YuvImage(
            data,
            camera.getParameters().getPreviewFormat(),
            previewSize.width,
            previewSize.height,
            null);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 100, baos);
    byte[] rawImage = baos.toByteArray();
    BitmapFactory.Options options = new BitmapFactory.Options();
    options.inPreferredConfig = Bitmap.Config.RGB_565;
    Bitmap bitmap = BitmapFactory.decodeByteArray(rawImage, 0, rawImage.length, options);
    Matrix m = new Matrix();
    // 这里我的手机需要旋转一下图像方向才正确,如果在你们的手机上不正确,自己调节,
    // 正式项目中不能这么写,需要计算方向,计算YuvImage方向太麻烦,我这里没有做。
    m.setRotate(-displayDegree);
    return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), m, true);
}
 
Example #15
Source File: AndroidCameraOutput.java    From sensorhub with Mozilla Public License 2.0 5 votes vote down vote up
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
    long timeStamp = SystemClock.elapsedRealtimeNanos();
    
    // select current buffer
    YuvImage yuvImg = (data == imgBuf1) ? yuvImg1 : yuvImg2;
    
    // compress as JPEG
    jpegBuf.reset();
    yuvImg.compressToJpeg(imgArea, 90, jpegBuf);
    
    // release buffer for next frame
    camera.addCallbackBuffer(data);
    
    // generate new data record
    DataBlock newRecord;
    if (latestRecord == null)
        newRecord = dataStruct.createDataBlock();
    else
        newRecord = latestRecord.renew();
    
    // set time stamp
    double samplingTime = getJulianTimeStamp(timeStamp);
    newRecord.setDoubleValue(0, samplingTime);
    
    // set encoded data
    AbstractDataBlock frameData = ((DataBlockMixed)newRecord).getUnderlyingObject()[1];
    frameData.setUnderlyingObject(jpegBuf.toByteArray());
    
    // send event
    latestRecord = newRecord;
    latestRecordTime = System.currentTimeMillis();
    eventHandler.publishEvent(new SensorDataEvent(latestRecordTime, AndroidCameraOutput.this, latestRecord));          
}
 
Example #16
Source File: ImageDecoder.java    From FastBarcodeScanner with Apache License 2.0 5 votes vote down vote up
private static Bitmap NV21ToBitmap(byte[] nv21Bytes, int width, int height)
{
    YuvImage yuv = new YuvImage(nv21Bytes, ImageFormat.NV21, width, height, null);

    // pWidth and pHeight define the size of the preview Frame
    ByteArrayOutputStream jpegStream = new ByteArrayOutputStream();
    yuv.compressToJpeg(new Rect(0, 0, width, height), 50, jpegStream);
    byte[] jpegBytes = jpegStream.toByteArray();

    Bitmap bitmap= BitmapFactory.decodeByteArray(jpegBytes, 0, jpegBytes.length);

    return bitmap;
}
 
Example #17
Source File: CameraActivity.java    From cordova-plugin-camera-preview with MIT License 5 votes vote down vote up
public void takeSnapshot(final int quality) {
  mCamera.setPreviewCallback(new Camera.PreviewCallback() {
    @Override
    public void onPreviewFrame(byte[] bytes, Camera camera) {
      try {
        Camera.Parameters parameters = camera.getParameters();
        Camera.Size size = parameters.getPreviewSize();
        int orientation = mPreview.getDisplayOrientation();
        if (mPreview.getCameraFacing() == Camera.CameraInfo.CAMERA_FACING_FRONT) {
          bytes = rotateNV21(bytes, size.width, size.height, (360 - orientation) % 360);
        } else {
          bytes = rotateNV21(bytes, size.width, size.height, orientation);
        }
        // switch width/height when rotating 90/270 deg
        Rect rect = orientation == 90 || orientation == 270 ?
          new Rect(0, 0, size.height, size.width) :
          new Rect(0, 0, size.width, size.height);
        YuvImage yuvImage = new YuvImage(bytes, parameters.getPreviewFormat(), rect.width(), rect.height(), null);
        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, quality, byteArrayOutputStream);
        byte[] data = byteArrayOutputStream.toByteArray();
        byteArrayOutputStream.close();
        eventListener.onSnapshotTaken(Base64.encodeToString(data, Base64.NO_WRAP));
      } catch (IOException e) {
        Log.d(TAG, "CameraPreview IOException");
        eventListener.onSnapshotTakenError("IO Error");
      } finally {

        mCamera.setPreviewCallback(null);
      }
    }
  });
}
 
Example #18
Source File: VideoStreamingThread.java    From faceswap with Apache License 2.0 5 votes vote down vote up
@Override
protected byte[] doInBackground(Object... objs) {
    byte[] frame = (byte[]) objs[0];
    Parameters parameters = (Parameters) objs[1];
    if (frame_firstUpdateTime == 0) {
        frame_firstUpdateTime = System.currentTimeMillis();
    }
    frame_currentUpdateTime = System.currentTimeMillis();

    int datasize = 0;
    cameraImageSize = parameters.getPreviewSize();
    YuvImage image = new YuvImage(frame, parameters.getPreviewFormat(), cameraImageSize.width,
            cameraImageSize.height, null);
    ByteArrayOutputStream tmpBuffer = new ByteArrayOutputStream();
    image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, tmpBuffer);
    Log.d(LOG_TAG, "compression took: "
            + (System.currentTimeMillis()-frame_currentUpdateTime));
    synchronized (frameLock) {
        frameBuffer = tmpBuffer.toByteArray();
        frameGeneratedTime = System.currentTimeMillis();
        frameID++;
        frameLock.notify();
    }
    datasize = tmpBuffer.size();
    frame_count++;
    frame_totalsize += datasize;
    if (frame_count % 50 == 0) {
        Log.d(LOG_TAG, "(IMG)\t" +
                "BW: " + 8.0 * frame_totalsize / (frame_currentUpdateTime - frame_firstUpdateTime) / 1000 +
                " Mbps\tCurrent FPS: " + 8.0 * datasize / (frame_currentUpdateTime - frame_prevUpdateTime) / 1000 + " Mbps\t" +
                "FPS: " + 1000.0 * frame_count / (frame_currentUpdateTime - frame_firstUpdateTime));
    }
    frame_prevUpdateTime = frame_currentUpdateTime;
    return tmpBuffer.toByteArray();
}
 
Example #19
Source File: CompressedImagePublisher.java    From rosjava_android_template with Apache License 2.0 5 votes vote down vote up
@Override
public void onNewRawImage(byte[] data, Size size) {
  Preconditions.checkNotNull(data);
  Preconditions.checkNotNull(size);
  if (data != rawImageBuffer || !size.equals(rawImageSize)) {
    rawImageBuffer = data;
    rawImageSize = size;
    yuvImage = new YuvImage(rawImageBuffer, ImageFormat.NV21, size.width, size.height, null);
    rect = new Rect(0, 0, size.width, size.height);
  }

  Time currentTime = connectedNode.getCurrentTime();
  String frameId = "camera";

  sensor_msgs.CompressedImage image = imagePublisher.newMessage();
  image.setFormat("jpeg");
  image.getHeader().setStamp(currentTime);
  image.getHeader().setFrameId(frameId);

  Preconditions.checkState(yuvImage.compressToJpeg(rect, 20, stream));
  image.setData(stream.buffer().copy());
  stream.buffer().clear();

  imagePublisher.publish(image);

  sensor_msgs.CameraInfo cameraInfo = cameraInfoPublisher.newMessage();
  cameraInfo.getHeader().setStamp(currentTime);
  cameraInfo.getHeader().setFrameId(frameId);

  cameraInfo.setWidth(size.width);
  cameraInfo.setHeight(size.height);
  cameraInfoPublisher.publish(cameraInfo);
}
 
Example #20
Source File: FrameCatcher.java    From LiveMultimedia with Apache License 2.0 5 votes vote down vote up
/**********************************************************************
 * getBitmapImageFromYUV returns a bitmap from an image captured in
 * the camera in YUV12 format. Image formats and video formats are not
 *  the same thing.
 *******************************************************************/
public static Bitmap getBitmapImageFromYUV(byte[] data, int width, int height) {
    YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, width, height, null);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    yuvimage.compressToJpeg(new Rect(0, 0, width, height), 80, baos);
    byte[] jdata = baos.toByteArray();
    BitmapFactory.Options bitmapFatoryOptions = new BitmapFactory.Options();
    bitmapFatoryOptions.inPreferredConfig = Bitmap.Config.RGB_565;
    return  BitmapFactory.decodeByteArray(jdata, 0, jdata.length, bitmapFatoryOptions);
}
 
Example #21
Source File: ColorFormatUtil.java    From MultiMediaSample with Apache License 2.0 5 votes vote down vote up
public static Bitmap convertYUV420sp2RGB(byte[] yuv, int width, int height) {
    //YUV420sp转RGB数据 5-60ms
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21, width, height, null);
    yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, out);
    byte[] imageBytes = out.toByteArray();
    return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
}
 
Example #22
Source File: CameraStreamer.java    From peepers with Apache License 2.0 5 votes vote down vote up
private void sendPreviewFrame(final byte[] data, final Camera camera, final long timestamp)
{
     // Calcalute the timestamp
     final long MILLI_PER_SECOND = 1000L;
     final long timestampSeconds = timestamp / MILLI_PER_SECOND;

     // Update and log the frame rate
     final long LOGS_PER_FRAME = 10L;
     mNumFrames++;
     if (mLastTimestamp != Long.MIN_VALUE)
     {
         mAverageSpf.update(timestampSeconds - mLastTimestamp);
         if (mNumFrames % LOGS_PER_FRAME == LOGS_PER_FRAME - 1)
         {
             Log.d(TAG, "FPS: " + 1.0 / mAverageSpf.getAverage());
         } // if
     } // else

     mLastTimestamp = timestampSeconds;

     // Create JPEG
     final YuvImage image = new YuvImage(data, mPreviewFormat, mPreviewWidth, mPreviewHeight,
             null /* strides */);
     image.compressToJpeg(mPreviewRect, mJpegQuality, mJpegOutputStream);

     mMJpegHttpStreamer.streamJpeg(mJpegOutputStream.getBuffer(), mJpegOutputStream.getLength(),
             timestamp);

     // Clean up
     mJpegOutputStream.seek(0);
     // XXX: I believe that this is thread-safe because we're not
     // calling methods in other threads. I might be wrong, the
     // documentation is not clear.
     camera.addCallbackBuffer(data);
}
 
Example #23
Source File: Nv21ImageTest.java    From easyrs with MIT License 5 votes vote down vote up
@NonNull
private Bitmap getConvertedBitmap(Nv21Image nv21Image) {
    YuvImage yuvImage = new YuvImage(nv21Image.nv21ByteArray, ImageFormat.NV21, nv21Image.width,
            nv21Image.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, nv21Image.width, nv21Image.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    return BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
}
 
Example #24
Source File: YuvToRgbTest.java    From easyrs with MIT License 5 votes vote down vote up
@NonNull
private Bitmap getExpectedBitmap(Nv21Image nv21Image) {
    YuvImage yuvImage = new YuvImage(nv21Image.nv21ByteArray, ImageFormat.NV21, nv21Image.width,
            nv21Image.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, nv21Image.width, nv21Image.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    return BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
}
 
Example #25
Source File: CameraAnalyzer.java    From LPR with Apache License 2.0 5 votes vote down vote up
private Mat ImagetoMat(ImageProxy imageProxy) {
    ImageProxy.PlaneProxy[] plane = imageProxy.getPlanes();
    ByteBuffer yBuffer = plane[0].getBuffer();  // Y
    ByteBuffer uBuffer = plane[1].getBuffer();  // U
    ByteBuffer vBuffer = plane[2].getBuffer();  // V

    int ySize = yBuffer.remaining();
    int uSize = uBuffer.remaining();
    int vSize = vBuffer.remaining();

    byte[] nv21 = new byte[ySize + uSize + vSize];

    //U and V are swapped
    yBuffer.get(nv21, 0, ySize);
    vBuffer.get(nv21, ySize, vSize);
    uBuffer.get(nv21, ySize + vSize, uSize);
    try {
        YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, imageProxy.getWidth(), imageProxy.getHeight(), null);
        ByteArrayOutputStream stream = new ByteArrayOutputStream(nv21.length);
        yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 90, stream);
        Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
        Matrix matrix = new Matrix();
        matrix.postRotate(90);
        Rect rect = scannerView.getFramingRectInPreview(bitmap.getWidth(), bitmap.getHeight());
        bitmap = Bitmap.createBitmap(bitmap, rect.top, rect.left, rect.height(), rect.width(), matrix, true);
        stream.close();
        Mat mat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4);
        Utils.bitmapToMat(bitmap, mat);
        return mat;
    } catch (IOException e) {
        e.printStackTrace();
    }
    return null;
}
 
Example #26
Source File: ProcessStillTask.java    From camerakit-android with MIT License 5 votes vote down vote up
@Override
public void run() {
    Camera.Parameters parameters = camera.getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;
    byte[] rotatedData = new Rotation(data, width, height, rotation).getYuv();

    int postWidth;
    int postHeight;

    switch (rotation) {
        case 90:
        case 270:
            postWidth = height;
            postHeight = width;
            break;

        case 0:
        case 180:
        default:
            postWidth = width;
            postHeight = height;
            break;
    }

    YuvImage yuv = new YuvImage(rotatedData, parameters.getPreviewFormat(), postWidth, postHeight, null);

    onStillProcessedListener.onStillProcessed(yuv);
}
 
Example #27
Source File: SnapshotVideoRenderer.java    From video-quickstart-android with MIT License 5 votes vote down vote up
private YuvImage i420ToYuvImage(ByteBuffer[] yuvPlanes,
                                int[] yuvStrides,
                                int width,
                                int height) {
    if (yuvStrides[0] != width) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }
    if (yuvStrides[1] != width / 2) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }
    if (yuvStrides[2] != width / 2) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }

    byte[] bytes = new byte[yuvStrides[0] * height +
            yuvStrides[1] * height / 2 +
            yuvStrides[2] * height / 2];
    ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, width * height);
    copyPlane(yuvPlanes[0], tmp);

    byte[] tmpBytes = new byte[width / 2 * height / 2];
    tmp = ByteBuffer.wrap(tmpBytes, 0, width / 2 * height / 2);

    copyPlane(yuvPlanes[2], tmp);
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2 ; col++) {
            bytes[width * height + row * width + col * 2]
                    = tmpBytes[row * width / 2 + col];
        }
    }
    copyPlane(yuvPlanes[1], tmp);
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2 ; col++) {
            bytes[width * height + row * width + col * 2 + 1] =
                    tmpBytes[row * width / 2 + col];
        }
    }
    return new YuvImage(bytes, NV21, width, height, null);
}
 
Example #28
Source File: ICamera.java    From MegviiFacepp-Android-SDK with Apache License 2.0 5 votes vote down vote up
public Bitmap getBitMap(byte[] data, Camera camera, boolean mIsFrontalCamera) {
	int width = camera.getParameters().getPreviewSize().width;
	int height = camera.getParameters().getPreviewSize().height;
	YuvImage yuvImage = new YuvImage(data, camera.getParameters()
			.getPreviewFormat(), width, height, null);
	ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
	yuvImage.compressToJpeg(new Rect(0, 0, width, height), 80,
			byteArrayOutputStream);
	byte[] jpegData = byteArrayOutputStream.toByteArray();
	// 获取照相后的bitmap
	Bitmap tmpBitmap = BitmapFactory.decodeByteArray(jpegData, 0,
			jpegData.length);
	Matrix matrix = new Matrix();
	matrix.reset();
	if (mIsFrontalCamera) {
		matrix.setRotate(-90);
	} else {
		matrix.setRotate(90);
	}
	tmpBitmap = Bitmap.createBitmap(tmpBitmap, 0, 0, tmpBitmap.getWidth(),
			tmpBitmap.getHeight(), matrix, true);
	tmpBitmap = tmpBitmap.copy(Bitmap.Config.ARGB_8888, true);

	int hight = tmpBitmap.getHeight() > tmpBitmap.getWidth() ? tmpBitmap
			.getHeight() : tmpBitmap.getWidth();

	float scale = hight / 800.0f;

	if (scale > 1) {
		tmpBitmap = Bitmap.createScaledBitmap(tmpBitmap,
				(int) (tmpBitmap.getWidth() / scale),
				(int) (tmpBitmap.getHeight() / scale), false);
	}
	return tmpBitmap;
}
 
Example #29
Source File: ConUtil.java    From MegviiFacepp-Android-SDK with Apache License 2.0 5 votes vote down vote up
public static Bitmap decodeToBitMap(byte[] data, Camera _camera) {
	Camera.Size size = _camera.getParameters().getPreviewSize();
	try {
		YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
		if (image != null) {
			ByteArrayOutputStream stream = new ByteArrayOutputStream();
			image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, stream);
			Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
			stream.close();
			return bmp;
		}
	} catch (Exception ex) {
	}
	return null;
}
 
Example #30
Source File: MotionDetector.java    From haven with GNU General Public License v3.0 5 votes vote down vote up
public static Bitmap convertImage (byte[] nv21bytearray, int width, int height)
{
	YuvImage yuvImage = new YuvImage(nv21bytearray, ImageFormat.NV21, width, height, null);
	ByteArrayOutputStream os = new ByteArrayOutputStream();
	yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, os);
	byte[] jpegByteArray = os.toByteArray();
	Bitmap bitmap = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
	return bitmap;
}