Java Code Examples for android.hardware.Camera.Size

The following are top voted examples for showing how to use android.hardware.Camera.Size. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: react-native-webrtc   File: WebRTCModule.java   View source code 7 votes vote down vote up
@SuppressWarnings("deprecation")
public WritableMap getCameraInfo(int index) {
    CameraInfo info = new CameraInfo();

    Size size = null;
    try {
        Camera.getCameraInfo(index, info);
        Camera camera = Camera.open(index);
        size = getMaxSupportedVideoSize(camera);
        camera.release();
    } catch (Exception var3) {
        Logging.e("CameraEnumerationAndroid", "getCameraInfo failed on index " + index, var3);

        return null;
    }
    WritableMap params = Arguments.createMap();
    String facing = info.facing == 1 ? "front" : "back";
    params.putString("label", "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation);
    params.putString("id", "" + index);
    params.putString("facing", facing);
    params.putString("kind", "video");
    params.putString("maxWidth", String.valueOf(size.width));
    params.putString("maxHeight", String.valueOf(size.height));

    return params;
}
 
Example 2
Project: QRCodeScanner   File: BarcodeCameraConfig.java   View source code 7 votes vote down vote up
public BarcodeCameraConfig configPreviewSize2(int viewWidth, int viewHeight) {
    Parameters params = camera.getParameters();
    List<Size> sizes = params.getSupportedPreviewSizes();
    if (sizes == null || sizes.size() <= 0) {
        return this;
    }

    Size bestSize = null;
    int diff = Integer.MAX_VALUE;

    for (Size tmpSize : sizes) {
        int newDiff = Math.abs(tmpSize.width - viewWidth) + Math.abs(tmpSize.height - viewHeight);
        if (newDiff == 0) {
            bestSize = tmpSize;
            break;
        } else if (newDiff < diff) {
            bestSize = tmpSize;
            diff = newDiff;
        }
    }
    params.setPreviewSize(bestSize.width, bestSize.height);
    camera.setParameters(params);
    return this;
}
 
Example 3
Project: react-native-webrtc   File: WebRTCModule.java   View source code 6 votes vote down vote up
@SuppressWarnings("deprecation")
private Size getMaxSupportedVideoSize(Camera camera) {

    List<Camera.Size> sizes;

    if (camera.getParameters().getSupportedVideoSizes() != null) {
        sizes = camera.getParameters().getSupportedVideoSizes();
    } else {
        // Video sizes may be null, which indicates that all the supported
        // preview sizes are supported for video recording.
        sizes = camera.getParameters().getSupportedPreviewSizes();
    }

    int maxWidth = sizes.get(0).width;
    int maxHeight = sizes.get(0).height;

    for (Camera.Size size : sizes) {
        if (size.height > maxWidth && size.width > maxHeight) {
            maxWidth = size.width;
            maxHeight = size.height;
        }
    }

    return new Size(maxWidth, maxHeight);
}
 
Example 4
Project: FaceRecognition   File: CamParaUtil.java   View source code 6 votes vote down vote up
public Size getPropPreviewSize(List<Size> list, float th, int minWidth){
	Collections.sort(list, sizeComparator);

	int i = 0;
	for(Size s:list){
		if((s.width >= minWidth) && equalRate(s, th)){
			Log.i(TAG, "PreviewSize:w = " + s.width + "h = " + s.height);
			break;
		}
		i++;
	}
	if(i == list.size()){
		i = 0;//���û�ҵ�����ѡ��С��size
	}
	return list.get(i);
}
 
Example 5
Project: FaceRecognition   File: CamParaUtil.java   View source code 6 votes vote down vote up
public Size getPropPictureSize(List<Size> list, float th, int minWidth){
	Collections.sort(list, sizeComparator);

	int i = 0;
	for(Size s:list){
		if((s.width >= minWidth) && equalRate(s, th)){
			Log.i(TAG, "PictureSize : w = " + s.width + "h = " + s.height);
			break;
		}
		i++;
	}
	if(i == list.size()){
		i = 0;//���û�ҵ�����ѡ��С��size
	}
	return list.get(i);
}
 
Example 6
Project: WeiXinRecordedDemo   File: MediaRecorderBase.java   View source code 6 votes vote down vote up
/** 设置回调 */
protected void setPreviewCallback() {
	Size size = mParameters.getPreviewSize();
	if (size != null) {
		PixelFormat pf = new PixelFormat();
		PixelFormat.getPixelFormatInfo(mParameters.getPreviewFormat(), pf);
		int buffSize = size.width * size.height * pf.bitsPerPixel / 8;
		try {
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.setPreviewCallbackWithBuffer(this);
		} catch (OutOfMemoryError e) {
			Log.e("Yixia", "startPreview...setPreviewCallback...", e);
		}
		Log.e("Yixia", "startPreview...setPreviewCallbackWithBuffer...width:" + size.width + " height:" + size.height);
	} else {
		camera.setPreviewCallback(this);
	}
}
 
Example 7
Project: WeiXinRecordedDemo   File: MediaRecorderBase.java   View source code 6 votes vote down vote up
/** 设置回调 */
protected void setPreviewCallback() {
	Size size = mParameters.getPreviewSize();
	if (size != null) {
		PixelFormat pf = new PixelFormat();
		PixelFormat.getPixelFormatInfo(mParameters.getPreviewFormat(), pf);
		int buffSize = size.width * size.height * pf.bitsPerPixel / 8;
		try {
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.setPreviewCallbackWithBuffer(this);
		} catch (OutOfMemoryError e) {
			Log.e("Yixia", "startPreview...setPreviewCallback...", e);
		}
		Log.e("Yixia", "startPreview...setPreviewCallbackWithBuffer...width:" + size.width + " height:" + size.height);
	} else {
		camera.setPreviewCallback(this);
	}
}
 
Example 8
Project: 19porn   File: OpenGlUtils.java   View source code 6 votes vote down vote up
public static int loadTexture(final IntBuffer data, final Size size, final int usedTexId) {
    int textures[] = new int[1];
    if (usedTexId == NO_TEXTURE) {
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, size.width, size.height,
                0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
        GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, size.width,
                size.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
        textures[0] = usedTexId;
    }
    return textures[0];
}
 
Example 9
Project: PeSanKita-android   File: CameraView.java   View source code 6 votes vote down vote up
private Rect getCroppedRect(Size cameraPreviewSize, Rect visibleRect, int rotation) {
  final int previewWidth  = cameraPreviewSize.width;
  final int previewHeight = cameraPreviewSize.height;

  if (rotation % 180 > 0) rotateRect(visibleRect);

  float scale = (float) previewWidth / visibleRect.width();
  if (visibleRect.height() * scale > previewHeight) {
    scale = (float) previewHeight / visibleRect.height();
  }
  final float newWidth  = visibleRect.width()  * scale;
  final float newHeight = visibleRect.height() * scale;
  final float centerX   = (VERSION.SDK_INT < 14 || isTroublemaker()) ? previewWidth - newWidth / 2 : previewWidth / 2;
  final float centerY   = previewHeight / 2;

  visibleRect.set((int) (centerX - newWidth  / 2),
                  (int) (centerY - newHeight / 2),
                  (int) (centerX + newWidth  / 2),
                  (int) (centerY + newHeight / 2));

  if (rotation % 180 > 0) rotateRect(visibleRect);
  return visibleRect;
}
 
Example 10
Project: PaoMovie   File: MagicCameraDisplay.java   View source code 6 votes vote down vote up
private void setUpCamera(){
mGLSurfaceView.queueEvent(new Runnable() {
     		
          @Override
          public void run() {
          	if(mTextureId == OpenGLUtils.NO_TEXTURE){
      			mTextureId = OpenGLUtils.getExternalOESTextureID();	
      			mSurfaceTexture = new SurfaceTexture(mTextureId);
  				mSurfaceTexture.setOnFrameAvailableListener(mOnFrameAvailableListener);   
          	}
          	Size size = CameraEngine.getPreviewSize();
  			int orientation = CameraEngine.getOrientation();
  			if(orientation == 90 || orientation == 270){
  				mImageWidth = size.height;
  				mImageHeight = size.width;
  			}else{
  				mImageWidth = size.width;
  				mImageHeight = size.height;
  			} 
  			mCameraInputFilter.onOutputSizeChanged(mImageWidth, mImageHeight);
          	CameraEngine.startPreview(mSurfaceTexture);
          }
      });
  }
 
Example 11
Project: mao-android   File: CameraRenderer.java   View source code 6 votes vote down vote up
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    log.d("onPreviewFrame");
    final Size previewSize = camera.getParameters().getPreviewSize();
    if (mGLRgbBuffer == null) {
        mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
    }
    if (mRunOnDraw.isEmpty()) {
        runOnDraw(new Runnable() {
            @Override
            public void run() {
                YuvDecoder.YUVtoRBGA(data, previewSize.width, previewSize.height, mGLRgbBuffer.array());
                mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
                camera.addCallbackBuffer(data);

                if (mImageWidth != previewSize.width) {
                    mImageWidth = previewSize.width;
                    mImageHeight = previewSize.height;
                    adjustImageScaling();
                }
            }
        });
    }
}
 
Example 12
Project: mao-android   File: OpenGlUtils.java   View source code 6 votes vote down vote up
public static int loadTexture(final IntBuffer data, final Size size, final int usedTexId) {
    int textures[] = new int[1];
    if (usedTexId == NO_TEXTURE) {
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, size.width, size.height,
                0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
        GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, size.width,
                size.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
        textures[0] = usedTexId;
    }
    return textures[0];
}
 
Example 13
Project: Cable-Android   File: CameraView.java   View source code 6 votes vote down vote up
private Rect getCroppedRect(Size cameraPreviewSize, Rect visibleRect, int rotation) {
  final int previewWidth  = cameraPreviewSize.width;
  final int previewHeight = cameraPreviewSize.height;

  if (rotation % 180 > 0) rotateRect(visibleRect);

  float scale = (float) previewWidth / visibleRect.width();
  if (visibleRect.height() * scale > previewHeight) {
    scale = (float) previewHeight / visibleRect.height();
  }
  final float newWidth  = visibleRect.width()  * scale;
  final float newHeight = visibleRect.height() * scale;
  final float centerX   = (VERSION.SDK_INT < 14 || isTroublemaker()) ? previewWidth - newWidth / 2 : previewWidth / 2;
  final float centerY   = previewHeight / 2;

  visibleRect.set((int) (centerX - newWidth  / 2),
                  (int) (centerY - newHeight / 2),
                  (int) (centerX + newWidth  / 2),
                  (int) (centerY + newHeight / 2));

  if (rotation % 180 > 0) rotateRect(visibleRect);
  return visibleRect;
}
 
Example 14
Project: meipai-Android   File: MediaRecorderBase.java   View source code 6 votes vote down vote up
/** 设置回调 */
protected void setPreviewCallback() {
	Size size = mParameters.getPreviewSize();
	if (size != null) {
		PixelFormat pf = new PixelFormat();
		PixelFormat.getPixelFormatInfo(mParameters.getPreviewFormat(), pf);
		int buffSize = size.width * size.height * pf.bitsPerPixel / 8;
		try {
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.addCallbackBuffer(new byte[buffSize]);
			camera.setPreviewCallbackWithBuffer(this);
		} catch (OutOfMemoryError e) {
			Log.e("Yixia", "startPreview...setPreviewCallback...", e);
		}
		Log.e("Yixia", "startPreview...setPreviewCallbackWithBuffer...width:" + size.width + " height:" + size.height);
	} else {
		camera.setPreviewCallback(this);
	}
}
 
Example 15
Project: live_master   File: CamParaUtil.java   View source code 6 votes vote down vote up
public  Size getPreviewSize(List<Camera.Size> list, int width,float th){
	Collections.sort(list, sizeComparator);

	float rate = 1.77f;
	if (Math.abs(th-1.33f)<Math.abs(th-1.77f)){
		rate = 1.33f;
	}else{
		rate = 1.77f;
	}
	int i = 0;
	for(Size s:list){
		if((s.height > width) && equalRate(s, rate)){
			Log.i(TAG, "最终设置预览尺寸:w = " + s.width + "h = " + s.height+","+rate);
			break;
		}
		i++;
	}

	return list.get(i);
}
 
Example 16
Project: live_master   File: CamParaUtil.java   View source code 6 votes vote down vote up
public Size getPropPictureSize(List<Size> list, float th, int minWidth){
	Collections.sort(list, sizeComparator);

	int i = 0;
	for(Size s:list){
		if((s.width >= minWidth) && equalRate(s, th)){
			Log.i(TAG, "PictureSize : w = " + s.width + "h = " + s.height);
			break;
		}
		i++;
	}
	if(i == list.size()){
		i = 0;//���û�ҵ�����ѡ��С��size
	}
	return list.get(i);
}
 
Example 17
Project: TAG   File: GPUImageRenderer.java   View source code 6 votes vote down vote up
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    final Size previewSize = camera.getParameters().getPreviewSize();
    if (mGLRgbBuffer == null) {
        mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
    }
    if (mRunOnDraw.isEmpty()) {
        runOnDraw(new Runnable() {
            @Override
            public void run() {
                GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
                        mGLRgbBuffer.array());
                mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
                camera.addCallbackBuffer(data);

                if (mImageWidth != previewSize.width) {
                    mImageWidth = previewSize.width;
                    mImageHeight = previewSize.height;
                    adjustImageScaling();
                }
            }
        });
    }
}
 
Example 18
Project: TAG   File: OpenGlUtils.java   View source code 6 votes vote down vote up
public static int loadTexture(final IntBuffer data, final Size size, final int usedTexId) {
    int textures[] = new int[1];
    if (usedTexId == NO_TEXTURE) {
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, size.width, size.height,
                0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
        GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, size.width,
                size.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
        textures[0] = usedTexId;
    }
    return textures[0];
}
 
Example 19
Project: PreRect   File: CameraConfigurationManager.java   View source code 6 votes vote down vote up
public void setDesiredCameraParameters(Camera camera, int caremaId) {
	Camera.Parameters parameters = camera.getParameters();
	parameters.setPreviewFormat(ImageFormat.NV21);
	parameters.setPreviewSize(cameraResolution.x, cameraResolution.y);
	setZoom(parameters);

	camera.setDisplayOrientation(getDisplayOrientation(caremaId));

	// 设置照片尺寸
	if (this.pictureSize == null) {
		WindowManager manager = (WindowManager) mContext
				.getSystemService(Context.WINDOW_SERVICE);
		Display display = manager.getDefaultDisplay();
		List<Size> pictureSizes = parameters.getSupportedPictureSizes();
		this.setPicutreSize(pictureSizes, display.getWidth(),
				display.getHeight());
	}
	try {
		parameters.setPictureSize(this.pictureSize.width,
				this.pictureSize.height);
	} catch (Exception e) {
		e.printStackTrace();
	}
	camera.setParameters(parameters);
}
 
Example 20
Project: ROLF-EV3   File: VideoQuality.java   View source code 6 votes vote down vote up
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
	VideoQuality v = quality.clone();
	int minDist = Integer.MAX_VALUE;
	String supportedSizesStr = "Supported resolutions: ";
	List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
	for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
		Size size = it.next();
		supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
		int dist = Math.abs(quality.resX - size.width);
		if (dist<minDist) {
			minDist = dist;
			v.resX = size.width;
			v.resY = size.height;
		}
	}
	Log.v(TAG, supportedSizesStr);
	if (quality.resX != v.resX || quality.resY != v.resY) {
		Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
	}
	
	return v;
}
 
Example 21
Project: VideoCamera   File: CameraWrapper.java   View source code 6 votes vote down vote up
@TargetApi(VERSION_CODES.HONEYCOMB)
protected List<Size> getSupportedVideoSizes(int currentSdkInt) {
    Parameters params = mNativeCamera.getNativeCameraParameters();

    List<Size> supportedVideoSizes;
    if (currentSdkInt < Build.VERSION_CODES.HONEYCOMB) {
        CLog.e(CLog.CAMERA, "Using supportedPreviewSizes iso supportedVideoSizes due to API restriction");
        supportedVideoSizes = params.getSupportedPreviewSizes();
    } else if (params.getSupportedVideoSizes() == null) {
        CLog.e(CLog.CAMERA, "Using supportedPreviewSizes because supportedVideoSizes is null");
        supportedVideoSizes = params.getSupportedPreviewSizes();
    } else {
        supportedVideoSizes = params.getSupportedVideoSizes();
    }

    return supportedVideoSizes;
}
 
Example 22
Project: AndroidCamera   File: GPUImageRenderer.java   View source code 6 votes vote down vote up
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    final Size previewSize = camera.getParameters().getPreviewSize();
    if (mGLRgbBuffer == null) {
        mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
    }
    if (mRunOnDraw.isEmpty()) {
        runOnDraw(new Runnable() {
            @Override
            public void run() {
                GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
                        mGLRgbBuffer.array());
                mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
                camera.addCallbackBuffer(data);

                if (mImageWidth != previewSize.width) {
                    mImageWidth = previewSize.width;
                    mImageHeight = previewSize.height;
                    adjustImageScaling();
                }
            }
        });
    }
}
 
Example 23
Project: AndroidCamera   File: OpenGlUtils.java   View source code 6 votes vote down vote up
public static int loadTexture(final IntBuffer data, final Size size, final int usedTexId) {
    int textures[] = new int[1];
    if (usedTexId == NO_TEXTURE) {
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, size.width, size.height,
                0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
        GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, size.width,
                size.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
        textures[0] = usedTexId;
    }
    return textures[0];
}
 
Example 24
Project: VisiSynth   File: Preview.java   View source code 6 votes vote down vote up
public void onPreviewFrame(byte[] data, Camera camera){
    Log.d("TAG", "frame1 "+data.length);
    Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
    YuvImage yuvimage=new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);

    // Convert to Bitmap
    final double [][] imgmat = imgpro.BufferedYUVImage2Mat(yuvimage.getYuvData(),
            yuvimage.getWidth(), yuvimage.getHeight(), 640, 480);

    List<Double> ld = imgpro.AnalyzeMat(imgmat, 0.6);

    String logline = "points:";
    for(Double p : ld)
        logline += " " + (1-p);
    Log.d("TAG", logline);
    double [] f = new double[ld.size()];
    for(int i = 0; i < f.length; i ++)
        f[i] = Math.pow(2.0, ld.get(i) * 2) * 440.0;
    play(f);
}
 
Example 25
Project: CommunityService   File: CameraConfigurationManager.java   View source code 6 votes vote down vote up
/**
 * Sets the camera up to take preview images which are used for both preview and decoding.
 * We detect the preview format here so that buildLuminanceSource() can build an appropriate
 * LuminanceSource subclass. In the future we may want to force YUV420SP as it's the smallest,
 * and the planar Y can be used for barcode scanning without a copy in some cases.
 */
void setDesiredCameraParameters(Camera camera) {
	Camera.Parameters parameters = camera.getParameters();
	List<Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
	int position =0;
	if(supportedPreviewSizes.size()>2){
		position=supportedPreviewSizes.size()/2+1;//supportedPreviewSizes.get();
	}else {
		position=supportedPreviewSizes.size()/2;
	}

	int width = supportedPreviewSizes.get(position).width;
	int height = supportedPreviewSizes.get(position).height;
	Log.d(TAG, "Setting preview size: " + cameraResolution);
	camera.setDisplayOrientation(90);  
	cameraResolution.x=width;
	cameraResolution.y=height;
	parameters.setPreviewSize(width,height);
	setFlash(parameters);
	setZoom(parameters);
	//setSharpness(parameters);
	camera.setParameters(parameters);
}
 
Example 26
Project: miku   File: GPUImageRenderer.java   View source code 6 votes vote down vote up
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
  final Size previewSize = camera.getParameters().getPreviewSize();
  if (mGLRgbBuffer == null) {
    mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
  }
  if (mRunOnDraw.isEmpty()) {
    runOnDraw(new Runnable() {
      @Override
      public void run() {
        GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
            mGLRgbBuffer.array());
        mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
        camera.addCallbackBuffer(data);

        if (mImageWidth != previewSize.width) {
          mImageWidth = previewSize.width;
          mImageHeight = previewSize.height;
          adjustImageScaling();
        }
      }
    });
  }
}
 
Example 27
Project: ArCamera   File: CamParaUtil.java   View source code 6 votes vote down vote up
public  Size getPropPreviewSize(List<Size> list, float th, int minWidth){
	Collections.sort(list, sizeComparator);

	int i = 0;
	for(Size s:list){
		if((s.width >= minWidth) && equalRate(s, th)){
			Log.i(TAG, "PreviewSize:w = " + s.width + "h = " + s.height);
			break;
		}
		i++;
	}
	if(i == list.size()){
		i = 0;//���û�ҵ�����ѡ��С��size
	}
	return list.get(i);
}
 
Example 28
Project: ArCamera   File: CamParaUtil.java   View source code 6 votes vote down vote up
public Size getPropPictureSize(List<Size> list, float th, int minWidth){
	Collections.sort(list, sizeComparator);

	int i = 0;
	for(Size s:list){
		if((s.width >= minWidth) && equalRate(s, th)){
			Log.i(TAG, "PictureSize : w = " + s.width + "h = " + s.height);
			break;
		}
		i++;
	}
	if(i == list.size()){
		i = 0;//���û�ҵ�����ѡ��С��size
	}
	return list.get(i);
}
 
Example 29
Project: Document-Scanner   File: DocumentCameraView.java   View source code 6 votes vote down vote up
public void setMaxPictureResolution() {
    int maxWidth=0;
    Size curRes=null;
    for ( Size r: getPictureResolutionList() ) {
        Log.d(TAG,"supported picture resolution: "+r.width+"x"+r.height);
        if (r.width>maxWidth) {
            maxWidth=r.width;
            curRes=r;
        }
    }

    if (curRes!=null) {
        Camera.Parameters parameters = mCamera.getParameters();
        parameters.setPictureSize(curRes.width, curRes.height);
        mCamera.setParameters(parameters);
        Log.d(TAG, "selected picture resolution: " + curRes.width + "x" + curRes.height);
    }

    return;
}
 
Example 30
Project: Document-Scanner   File: DocumentCameraView.java   View source code 6 votes vote down vote up
public void setMaxPreviewResolution() {
    int maxWidth=0;
    Size curRes=null;

    mCamera.lock();

    for ( Size r: getResolutionList() ) {
        if (r.width>maxWidth) {
            Log.d(TAG,"supported preview resolution: "+r.width+"x"+r.height);
            maxWidth=r.width;
            curRes=r;
        }
    }

    if (curRes!=null) {
        setResolution(curRes);
        Log.d(TAG, "selected preview resolution: " + curRes.width + "x" + curRes.height);
    }

    return;
}
 
Example 31
Project: prayer-times-android   File: CameraSurfaceView.java   View source code 6 votes vote down vote up
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    if (mCamera == null) {
        return;
    }
    // Now that the size is known, set up the camera parameters and begin
    // the preview.

    Camera.Parameters parameters = mCamera.getParameters();
    Size size = getBestPreviewSize(width, height, parameters);
    if (size != null) {
        parameters.setPreviewSize(size.width, size.height);
    }
    mCamera.setParameters(parameters);

    try {
        mCamera.startPreview();
    } catch (Exception e) {
        Crashlytics.logException(e);
    }
}
 
Example 32
Project: prayer-times-android   File: CameraSurfaceView.java   View source code 6 votes vote down vote up
@Nullable
private Camera.Size getBestPreviewSize(int width, int height, @NonNull Camera.Parameters parameters) {
    Camera.Size result = null;
    double aspect = width / (double) height;
    for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
        if ((size.width >= width) && (size.height >= height)) {
            if (result == null) {
                result = size;
            } else {
                if (Math.abs((result.width / (double) result.height - aspect))
                        > Math.abs((size.width / (double) size.height - aspect))) {
                    result = size;
                }
            }
        }
    }

    return result;
}
 
Example 33
Project: Android_CCTV   File: VideoQuality.java   View source code 6 votes vote down vote up
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
	VideoQuality v = quality.clone();
	int minDist = Integer.MAX_VALUE;
	String supportedSizesStr = "Supported resolutions: ";
	List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
	for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
		Size size = it.next();
		supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
		int dist = Math.abs(quality.resX - size.width);
		if (dist<minDist) {
			minDist = dist;
			v.resX = size.width;
			v.resY = size.height;
		}
	}
	Log.v(TAG, supportedSizesStr);
	if (quality.resX != v.resX || quality.resY != v.resY) {
		Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
	}
	
	return v;
}
 
Example 34
Project: Endoscope   File: VideoQuality.java   View source code 6 votes vote down vote up
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
	VideoQuality v = quality.clone();
	int minDist = Integer.MAX_VALUE;
	String supportedSizesStr = "Supported resolutions: ";
	List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
	for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
		Size size = it.next();
		supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
		int dist = Math.abs(quality.resX - size.width);
		if (dist<minDist) {
			minDist = dist;
			v.resX = size.width;
			v.resY = size.height;
		}
	}
	Log.v(TAG, supportedSizesStr);
	if (quality.resX != v.resX || quality.resY != v.resY) {
		Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
	}
	
	return v;
}
 
Example 35
Project: Rocket.Chat-android   File: CameraManager.java   View source code 6 votes vote down vote up
public void setupCameraAndStartPreview(SurfaceHolder sf, Size sz, int displayRotation) {
	stopCameraPreview();
	
	cameraRotationDegree = CameraHelper.setCameraDisplayOrientation(defaultCameraID, camera, displayRotation);

	chooseCamcorderProfile(sz);

	// tweak profile
	profile.fileFormat = MediaRecorder.OutputFormat.THREE_GPP;
	profile.audioSampleRate = 16000;
	profile.audioChannels = 1;
	profile.audioBitRate = 96000;

	Parameters param = camera.getParameters();

	param.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
	param.setPreviewSize(profile.videoFrameWidth, profile.videoFrameHeight);
       if (Build.VERSION.SDK_INT >= 14)
           param.setRecordingHint(true);
	camera.setParameters(param);
	
	if (setDisplay(sf)) {
		startCameraPreview();
	}	
}
 
Example 36
Project: Rocket.Chat-android   File: CameraManager.java   View source code 6 votes vote down vote up
private void chooseCamcorderProfile(Size sizeHint) {
	// For android 2.3 devices video quality = low
	if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB)
		profile = (CamcorderProfile.get(CamcorderProfile.QUALITY_LOW));
	else {
		// For >= Android 3.0 devices select 720p, 480p or low quality of video
		if (CamcorderProfile.hasProfile(getCameraID(), CamcorderProfile.QUALITY_720P)
				&& (sizeHint == null || sizeHint.height >= 720)) {
			profile = (CamcorderProfile.get(CamcorderProfile.QUALITY_720P));
			return;
		}

		if (CamcorderProfile.hasProfile(getCameraID(), CamcorderProfile.QUALITY_480P)
				&& (sizeHint == null || sizeHint.height >= 480)) {
			profile = (CamcorderProfile.get(CamcorderProfile.QUALITY_480P));
			return;
		}

		profile = (CamcorderProfile.get(CamcorderProfile.QUALITY_LOW));
	}
}
 
Example 37
Project: RemoteEye   File: VideoQuality.java   View source code 6 votes vote down vote up
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
	VideoQuality v = quality.clone();
	int minDist = Integer.MAX_VALUE;
	String supportedSizesStr = "Supported resolutions: ";
	List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
	for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
		Size size = it.next();
		supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
		int dist = Math.abs(quality.resX - size.width);
		if (dist<minDist) {
			minDist = dist;
			v.resX = size.width;
			v.resY = size.height;
		}
	}
	Log.v(TAG, supportedSizesStr);
	if (quality.resX != v.resX || quality.resY != v.resY) {
		Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
	}
	
	return v;
}
 
Example 38
Project: AndroidQrCodeScanner   File: MainActivity.java   View source code 6 votes vote down vote up
public void onPreviewFrame(byte[] data, Camera camera) {
    Parameters parameters = camera.getParameters();
    Size size = parameters.getPreviewSize();

    Image barcode = new Image(size.width, size.height, "Y800");
    barcode.setData(data);

    int result = scanner.scanImage(barcode);
    
    if (result != 0) {
        previewing = false;
        mCamera.setPreviewCallback(null);
        mCamera.stopPreview();
        
        SymbolSet syms = scanner.getResults();
        for (Symbol sym : syms) {
          //  scanText.setText("barcode result " + sym.getData());
            Toast.makeText(MainActivity.this,sym.getData(),Toast.LENGTH_SHORT).show();
            barcodeScanned = false;
            mCamera.setPreviewCallback(previewCb);
            mCamera.startPreview();
          //  previewing = true;
            mCamera.autoFocus(autoFocusCB);
        }
    }
}
 
Example 39
Project: pause-resume-video-recording   File: CameraCaptureActivity.java   View source code 6 votes vote down vote up
private Size determineBestSize(List<Size> sizes, int widthThreshold) {
    Size bestSize = null;
    Size size;
    int numOfSizes = sizes.size();
    for (int i = 0; i < numOfSizes; i++) {
        size = sizes.get(i);
        boolean isDesireRatio = (size.width / 4) == (size.height / 3);
        boolean isBetterSize = (bestSize == null) || size.width > bestSize.width;

        if (isDesireRatio && isBetterSize) {
            bestSize = size;
        }
    }

    if (bestSize == null) {
        Log.d(TAG, "cannot find the best camera size");
        return sizes.get(sizes.size() - 1);
    }

    return bestSize;
}
 
Example 40
Project: libstreaming_android_studio   File: VideoQuality.java   View source code 6 votes vote down vote up
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
	VideoQuality v = quality.clone();
	int minDist = Integer.MAX_VALUE;
	String supportedSizesStr = "Supported resolutions: ";
	List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
	for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
		Size size = it.next();
		supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
		int dist = Math.abs(quality.resX - size.width);
		if (dist<minDist) {
			minDist = dist;
			v.resX = size.width;
			v.resY = size.height;
		}
	}
	Log.v(TAG, supportedSizesStr);
	if (quality.resX != v.resX || quality.resY != v.resY) {
		Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
	}
	
	return v;
}