Java Code Examples for android.hardware.Camera

The following are top voted examples for showing how to use android.hardware.Camera. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: CXJPadProject   File: OrcVinActivity.java   View source code 7 votes vote down vote up
@Override
public void surfaceChanged(final SurfaceHolder holder, int format, int width, int height) {
    if (mycamera != null) {
        mycamera.autoFocus(new AutoFocusCallback() {
            @Override
            public void onAutoFocus(boolean success, Camera camera) {
                if (success) {
                    synchronized (camera) {
                        new Thread() {
                            public void run() {
                                initCamera(holder);
                                super.run();
                            }
                        }.start();
                    }
                }
            }
        });
    }
}
 
Example 2
Project: DeepImagePreview-Project   File: ScanFragment.java   View source code 7 votes vote down vote up
private void createCameraSource() {

        // Create the TextRecognizer
        TextRecognizer textRecognizer = new TextRecognizer.Builder(getContext()).build();
        SearchResultHandler searchResultHandler = new SearchResultHandler(getContext(), mPreferenceManager, getResources().getStringArray(R.array.pin_colors));

        // Set the TextRecognizer's Processor.
        OcrDetectorProcessor ocrDetectorProcessor = new OcrDetectorProcessor(
                mOcrGraphicOverlay,
                mMaskView,
                mPreferenceManager,
                searchResultHandler,
                mScanPresenter);
        textRecognizer.setProcessor(ocrDetectorProcessor);
        mScanPresenter.setProcessor(ocrDetectorProcessor);

        // Check if the TextRecognizer is operational.
        if (!textRecognizer.isOperational()) {
            Log.w(TAG, "Detector dependencies are not yet available.");

            // Check for low storage.  If there is low storage, the native library will not be
            // downloaded, so detection will not become operational.
            IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
            boolean hasLowStorage = getActivity().registerReceiver(null, lowstorageFilter) != null;

            if (hasLowStorage) {
                Toast.makeText(getContext(), R.string.low_storage_error, Toast.LENGTH_LONG).show();
                Log.w(TAG, getString(R.string.low_storage_error));
            }
        }

        // Create the mCameraSource using the TextRecognizer.
        mCameraSource = new CameraSource.Builder(getContext(), textRecognizer)
                .setFacing(CameraSource.CAMERA_FACING_BACK)
                .setRequestedPreviewSize(1280, 1024)
                .setRequestedFps(15.0f)
//                .setFlashMode(Camera.Parameters.FLASH_MODE_TORCH)
                .setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)
                .build();
    }
 
Example 3
Project: meipai-Android   File: FocusSurfaceView.java   View source code 6 votes vote down vote up
private void focusOnTouch(int x, int y, Camera camera) {

        Rect rect = new Rect(x - 100, y - 100, x + 100, y + 100);
        int left = rect.left * 2000 / getWidth() - 1000;
        int top = rect.top * 2000 / getHeight() - 1000;
        int right = rect.right * 2000 / getWidth() - 1000;
        int bottom = rect.bottom * 2000 / getHeight() - 1000;
        // 如果超出了(-1000,1000)到(1000, 1000)的范围,则会导致相机崩溃
        left = left < -1000 ? -1000 : left;
        top = top < -1000 ? -1000 : top;
        right = right > 1000 ? 1000 : right;
        bottom = bottom > 1000 ? 1000 : bottom;
        try {
            focusOnRect(new Rect(left, top, right, bottom), camera);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
 
Example 4
Project: KrGallery   File: CameraSession.java   View source code 6 votes vote down vote up
protected void configureRecorder(int quality, MediaRecorder recorder) {
    Camera.CameraInfo info = new Camera.CameraInfo();
    Camera.getCameraInfo(cameraInfo.cameraId, info);
    int displayOrientation = getDisplayOrientation(info, false);
    recorder.setOrientationHint(displayOrientation);

    int highProfile = getHigh();
    boolean canGoHigh = CamcorderProfile.hasProfile(cameraInfo.cameraId, highProfile);
    boolean canGoLow = CamcorderProfile.hasProfile(cameraInfo.cameraId, CamcorderProfile.QUALITY_LOW);
    if (canGoHigh && (quality == 1 || !canGoLow)) {
        recorder.setProfile(CamcorderProfile.get(cameraInfo.cameraId, highProfile));
    } else if (canGoLow) {
        recorder.setProfile(CamcorderProfile.get(cameraInfo.cameraId, CamcorderProfile.QUALITY_LOW));
    } else {
        throw new IllegalStateException("cannot find valid CamcorderProfile");
    }
    isVideo = true;
}
 
Example 5
Project: CustomAndroidOneSheeld   File: CameraUtils.java   View source code 6 votes vote down vote up
/**
 * Check if this device has flash
 */
public static boolean hasFlash(Camera mCamera) {
    if (mCamera == null) {
        return false;
    }

    Camera.Parameters parameters = mCamera.getParameters();

    if (parameters.getFlashMode() == null) {
        return false;
    }

    List<String> supportedFlashModes = parameters.getSupportedFlashModes();
    if (supportedFlashModes == null || supportedFlashModes.isEmpty() || supportedFlashModes.size() == 1 && supportedFlashModes.get(0).equals(Camera.Parameters.FLASH_MODE_OFF)) {
        return false;
    }

    return true;
}
 
Example 6
Project: MediaCodecRecorder   File: RecordingActivity.java   View source code 6 votes vote down vote up
@Override
public void onCameraSurfaceCreate(SurfaceTexture surfaceTexture) {
    Log.d(TAG, "onCameraSurfaceCreate");
    mCamera = Camera.open();
    Camera.Parameters parameters = mCamera.getParameters();
    parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
    mVideoRecorder.createInputSurfaceWindow(EGL14.eglGetCurrentContext());
    try {
        parameters.setPreviewSize(PREVIEW_WIDTH, PREVIEW_HEIGHT);
        mCameraView.setPreviewSize(PREVIEW_HEIGHT, PREVIEW_WIDTH);
        mVideoRecorder.setPreviewSize(PREVIEW_HEIGHT, PREVIEW_WIDTH);
        mCamera.setParameters(parameters);
        mCamera.setPreviewTexture(surfaceTexture);
        mCamera.setDisplayOrientation(Profile.ORIENTATION_90);
        mCamera.startPreview();
    } catch (IOException e) {
        e.printStackTrace();
    }

    isSurfaceReady = true;
}
 
Example 7
Project: buildAPKsSamples   File: CameraFragment.java   View source code 6 votes vote down vote up
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    // Create a container that will hold a SurfaceView for camera previews
    mPreview = new Preview(this.getActivity());

    // Find the total number of cameras available
    mNumberOfCameras = Camera.getNumberOfCameras();

    // Find the ID of the rear-facing ("default") camera
    CameraInfo cameraInfo = new CameraInfo();
    for (int i = 0; i < mNumberOfCameras; i++) {
        Camera.getCameraInfo(i, cameraInfo);
        if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) {
            mCurrentCamera = mDefaultCameraId = i;
        }
    }
    setHasOptionsMenu(mNumberOfCameras > 1);
}
 
Example 8
Project: kotlin-android   File: DBR.java   View source code 6 votes vote down vote up
private void openCamera()
{
    new Thread(new Runnable() {
        @Override
        public void run() {
            mCamera = getCameraInstance();
            if (mCamera != null) {
                mCamera.setDisplayOrientation(90);
                Camera.Parameters cameraParameters = mCamera.getParameters();
                cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
                mCamera.setParameters(cameraParameters);
            }

            Message message = handler.obtainMessage(OPEN_CAMERA, 1);
            message.sendToTarget();
        }
    }).start();
}
 
Example 9
Project: Mobike   File: CameraManager.java   View source code 6 votes vote down vote up
/**
 * Opens the camera driver and initializes the hardware parameters.
 *
 * @param holder The surface object which the camera will draw preview frames into.
 * @throws IOException Indicates the camera driver failed to open.
 */
public void openDriver(SurfaceHolder holder) throws IOException {
    if (camera == null) {
        camera = Camera.open();
        if (camera == null) {
            throw new IOException();
        }
        camera.setPreviewDisplay(holder);

        if (!initialized) {
            initialized = true;
            configManager.initFromCameraParameters(camera);
        }
        configManager.setDesiredCameraParameters(camera);

        //FIXME

        FlashlightManager.enableFlashlight();
    }
}
 
Example 10
Project: meipai-Android   File: MediaRecorderBase.java   View source code 6 votes vote down vote up
/**
 * 手动对焦
 *
 * @param focusAreas 对焦区域
 * @return
 */
@SuppressLint("NewApi")
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public boolean manualFocus(AutoFocusCallback cb, List<Area> focusAreas) {
	if (camera != null && focusAreas != null && mParameters != null && DeviceUtils.hasICS()) {
		try {
			camera.cancelAutoFocus();
			// getMaxNumFocusAreas检测设备是否支持
			if (mParameters.getMaxNumFocusAreas() > 0) {
				// mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO);//
				// Macro(close-up) focus mode
				mParameters.setFocusAreas(focusAreas);
			}

			if (mParameters.getMaxNumMeteringAreas() > 0)
				mParameters.setMeteringAreas(focusAreas);

			mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO);
			camera.setParameters(mParameters);
			camera.autoFocus(cb);
			return true;
		} catch (Exception e) {
			if (mOnErrorListener != null) {
				mOnErrorListener.onVideoError(MEDIA_ERROR_CAMERA_AUTO_FOCUS, 0);
			}
			if (e != null)
				Log.e("Yixia", "autoFocus", e);
		}
	}
	return false;
}
 
Example 11
Project: LongImageCamera   File: Camera1.java   View source code 6 votes vote down vote up
/**
 * @return {@code true} if {@link #mCameraParameters} was modified.
 */
private boolean setFlashInternal(int flash) {
    if (isCameraOpened()) {
        List<String> modes = mCameraParameters.getSupportedFlashModes();
        String mode = FLASH_MODES.get(flash);
        if (modes != null && modes.contains(mode)) {
            mCameraParameters.setFlashMode(mode);
            mFlash = flash;
            return true;
        }
        String currentMode = FLASH_MODES.get(mFlash);
        if (modes == null || !modes.contains(currentMode)) {
            mCameraParameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
            mFlash = Constants.FLASH_OFF;
            return true;
        }
        return false;
    } else {
        mFlash = flash;
        return false;
    }
}
 
Example 12
Project: FaceRecognition   File: CameraInterface.java   View source code 6 votes vote down vote up
public Camera startCamera(){
	try {
		if(Camera.getNumberOfCameras()>1){
			camera = Camera.open(1);
			Facing = getFacing(1);
			id++;
		}else{
			camera = Camera.open(0);
			Facing = getFacing(0);
			id++;
		}
	}catch (RuntimeException e){
		Toast.makeText(context, "打开相机失败,请解除占用!", Toast.LENGTH_LONG).show();
	}
	return camera;
}
 
Example 13
Project: EditPhoto   File: CameraController.java   View source code 6 votes vote down vote up
public boolean startAutoFocus(Camera.AutoFocusCallback autoFocusCallback) {
    if ((mIsSupportAutoFocus || mIsSupportAutoFocusContinuousPicture) && mCamera != null) {
        try {

            String focusMode = getCameraParameters().getFocusMode();

            if (!TextUtils.isEmpty(focusMode) && focusMode.
                    equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {  // 如果是连续自动对焦, 来一次对焦处理
                mCamera.autoFocus(autoFocusCallback);
            } else {
                return false;
            }
        } catch (Exception e) {
            e.printStackTrace();
            return false;
        }
        return true;
    }

    return false;
}
 
Example 14
Project: SmART-Form   File: CameraActivity.java   View source code 6 votes vote down vote up
@Override
protected void onResume() {
    super.onResume();

    if (mCamera == null) {
        try {
            mCamera = Camera.open();
            mCamera.setPreviewDisplay(mCameraPreview.getHolder());
            if (mIsCapturing) {
                mCamera.setDisplayOrientation(90);
                mCamera.startPreview();
            }
        } catch (Exception e) {
            Toast.makeText(CameraActivity.this, "Unable to open camera. Please go to settings for camera permission", Toast.LENGTH_SHORT    )
                    .show();
        }
    }
}
 
Example 15
Project: CameraPreview   File: PreviewView.java   View source code 6 votes vote down vote up
/**
 * This method is only valid when preview is active
 * (between {@link Camera#startPreview()} and before {@link Camera#stopPreview()}).
 */
private void autoFocus() {
    if (mCamera == null) {
        return;
    }
    Camera.Parameters cameraParam = mCamera.getParameters();
    List<String> focusModes = cameraParam.getSupportedFocusModes();
    if (focusModes == null) {
        return;
    }
    if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
        // Autofocus mode is supported
        // get Camera parameters

        // set the focus mode
        cameraParam.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
        // set Camera parameters
        mCamera.setParameters(cameraParam);
        mCamera.autoFocus(mFocusCallback);

    }
}
 
Example 16
Project: Amazing   File: CameraActivity.java   View source code 6 votes vote down vote up
private void setParams() {
    //LogUtil.e("preview set size=" + width + " : " + height);
    Camera.Parameters parameters = camera.getParameters();
    //        parameters.setPreviewSize(width, height);
    //        parameters.setPictureSize(width, height);
    parameters.setPreviewFormat(ImageFormat.NV21);
    camera.setDisplayOrientation(90);
    parameters.setRotation(90);

    List<Integer> supportedPreviewFormats = parameters.getSupportedPreviewFormats();
    for (Integer integer : supportedPreviewFormats) {
        //LogUtil.e("preview format=" + integer);
    }

    List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
    for (Camera.Size size : supportedPreviewSizes) {
        //LogUtil.e("preview size=" + size.width + " : " + size.height);
    }
    camera.setParameters(parameters);
}
 
Example 17
Project: code-scanner   File: CodeScanner.java   View source code 6 votes vote down vote up
private void startPreviewInternal(boolean internal) {
    try {
        DecoderWrapper decoderWrapper = mDecoderWrapper;
        Camera camera = decoderWrapper.getCamera();
        camera.setPreviewCallback(mPreviewCallback);
        camera.setPreviewDisplay(mSurfaceHolder);
        if (!internal && decoderWrapper.isFlashSupported() && mFlashEnabled) {
            setFlashEnabledInternal(true);
        }
        camera.startPreview();
        mStoppingPreview = false;
        mPreviewActive = true;
        mSafeAutoFocusing = false;
        mSafeAutoFocusAttemptsCount = 0;
        if (mAutoFocusMode == AutoFocusMode.SAFE) {
            scheduleSafeAutoFocusTask();
        }
    } catch (Exception ignored) {
    }
}
 
Example 18
Project: MegviiFacepp-Android-SDK   File: ICamera.java   View source code 6 votes vote down vote up
/**
 * 通过传入的宽高算出最接近于宽高值的相机大小
 */
private Camera.Size calBestPreviewSize(Camera.Parameters camPara,
									   final int width, final int height) {
	List<Camera.Size> allSupportedSize = camPara.getSupportedPreviewSizes();
	ArrayList<Camera.Size> widthLargerSize = new ArrayList<Camera.Size>();
	for (Camera.Size tmpSize : allSupportedSize) {
		Log.w("ceshi", "tmpSize.width===" + tmpSize.width
				+ ", tmpSize.height===" + tmpSize.height);
		if (tmpSize.width > tmpSize.height) {
			widthLargerSize.add(tmpSize);
		}
	}

	Collections.sort(widthLargerSize, new Comparator<Camera.Size>() {
		@Override
		public int compare(Camera.Size lhs, Camera.Size rhs) {
			int off_one = Math.abs(lhs.width * lhs.height - width * height);
			int off_two = Math.abs(rhs.width * rhs.height - width * height);
			return off_one - off_two;
		}
	});

	return widthLargerSize.get(0);
}
 
Example 19
Project: CameraFragment   File: Camera1Manager.java   View source code 6 votes vote down vote up
@Override
public void initializeCameraManager(ConfigurationProvider configurationProvider, Context context) {
    super.initializeCameraManager(configurationProvider, context);

    numberOfCameras = Camera.getNumberOfCameras();

    for (int i = 0; i < numberOfCameras; ++i) {
        final Camera.CameraInfo cameraInfo = new Camera.CameraInfo();

        Camera.getCameraInfo(i, cameraInfo);
        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
            faceBackCameraId = i;
            faceBackCameraOrientation = cameraInfo.orientation;
        } else if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            faceFrontCameraId = i;
            faceFrontCameraOrientation = cameraInfo.orientation;
        }
    }
}
 
Example 20
Project: rtmp-rtsp-stream-client-java   File: Camera1ApiManager.java   View source code 6 votes vote down vote up
private List<Camera.Size> getPreviewSize() {
  List<Camera.Size> previewSizes;
  Camera.Size maxSize;
  if (camera != null) {
    maxSize = getMaxEncoderSizeSupported();
    previewSizes = camera.getParameters().getSupportedPreviewSizes();
  } else {
    camera = Camera.open(cameraSelect);
    maxSize = getMaxEncoderSizeSupported();
    previewSizes = camera.getParameters().getSupportedPreviewSizes();
    camera.release();
    camera = null;
  }
  //discard preview more high than device can record
  Iterator<Camera.Size> iterator = previewSizes.iterator();
  while (iterator.hasNext()) {
    Camera.Size size = iterator.next();
    if (size.width > maxSize.width || size.height > maxSize.height) {
      Log.i(TAG, size.width + "X" + size.height + ", not supported for encoder");
      iterator.remove();
    }
  }
  return previewSizes;
}
 
Example 21
Project: BarcodeReaderView   File: CameraConfigurationUtils.java   View source code 6 votes vote down vote up
public static void setBestExposure(Camera.Parameters parameters, boolean lightOn) {
    int minExposure = parameters.getMinExposureCompensation();
    int maxExposure = parameters.getMaxExposureCompensation();
    float step = parameters.getExposureCompensationStep();
    if ((minExposure != 0 || maxExposure != 0) && step > 0.0f) {
        // Set low when light is on
        float targetCompensation = lightOn ? MIN_EXPOSURE_COMPENSATION : MAX_EXPOSURE_COMPENSATION;
        int compensationSteps = Math.round(targetCompensation / step);
        float actualCompensation = step * compensationSteps;
        // Clamp value:
        compensationSteps = Math.max(Math.min(compensationSteps, maxExposure), minExposure);
        if (parameters.getExposureCompensation() == compensationSteps) {
            LogEx.i(TAG, "Exposure compensation already set to " + compensationSteps + " / " + actualCompensation);
        } else {
            LogEx.i(TAG, "Setting exposure compensation to " + compensationSteps + " / " + actualCompensation);
            parameters.setExposureCompensation(compensationSteps);
        }
    } else {
        LogEx.i(TAG, "Camera does not support exposure compensation");
    }
}
 
Example 22
Project: OCR-Reader   File: CameraSource.java   View source code 6 votes vote down vote up
/**
 * Sets the frame data received from the camera.  This adds the previous unused frame buffer
 * (if present) back to the camera, and keeps a pending reference to the frame data for
 * future use.
 */
void setNextFrame(byte[] data, Camera camera) {
    synchronized (mLock) {
        if (mPendingFrameData != null) {
            camera.addCallbackBuffer(mPendingFrameData.array());
            mPendingFrameData = null;
        }

        if (!mBytesToByteBuffer.containsKey(data)) {
            Log.d(TAG,
                "Skipping frame.  Could not find ByteBuffer associated with the image " +
                "data from the camera.");
            return;
        }

        // Timestamp and frame ID are maintained here, which will give downstream code some
        // idea of the timing of frames received and when frames were dropped along the way.
        mPendingTimeMillis = SystemClock.elapsedRealtime() - mStartTimeMillis;
        mPendingFrameId++;
        mPendingFrameData = mBytesToByteBuffer.get(data);

        // Notify the processor thread if it is waiting on the next frame (see below).
        mLock.notifyAll();
    }
}
 
Example 23
Project: PlusGram   File: CameraController.java   View source code 6 votes vote down vote up
public void startPreview(final CameraSession session) {
    if (session == null) {
        return;
    }
    threadPool.execute(new Runnable() {
        @SuppressLint("NewApi")
        @Override
        public void run() {
            Camera camera = session.cameraInfo.camera;
            try {
                if (camera == null) {
                    camera = session.cameraInfo.camera = Camera.open(session.cameraInfo.cameraId);
                }
                camera.startPreview();
            } catch (Exception e) {
                session.cameraInfo.camera = null;
                if (camera != null) {
                    camera.release();
                }
                FileLog.e("tmessages", e);
            }
        }
    });
}
 
Example 24
Project: PlusGram   File: CameraSession.java   View source code 6 votes vote down vote up
protected void configureRecorder(int quality, MediaRecorder recorder) {
    Camera.CameraInfo info = new Camera.CameraInfo();
    Camera.getCameraInfo(cameraInfo.cameraId, info);
    int displayOrientation = getDisplayOrientation(info, false);
    recorder.setOrientationHint(displayOrientation);

    int highProfile = getHigh();
    boolean canGoHigh = CamcorderProfile.hasProfile(cameraInfo.cameraId, highProfile);
    boolean canGoLow = CamcorderProfile.hasProfile(cameraInfo.cameraId, CamcorderProfile.QUALITY_LOW);
    if (canGoHigh && (quality == 1 || !canGoLow)) {
        recorder.setProfile(CamcorderProfile.get(cameraInfo.cameraId, highProfile));
    } else if (canGoLow) {
        recorder.setProfile(CamcorderProfile.get(cameraInfo.cameraId, CamcorderProfile.QUALITY_LOW));
    } else {
        throw new IllegalStateException("cannot find valid CamcorderProfile");
    }
    isVideo = true;
}
 
Example 25
Project: PaoMovie   File: CameraPreview.java   View source code 6 votes vote down vote up
public CameraPreview(Context context, Camera camera,
                     PreviewCallback previewCb,
                     AutoFocusCallback autoFocusCb) {
    super(context);
    this.context=context;
    mCamera = camera;
    previewCallback = previewCb;
    autoFocusCallback = autoFocusCb;

    /* 
     * Set camera to continuous focus if supported, otherwise use
     * software auto-focus. Only works for API level >=9.
     */
    /*
    Camera.Parameters parameters = camera.getParameters();
    for (String f : parameters.getSupportedFocusModes()) {
        if (f == Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) {
            mCamera.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
            autoFocusCallback = null;
            break;
        }
    }
    */

    // Install a SurfaceHolder.Callback so we get notified when the
    // underlying surface is created and destroyed.
    mHolder = getHolder();
    mHolder.addCallback(this);

    // deprecated setting, but required on Android versions prior to 3.0
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
 
Example 26
Project: Expert-Android-Programming   File: CameraUtil.java   View source code 5 votes vote down vote up
public static int getBackCameraId() {
    if (backCameraId == -1) {
        Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
        for (int i = 0; i < getCameraNumber(); i++) {
            Camera.getCameraInfo(i, cameraInfo);
            if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
                backCameraId = i;
                break;
            }
        }
    }
    return backCameraId;
}
 
Example 27
Project: ZXingAndroidExt   File: CameraConfigurationManager.java   View source code 5 votes vote down vote up
boolean getTorchState(Camera camera) {
    if (camera != null) {
        Camera.Parameters parameters = camera.getParameters();
        if (parameters != null) {
            String flashMode = parameters.getFlashMode();
            return flashMode != null &&
                    (Camera.Parameters.FLASH_MODE_ON.equals(flashMode) ||
                            Camera.Parameters.FLASH_MODE_TORCH.equals(flashMode));
        }
    }
    return false;
}
 
Example 28
Project: QrCode   File: CameraConfigurationUtils.java   View source code 5 votes vote down vote up
public static void setMetering(Camera.Parameters parameters) {
    if (parameters.getMaxNumMeteringAreas() > 0) {
        Log.i(TAG, "Old metering areas: " + parameters.getMeteringAreas());
        List<Camera.Area> middleArea = buildMiddleArea(AREA_PER_1000);
        Log.i(TAG, "Setting metering area to : " + toString(middleArea));
        parameters.setMeteringAreas(middleArea);
    } else {
        Log.i(TAG, "Device does not support metering areas");
    }
}
 
Example 29
Project: humaniq-android   File: PhotoFragment.java   View source code 5 votes vote down vote up
private void startCapture() {
    camera = Camera.open(getCameraID());
    configureCamera();
    final ViewTreeObserver viewTreeObserver = preview.getViewTreeObserver();
    viewTreeObserver.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
        @Override
        public void onGlobalLayout() {
            preview.getViewTreeObserver().removeGlobalOnLayoutListener(this);
            setPreviewSize(true);
            holderCallback = new HolderCallback();
            preview.getHolder().addCallback(holderCallback);
        }
    });
}
 
Example 30
Project: phonk   File: CameraNew.java   View source code 5 votes vote down vote up
public void setCameraDisplayOrientation(int cameraId, android.hardware.Camera camera) {
    android.hardware.Camera.CameraInfo info =
            new android.hardware.Camera.CameraInfo();
    android.hardware.Camera.getCameraInfo(cameraId, info);

    WindowManager windowManager = (WindowManager) mAppRunner.getAppContext().getSystemService(Context.WINDOW_SERVICE);
    int rotation = windowManager.getDefaultDisplay().getRotation();

    int degrees = 0;
    switch (rotation) {
        case Surface.ROTATION_0:
            degrees = 0;
            break;
        case Surface.ROTATION_90:
            degrees = 90;
            break;
        case Surface.ROTATION_180:
            degrees = 180;
            break;
        case Surface.ROTATION_270:
            degrees = 270;
            break;
    }

    int result;
    if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
        result = (info.orientation + degrees) % 360;
        result = (360 - result) % 360;  // compensate the mirror
    } else {  // back-facing
        result = (info.orientation - degrees + 360) % 360;
    }

    cameraRotation = result;
    camera.setDisplayOrientation(result);
}
 
Example 31
Project: ZxingForAndroid   File: AutoFocusManager.java   View source code 5 votes vote down vote up
public AutoFocusManager(Camera camera, CameraSettings settings) {
    this.handler = new Handler(focusHandlerCallback);
    this.camera = camera;
    String currentFocusMode = camera.getParameters().getFocusMode();
    useAutoFocus = settings.isAutoFocusEnabled() && FOCUS_MODES_CALLING_AF.contains(currentFocusMode);
    Log.i(TAG, "Current focus mode '" + currentFocusMode + "'; use auto focus? " + useAutoFocus);
    start();
}
 
Example 32
Project: ZxingForAndroid   File: CameraConfigurationUtils.java   View source code 5 votes vote down vote up
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public static void setFocusArea(Camera.Parameters parameters) {
    if (parameters.getMaxNumFocusAreas() > 0) {
        Log.i(TAG, "Old focus areas: " + toString(parameters.getFocusAreas()));
        List<Camera.Area> middleArea = buildMiddleArea(AREA_PER_1000);
        Log.i(TAG, "Setting focus area to : " + toString(middleArea));
        parameters.setFocusAreas(middleArea);
    } else {
        Log.i(TAG, "Device does not support focus areas");
    }
}
 
Example 33
Project: CameraSample   File: CameraV1Util.java   View source code 5 votes vote down vote up
public static boolean isContinuousFocusModeSupported(List<String> supportedFocusModes) {
    if (supportedFocusModes != null && !supportedFocusModes.isEmpty()) {
        for (String focusMode : supportedFocusModes) {
            if (focusMode != null && focusMode.equalsIgnoreCase(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
                return true;
            }
        }
    }
    return false;
}
 
Example 34
Project: PaoMovie   File: CameraEngine.java   View source code 5 votes vote down vote up
public static boolean openCamera(){
	if(mCamera == null){
		try{
			mCamera = Camera.open(mCameraID);
			setDefaultParameters();
			return true;
		}catch(RuntimeException e){
			return false;
		}
	}
	return false;
}
 
Example 35
Project: SbCamera   File: SbCamera.java   View source code 5 votes vote down vote up
private void initDefaultParamsForCamera() {
    if (mCamera == null) {
        return;
    }
    Camera.Parameters parameters = mCamera.getParameters();

    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
        parameters.setRecordingHint(true);
    }

    List<Integer> formatList = parameters.getSupportedPreviewFormats();
    for (Integer format : formatList) {
        if (format == mImageFormat) {
            parameters.setPreviewFormat(mImageFormat);
            break;
        }
    }
    //set focus mode
    List<String> focusModes = parameters.getSupportedFocusModes();
    if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
        parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
    } else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
        parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
    }
    mCamera.setParameters(parameters);
    adjustRotationForCamera();

}
 
Example 36
Project: BarcodeReaderView   File: AutoFocusManager.java   View source code 5 votes vote down vote up
AutoFocusManager(Camera camera, boolean useAutoFocus) {
    this.camera = camera;
    String currentFocusMode = camera.getParameters().getFocusMode();
    this.useAutoFocus = useAutoFocus && FOCUS_MODES_CALLING_AF.contains(currentFocusMode);
    LogEx.i(TAG, "Current focus mode '" + currentFocusMode + "'; use auto focus? " + useAutoFocus);
    start();
}
 
Example 37
Project: Idea-Camera   File: CameraPreview.java   View source code 5 votes vote down vote up
public CameraPreview(Context context, Camera camera) {
    super(context);
    mCamera = camera;

    // Install a SurfaceHolder.Callback so we get notified when the
    // underlying surface is created and destroyed.
    mHolder = getHolder();
    //添加Callback回调
    mHolder.addCallback(this);
    // deprecated setting, but required on Android versions prior to 3.0
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
 
Example 38
Project: Nird2   File: CameraView.java   View source code 5 votes vote down vote up
@UiThread
public void start() {
	try {
		LOG.info("Opening camera");
		camera = Camera.open();
	} catch (RuntimeException e) {
		LOG.log(WARNING, "Error opening camera", e);
		return;
	}
	setDisplayOrientation(0);
	// Use barcode scene mode if it's available
	Parameters params = camera.getParameters();
	params = setSceneMode(camera, params);
	if (SCENE_MODE_BARCODE.equals(params.getSceneMode())) {
		// If the scene mode enabled the flash, try to disable it
		if (!FLASH_MODE_OFF.equals(params.getFlashMode()))
			params = disableFlash(camera, params);
		// If the flash is still enabled, disable the scene mode
		if (!FLASH_MODE_OFF.equals(params.getFlashMode()))
			params = disableSceneMode(camera, params);
	}
	// Use the best available focus mode, preview size and other options
	params = setBestParameters(camera, params);
	// Enable auto focus if the selected focus mode uses it
	enableAutoFocus(params.getFocusMode());
	// Log the parameters that are being used (maybe not what we asked for)
	logCameraParameters();
	// Start the preview when the camera and the surface are both ready
	if (surface != null && !previewStarted) startPreview(getHolder());
}
 
Example 39
Project: AndroidOCRFforID   File: OcrIdActivity.java   View source code 5 votes vote down vote up
public void toggleFlash(View view) {
    if (!isFlashOn) mCameraSource.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
    else mCameraSource.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
    try {
        mImageViewFlash.setImageResource(isFlashOn ? R.drawable.ic_flash_on : R.drawable.ic_flash_off);

        isFlashOn = !isFlashOn;
    } catch (Exception e) {
        e.printStackTrace();
    }

}
 
Example 40
Project: PaoMovie   File: MagicCameraDisplay.java   View source code 5 votes vote down vote up
@Override
public void onPictureTaken(final byte[] data,Camera camera) {
	Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
	if(mFilters != null){
		getBitmapFromGL(bitmap, true);
	}else{
		mSaveTask.execute(bitmap);   
	}
}