Java Code Examples for android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats()

The following examples show how to use android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CustomVideoCapturerCamera2.java    From opentok-android-sdk-samples with MIT License 6 votes vote down vote up
private Size selectPreferredSize(String camId, final int width, final int height, int format)
        throws CameraAccessException {
    CameraCharacteristics info = cameraManager.getCameraCharacteristics(camId);
    StreamConfigurationMap dimMap = info.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    List<Size> sizeLst = new ArrayList<Size>();
    int[] formats = dimMap.getOutputFormats();
    Collections.addAll(sizeLst, dimMap.getOutputSizes(ImageFormat.YUV_420_888));
    /* sort list by error from desired size */
    return Collections.min(sizeLst, new Comparator<Size>() {
        @Override
        public int compare(Size lhs, Size rhs) {
            int lXerror = Math.abs(lhs.getWidth() - width);
            int lYerror = Math.abs(lhs.getHeight() - height);
            int rXerror = Math.abs(rhs.getWidth() - width);
            int rYerror = Math.abs(rhs.getHeight() - height);
            return (lXerror + lYerror) - (rXerror + rYerror);
        }
    });
}
 
Example 2
Source File: CameraHandler.java    From sample-tensorflow-imageclassifier with Apache License 2.0 5 votes vote down vote up
/**
 * Helpful debugging method:  Dump all supported camera formats to log.  You don't need to run
 * this for normal operation, but it's very helpful when porting this code to different
 * hardware.
 */
public static void dumpFormatInfo(Context context) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String camId = getCameraId(context);
    if (camId == null) {
        return;
    }
    Log.d(TAG, "Using camera id " + camId);
    try {
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(camId);
        StreamConfigurationMap configs = characteristics.get(
                CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        for (int format : configs.getOutputFormats()) {
            Log.d(TAG, "Getting sizes for format: " + format);
            for (Size s : configs.getOutputSizes(format)) {
                Log.d(TAG, "\t" + s.toString());
            }
        }
        int[] effects = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS);
        for (int effect : effects) {
            Log.d(TAG, "Effect available: " + effect);
        }
    } catch (CameraAccessException e) {
        Log.e(TAG, "Camera access exception getting characteristics.");
    }
}
 
Example 3
Source File: AndroidCamera2Capabilities.java    From Camera2 with Apache License 2.0 4 votes vote down vote up
AndroidCamera2Capabilities(CameraCharacteristics p) {
    super(new Stringifier());

    StreamConfigurationMap s = p.get(SCALER_STREAM_CONFIGURATION_MAP);

    for (Range<Integer> fpsRange : p.get(CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)) {
        mSupportedPreviewFpsRange.add(new int[] { fpsRange.getLower(), fpsRange.getUpper() });
    }

    // TODO: We only support TextureView preview rendering
    mSupportedPreviewSizes.addAll(Size.buildListFromAndroidSizes(Arrays.asList(
            s.getOutputSizes(SurfaceTexture.class))));
    for (int format : s.getOutputFormats()) {
        mSupportedPreviewFormats.add(format);
    }

    // TODO: We only support MediaRecorder video capture
    mSupportedVideoSizes.addAll(Size.buildListFromAndroidSizes(Arrays.asList(
            s.getOutputSizes(MediaRecorder.class))));

    // TODO: We only support JPEG image capture
    mSupportedPhotoSizes.addAll(Size.buildListFromAndroidSizes(Arrays.asList(
            s.getOutputSizes(ImageFormat.JPEG))));
    mSupportedPhotoFormats.addAll(mSupportedPreviewFormats);

    buildSceneModes(p);
    buildFlashModes(p);
    buildFocusModes(p);
    buildWhiteBalances(p);
    // TODO: Populate mSupportedFeatures

    // TODO: Populate mPreferredPreviewSizeForVideo

    Range<Integer> ecRange = p.get(CONTROL_AE_COMPENSATION_RANGE);
    mMinExposureCompensation = ecRange.getLower();
    mMaxExposureCompensation = ecRange.getUpper();

    Rational ecStep = p.get(CONTROL_AE_COMPENSATION_STEP);
    mExposureCompensationStep = (float) ecStep.getNumerator() / ecStep.getDenominator();

    mMaxNumOfFacesSupported = p.get(STATISTICS_INFO_MAX_FACE_COUNT);
    mMaxNumOfMeteringArea = p.get(CONTROL_MAX_REGIONS_AE);

    mMaxZoomRatio = p.get(SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
    // TODO: Populate mHorizontalViewAngle
    // TODO: Populate mVerticalViewAngle
    // TODO: Populate mZoomRatioList
    // TODO: Populate mMaxZoomIndex

    if (supports(FocusMode.AUTO)) {
        mMaxNumOfFocusAreas = p.get(CONTROL_MAX_REGIONS_AF);
        if (mMaxNumOfFocusAreas > 0) {
            mSupportedFeatures.add(Feature.FOCUS_AREA);
        }
    }
    if (mMaxNumOfMeteringArea > 0) {
        mSupportedFeatures.add(Feature.METERING_AREA);
    }

    if (mMaxZoomRatio > CameraCapabilities.ZOOM_RATIO_UNZOOMED) {
        mSupportedFeatures.add(Feature.ZOOM);
    }

    // TODO: Detect other features
}
 
Example 4
Source File: VideoCaptureCamera2.java    From 365browser with Apache License 2.0 4 votes vote down vote up
static VideoCaptureFormat[] getDeviceSupportedFormats(int id) {
    final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id);
    if (cameraCharacteristics == null) return null;

    final int[] capabilities =
            cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
    // Per-format frame rate via getOutputMinFrameDuration() is only available if the
    // property REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR is set.
    boolean minFrameDurationAvailable = false;
    for (int cap : capabilities) {
        if (cap == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) {
            minFrameDurationAvailable = true;
            break;
        }
    }

    ArrayList<VideoCaptureFormat> formatList = new ArrayList<VideoCaptureFormat>();
    final StreamConfigurationMap streamMap =
            cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    final int[] formats = streamMap.getOutputFormats();
    for (int format : formats) {
        final Size[] sizes = streamMap.getOutputSizes(format);
        if (sizes == null) continue;
        for (Size size : sizes) {
            double minFrameRate = 0.0f;
            if (minFrameDurationAvailable) {
                final long minFrameDuration = streamMap.getOutputMinFrameDuration(format, size);
                minFrameRate = (minFrameDuration == 0)
                        ? 0.0f
                        : (1.0 / kNanoSecondsToFps * minFrameDuration);
            } else {
                // TODO(mcasas): find out where to get the info from in this case.
                // Hint: perhaps using SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS.
                minFrameRate = 0.0;
            }
            formatList.add(new VideoCaptureFormat(
                    size.getWidth(), size.getHeight(), (int) minFrameRate, 0));
        }
    }
    return formatList.toArray(new VideoCaptureFormat[formatList.size()]);
}