Java Code Examples for android.hardware.camera2.TotalCaptureResult#get()

The following examples show how to use android.hardware.camera2.TotalCaptureResult#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MetadataResponseListener.java    From Camera2 with Apache License 2.0 5 votes vote down vote up
@Override
public void onCompleted(TotalCaptureResult totalCaptureResult)
{
    V newValue = totalCaptureResult.get(mKey);
    if (newValue != null)
    {
        mUpdatable.update(newValue);
    }
}
 
Example 2
Source File: ImageCaptureManager.java    From Camera2 with Apache License 2.0 5 votes vote down vote up
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
                               final TotalCaptureResult result)
{
    final long timestamp = result.get(TotalCaptureResult.SENSOR_TIMESTAMP);

    updateMetadataChangeListeners(result);

    // Detect camera thread stall.
    long now = SystemClock.uptimeMillis();
    if (now - mDebugLastOnCaptureCompletedMillis < DEBUG_INTERFRAME_STALL_WARNING)
    {
        Log.e(TAG, "Camera thread has stalled for " + ++mDebugStalledFrameCount +
                " frames at # " + result.getFrameNumber() + ".");
    } else
    {
        mDebugStalledFrameCount = 0;
    }
    mDebugLastOnCaptureCompletedMillis = now;

    // Find the CapturedImage in the ring-buffer and attach the
    // TotalCaptureResult to it.
    // See documentation for swapLeast() for details.
    boolean swapSuccess = doMetaDataSwap(result, timestamp);
    if (!swapSuccess)
    {
        // Do nothing on failure to swap in.
        Log.v(TAG, "Unable to add new image metadata to ring-buffer.");
    }

    tryExecutePendingCaptureRequest(timestamp);
}
 
Example 3
Source File: FramerateJankDetector.java    From Camera2 with Apache License 2.0 5 votes vote down vote up
@Override
public void onCompleted(TotalCaptureResult result)
{
    long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
    if (mLastFrameTimestamp >= 0)
    {
        double deltaMillis = (timestamp - mLastFrameTimestamp) / 1000000.0;

        if (mLastDeltaMillis > 0)
        {
            double fractionalChange = (deltaMillis - mLastDeltaMillis) / mLastDeltaMillis;
            if (fractionalChange >= FRACTIONAL_CHANGE_STATS_THRESHOLD)
            {
                mUsageStatistics.cameraFrameDrop(deltaMillis, mLastDeltaMillis);
            }

            if (fractionalChange >= FRACTIONAL_CHANGE_LOG_THRESHOLD)
            {
                mLog.v("JANK! Time between frames (" + deltaMillis + "ms) increased by " +
                        (fractionalChange * 100) + "% over the last frame delta (" +
                        mLastDeltaMillis + "ms)");
            }
        }
        mLastDeltaMillis = deltaMillis;
    }

    mLastFrameTimestamp = timestamp;
}
 
Example 4
Source File: Camera2ApiManager.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 5 votes vote down vote up
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
    @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
  Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
  if (faceDetectorCallback != null) {
    faceDetectorCallback.onGetFaces(faces);
  }
}
 
Example 5
Source File: Camera2Proxy.java    From mobile-ar-sensor-logger with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void onCaptureCompleted(CameraCaptureSession session,
                               CaptureRequest request,
                               TotalCaptureResult result) {
    Long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
    Long number = result.getFrameNumber();
    Long exposureTimeNs = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);

    Long frmDurationNs = result.get(CaptureResult.SENSOR_FRAME_DURATION);
    Long frmReadoutNs = result.get(CaptureResult.SENSOR_ROLLING_SHUTTER_SKEW);
    Integer iso = result.get(CaptureResult.SENSOR_SENSITIVITY);
    if (expoStats.size() > kMaxExpoSamples) {
        expoStats.subList(0, kMaxExpoSamples / 2).clear();
    }
    expoStats.add(new NumExpoIso(number, exposureTimeNs, iso));

    Float fl = result.get(CaptureResult.LENS_FOCAL_LENGTH);

    Float fd = result.get(CaptureResult.LENS_FOCUS_DISTANCE);

    Integer afMode = result.get(CaptureResult.CONTROL_AF_MODE);

    Rect rect = result.get(CaptureResult.SCALER_CROP_REGION);
    mFocalLengthHelper.setmFocalLength(fl);
    mFocalLengthHelper.setmFocusDistance(fd);
    mFocalLengthHelper.setmCropRegion(rect);
    SizeF sz_focal_length = mFocalLengthHelper.getFocalLengthPixel();
    String delimiter = ",";
    StringBuilder sb = new StringBuilder();
    sb.append(timestamp);
    sb.append(delimiter + sz_focal_length.getWidth());
    sb.append(delimiter + sz_focal_length.getHeight());
    sb.append(delimiter + number);
    sb.append(delimiter + exposureTimeNs);
    sb.append(delimiter + frmDurationNs);
    sb.append(delimiter + frmReadoutNs);
    sb.append(delimiter + iso);
    sb.append(delimiter + fl);
    sb.append(delimiter + fd);
    sb.append(delimiter + afMode);
    String frame_info = sb.toString();
    if (mRecordingMetadata) {
        try {
            mFrameMetadataWriter.write(frame_info + "\n");
        } catch (IOException err) {
            System.err.println("Error writing captureResult: " + err.getMessage());
        }
    }
    ((CameraCaptureActivity) mActivity).updateCaptureResultPanel(
            sz_focal_length.getWidth(), exposureTimeNs, afMode);
}
 
Example 6
Source File: OneCameraZslImpl.java    From Camera2 with Apache License 2.0 4 votes vote down vote up
@Override
public void onImageCaptured(Image image, TotalCaptureResult captureResult)
{
    long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);

    // We should only capture the image if it hasn't been captured
    // before. Synchronization is necessary since
    // mCapturedImageTimestamps is read & modified elsewhere.
    synchronized (mCapturedImageTimestamps)
    {
        if (!mCapturedImageTimestamps.contains(timestamp))
        {
            mCapturedImageTimestamps.add(timestamp);
        } else
        {
            // There was a more recent (or identical) image which has
            // begun being saved, so abort.
            return;
        }

        // Clear out old timestamps from the set.
        // We must keep old timestamps in the set a little longer (a
        // factor of 2 seems adequate) to ensure they are cleared out of
        // the ring buffer before their timestamp is removed from the
        // set.
        long maxTimestamps = MAX_CAPTURE_IMAGES * 2;
        if (mCapturedImageTimestamps.size() > maxTimestamps)
        {
            ArrayList<Long> timestamps = new ArrayList<Long>(mCapturedImageTimestamps);
            Collections.sort(timestamps);
            for (int i = 0; i < timestamps.size()
                    && mCapturedImageTimestamps.size() > maxTimestamps; i++)
            {
                mCapturedImageTimestamps.remove(timestamps.get(i));
            }
        }
    }

    mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);

    savePicture(image, mParams, mSession, captureResult);
    mParams.callback.onPictureTaken(mSession);
    Log.v(TAG, "Image saved.  Frame number = " + captureResult.getFrameNumber());
}
 
Example 7
Source File: HdrViewfinderActivity.java    From android-HdrViewfinder with Apache License 2.0 4 votes vote down vote up
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
                               @NonNull CaptureRequest request,
                               @NonNull TotalCaptureResult result) {

    // Only update UI every so many frames
    // Use an odd number here to ensure both even and odd exposures get an occasional update
    long frameNumber = result.getFrameNumber();
    if (frameNumber % 3 != 0) return;

    final Long exposureTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
    if (exposureTime == null) {
        throw new RuntimeException("Cannot get exposure time.");
    }

    // Format exposure time nicely
    String exposureText;
    if (exposureTime > ONE_SECOND) {
        exposureText = String.format(Locale.US, "%.2f s", exposureTime / 1e9);
    } else if (exposureTime > MILLI_SECOND) {
        exposureText = String.format(Locale.US, "%.2f ms", exposureTime / 1e6);
    } else if (exposureTime > MICRO_SECOND) {
        exposureText = String.format(Locale.US, "%.2f us", exposureTime / 1e3);
    } else {
        exposureText = String.format(Locale.US, "%d ns", exposureTime);
    }

    Object tag = request.getTag();
    Log.i(TAG, "Exposure: " + exposureText);

    if (tag == mEvenExposureTag) {
        mEvenExposureText.setText(exposureText);

        mEvenExposureText.setEnabled(true);
        mOddExposureText.setEnabled(true);
        mAutoExposureText.setEnabled(false);
    } else if (tag == mOddExposureTag) {
        mOddExposureText.setText(exposureText);

        mEvenExposureText.setEnabled(true);
        mOddExposureText.setEnabled(true);
        mAutoExposureText.setEnabled(false);
    } else {
        mAutoExposureText.setText(exposureText);

        mEvenExposureText.setEnabled(false);
        mOddExposureText.setEnabled(false);
        mAutoExposureText.setEnabled(true);
    }
}