org.webrtc.CameraEnumerationAndroid Java Examples

The following examples show how to use org.webrtc.CameraEnumerationAndroid. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: WebRtcClient.java    From imsdk-android with MIT License 6 votes vote down vote up
public void switchCameraInternal() {
    if (CameraEnumerationAndroid.getDeviceCount()>1) {
        if (!pcParams.videoCallEnabled || isError || localMS == null) {
            LogUtil.e(TAG, "Failed to switch camera. Video: " + pcParams.videoCallEnabled + ". Error : " + isError);
            return; // No video is sent or only one camera is available or error happened.
        }
        LogUtil.d(TAG, "Switch camera");
        videoCapturer.switchCamera(new VideoCapturerAndroid.CameraSwitchHandler() {
            @Override
            public void onCameraSwitchDone(boolean b) {
                Logger.i(TAG + "切换摄像头回调成功" + b );

            }

            @Override
            public void onCameraSwitchError(String s) {
                Logger.i(TAG + "切换摄像头回调错误" + s );

            }
        });
    } else {
        LogUtil.d(TAG, "Will not switch camera, video caputurer is not a camera");
    }
}
 
Example #2
Source File: MediaResourceManager.java    From webrtcpeer-android with Apache License 2.0 6 votes vote down vote up
boolean hasCameraPosition(NBMMediaConfiguration.NBMCameraPosition position){
    boolean retMe = false;

    String backName = CameraEnumerationAndroid.getNameOfBackFacingDevice();
    String frontName = CameraEnumerationAndroid.getNameOfFrontFacingDevice();

    if (position == NBMMediaConfiguration.NBMCameraPosition.ANY &&
            (backName != null || frontName != null)){
        retMe = true;
    } else if (position == NBMMediaConfiguration.NBMCameraPosition.BACK &&
            backName != null){
        retMe = true;

    } else if (position == NBMMediaConfiguration.NBMCameraPosition.FRONT &&
            frontName != null){
        retMe = true;
    }

    return retMe;
}
 
Example #3
Source File: WebRTCWrapper.java    From Pix-Art-Messenger with GNU General Public License v3.0 5 votes vote down vote up
@Nullable
private static CapturerChoice of(CameraEnumerator enumerator, final String deviceName, Set<String> availableCameras) {
    final CameraVideoCapturer capturer = enumerator.createCapturer(deviceName, null);
    if (capturer == null) {
        return null;
    }
    final ArrayList<CameraEnumerationAndroid.CaptureFormat> choices = new ArrayList<>(enumerator.getSupportedFormats(deviceName));
    Collections.sort(choices, (a, b) -> b.width - a.width);
    for (final CameraEnumerationAndroid.CaptureFormat captureFormat : choices) {
        if (captureFormat.width <= CAPTURING_RESOLUTION) {
            return new CapturerChoice(capturer, captureFormat, availableCameras);
        }
    }
    return null;
}
 
Example #4
Source File: WebRTCWrapper.java    From Conversations with GNU General Public License v3.0 5 votes vote down vote up
@Nullable
private static CapturerChoice of(CameraEnumerator enumerator, final String deviceName, Set<String> availableCameras) {
    final CameraVideoCapturer capturer = enumerator.createCapturer(deviceName, null);
    if (capturer == null) {
        return null;
    }
    final ArrayList<CameraEnumerationAndroid.CaptureFormat> choices = new ArrayList<>(enumerator.getSupportedFormats(deviceName));
    Collections.sort(choices, (a, b) -> b.width - a.width);
    for (final CameraEnumerationAndroid.CaptureFormat captureFormat : choices) {
        if (captureFormat.width <= CAPTURING_RESOLUTION) {
            return new CapturerChoice(capturer, captureFormat, availableCameras);
        }
    }
    return null;
}
 
Example #5
Source File: PeerConnectionClient.java    From Yahala-Messenger with MIT License 4 votes vote down vote up
private void createMediaConstraintsInternal() {
    // Create peer connection constraints.
    pcConstraints = new MediaConstraints();
    // Enable DTLS for normal calls and disable for loopback calls.
    if (peerConnectionParameters.loopback) {
        pcConstraints.optional.add(
                new KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "false"));
    } else {
        pcConstraints.optional.add(
                new KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "true"));
    }

    // Check if there is a camera on device and disable video call if not.
    numberOfCameras = CameraEnumerationAndroid.getDeviceCount();
    if (numberOfCameras == 0) {
        Log.w(TAG, "No camera on device. Switch to audio only call.");
        videoCallEnabled = false;
    }
    // Create video constraints if video call is enabled.
    if (videoCallEnabled) {
        videoConstraints = new MediaConstraints();
        int videoWidth = peerConnectionParameters.videoWidth;
        int videoHeight = peerConnectionParameters.videoHeight;

        // If VP8 HW video encoder is supported and video resolution is not
        // specified force it to HD.
        if ((videoWidth == 0 || videoHeight == 0)
                && peerConnectionParameters.videoCodecHwAcceleration
                && MediaCodecVideoEncoder.isVp8HwSupported()) {
            videoWidth = HD_VIDEO_WIDTH;
            videoHeight = HD_VIDEO_HEIGHT;
        }

        // Add video resolution constraints.
        if (videoWidth > 0 && videoHeight > 0) {
            videoWidth = Math.min(videoWidth, MAX_VIDEO_WIDTH);
            videoHeight = Math.min(videoHeight, MAX_VIDEO_HEIGHT);
            videoConstraints.mandatory.add(new KeyValuePair(
                    MIN_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth)));
            videoConstraints.mandatory.add(new KeyValuePair(
                    MAX_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth)));
            videoConstraints.mandatory.add(new KeyValuePair(
                    MIN_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight)));
            videoConstraints.mandatory.add(new KeyValuePair(
                    MAX_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight)));
        }

        // Add fps constraints.
        int videoFps = peerConnectionParameters.videoFps;
        if (videoFps > 0) {
            videoFps = Math.min(videoFps, MAX_VIDEO_FPS);
            videoConstraints.mandatory.add(new KeyValuePair(
                    MIN_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps)));
            videoConstraints.mandatory.add(new KeyValuePair(
                    MAX_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps)));
        }
    }

    // Create audio constraints.
    audioConstraints = new MediaConstraints();
    // added for audio performance measurements
    if (peerConnectionParameters.noAudioProcessing) {
        Log.d(TAG, "Disabling audio processing");
        audioConstraints.mandatory.add(new KeyValuePair(
                AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
        audioConstraints.mandatory.add(new KeyValuePair(
                AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
        audioConstraints.mandatory.add(new KeyValuePair(
                AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
        audioConstraints.mandatory.add(new KeyValuePair(
                AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
    }
    // Create SDP constraints.
    sdpMediaConstraints = new MediaConstraints();
    sdpMediaConstraints.mandatory.add(new KeyValuePair(
            "OfferToReceiveAudio", "true"));
    if (videoCallEnabled || peerConnectionParameters.loopback) {
        sdpMediaConstraints.mandatory.add(new KeyValuePair(
                "OfferToReceiveVideo", "true"));
    } else {
        sdpMediaConstraints.mandatory.add(new KeyValuePair(
                "OfferToReceiveVideo", "false"));
    }
}
 
Example #6
Source File: PeerConnectionClient.java    From Yahala-Messenger with MIT License 4 votes vote down vote up
private void createPeerConnectionInternal(EGLContext renderEGLContext) {
    if (factory == null || isError) {
        Log.e(TAG, "Peerconnection factory is not created");
        return;
    }
    Log.d(TAG, "Create peer connection.");

    Log.d(TAG, "PCConstraints: " + pcConstraints.toString());
    if (videoConstraints != null) {
        Log.d(TAG, "VideoConstraints: " + videoConstraints.toString());
    }
    queuedRemoteCandidates = new LinkedList<IceCandidate>();

    if (videoCallEnabled) {
        Log.d(TAG, "EGLContext: " + renderEGLContext);
        factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
    }

    PeerConnection.RTCConfiguration rtcConfig =
            new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
    // TCP candidates are only useful when connecting to a server that supports
    // ICE-TCP.
    rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
    rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
    rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
    // Use ECDSA encryption.
    rtcConfig.keyType = PeerConnection.KeyType.ECDSA;

    peerConnection = factory.createPeerConnection(
            rtcConfig, pcConstraints, pcObserver);
    isInitiator = false;

    // Set default WebRTC tracing and INFO libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!

    Logging.enableTracing(
            "logcat:",
            EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT),
            Logging.Severity.LS_INFO);

    mediaStream = factory.createLocalMediaStream("ARDAMS");
    if (videoCallEnabled) {
        String cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
        String frontCameraDeviceName =
                CameraEnumerationAndroid.getNameOfFrontFacingDevice();
        if (numberOfCameras > 1 && frontCameraDeviceName != null) {
            cameraDeviceName = frontCameraDeviceName;
        }
        Log.d(TAG, "Opening camera: " + cameraDeviceName);
        videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null,
                peerConnectionParameters.captureToTexture ? renderEGLContext : null);
        if (videoCapturer == null) {
            reportError("Failed to open camera");
            return;
        }
        mediaStream.addTrack(createVideoTrack(videoCapturer));
    }

    mediaStream.addTrack(factory.createAudioTrack(
            AUDIO_TRACK_ID,
            factory.createAudioSource(audioConstraints)));
    peerConnection.addStream(mediaStream);

    Log.d(TAG, "Peer connection created.");
}
 
Example #7
Source File: MediaResourceManager.java    From webrtcpeer-android with Apache License 2.0 4 votes vote down vote up
void createLocalMediaStream(Object renderEGLContext,final VideoRenderer.Callbacks localRender) {
    if (factory == null) {
        Log.e(TAG, "Peerconnection factory is not created");
        return;
    }
    this.localRender = localRender;
    if (videoCallEnabled) {
        factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
    }

    // Set default WebRTC tracing and INFO libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO);

    localMediaStream = factory.createLocalMediaStream("ARDAMS");

    // If video call is enabled and the device has camera(s)
    if (videoCallEnabled && numberOfCameras > 0) {
        String cameraDeviceName; // = CameraEnumerationAndroid.getDeviceName(0);
        String frontCameraDeviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice();
        String backCameraDeviceName = CameraEnumerationAndroid.getNameOfBackFacingDevice();

        // If current camera is set to front and the device has one
        if (currentCameraPosition==NBMCameraPosition.FRONT && frontCameraDeviceName!=null) {
            cameraDeviceName = frontCameraDeviceName;
        }
        // If current camera is set to back and the device has one
        else if (currentCameraPosition==NBMCameraPosition.BACK && backCameraDeviceName!=null) {
            cameraDeviceName = backCameraDeviceName;
        }
        // If current camera is set to any then we pick the first camera of the device, which
        // should be a back-facing camera according to libjingle API
        else {
            cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
            currentCameraPosition = NBMCameraPosition.BACK;
        }

        Log.d(TAG, "Opening camera: " + cameraDeviceName);
        videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null);
        if (videoCapturer == null) {
            Log.d(TAG, "Error while opening camera");
            return;
        }
        localMediaStream.addTrack(createCapturerVideoTrack(videoCapturer));
    }

    // Create audio track
    localMediaStream.addTrack(factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints)));

    Log.d(TAG, "Local media stream created.");
}
 
Example #8
Source File: WebRTCWrapper.java    From Pix-Art-Messenger with GNU General Public License v3.0 4 votes vote down vote up
CapturerChoice(CameraVideoCapturer cameraVideoCapturer, CameraEnumerationAndroid.CaptureFormat captureFormat, Set<String> cameras) {
    this.cameraVideoCapturer = cameraVideoCapturer;
    this.captureFormat = captureFormat;
    this.availableCameras = cameras;
}
 
Example #9
Source File: WebRTCWrapper.java    From Conversations with GNU General Public License v3.0 4 votes vote down vote up
CapturerChoice(CameraVideoCapturer cameraVideoCapturer, CameraEnumerationAndroid.CaptureFormat captureFormat, Set<String> cameras) {
    this.cameraVideoCapturer = cameraVideoCapturer;
    this.captureFormat = captureFormat;
    this.availableCameras = cameras;
}