org.webrtc.SurfaceTextureHelper Java Examples

The following examples show how to use org.webrtc.SurfaceTextureHelper. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 6 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    // create PeerConnectionFactory
    PeerConnectionFactory.InitializationOptions initializationOptions =
            PeerConnectionFactory.InitializationOptions.builder(this).createInitializationOptions();
    PeerConnectionFactory.initialize(initializationOptions);
    PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory();

    // create AudioSource
    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

    EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext();

    SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext);
    // create VideoCapturer
    VideoCapturer videoCapturer = createCameraCapturer();
    VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast());
    videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());
    videoCapturer.startCapture(480, 640, 30);

    SurfaceViewRenderer localView = findViewById(R.id.localView);
    localView.setMirror(true);
    localView.init(eglBaseContext, null);

    // create VideoTrack
    VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
    // display in localView
    videoTrack.addSink(localView);
}
 
Example #2
Source File: WebRTCEngine.java    From webrtc_android with MIT License 6 votes vote down vote up
/**
 * 创建本地流
 */
public void createLocalStream() {
    _localStream = _factory.createLocalMediaStream("ARDAMS");
    // 音频
    audioSource = _factory.createAudioSource(createAudioConstraints());
    _localAudioTrack = _factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    _localStream.addTrack(_localAudioTrack);

    // 视频
    if (!mIsAudioOnly) {
        captureAndroid = createVideoCapture();
        surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", mRootEglBase.getEglBaseContext());
        videoSource = _factory.createVideoSource(captureAndroid.isScreencast());

        captureAndroid.initialize(surfaceTextureHelper, mContext, videoSource.getCapturerObserver());
        captureAndroid.startCapture(VIDEO_RESOLUTION_WIDTH, VIDEO_RESOLUTION_HEIGHT, FPS);


        VideoTrack _localVideoTrack = _factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
        _localStream.addTrack(_localVideoTrack);
    }

}
 
Example #3
Source File: Camera.java    From mollyim-android with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void initCapturer(@NonNull CapturerObserver observer) {
  if (capturer != null) {
    capturer.initialize(SurfaceTextureHelper.create("WebRTC-SurfaceTextureHelper", eglBase.getEglBaseContext()),
                        context,
                        observer);
  }
}
 
Example #4
Source File: MediaStreamFactory.java    From owt-client-android with Apache License 2.0 5 votes vote down vote up
MediaStream createMediaStream(VideoCapturer videoCapturer,
        AudioTrackConstraints audioMediaConstraints) {
    RCHECK(videoCapturer != null || audioMediaConstraints != null);

    String label = UUID.randomUUID().toString();
    MediaStream mediaStream = PCFactoryProxy.instance().createLocalMediaStream(label);

    if (videoCapturer != null) {
        VideoSource videoSource = PCFactoryProxy.instance().createVideoSource(
                videoCapturer.isScreencast());
        SurfaceTextureHelper helper = SurfaceTextureHelper.create("CT", localContext);
        videoCapturer.initialize(helper, ContextInitialization.context,
                videoSource.getCapturerObserver());
        videoCapturer.startCapture(videoCapturer.getWidth(),
                videoCapturer.getHeight(),
                videoCapturer.getFps());
        VideoTrack videoTrack = PCFactoryProxy.instance().createVideoTrack(label + "v0",
                videoSource);
        videoTrack.setEnabled(true);
        mediaStream.addTrack(videoTrack);
        unsharedVideoSources.put(label, videoSource);
    }

    if (audioMediaConstraints != null) {
        if (sharedAudioSource == null) {
            sharedAudioSource = PCFactoryProxy.instance().createAudioSource(
                    audioMediaConstraints.convertToWebRTCConstraints());
        }
        audioSourceRef++;
        mediaStream.addTrack(
                PCFactoryProxy.instance().createAudioTrack(label + "a0", sharedAudioSource));
    }

    return mediaStream;
}
 
Example #5
Source File: PeerConnectionClient.java    From voip_android with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Nullable
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    surfaceTextureHelper =
            SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
    videoSource = factory.createVideoSource(capturer.isScreencast());
    capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver());
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addSink(localRender);
    return localVideoTrack;
}
 
Example #6
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 4 votes vote down vote up
@Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext();

        // create PeerConnectionFactory
        PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
                .builder(this)
                .createInitializationOptions());
        PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
        DefaultVideoEncoderFactory defaultVideoEncoderFactory =
                new DefaultVideoEncoderFactory(eglBaseContext, true, true);
        DefaultVideoDecoderFactory defaultVideoDecoderFactory =
                new DefaultVideoDecoderFactory(eglBaseContext);
        peerConnectionFactory = PeerConnectionFactory.builder()
                .setOptions(options)
                .setVideoEncoderFactory(defaultVideoEncoderFactory)
                .setVideoDecoderFactory(defaultVideoDecoderFactory)
                .createPeerConnectionFactory();

        SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext);
        // create VideoCapturer
        VideoCapturer videoCapturer = createCameraCapturer(true);
        VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast());
        videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());
        videoCapturer.startCapture(480, 640, 30);

        localView = findViewById(R.id.localView);
        localView.setMirror(true);
        localView.init(eglBaseContext, null);

        // create VideoTrack
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
//        // display in localView
//        videoTrack.addSink(localView);




        SurfaceTextureHelper remoteSurfaceTextureHelper = SurfaceTextureHelper.create("RemoteCaptureThread", eglBaseContext);
        // create VideoCapturer
        VideoCapturer remoteVideoCapturer = createCameraCapturer(false);
        VideoSource remoteVideoSource = peerConnectionFactory.createVideoSource(remoteVideoCapturer.isScreencast());
        remoteVideoCapturer.initialize(remoteSurfaceTextureHelper, getApplicationContext(), remoteVideoSource.getCapturerObserver());
        remoteVideoCapturer.startCapture(480, 640, 30);

        remoteView = findViewById(R.id.remoteView);
        remoteView.setMirror(false);
        remoteView.init(eglBaseContext, null);

        // create VideoTrack
        VideoTrack remoteVideoTrack = peerConnectionFactory.createVideoTrack("102", remoteVideoSource);
//        // display in remoteView
//        remoteVideoTrack.addSink(remoteView);



        mediaStreamLocal = peerConnectionFactory.createLocalMediaStream("mediaStreamLocal");
        mediaStreamLocal.addTrack(videoTrack);

        mediaStreamRemote = peerConnectionFactory.createLocalMediaStream("mediaStreamRemote");
        mediaStreamRemote.addTrack(remoteVideoTrack);

        call(mediaStreamLocal, mediaStreamRemote);
    }
 
Example #7
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 4 votes vote down vote up
@Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext();

        // create PeerConnectionFactory
        PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
                .builder(this)
                .createInitializationOptions());
        PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
        DefaultVideoEncoderFactory defaultVideoEncoderFactory =
                new DefaultVideoEncoderFactory(eglBaseContext, true, true);
        DefaultVideoDecoderFactory defaultVideoDecoderFactory =
                new DefaultVideoDecoderFactory(eglBaseContext);
        peerConnectionFactory = PeerConnectionFactory.builder()
                .setOptions(options)
                .setVideoEncoderFactory(defaultVideoEncoderFactory)
                .setVideoDecoderFactory(defaultVideoDecoderFactory)
                .createPeerConnectionFactory();

        SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext);
        // create VideoCapturer
        VideoCapturer videoCapturer = createCameraCapturer(true);
        VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast());
        videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());
        videoCapturer.startCapture(480, 640, 30);

        localView = findViewById(R.id.localView);
        localView.setMirror(true);
        localView.init(eglBaseContext, null);

        // create VideoTrack
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
//        // display in localView
        videoTrack.addSink(localView);


        remoteView = findViewById(R.id.remoteView);
        remoteView.setMirror(false);
        remoteView.init(eglBaseContext, null);


        AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
        AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

        mediaStream = peerConnectionFactory.createLocalMediaStream("mediaStream");
        mediaStream.addTrack(videoTrack);
        mediaStream.addTrack(audioTrack);

        SignalingClient.get().setCallback(this);
        call();
    }
 
Example #8
Source File: PeerConnectionWrapper.java    From bcm-android with GNU General Public License v3.0 4 votes vote down vote up
public PeerConnectionWrapper(@NonNull Context context,
                             @NonNull PeerConnectionFactory factory,
                             @NonNull PeerConnection.Observer observer,
                             @NonNull VideoSink localRenderer,
                             @NonNull List<PeerConnection.IceServer> turnServers,
                             @NonNull CameraEventListener cameraEventListener,
                             @NonNull EglBase eglBase,
                             boolean hideIp) {
    List<PeerConnection.IceServer> iceServers = new LinkedList<>();
    iceServers.add(STUN_SERVER);
    iceServers.addAll(turnServers);

    this.iceServers = iceServers;

    MediaConstraints constraints = new MediaConstraints();
    MediaConstraints audioConstraints = new MediaConstraints();
    PeerConnection.RTCConfiguration configuration = new PeerConnection.RTCConfiguration(iceServers);

    configuration.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
    configuration.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;

    if (hideIp) {
        configuration.iceTransportsType = PeerConnection.IceTransportsType.RELAY;
    }

    constraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
    audioConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));

    this.peerConnection = factory.createPeerConnection(configuration, constraints, observer);
    this.peerConnection.setAudioPlayout(false);
    this.peerConnection.setAudioRecording(false);

    this.mediaStream = factory.createLocalMediaStream("ARDAMS");
    this.audioSource = factory.createAudioSource(audioConstraints);
    this.audioTrack = factory.createAudioTrack("ARDAMSa0", audioSource);
    this.audioTrack.setEnabled(false);
    mediaStream.addTrack(audioTrack);

    this.camera = new Camera(context, cameraEventListener);

    if (camera.capturer != null) {
        this.videoSource = factory.createVideoSource(false);
        this.videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);

        camera.capturer.initialize(SurfaceTextureHelper.create("WebRTC-SurfaceTextureHelper", eglBase.getEglBaseContext()), context, videoSource.getCapturerObserver());

        this.videoTrack.addSink(localRenderer);
        this.videoTrack.setEnabled(false);
        mediaStream.addTrack(videoTrack);
    } else {
        this.videoSource = null;
        this.videoTrack = null;
    }

    this.peerConnection.addStream(mediaStream);
}
 
Example #9
Source File: CallActivity.java    From RTCStartupDemo with GNU General Public License v3.0 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_call);

    mLogcatView = findViewById(R.id.LogcatView);
    mStartCallBtn = findViewById(R.id.StartCallButton);
    mEndCallBtn = findViewById(R.id.EndCallButton);

    RTCSignalClient.getInstance().setSignalEventListener(mOnSignalEventListener);

    String serverAddr = getIntent().getStringExtra("ServerAddr");
    String roomName = getIntent().getStringExtra("RoomName");
    RTCSignalClient.getInstance().joinRoom(serverAddr, UUID.randomUUID().toString(), roomName);

    mRootEglBase = EglBase.create();

    mLocalSurfaceView = findViewById(R.id.LocalSurfaceView);
    mRemoteSurfaceView = findViewById(R.id.RemoteSurfaceView);

    mLocalSurfaceView.init(mRootEglBase.getEglBaseContext(), null);
    mLocalSurfaceView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalSurfaceView.setMirror(true);
    mLocalSurfaceView.setEnableHardwareScaler(false /* enabled */);

    mRemoteSurfaceView.init(mRootEglBase.getEglBaseContext(), null);
    mRemoteSurfaceView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mRemoteSurfaceView.setMirror(true);
    mRemoteSurfaceView.setEnableHardwareScaler(true /* enabled */);
    mRemoteSurfaceView.setZOrderMediaOverlay(true);

    ProxyVideoSink videoSink = new ProxyVideoSink();
    videoSink.setTarget(mLocalSurfaceView);

    mPeerConnectionFactory = createPeerConnectionFactory(this);

    // NOTE: this _must_ happen while PeerConnectionFactory is alive!
    Logging.enableLogToDebugOutput(Logging.Severity.LS_VERBOSE);

    mVideoCapturer = createVideoCapturer();

    mSurfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", mRootEglBase.getEglBaseContext());
    VideoSource videoSource = mPeerConnectionFactory.createVideoSource(false);
    mVideoCapturer.initialize(mSurfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());

    mVideoTrack = mPeerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    mVideoTrack.setEnabled(true);
    mVideoTrack.addSink(videoSink);

    AudioSource audioSource = mPeerConnectionFactory.createAudioSource(new MediaConstraints());
    mAudioTrack = mPeerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    mAudioTrack.setEnabled(true);
}
 
Example #10
Source File: MockVideoCapturer.java    From owt-client-android with Apache License 2.0 4 votes vote down vote up
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context context,
        CapturerObserver capturerObserver) {

}
 
Example #11
Source File: WebRTCWrapper.java    From Pix-Art-Messenger with GNU General Public License v3.0 4 votes vote down vote up
synchronized void initializePeerConnection(final Set<Media> media, final List<PeerConnection.IceServer> iceServers) throws InitializationException {
    Preconditions.checkState(this.eglBase != null);
    Preconditions.checkNotNull(media);
    Preconditions.checkArgument(media.size() > 0, "media can not be empty when initializing peer connection");
    final boolean setUseHardwareAcousticEchoCanceler = WebRtcAudioEffects.canUseAcousticEchoCanceler() && !HARDWARE_AEC_BLACKLIST.contains(Build.MODEL);
    Log.d(Config.LOGTAG, String.format("setUseHardwareAcousticEchoCanceler(%s) model=%s", setUseHardwareAcousticEchoCanceler, Build.MODEL));
    PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder()
            .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglBase.getEglBaseContext()))
            .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglBase.getEglBaseContext(), true, true))
            .setAudioDeviceModule(JavaAudioDeviceModule.builder(context)
                    .setUseHardwareAcousticEchoCanceler(setUseHardwareAcousticEchoCanceler)
                    .createAudioDeviceModule()
            )
            .createPeerConnectionFactory();


    final MediaStream stream = peerConnectionFactory.createLocalMediaStream("my-media-stream");

    final Optional<CapturerChoice> optionalCapturerChoice = media.contains(Media.VIDEO) ? getVideoCapturer() : Optional.absent();

    if (optionalCapturerChoice.isPresent()) {
        this.capturerChoice = optionalCapturerChoice.get();
        final CameraVideoCapturer capturer = this.capturerChoice.cameraVideoCapturer;
        final VideoSource videoSource = peerConnectionFactory.createVideoSource(false);
        SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("webrtc", eglBase.getEglBaseContext());
        capturer.initialize(surfaceTextureHelper, requireContext(), videoSource.getCapturerObserver());
        Log.d(Config.LOGTAG, String.format("start capturing at %dx%d@%d", capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate()));
        capturer.startCapture(capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate());

        this.localVideoTrack = peerConnectionFactory.createVideoTrack("my-video-track", videoSource);

        stream.addTrack(this.localVideoTrack);
    }


    if (media.contains(Media.AUDIO)) {
        //set up audio track
        final AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
        this.localAudioTrack = peerConnectionFactory.createAudioTrack("my-audio-track", audioSource);
        stream.addTrack(this.localAudioTrack);
    }


    final PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
    rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; //XEP-0176 doesn't support tcp
    rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
    final PeerConnection peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, peerConnectionObserver);
    if (peerConnection == null) {
        throw new InitializationException("Unable to create PeerConnection");
    }
    peerConnection.addStream(stream);
    peerConnection.setAudioPlayout(true);
    peerConnection.setAudioRecording(true);
    this.peerConnection = peerConnection;
}
 
Example #12
Source File: WebRTCWrapper.java    From Conversations with GNU General Public License v3.0 4 votes vote down vote up
synchronized void initializePeerConnection(final Set<Media> media, final List<PeerConnection.IceServer> iceServers) throws InitializationException {
    Preconditions.checkState(this.eglBase != null);
    Preconditions.checkNotNull(media);
    Preconditions.checkArgument(media.size() > 0, "media can not be empty when initializing peer connection");
    final boolean setUseHardwareAcousticEchoCanceler = WebRtcAudioEffects.canUseAcousticEchoCanceler() && !HARDWARE_AEC_BLACKLIST.contains(Build.MODEL);
    Log.d(Config.LOGTAG, String.format("setUseHardwareAcousticEchoCanceler(%s) model=%s", setUseHardwareAcousticEchoCanceler, Build.MODEL));
    PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder()
            .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglBase.getEglBaseContext()))
            .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglBase.getEglBaseContext(), true, true))
            .setAudioDeviceModule(JavaAudioDeviceModule.builder(context)
                    .setUseHardwareAcousticEchoCanceler(setUseHardwareAcousticEchoCanceler)
                    .createAudioDeviceModule()
            )
            .createPeerConnectionFactory();


    final MediaStream stream = peerConnectionFactory.createLocalMediaStream("my-media-stream");

    final Optional<CapturerChoice> optionalCapturerChoice = media.contains(Media.VIDEO) ? getVideoCapturer() : Optional.absent();

    if (optionalCapturerChoice.isPresent()) {
        this.capturerChoice = optionalCapturerChoice.get();
        final CameraVideoCapturer capturer = this.capturerChoice.cameraVideoCapturer;
        final VideoSource videoSource = peerConnectionFactory.createVideoSource(false);
        SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("webrtc", eglBase.getEglBaseContext());
        capturer.initialize(surfaceTextureHelper, requireContext(), videoSource.getCapturerObserver());
        Log.d(Config.LOGTAG, String.format("start capturing at %dx%d@%d", capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate()));
        capturer.startCapture(capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate());

        this.localVideoTrack = peerConnectionFactory.createVideoTrack("my-video-track", videoSource);

        stream.addTrack(this.localVideoTrack);
    }


    if (media.contains(Media.AUDIO)) {
        //set up audio track
        final AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
        this.localAudioTrack = peerConnectionFactory.createAudioTrack("my-audio-track", audioSource);
        stream.addTrack(this.localAudioTrack);
    }


    final PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
    rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; //XEP-0176 doesn't support tcp
    rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
    final PeerConnection peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, peerConnectionObserver);
    if (peerConnection == null) {
        throw new InitializationException("Unable to create PeerConnection");
    }
    peerConnection.addStream(stream);
    peerConnection.setAudioPlayout(true);
    peerConnection.setAudioRecording(true);
    this.peerConnection = peerConnection;
}