Java Code Examples for org.webrtc.VideoTrack#setEnabled()

The following examples show how to use org.webrtc.VideoTrack#setEnabled() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PeerConnectionObserver.java    From iGap-Android with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public void onAddStream(MediaStream stream) {
    for (AudioTrack audioTrack : stream.audioTracks) {
        audioTrack.setEnabled(true);
    }

    if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
        VideoTrack videoTrack = stream.videoTracks.get(0);
        videoTrack.setEnabled(true);

        videoTrack.addSink(new VideoSink() {
            @Override
            public void onFrame(VideoFrame videoFrame) {

                if (G.onVideoCallFrame != null) {
                    G.onVideoCallFrame.onRemoteFrame(videoFrame);
                }
            }
        });

    }


}
 
Example 2
Source File: WebRtcCallService.java    From bcm-android with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void onAddStream(MediaStream stream) {
    ALog.logForSecret(TAG, "onAddStream:" + stream);

    for (AudioTrack audioTrack : stream.audioTracks) {
        audioTrack.setEnabled(true);
    }

    if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
        VideoTrack videoTrack = stream.videoTracks.get(0);
        videoTrack.setEnabled(true);
        videoTrack.addSink(remoteRenderer);
    }
}
 
Example 3
Source File: CallActivity.java    From RTCStartupDemo with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
    MediaStreamTrack track = rtpReceiver.track();
    if (track instanceof VideoTrack) {
        Log.i(TAG, "onAddVideoTrack");
        VideoTrack remoteVideoTrack = (VideoTrack) track;
        remoteVideoTrack.setEnabled(true);
        ProxyVideoSink videoSink = new ProxyVideoSink();
        videoSink.setTarget(mRemoteSurfaceView);
        remoteVideoTrack.addSink(videoSink);
    }
}
 
Example 4
Source File: MediaStreamFactory.java    From owt-client-android with Apache License 2.0 5 votes vote down vote up
MediaStream createMediaStream(VideoCapturer videoCapturer,
        AudioTrackConstraints audioMediaConstraints) {
    RCHECK(videoCapturer != null || audioMediaConstraints != null);

    String label = UUID.randomUUID().toString();
    MediaStream mediaStream = PCFactoryProxy.instance().createLocalMediaStream(label);

    if (videoCapturer != null) {
        VideoSource videoSource = PCFactoryProxy.instance().createVideoSource(
                videoCapturer.isScreencast());
        SurfaceTextureHelper helper = SurfaceTextureHelper.create("CT", localContext);
        videoCapturer.initialize(helper, ContextInitialization.context,
                videoSource.getCapturerObserver());
        videoCapturer.startCapture(videoCapturer.getWidth(),
                videoCapturer.getHeight(),
                videoCapturer.getFps());
        VideoTrack videoTrack = PCFactoryProxy.instance().createVideoTrack(label + "v0",
                videoSource);
        videoTrack.setEnabled(true);
        mediaStream.addTrack(videoTrack);
        unsharedVideoSources.put(label, videoSource);
    }

    if (audioMediaConstraints != null) {
        if (sharedAudioSource == null) {
            sharedAudioSource = PCFactoryProxy.instance().createAudioSource(
                    audioMediaConstraints.convertToWebRTCConstraints());
        }
        audioSourceRef++;
        mediaStream.addTrack(
                PCFactoryProxy.instance().createAudioTrack(label + "a0", sharedAudioSource));
    }

    return mediaStream;
}
 
Example 5
Source File: MediaResourceManager.java    From webrtcpeer-android with Apache License 2.0 5 votes vote down vote up
public void run() {
    Log.d(TAG, "Attaching VideoRenderer to remote stream (" + remoteStream + ")");

    // Check if the remote stream has a video track
    if (remoteStream.videoTracks.size() == 1) {
        // Get the video track
        VideoTrack remoteVideoTrack = remoteStream.videoTracks.get(0);
        // Set video track enabled if we have enabled video rendering
        remoteVideoTrack.setEnabled(renderVideo);

        VideoRenderer videoRenderer = remoteVideoRenderers.get(remoteRender);
        if (videoRenderer != null) {
            MediaStream mediaStream = remoteVideoMediaStreams.get(videoRenderer);
            if (mediaStream != null) {
                VideoTrack videoTrack = remoteVideoTracks.get(mediaStream);
                if (videoTrack != null) {
                    videoTrack.removeRenderer(videoRenderer);
                }
            }
        }

        VideoRenderer newVideoRenderer = new VideoRenderer(remoteRender);
        remoteVideoTrack.addRenderer(newVideoRenderer);
        remoteVideoRenderers.put(remoteRender, newVideoRenderer);
        remoteVideoMediaStreams.put(newVideoRenderer, remoteStream);
        remoteVideoTracks.put(remoteStream, remoteVideoTrack);
        Log.d(TAG, "Attached.");
    }
}
 
Example 6
Source File: WebRTCWrapper.java    From Pix-Art-Messenger with GNU General Public License v3.0 5 votes vote down vote up
void setVideoEnabled(final boolean enabled) {
    final VideoTrack videoTrack = this.localVideoTrack;
    if (videoTrack == null) {
        throw new IllegalStateException("Local video track does not exist");
    }
    videoTrack.setEnabled(enabled);
}
 
Example 7
Source File: WebRTCWrapper.java    From Conversations with GNU General Public License v3.0 5 votes vote down vote up
void setVideoEnabled(final boolean enabled) {
    final VideoTrack videoTrack = this.localVideoTrack;
    if (videoTrack == null) {
        throw new IllegalStateException("Local video track does not exist");
    }
    videoTrack.setEnabled(enabled);
}
 
Example 8
Source File: MainActivity.java    From krankygeek with MIT License 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(true);

    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    peerConnectionFactory = new PeerConnectionFactory();

    VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);

    localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
    localVideoTrack.setEnabled(true);

    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(true);

    localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
    localMediaStream.addTrack(localVideoTrack);
    localMediaStream.addTrack(localAudioTrack);

    GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);

    VideoRendererGui.setView(videoView, null);
    try {
        otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        localVideoTrack.addRenderer(renderer);
    } catch (Exception e) {
        e.printStackTrace();
    }
}