org.webrtc.MediaStreamTrack Java Examples

The following examples show how to use org.webrtc.MediaStreamTrack. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CallActivity.java    From RTCStartupDemo with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
    MediaStreamTrack track = rtpReceiver.track();
    if (track instanceof VideoTrack) {
        Log.i(TAG, "onAddVideoTrack");
        VideoTrack remoteVideoTrack = (VideoTrack) track;
        remoteVideoTrack.setEnabled(true);
        ProxyVideoSink videoSink = new ProxyVideoSink();
        videoSink.setTarget(mRemoteSurfaceView);
        remoteVideoTrack.addSink(videoSink);
    }
}
 
Example #2
Source File: RespokeCall.java    From respoke-sdk-android with MIT License 5 votes vote down vote up
/**
 *  Mute or unmute the local video
 *
 *  @param mute If true, mute the video. If false, unmute the video
 */
public void muteVideo(boolean mute) {
    if (!audioOnly && (null != localStream) && (isActive())) {
        for (MediaStreamTrack eachTrack : localStream.videoTracks) {
            eachTrack.setEnabled(!mute);
        }
    }
}
 
Example #3
Source File: RespokeCall.java    From respoke-sdk-android with MIT License 5 votes vote down vote up
/**
 *  Indicates if the local video stream is muted
 *
 *  @return returns true if the local video stream is currently muted
 */
public boolean videoIsMuted() {
    boolean isMuted = true;

    if (!audioOnly && (null != localStream)) {
        for (MediaStreamTrack eachTrack : localStream.videoTracks) {
            if (eachTrack.enabled()) {
                isMuted = false;
            }
        }
    }

    return isMuted;
}
 
Example #4
Source File: RespokeCall.java    From respoke-sdk-android with MIT License 5 votes vote down vote up
/**
 *  Mute or unmute the local audio
 *
 *  @param mute If true, mute the audio. If false, unmute the audio
 */
public void muteAudio(boolean mute) {
    if ((null != localStream) && isActive()) {
        for (MediaStreamTrack eachTrack : localStream.audioTracks) {
            eachTrack.setEnabled(!mute);
        }
    }
}
 
Example #5
Source File: RespokeCall.java    From respoke-sdk-android with MIT License 5 votes vote down vote up
/**
 *  Indicates if the local audio stream is muted
 *
 *  @return returns true if the local audio stream is currently muted
 */
public boolean audioIsMuted() {
    boolean isMuted = true;

    if (null != localStream) {
        for (MediaStreamTrack eachTrack : localStream.audioTracks) {
            if (eachTrack.enabled()) {
                isMuted = false;
            }
        }
    }

    return isMuted;
}
 
Example #6
Source File: RtpTransceiver.java    From webrtc_android with MIT License 4 votes vote down vote up
/**
 * Media type of the transceiver. Any sender(s)/receiver(s) will have this
 * type as well.
 */
public MediaStreamTrack.MediaType getMediaType() {
  checkRtpTransceiverExists();
  return nativeGetMediaType(nativeRtpTransceiver);
}
 
Example #7
Source File: WebRtcClient.java    From imsdk-android with MIT License 4 votes vote down vote up
public void setCamera(){
        localMS = factory.createLocalMediaStream("ARDAMS");
        if(pcParams.videoCallEnabled){
            MediaConstraints videoConstraints = new MediaConstraints();
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", Integer.toString(pcParams.videoHeight)));
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", Integer.toString(pcParams.videoWidth)));
//            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", Integer.toString(pcParams.videoFps)));
//            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("ideaFrameRate", Integer.toString(48/*pcParams.videoFps*/)));
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", Integer.toString(pcParams.videoFps)));
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minFrameRate", Integer.toString(pcParams.videoFps)));

            videoCapturer = getVideoCapturer();
            videoSource = factory.createVideoSource(videoCapturer, videoConstraints);
            //videoCapturer.changeCaptureFormat(pcParams.videoWidth,pcParams.videoHeight,
            //        pcParams.videoFps);
            videoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
            localMS.addTrack(videoTrack);
        }

        MediaConstraints audioConstraints = new MediaConstraints();
        // added for audio performance measurements
        if (pcParams.noAudioProcessing) {
            LogUtil.d(TAG, "Disabling audio processing");
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
        }
        else {
            LogUtil.d(TAG, "Enabling audio processing");
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "true"));
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "true"));
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "true"));
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "true"));
        }
        if (pcParams.enableLevelControl) {
            LogUtil.d(TAG, "Enabling level control.");
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
        }
        else {
            LogUtil.d(TAG, "Disabling level control.");
            audioConstraints.mandatory.add(
                    new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "false"));
        }
        audioSource = factory.createAudioSource(audioConstraints);
        audioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
        audioTrack.setEnabled(true);
        audioTrack.setState(MediaStreamTrack.State.LIVE);
        localMS.addTrack(audioTrack);
        if(mListener != null) {
            mListener.onLocalStream(localMS);
        }
    }
 
Example #8
Source File: WebRTCWrapper.java    From Pix-Art-Messenger with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
    final MediaStreamTrack track = rtpReceiver.track();
    Log.d(EXTENDED_LOGGING_TAG, "onAddTrack(kind=" + (track == null ? "null" : track.kind()) + ",numMediaStreams=" + mediaStreams.length + ")");
}
 
Example #9
Source File: WebRTCWrapper.java    From Conversations with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
    final MediaStreamTrack track = rtpReceiver.track();
    Log.d(EXTENDED_LOGGING_TAG, "onAddTrack(kind=" + (track == null ? "null" : track.kind()) + ",numMediaStreams=" + mediaStreams.length + ")");
}
 
Example #10
Source File: RtpTransceiver.java    From webrtc_android with MIT License votes vote down vote up
private static native MediaStreamTrack.MediaType nativeGetMediaType(long rtpTransceiver);