Java Code Examples for org.webrtc.AudioTrack#setEnabled()

The following examples show how to use org.webrtc.AudioTrack#setEnabled() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PeerConnectionObserver.java    From iGap-Android with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public void onAddStream(MediaStream stream) {
    for (AudioTrack audioTrack : stream.audioTracks) {
        audioTrack.setEnabled(true);
    }

    if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
        VideoTrack videoTrack = stream.videoTracks.get(0);
        videoTrack.setEnabled(true);

        videoTrack.addSink(new VideoSink() {
            @Override
            public void onFrame(VideoFrame videoFrame) {

                if (G.onVideoCallFrame != null) {
                    G.onVideoCallFrame.onRemoteFrame(videoFrame);
                }
            }
        });

    }


}
 
Example 2
Source File: WebRtcCallService.java    From bcm-android with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void onAddStream(MediaStream stream) {
    ALog.logForSecret(TAG, "onAddStream:" + stream);

    for (AudioTrack audioTrack : stream.audioTracks) {
        audioTrack.setEnabled(true);
    }

    if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
        VideoTrack videoTrack = stream.videoTracks.get(0);
        videoTrack.setEnabled(true);
        videoTrack.addSink(remoteRenderer);
    }
}
 
Example 3
Source File: WebRTC.java    From iGap-Android with GNU Affero General Public License v3.0 5 votes vote down vote up
public void muteSound() {

        if (mediaStream == null) {
            return;
        }

        for (AudioTrack audioTrack : mediaStream.audioTracks) {
            audioTrack.setEnabled(false);
        }
    }
 
Example 4
Source File: WebRTC.java    From iGap-Android with GNU Affero General Public License v3.0 5 votes vote down vote up
public void unMuteSound() {

        if (mediaStream == null) {
            return;
        }

        for (AudioTrack audioTrack : mediaStream.audioTracks) {
            audioTrack.setEnabled(true);
        }
    }
 
Example 5
Source File: WebRTCWrapper.java    From Pix-Art-Messenger with GNU General Public License v3.0 5 votes vote down vote up
void setMicrophoneEnabled(final boolean enabled) {
    final AudioTrack audioTrack = this.localAudioTrack;
    if (audioTrack == null) {
        throw new IllegalStateException("Local audio track does not exist (yet)");
    }
    audioTrack.setEnabled(enabled);
}
 
Example 6
Source File: LicodeConnector.java    From licodeAndroidClient with MIT License 5 votes vote down vote up
@Override
public void setAudioEnabled(boolean enabled) {
	if (mState != State.kConnected || lMS == null) {
		return;
	}

	for (AudioTrack audioTrack : lMS.audioTracks) {
		audioTrack.setEnabled(enabled);
	}
}
 
Example 7
Source File: WebRTCWrapper.java    From Conversations with GNU General Public License v3.0 5 votes vote down vote up
void setMicrophoneEnabled(final boolean enabled) {
    final AudioTrack audioTrack = this.localAudioTrack;
    if (audioTrack == null) {
        throw new IllegalStateException("Local audio track does not exist (yet)");
    }
    audioTrack.setEnabled(enabled);
}
 
Example 8
Source File: WebRTC.java    From iGap-Android with GNU Affero General Public License v3.0 4 votes vote down vote up
private void addAudioTrack(MediaStream mediaStream) {
    AudioSource audioSource = peerConnectionFactoryInstance().createAudioSource(audioConstraintsGetInstance());
    AudioTrack audioTrack = peerConnectionFactoryInstance().createAudioTrack("ARDAMSa0", audioSource);
    audioTrack.setEnabled(true);
    mediaStream.addTrack(audioTrack);
}
 
Example 9
Source File: MainActivity.java    From krankygeek with MIT License 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(true);

    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    peerConnectionFactory = new PeerConnectionFactory();

    VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);

    localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
    localVideoTrack.setEnabled(true);

    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(true);

    localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
    localMediaStream.addTrack(localVideoTrack);
    localMediaStream.addTrack(localAudioTrack);

    GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);

    VideoRendererGui.setView(videoView, null);
    try {
        otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        localVideoTrack.addRenderer(renderer);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 10
Source File: LicodeConnector.java    From licodeAndroidClient with MIT License 4 votes vote down vote up
/** begin streaming to server - MUST run on VcThread */
void doPublish(VideoStreamsView view) {
	if (mVideoCapturer != null) {
		return;
	}

	MediaConstraints videoConstraints = new MediaConstraints();
	videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
			"maxWidth", "320"));
	videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
			"maxHeight", "240"));
	videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
			"maxFrameRate", "10"));
	MediaConstraints audioConstraints = new MediaConstraints();
	audioConstraints.optional.add(new MediaConstraints.KeyValuePair(
			"googEchoCancellation2", "true"));
	audioConstraints.optional.add(new MediaConstraints.KeyValuePair(
			"googNoiseSuppression", "true"));
	lMS = sFactory.createLocalMediaStream("ARDAMS");

	if (videoConstraints != null) {
		mVideoCapturer = getVideoCapturer();
		mVideoSource = sFactory.createVideoSource(mVideoCapturer,
				videoConstraints);
		VideoTrack videoTrack = sFactory.createVideoTrack("ARDAMSv0",
				mVideoSource);
		lMS.addTrack(videoTrack);
	}
	if (audioConstraints != null) {
		AudioTrack audioTrack = sFactory.createAudioTrack("ARDAMSa0",
				sFactory.createAudioSource(audioConstraints));
		lMS.addTrack(audioTrack);
		audioTrack.setEnabled(false);
	}

	StreamDescription stream = new StreamDescription("", false, true, true,
			false, null, mNick);
	MediaConstraints pcConstraints = makePcConstraints();
	MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream,
			true), stream);

	PeerConnection pc = sFactory.createPeerConnection(mIceServers,
			pcConstraints, pcObs);
	pc.addStream(lMS, new MediaConstraints());

	stream.setMedia(lMS);
	if (view != null) {
		stream.attachRenderer(new VideoCallbacks(view,
				VideoStreamsView.LOCAL_STREAM_ID));
	}
	stream.initLocal(pc, pcObs.getSdpObserver());
}