org.webrtc.VideoCapturer Java Examples

The following examples show how to use org.webrtc.VideoCapturer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 6 votes vote down vote up
private VideoCapturer createCameraCapturer(boolean isFront) {
    Camera1Enumerator enumerator = new Camera1Enumerator(false);
    final String[] deviceNames = enumerator.getDeviceNames();

    // First, try to find front facing camera
    for (String deviceName : deviceNames) {
        if (isFront ? enumerator.isFrontFacing(deviceName) : enumerator.isBackFacing(deviceName)) {
            VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

            if (videoCapturer != null) {
                return videoCapturer;
            }
        }
    }

    return null;
}
 
Example #2
Source File: AppRTCDemoActivity.java    From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
private VideoCapturer getVideoCapturer() {
  String[] cameraFacing = { "front", "back" };
  int[] cameraIndex = { 0, 1 };
  int[] cameraOrientation = { 0, 90, 180, 270 };
  for (String facing : cameraFacing) {
    for (int index : cameraIndex) {
      for (int orientation : cameraOrientation) {
        String name = "Camera " + index + ", Facing " + facing +
            ", Orientation " + orientation;
        VideoCapturer capturer = VideoCapturer.create(name);
        if (capturer != null) {
          logAndToast("Using camera: " + name);
          return capturer;
        }
      }
    }
  }
  throw new RuntimeException("Failed to open capturer");
}
 
Example #3
Source File: LicodeConnector.java    From licodeAndroidClient with MIT License 6 votes vote down vote up
/** get access to the camera */
private VideoCapturer getVideoCapturer() {
	String[] cameraFacing = { "front", "back" };
	int[] cameraIndex = { 0, 1 };
	int[] cameraOrientation = { 0, 90, 180, 270 };
	for (String facing : cameraFacing) {
		for (int index : cameraIndex) {
			for (int orientation : cameraOrientation) {
				String name = "Camera " + index + ", Facing " + facing
						+ ", Orientation " + orientation;
				VideoCapturer capturer = VideoCapturer.create(name);
				if (capturer != null) {
					log("Using camera: " + name);
					return capturer;
				}
			}
		}
	}
	throw new RuntimeException("Failed to open capturer");
}
 
Example #4
Source File: AppRTCDemoActivity.java    From WebRTCDemo with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
private VideoCapturer getVideoCapturer() {
  String[] cameraFacing = { "front", "back" };
  int[] cameraIndex = { 0, 1 };
  int[] cameraOrientation = { 0, 90, 180, 270 };
  for (String facing : cameraFacing) {
    for (int index : cameraIndex) {
      for (int orientation : cameraOrientation) {
        String name = "Camera " + index + ", Facing " + facing +
            ", Orientation " + orientation;
        VideoCapturer capturer = VideoCapturer.create(name);
        if (capturer != null) {
          logAndToast("Using camera: " + name);
          return capturer;
        }
      }
    }
  }
  throw new RuntimeException("Failed to open capturer");
}
 
Example #5
Source File: RespokeCall.java    From respoke-sdk-android with MIT License 6 votes vote down vote up
private VideoCapturer getVideoCapturer() {
    String[] cameraFacing = { "front", "back" };
    int[] cameraIndex = { 0, 1 };
    int[] cameraOrientation = { 0, 90, 180, 270 };
    for (String facing : cameraFacing) {
        for (int index : cameraIndex) {
            for (int orientation : cameraOrientation) {
                String name = "Camera " + index + ", Facing " + facing +
                        ", Orientation " + orientation;
                VideoCapturer capturer = VideoCapturer.create(name);
                if (capturer != null) {
                    //logAndToast("Using camera: " + name);
                    Log.d(TAG, "Using camera: " + name);
                    return capturer;
                }
            }
        }
    }
    throw new RuntimeException("Failed to open capturer");
}
 
Example #6
Source File: RespokeCall.java    From respoke-sdk-android with MIT License 6 votes vote down vote up
private void addLocalStreams(Context context) {
    AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
    // TODO(fischman): figure out how to do this Right(tm) and remove the suppression.
    @SuppressWarnings("deprecation")
    boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
    audioManager.setMode(isWiredHeadsetOn ? AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

    localStream = peerConnectionFactory.createLocalMediaStream("ARDAMS");

    if (!audioOnly) {
        VideoCapturer capturer = getVideoCapturer();
        MediaConstraints videoConstraints = new MediaConstraints();
        videoSource = peerConnectionFactory.createVideoSource(capturer, videoConstraints);
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("ARDAMSv0", videoSource);
        videoTrack.addRenderer(new VideoRenderer(localRender));
        localStream.addTrack(videoTrack);
    }

    localStream.addTrack(peerConnectionFactory.createAudioTrack("ARDAMSa0", peerConnectionFactory.createAudioSource(new MediaConstraints())));

    peerConnection.addStream(localStream);
}
 
Example #7
Source File: AndroidVideoSource.java    From actor-platform with GNU Affero General Public License v3.0 6 votes vote down vote up
private VideoCapturer getVideoCapturer() {
    String[] cameraFacing = {"front", "back"};
    int[] cameraIndex = {0, 1};
    int[] cameraOrientation = {0, 90, 180, 270};
    for (String facing : cameraFacing) {
        for (int index : cameraIndex) {
            for (int orientation : cameraOrientation) {
                String name = "Camera " + index + ", Facing " + facing +
                        ", Orientation " + orientation;
                VideoCapturer capturer = VideoCapturer.create(name);
                if (capturer != null) {
                    return capturer;
                }
            }
        }
    }
    throw new RuntimeException("Failed to open capturer");
}
 
Example #8
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 6 votes vote down vote up
private VideoCapturer createCameraCapturer(boolean isFront) {
    Camera1Enumerator enumerator = new Camera1Enumerator(false);
    final String[] deviceNames = enumerator.getDeviceNames();

    // First, try to find front facing camera
    for (String deviceName : deviceNames) {
        if (isFront ? enumerator.isFrontFacing(deviceName) : enumerator.isBackFacing(deviceName)) {
            VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

            if (videoCapturer != null) {
                return videoCapturer;
            }
        }
    }

    return null;
}
 
Example #9
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 6 votes vote down vote up
private VideoCapturer createCameraCapturer(boolean isFront) {
    Camera1Enumerator enumerator = new Camera1Enumerator(false);
    final String[] deviceNames = enumerator.getDeviceNames();

    // First, try to find front facing camera
    for (String deviceName : deviceNames) {
        if (isFront ? enumerator.isFrontFacing(deviceName) : enumerator.isBackFacing(deviceName)) {
            VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

            if (videoCapturer != null) {
                return videoCapturer;
            }
        }
    }

    return null;
}
 
Example #10
Source File: WebRTCEngine.java    From webrtc_android with MIT License 6 votes vote down vote up
/**
 * 创建媒体方式
 *
 * @return VideoCapturer
 */
private VideoCapturer createVideoCapture() {
    VideoCapturer videoCapturer;


    if (screencaptureEnabled) {
        return createScreenCapturer();
    }

    if (Camera2Enumerator.isSupported(mContext)) {
        videoCapturer = createCameraCapture(new Camera2Enumerator(mContext));
    } else {
        videoCapturer = createCameraCapture(new Camera1Enumerator(true));
    }
    return videoCapturer;
}
 
Example #11
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 6 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    // create PeerConnectionFactory
    PeerConnectionFactory.InitializationOptions initializationOptions =
            PeerConnectionFactory.InitializationOptions.builder(this).createInitializationOptions();
    PeerConnectionFactory.initialize(initializationOptions);
    PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory();

    // create AudioSource
    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

    EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext();

    SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext);
    // create VideoCapturer
    VideoCapturer videoCapturer = createCameraCapturer();
    VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast());
    videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());
    videoCapturer.startCapture(480, 640, 30);

    SurfaceViewRenderer localView = findViewById(R.id.localView);
    localView.setMirror(true);
    localView.init(eglBaseContext, null);

    // create VideoTrack
    VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
    // display in localView
    videoTrack.addSink(localView);
}
 
Example #12
Source File: CallActivity.java    From RTCStartupDemo with GNU General Public License v3.0 5 votes vote down vote up
private VideoCapturer createVideoCapturer() {
    if (Camera2Enumerator.isSupported(this)) {
        return createCameraCapturer(new Camera2Enumerator(this));
    } else {
        return createCameraCapturer(new Camera1Enumerator(true));
    }
}
 
Example #13
Source File: PeerConnectionClient.java    From voip_android with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Nullable
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    surfaceTextureHelper =
            SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
    videoSource = factory.createVideoSource(capturer.isScreencast());
    capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver());
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addSink(localRender);
    return localVideoTrack;
}
 
Example #14
Source File: PeerConnectionClient.java    From voip_android with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink,
                                 final VideoCapturer videoCapturer) {
    if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) {
        Log.w(TAG, "Video call enabled but no video capturer provided.");
    }
    createPeerConnection(
            localRender, Collections.singletonList(remoteSink), videoCapturer);
}
 
Example #15
Source File: WebRTCActivity.java    From voip_android with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
protected void startStream() {
    logAndToast("Creating peer connection");

    peerConnectionClient = new PeerConnectionClient(getApplicationContext(), rootEglBase, peerConnectionParameters, this);

    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();

    peerConnectionClient.createPeerConnectionFactory(options);

    PeerConnection.IceServer server = new PeerConnection.IceServer("stun:stun.counterpath.net:3478");

    String username = turnUserName;
    String password = turnPassword;
    PeerConnection.IceServer server2 = new PeerConnection.IceServer("turn:turn.gobelieve.io:3478?transport=udp", username, password);

    peerConnectionClient.clearIceServer();
    peerConnectionClient.addIceServer(server);
    peerConnectionClient.addIceServer(server2);

    VideoCapturer videoCapturer = null;
    if (peerConnectionParameters.videoCallEnabled) {
        videoCapturer = createVideoCapturer();
    }
    peerConnectionClient.createPeerConnection(localRender,
            remoteRender, videoCapturer);

    if (this.isCaller) {
        logAndToast("Creating OFFER...");
        // Create offer. Offer SDP will be sent to answering client in
        // PeerConnectionEvents.onLocalDescription event.
        peerConnectionClient.createOffer();
    }
}
 
Example #16
Source File: VideoCapturers.java    From VideoCRE with MIT License 5 votes vote down vote up
public static VideoCapturer createFileVideoCapturer(String path) {
    try {
        return new FileVideoCapturer(path);
    } catch (IOException e) {
        return null;
    }
}
 
Example #17
Source File: PeerConnectionClient.java    From restcomm-android-sdk with GNU Affero General Public License v3.0 5 votes vote down vote up
private VideoTrack createVideoTrack(VideoCapturer capturer) {
  videoSource = factory.createVideoSource(capturer);
  capturer.startCapture(videoWidth, videoHeight, videoFps);

  localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
  localVideoTrack.setEnabled(renderLocalVideo);
  localVideoTrack.addSink(localRender);
  return localVideoTrack;
}
 
Example #18
Source File: VideoActivity.java    From VideoCRE with MIT License 5 votes vote down vote up
private VideoCapturer createVideoCapturer() {
    switch (MainActivity.sVideoSource) {
        case VideoSource.SOURCE_CAMERA1:
            return VideoCapturers.createCamera1Capturer(true);
        case VideoSource.SOURCE_CAMERA2:
            return VideoCapturers.createCamera2Capturer(this);
        case VideoSource.SOURCE_SCREEN:
            return null;
        case VideoSource.SOURCE_FILE:
            return VideoCapturers.createFileVideoCapturer("");
        default:
            return null;
    }
}
 
Example #19
Source File: PeerConnectionClient.java    From janus-gateway-android with MIT License 5 votes vote down vote up
private VideoTrack createVideoTrack(VideoCapturer capturer) {
  videoSource = factory.createVideoSource(capturer);
  capturer.startCapture(videoWidth, videoHeight, videoFps);

  localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
  localVideoTrack.setEnabled(renderVideo);
  localVideoTrack.addRenderer(new VideoRenderer(localRender));
  return localVideoTrack;
}
 
Example #20
Source File: MainActivity.java    From janus-gateway-android with MIT License 5 votes vote down vote up
private VideoCapturer createVideoCapturer() {
    VideoCapturer videoCapturer = null;
    if (useCamera2()) {
        Log.d(TAG, "Creating capturer using camera2 API.");
        videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
    } else {
        Log.d(TAG, "Creating capturer using camera1 API.");
        videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
    }
    if (videoCapturer == null) {
        Log.e(TAG, "Failed to open camera");
        return null;
    }
    return videoCapturer;
}
 
Example #21
Source File: CallActivity.java    From sample-videoRTC with Apache License 2.0 5 votes vote down vote up
private void onConnectedToRoomInternal(final AppRTCClient.SignalingParameters params) {
    final long delta = System.currentTimeMillis() - callStartedTimeMs;

    signalingParameters = params;
    logAndToast("Creating peer connection, delay=" + delta + "ms");
    VideoCapturer videoCapturer = null;
    if (peerConnectionParameters.videoCallEnabled) {
        videoCapturer = createVideoCapturer();
    }
    peerConnectionClient.createPeerConnection(
            localProxyVideoSink, remoteRenderers, videoCapturer, signalingParameters);

    if (signalingParameters.initiator) {
        logAndToast("Creating OFFER...");
        // Create offer. Offer SDP will be sent to answering client in
        // PeerConnectionEvents.onLocalDescription event.
        peerConnectionClient.createOffer();
    } else {
        if (params.offerSdp != null) {
            peerConnectionClient.setRemoteDescription(params.offerSdp);
            logAndToast("Creating ANSWER...");
            // Create answer. Answer SDP will be sent to offering client in
            // PeerConnectionEvents.onLocalDescription event.
            peerConnectionClient.createAnswer();
        }
        if (params.iceCandidates != null) {
            // Add remote ICE candidates from room.
            for (IceCandidate iceCandidate : params.iceCandidates) {
                peerConnectionClient.addRemoteIceCandidate(iceCandidate);
            }
        }
    }
}
 
Example #22
Source File: CallActivity.java    From sample-videoRTC with Apache License 2.0 5 votes vote down vote up
private VideoCapturer createVideoCapturer() {
    final VideoCapturer videoCapturer;
    Logging.d(TAG, "Creating capturer using camera2 API.");
    videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
    if (videoCapturer == null) {
        reportError("Failed to open camera");
        return null;
    }
    return videoCapturer;
}
 
Example #23
Source File: PeerConnectionClient.java    From sample-videoRTC with Apache License 2.0 5 votes vote down vote up
private VideoTrack createVideoTrack(VideoCapturer capturer) {
  videoSource = factory.createVideoSource(capturer);
  capturer.startCapture(videoWidth, videoHeight, videoFps);

  localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
  localVideoTrack.setEnabled(renderVideo);
  localVideoTrack.addSink(localRender);
  return localVideoTrack;
}
 
Example #24
Source File: WebRTCEngine.java    From webrtc_android with MIT License 5 votes vote down vote up
@TargetApi(21)
private VideoCapturer createScreenCapturer() {
    if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
        return null;
    }
    return new ScreenCapturerAndroid(
            mediaProjectionPermissionResultData, new MediaProjection.Callback() {
        @Override
        public void onStop() {
            Log.e(TAG, "User revoked permission to capture the screen.");
        }
    });
}
 
Example #25
Source File: VideoChatHelper.java    From Socket.io-FLSocketIM-Android with MIT License 5 votes vote down vote up
private void createLocalStream() {
    localMediaStream = factory.createLocalMediaStream("ARDAMS");

    // 音频
    AudioSource audioSource = factory.createAudioSource(new MediaConstraints());
    localMediaStream.addTrack(factory.createAudioTrack("ARDAMSa0", audioSource));

    // 视频
    String frontCameraDeviceName = VideoCapturerAndroid.getNameOfFrontFacingDevice();
    VideoCapturer capture = VideoCapturerAndroid.create(frontCameraDeviceName);
    VideoSource videoSource = factory.createVideoSource(capture, localVideoConstraints());
    localMediaStream.addTrack(factory.createVideoTrack("ARDAMSv0", videoSource));

    callBack.onSetLocalStream(localMediaStream, myId);
}
 
Example #26
Source File: RCConnection.java    From restcomm-android-sdk with GNU Affero General Public License v3.0 4 votes vote down vote up
private void onConnectedToRoomInternal(final SignalingParameters params)
{
   RCLogger.i(TAG, "onConnectedToRoomInternal");
   final long delta = System.currentTimeMillis() - callStartedTimeMs;

   signalingParameters = params;
   if (peerConnectionClient == null) {
      RCLogger.w(TAG, "Room is connected, but EGL context is not ready yet.");
      return;
   }

   VideoCapturer videoCapturer = null;
   if (peerConnectionParameters.videoCallEnabled) {
      videoCapturer = createVideoCapturer();
   }

   logAndToast("Creating peer connection, delay=" + delta + "ms");
   peerConnectionClient.createPeerConnection(localRender, remoteRender, videoCapturer, signalingParameters);

   if (signalingParameters.initiator) {
      logAndToast("Creating OFFER...");
      // Create offer. Offer SDP will be sent to answering client in
      // PeerConnectionEvents.onLocalDescription event.
      peerConnectionClient.createOffer();
   }
   else {
      if (params.offerSdp != null) {
         peerConnectionClient.setRemoteDescription(params.offerSdp);
         logAndToast("Creating ANSWER...");
         // Create answer. Answer SDP will be sent to offering client in
         // PeerConnectionEvents.onLocalDescription event.
         peerConnectionClient.createAnswer();
      }
      if (params.iceCandidates != null) {
         // Add remote ICE candidates from room.
         for (IceCandidate iceCandidate : params.iceCandidates) {
            peerConnectionClient.addRemoteIceCandidate(iceCandidate);
         }
      }
   }
}
 
Example #27
Source File: PeerConnectionClient.java    From restcomm-android-sdk with GNU Affero General Public License v3.0 4 votes vote down vote up
public void createPeerConnection(final VideoSink localRender,
    final VideoRenderer.Callbacks remoteRender, final VideoCapturer videoCapturer,
    final SignalingParameters signalingParameters) {
  createPeerConnection(
      localRender, Collections.singletonList(remoteRender), videoCapturer, signalingParameters);
}
 
Example #28
Source File: AppRTCDemoActivity.java    From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
  factory = new PeerConnectionFactory();

  MediaConstraints pcConstraints = appRtcClient.pcConstraints();
  pcConstraints.optional.add(
      new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
  pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);

  createDataChannelToRegressionTestBug2302(pc);  // See method comment.

  // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
  // NOTE: this _must_ happen while |factory| is alive!
  // Logging.enableTracing(
  //     "logcat:",
  //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
  //     Logging.Severity.LS_SENSITIVE);

  {
    final PeerConnection finalPC = pc;
    final Runnable repeatedStatsLogger = new Runnable() {
        public void run() {
          synchronized (quit[0]) {
            if (quit[0]) {
              return;
            }
            final Runnable runnableThis = this;
            if (hudView.getVisibility() == View.INVISIBLE) {
              vsv.postDelayed(runnableThis, 1000);
              return;
            }
            boolean success = finalPC.getStats(new StatsObserver() {
                public void onComplete(final StatsReport[] reports) {
                  runOnUiThread(new Runnable() {
                      public void run() {
                        updateHUD(reports);
                      }
                    });
                  for (StatsReport report : reports) {
                    Log.d(TAG, "Stats: " + report.toString());
                  }
                  vsv.postDelayed(runnableThis, 1000);
                }
              }, null);
            if (!success) {
              throw new RuntimeException("getStats() return false!");
            }
          }
        }
      };
    vsv.postDelayed(repeatedStatsLogger, 1000);
  }

  {
    logAndToast("Creating local video source...");
    MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
    if (appRtcClient.videoConstraints() != null) {
      VideoCapturer capturer = getVideoCapturer();
      videoSource = factory.createVideoSource(
          capturer, appRtcClient.videoConstraints());
      VideoTrack videoTrack =
          factory.createVideoTrack("ARDAMSv0", videoSource);
      videoTrack.addRenderer(new VideoRenderer(localRender));
      lMS.addTrack(videoTrack);
    }
    if (appRtcClient.audioConstraints() != null) {
      lMS.addTrack(factory.createAudioTrack(
          "ARDAMSa0",
          factory.createAudioSource(appRtcClient.audioConstraints())));
    }
    pc.addStream(lMS, new MediaConstraints());
  }
  logAndToast("Waiting for ICE candidates...");
}
 
Example #29
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 4 votes vote down vote up
@Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext();

        // create PeerConnectionFactory
        PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
                .builder(this)
                .createInitializationOptions());
        PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
        DefaultVideoEncoderFactory defaultVideoEncoderFactory =
                new DefaultVideoEncoderFactory(eglBaseContext, true, true);
        DefaultVideoDecoderFactory defaultVideoDecoderFactory =
                new DefaultVideoDecoderFactory(eglBaseContext);
        peerConnectionFactory = PeerConnectionFactory.builder()
                .setOptions(options)
                .setVideoEncoderFactory(defaultVideoEncoderFactory)
                .setVideoDecoderFactory(defaultVideoDecoderFactory)
                .createPeerConnectionFactory();

        SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext);
        // create VideoCapturer
        VideoCapturer videoCapturer = createCameraCapturer(true);
        VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast());
        videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());
        videoCapturer.startCapture(480, 640, 30);

        localView = findViewById(R.id.localView);
        localView.setMirror(true);
        localView.init(eglBaseContext, null);

        // create VideoTrack
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
//        // display in localView
//        videoTrack.addSink(localView);




        SurfaceTextureHelper remoteSurfaceTextureHelper = SurfaceTextureHelper.create("RemoteCaptureThread", eglBaseContext);
        // create VideoCapturer
        VideoCapturer remoteVideoCapturer = createCameraCapturer(false);
        VideoSource remoteVideoSource = peerConnectionFactory.createVideoSource(remoteVideoCapturer.isScreencast());
        remoteVideoCapturer.initialize(remoteSurfaceTextureHelper, getApplicationContext(), remoteVideoSource.getCapturerObserver());
        remoteVideoCapturer.startCapture(480, 640, 30);

        remoteView = findViewById(R.id.remoteView);
        remoteView.setMirror(false);
        remoteView.init(eglBaseContext, null);

        // create VideoTrack
        VideoTrack remoteVideoTrack = peerConnectionFactory.createVideoTrack("102", remoteVideoSource);
//        // display in remoteView
//        remoteVideoTrack.addSink(remoteView);



        mediaStreamLocal = peerConnectionFactory.createLocalMediaStream("mediaStreamLocal");
        mediaStreamLocal.addTrack(videoTrack);

        mediaStreamRemote = peerConnectionFactory.createLocalMediaStream("mediaStreamRemote");
        mediaStreamRemote.addTrack(remoteVideoTrack);

        call(mediaStreamLocal, mediaStreamRemote);
    }
 
Example #30
Source File: MainActivity.java    From webrtc-android-tutorial with Apache License 2.0 4 votes vote down vote up
@Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext();

        // create PeerConnectionFactory
        PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
                .builder(this)
                .createInitializationOptions());
        PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
        DefaultVideoEncoderFactory defaultVideoEncoderFactory =
                new DefaultVideoEncoderFactory(eglBaseContext, true, true);
        DefaultVideoDecoderFactory defaultVideoDecoderFactory =
                new DefaultVideoDecoderFactory(eglBaseContext);
        peerConnectionFactory = PeerConnectionFactory.builder()
                .setOptions(options)
                .setVideoEncoderFactory(defaultVideoEncoderFactory)
                .setVideoDecoderFactory(defaultVideoDecoderFactory)
                .createPeerConnectionFactory();

        SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext);
        // create VideoCapturer
        VideoCapturer videoCapturer = createCameraCapturer(true);
        VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast());
        videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());
        videoCapturer.startCapture(480, 640, 30);

        localView = findViewById(R.id.localView);
        localView.setMirror(true);
        localView.init(eglBaseContext, null);

        // create VideoTrack
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
//        // display in localView
        videoTrack.addSink(localView);


        remoteView = findViewById(R.id.remoteView);
        remoteView.setMirror(false);
        remoteView.init(eglBaseContext, null);


        AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
        AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

        mediaStream = peerConnectionFactory.createLocalMediaStream("mediaStream");
        mediaStream.addTrack(videoTrack);
        mediaStream.addTrack(audioTrack);

        SignalingClient.get().setCallback(this);
        call();
    }