org.webrtc.VideoRendererGui Java Examples

The following examples show how to use org.webrtc.VideoRendererGui. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RtcActivity.java    From imsdk-android with MIT License 6 votes vote down vote up
protected void initCamera()
{
    /*localRender.init(rootContext.getEglBaseContext(),null);
    localRender.setZOrderMediaOverlay(true);
    localRender.setMirror(true);
    localRender.setScalingType(scalingType);

    remoteRender.setMirror(false);
    remoteRender.setScalingType(scalingType);
    remoteRender.init(rootContext.getEglBaseContext(),null);*/
    // local and remote render
    if(videoEnable) {
        vsv.setVisibility(View.VISIBLE);
        remoteRender = VideoRendererGui.create(
                REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false);
        localRender = VideoRendererGui.create(
                LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
                LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING, scalingType, true);
    } else {
        vsv.setVisibility(View.GONE);
    }
}
 
Example #2
Source File: RespokeCall.java    From respoke-sdk-android with MIT License 6 votes vote down vote up
/**
 *  Attach the call's video renderers to the specified GLSurfaceView
 *
 *  @param glView  The GLSurfaceView on which to render video
 */
public void attachVideoRenderer(GLSurfaceView glView) {
    if (null != glView) {
        VideoRendererGui.setView(glView, new Runnable() {
            @Override
            public void run() {
                Log.d(TAG, "VideoRendererGui GL Context ready");
            }
        });

        remoteRender = VideoRendererGui.create(0, 0, 100, 100,
                VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
        localRender = VideoRendererGui.create(70, 5, 25, 25,
                VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    }
}
 
Example #3
Source File: JanusActivity.java    From janus-gateway-android with MIT License 6 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    java.lang.System.setProperty("java.net.preferIPv6Addresses", "false");
    java.lang.System.setProperty("java.net.preferIPv4Stack", "true");
    super.onCreate(savedInstanceState);
    requestWindowFeature(Window.FEATURE_NO_TITLE);
    setContentView(R.layout.activity_janus);

    getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);

    vsv = (GLSurfaceView) findViewById(R.id.glview);
    vsv.setPreserveEGLContextOnPause(true);
    vsv.setKeepScreenOn(true);
    VideoRendererGui.setView(vsv, new MyInit());

    localRender = VideoRendererGui.create(72, 72, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    remoteRender = VideoRendererGui.create(0, 0, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
}
 
Example #4
Source File: VideoChatActivity.java    From AndroidRTC with MIT License 6 votes vote down vote up
@Override
public void onAddRemoteStream(final MediaStream remoteStream, final PnPeer peer) {
    super.onAddRemoteStream(remoteStream, peer); // Will log values
    VideoChatActivity.this.runOnUiThread(new Runnable() {
        @Override
        public void run() {
            Toast.makeText(VideoChatActivity.this,"Connected to " + peer.getId(), Toast.LENGTH_SHORT).show();
            try {
                if(remoteStream.audioTracks.size()==0 || remoteStream.videoTracks.size()==0) return;
                mCallStatus.setVisibility(View.GONE);
                remoteStream.videoTracks.get(0).addRenderer(new VideoRenderer(remoteRender));
                VideoRendererGui.update(remoteRender, 0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
                VideoRendererGui.update(localRender, 72, 65, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FIT, true);
            }
            catch (Exception e){ e.printStackTrace(); }
        }
    });
}
 
Example #5
Source File: RoomChatPresenter.java    From VideoMeeting with Apache License 2.0 6 votes vote down vote up
public RoomChatPresenter(IRoomChatView view) {
    mView = view;
    mSocketAddress = "http://" + App.getInstance().getString(R.string.stream_host);
    mSocketAddress += (":" + App.getInstance().getString(R.string.stream_port) + "/");

    GLSurfaceView surfaceView = mView.getSurfaceView();
    surfaceView.setPreserveEGLContextOnPause(true);
    surfaceView.setKeepScreenOn(true);
    VideoRendererGui.setView(surfaceView, new Runnable() {
        @Override
        public void run() {
            // SurfaceView 准备完毕
            L.d("eglContextReadyCallback");
            init();
        }
    });

    localRender = VideoRendererGui.create(
            0, 0,
            50, 50, scalingType, true);

}
 
Example #6
Source File: RtcActivity.java    From imsdk-android with MIT License 6 votes vote down vote up
@Override
public void onLocalStream(final MediaStream localStream) {
    /*getHandler().post(new Runnable() {
        @Override
        public void run() {
            if(videoEnable) {

            }
        }
    });*/
    if(client==null||localRender ==  null) return;
    if(!videoEnable&&localStream.audioTracks.size()==1)
    {
        localStream.audioTracks.get(0).setEnabled(true);
    }
    if(localStream.videoTracks.size()==1) {
        localStream.videoTracks.get(0).addRenderer(new VideoRenderer(localRender));
        localStream.videoTracks.get(0).setEnabled(videoEnable);
        if (videoEnable) {
            VideoRendererGui.update(localRender,
                    LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
                    LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING,
                    scalingType, true);
        }
    }
}
 
Example #7
Source File: RtcActivity.java    From imsdk-android with MIT License 5 votes vote down vote up
private void switchRender() {
    VideoRendererGui.update(localRender,
            REMOTE_X, REMOTE_Y,
            REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, true);
    VideoRendererGui.update(remoteRender,
            LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
            LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
            scalingType, false);
}
 
Example #8
Source File: RTCGLVideoView.java    From q-municate-android with Apache License 2.0 5 votes vote down vote up
public void  release(){
    if (localRendererCallback != null) {
        VideoRendererGui.remove(localRendererCallback);
    }
    if (mainRendererCallback != null) {
        VideoRendererGui.remove(mainRendererCallback);
    }
}
 
Example #9
Source File: RTCGLVideoView.java    From q-municate-android with Apache License 2.0 5 votes vote down vote up
public void updateRenderer(RendererSurface rendererSurface, RendererConfig config){
    boolean mainRenderer = RendererSurface.MAIN.equals(rendererSurface);
    VideoRenderer.Callbacks callbacks = mainRenderer ? mainRendererCallback
            :localRendererCallback;

    if (config.coordinates != null) {
        setViewCoordinates((mainRenderer ? remoteCoords : localCoords),
                config.coordinates);
    }
    setRendererMirror(config.mirror, rendererSurface);
    int[] viewCoordinates = mainRenderer ? remoteCoords : localCoords;
    VideoRendererGui.update(callbacks, viewCoordinates[0], viewCoordinates[1], viewCoordinates[2],
            viewCoordinates[3], RendererCommon.ScalingType.SCALE_ASPECT_FILL,
            (mainRenderer ? mainMirror : secondMirror));
}
 
Example #10
Source File: Respoke.java    From respoke-sdk-android with MIT License 5 votes vote down vote up
/**
 *  Notify the shared SDK instance that the specified client has connected. This is for internal use only, and should never be called by your client application.
 *
 *  @param client  The client that just connected
 */
public void clientConnected(RespokeClient client) {
    if (null != pushToken) {
        registerPushServices();
    }

    if (!factoryStaticInitialized) {
        // Perform a one-time WebRTC global initialization
        PeerConnectionFactory.initializeAndroidGlobals(context, true, true, true, VideoRendererGui.getEGLContext());
        factoryStaticInitialized = true;
    }
}
 
Example #11
Source File: JanusActivity.java    From janus-gateway-android with MIT License 5 votes vote down vote up
private void init() {
    try {
        EGLContext con = VideoRendererGui.getEGLContext();
        echoTest = new EchoTest(localRender, remoteRender);
        echoTest.initializeMediaContext(JanusActivity.this, true, true, true, con);
        echoTest.Start();

    } catch (Exception ex) {
        Log.e("computician.janusclient", ex.getMessage());
    }
}
 
Example #12
Source File: EchoTest.java    From janus-gateway-android with MIT License 5 votes vote down vote up
@Override
public void onRemoteStream(MediaStream stream) {
    stream.videoTracks.get(0).setEnabled(true);
    if(stream.videoTracks.get(0).enabled())
        Log.d("JANUSCLIENT", "video tracks enabled");
    stream.videoTracks.get(0).addRenderer(new VideoRenderer(remoteRender));
    VideoRendererGui.update(remoteRender, 0, 0, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
    VideoRendererGui.update(localRender, 72, 72, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
}
 
Example #13
Source File: NBMWebRTCPeer.java    From webrtcpeer-android with Apache License 2.0 5 votes vote down vote up
private boolean startLocalMediaSync() {
    if (mediaResourceManager != null && mediaResourceManager.getLocalMediaStream() == null) {
        mediaResourceManager.createLocalMediaStream(VideoRendererGui.getEglBaseContext(), localRender);
        mediaResourceManager.startVideoSource();
        mediaResourceManager.selectCameraPosition(config.getCameraPosition());
        return true;
    } else {
        return false;
    }
}
 
Example #14
Source File: RtcActivity.java    From imsdk-android with MIT License 5 votes vote down vote up
@Override
public void onAddRemoteStream(final MediaStream remoteStream) {
    if(client == null||remoteRender ==  null) return;
    stopPlayTone();
    if(remoteStream.audioTracks.size()>1||
            remoteStream.videoTracks.size()>1)
    {
        LogUtil.e("Wired looking stream: "+remoteStream);
        return;
    }
    if(!videoEnable&&remoteStream.audioTracks.size()==1)
    {
        remoteStream.audioTracks.get(0).setEnabled(true);
    }
    if(remoteStream.videoTracks.size() == 1)
    {
        remoteStream.videoTracks.get(0).addRenderer(new VideoRenderer(remoteRender));
        remoteStream.videoTracks.get(0).setEnabled(videoEnable);
        if(videoEnable) {
            VideoRendererGui.update(remoteRender,
                    REMOTE_X, REMOTE_Y,
                    REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false);
            VideoRendererGui.update(localRender,
                    LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
                    LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
                    scalingType, true);
        }
    }
}
 
Example #15
Source File: RTCGLVideoView.java    From q-municate-android with Apache License 2.0 5 votes vote down vote up
private void init(TypedArray typedArray) {
    VideoRendererGui.setView(this, null);
    if (typedArray != null) {
        setValuefromResources(typedArray);
        typedArray.recycle();
    }

    obtainMainVideoRenderer();
}
 
Example #16
Source File: RtcActivity.java    From imsdk-android with MIT License 5 votes vote down vote up
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(LayoutParams.FLAG_KEEP_SCREEN_ON
                    | LayoutParams.FLAG_DISMISS_KEYGUARD
                    | LayoutParams.FLAG_SHOW_WHEN_LOCKED
                    | LayoutParams.FLAG_TURN_SCREEN_ON);
    this.requestWindowFeature(Window.FEATURE_NO_TITLE);
    this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
            WindowManager.LayoutParams.FLAG_FULLSCREEN);
    setContentView(R.layout.atom_rtc_activity_webrtc);
    videoEnable = getIntent().getBooleanExtra(INTENT_KEY_VIDEO_ENABLE,true);
    isCaller = getIntent().getBooleanExtra(INTENT_KEY_CREATEOFFER,false);
    from =  getIntent().getExtras().getString(INTENT_KEY_FROM);
    to =  getIntent().getExtras().getString(INTENT_KEY_TO);
    chatType =  getIntent().getExtras().getString(INTENT_KEY_CHATTYPE);
    realJid =  getIntent().getExtras().getString(INTENT_KEY_REALJID);
    LogUtil.d(TAG,"onCreate Load Tone");
    initView();
    EventBus.getDefault().register(this);
    if(videoEnable) {
        VideoRendererGui.setView(vsv, new Runnable() {
            @Override
            public void run() {
                init();
            }
        });
    } else {
        init();
    }
    initCamera();
    initInfo();
}
 
Example #17
Source File: RoomChatPresenter.java    From VideoMeeting with Apache License 2.0 5 votes vote down vote up
private void addRender(MediaStream stream, int position) {
    VideoRenderer.Callbacks render;
    L.d("addRender position is " + position);
    if (position == 0) {
        render = localRender;
    } else {
        render = VideoRendererGui.create(position % 2 == 0 ? 0 : 50,
                position / 2 * 50,
                50, 50,
                scalingType, false);
    }
    stream.videoTracks.get(0).addRenderer(new VideoRenderer(render));
}
 
Example #18
Source File: RoomChatPresenter.java    From VideoMeeting with Apache License 2.0 5 votes vote down vote up
private void init() {
    if (mView == null) {
        L.w("view is null, it may be destroyed");
        return;
    }
    Point displaySize = mView.getDisplaySize();
    // TODO make sure video height and width supported.确保手机摄像头支持displaySize的分辨率
    PeerConnectionParameters params = new PeerConnectionParameters(
            true, false, displaySize.x, displaySize.y, 30, 1, VIDEO_CODEC_VP9, true, 1, AUDIO_CODEC_OPUS, true);

    mWebRtcClient = new WebRtcClient(this, mSocketAddress, params, VideoRendererGui.getEGLContext());
}
 
Example #19
Source File: VideoChatActivity.java    From Socket.io-FLSocketIM-Android with MIT License 5 votes vote down vote up
@Override
protected void initData() {
    super.initData();

    Bundle bundle = getIntent().getExtras();
    String fromUser = bundle.getString("fromUser");
    String toUser = bundle.getString("toUser");
    String room = bundle.getString("room");
    this.fromUser = fromUser;
    this.toUser = toUser;
    this.room = room;
    int type = bundle.getInt("type");
    videoType = type == 0?ChatVideoType.videoTypeCaller:ChatVideoType.videoTypeCallee;
    videoView = new GLSurfaceView(mContext);
    videoView.setPreserveEGLContextOnPause(true);
    videoView.setKeepScreenOn(true);
    VideoRendererGui.setView(videoView, new Runnable() {
        @Override
        public void run() {

            initHelper();
        }
    });
    FrameLayout layout = findViewById(R.id.video_view_back);
    LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
    layout.addView(videoView, layoutParams);

    /*
    * 两者初始化顺序会影响最终渲染层的层次结构
    * */
    remoteRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
    localRender = VideoRendererGui.create(66, 0, 33, 33, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);

}
 
Example #20
Source File: RTCGLVideoView.java    From q-municate-android with Apache License 2.0 4 votes vote down vote up
private VideoRenderer.Callbacks initRenderer(boolean mirror, int[] viewCoordinates) {
    return VideoRendererGui.createGuiRenderer(viewCoordinates[0], viewCoordinates[1], viewCoordinates[2],
            viewCoordinates[3], RendererCommon.ScalingType.SCALE_ASPECT_FILL, mirror);

}
 
Example #21
Source File: AppRTCDemoActivity.java    From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
@Override
public void onCreate(Bundle savedInstanceState) {
  super.onCreate(savedInstanceState);

  Thread.setDefaultUncaughtExceptionHandler(
      new UnhandledExceptionHandler(this));

  getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
  getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

  Point displaySize = new Point();
  getWindowManager().getDefaultDisplay().getRealSize(displaySize);

  vsv = new AppRTCGLView(this, displaySize);
  VideoRendererGui.setView(vsv);
  remoteRender = VideoRendererGui.create(0, 0, 100, 100);
  localRender = VideoRendererGui.create(70, 5, 25, 25);

  vsv.setOnClickListener(new View.OnClickListener() {
      @Override public void onClick(View v) {
        toggleHUD();
      }
    });
  setContentView(vsv);
  logAndToast("Tap the screen to toggle stats visibility");

  hudView = new TextView(this);
  hudView.setTextColor(Color.BLACK);
  hudView.setBackgroundColor(Color.WHITE);
  hudView.setAlpha(0.4f);
  hudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
  hudView.setVisibility(View.INVISIBLE);
  addContentView(hudView, hudLayout);

  if (!factoryStaticInitialized) {
    abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
        this, true, true),
      "Failed to initializeAndroidGlobals");
    factoryStaticInitialized = true;
  }

  AudioManager audioManager =
      ((AudioManager) getSystemService(AUDIO_SERVICE));
  // TODO(fischman): figure out how to do this Right(tm) and remove the
  // suppression.
  @SuppressWarnings("deprecation")
  boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
  audioManager.setMode(isWiredHeadsetOn ?
      AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
  audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

  sdpMediaConstraints = new MediaConstraints();
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveAudio", "true"));
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveVideo", "true"));

  final Intent intent = getIntent();
  if ("android.intent.action.VIEW".equals(intent.getAction())) {
    connectToRoom(intent.getData().toString());
    return;
  }
  showGetRoomUI();
}
 
Example #22
Source File: RTCGLVideoView.java    From q-municate-android with Apache License 2.0 4 votes vote down vote up
public void removeMainRendererCallback(){
    if (mainRendererCallback != null) {
        VideoRendererGui.remove(mainRendererCallback);
        mainRendererCallback = null;
    }
}
 
Example #23
Source File: RTCGLVideoView.java    From q-municate-android with Apache License 2.0 4 votes vote down vote up
public void removeLocalRendererCallback(){
    if (localRendererCallback != null) {
        VideoRendererGui.remove(localRendererCallback);
        localRendererCallback = null;
    }
}
 
Example #24
Source File: AppRTCDemoActivity.java    From WebRTCDemo with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
@Override
public void onCreate(Bundle savedInstanceState) {
  super.onCreate(savedInstanceState);

  Thread.setDefaultUncaughtExceptionHandler(
      new UnhandledExceptionHandler(this));

  getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
  getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

  Point displaySize = new Point();
  getWindowManager().getDefaultDisplay().getRealSize(displaySize);

  vsv = new AppRTCGLView(this, displaySize);
  VideoRendererGui.setView(vsv);
  remoteRender = VideoRendererGui.create(0, 0, 100, 100);
  localRender = VideoRendererGui.create(70, 5, 25, 25);

  vsv.setOnClickListener(new View.OnClickListener() {
      @Override public void onClick(View v) {
        toggleHUD();
      }
    });
  setContentView(vsv);
  logAndToast("Tap the screen to toggle stats visibility");

  hudView = new TextView(this);
  hudView.setTextColor(Color.BLACK);
  hudView.setBackgroundColor(Color.WHITE);
  hudView.setAlpha(0.4f);
  hudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
  hudView.setVisibility(View.INVISIBLE);
  addContentView(hudView, hudLayout);

  if (!factoryStaticInitialized) {
    abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
        this, true, true),
      "Failed to initializeAndroidGlobals");
    factoryStaticInitialized = true;
  }

  AudioManager audioManager =
      ((AudioManager) getSystemService(AUDIO_SERVICE));
  // TODO(fischman): figure out how to do this Right(tm) and remove the
  // suppression.
  @SuppressWarnings("deprecation")
  boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
  audioManager.setMode(isWiredHeadsetOn ?
      AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
  audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

  sdpMediaConstraints = new MediaConstraints();
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveAudio", "true"));
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveVideo", "true"));

  final Intent intent = getIntent();
  if ("android.intent.action.VIEW".equals(intent.getAction())) {
    connectToRoom(intent.getData().toString());
    return;
  }
  showGetRoomUI();
}
 
Example #25
Source File: EchoTest.java    From janus-gateway-android with MIT License 4 votes vote down vote up
@Override
public void onLocalStream(MediaStream stream) {
    stream.videoTracks.get(0).addRenderer(new VideoRenderer(localRender));
    VideoRendererGui.update(localRender, 0, 0, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
}
 
Example #26
Source File: VideoChatActivity.java    From AndroidRTC with MIT License 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_video_chat);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    Bundle extras = getIntent().getExtras();
    if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
        Intent intent = new Intent(this, MainActivity.class);
        startActivity(intent);
        Toast.makeText(this, "Need to pass username to VideoChatActivity in intent extras (Constants.USER_NAME).",
                Toast.LENGTH_SHORT).show();
        finish();
        return;
    }
    this.username      = extras.getString(Constants.USER_NAME, "");
    this.mChatList     = getListView();
    this.mChatEditText = (EditText) findViewById(R.id.chat_input);
    this.mCallStatus   = (TextView) findViewById(R.id.call_status);

    // Set up the List View for chatting
    List<ChatMessage> ll = new LinkedList<ChatMessage>();
    mChatAdapter = new ChatAdapter(this, ll);
    mChatList.setAdapter(mChatAdapter);

    // First, we initiate the PeerConnectionFactory with our application context and some options.
    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    PeerConnectionFactory pcFactory = new PeerConnectionFactory();
    this.pnRTCClient = new PnRTCClient(Constants.PUB_KEY, Constants.SUB_KEY, this.username);
    List<PeerConnection.IceServer> servers = getXirSysIceServers();
    if (!servers.isEmpty()){
        this.pnRTCClient.setSignalParams(new PnSignalingParams());
    }

    // Returns the number of cams & front/back face device name
    int camNumber = VideoCapturerAndroid.getDeviceCount();
    String frontFacingCam = VideoCapturerAndroid.getNameOfFrontFacingDevice();
    String backFacingCam = VideoCapturerAndroid.getNameOfBackFacingDevice();

    // Creates a VideoCapturerAndroid instance for the device name
    VideoCapturer capturer = VideoCapturerAndroid.create(frontFacingCam);

    // First create a Video Source, then we can make a Video Track
    localVideoSource = pcFactory.createVideoSource(capturer, this.pnRTCClient.videoConstraints());
    VideoTrack localVideoTrack = pcFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);

    // First we create an AudioSource then we can create our AudioTrack
    AudioSource audioSource = pcFactory.createAudioSource(this.pnRTCClient.audioConstraints());
    AudioTrack localAudioTrack = pcFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);

    // To create our VideoRenderer, we can use the included VideoRendererGui for simplicity
    // First we need to set the GLSurfaceView that it should render to
    this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);

    // Then we set that view, and pass a Runnable to run once the surface is ready
    VideoRendererGui.setView(videoView, null);

    // Now that VideoRendererGui is ready, we can get our VideoRenderer.
    // IN THIS ORDER. Effects which is on top or bottom
    remoteRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    localRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);

    // We start out with an empty MediaStream object, created with help from our PeerConnectionFactory
    //  Note that LOCAL_MEDIA_STREAM_ID can be any string
    MediaStream mediaStream = pcFactory.createLocalMediaStream(LOCAL_MEDIA_STREAM_ID);

    // Now we can add our tracks.
    mediaStream.addTrack(localVideoTrack);
    mediaStream.addTrack(localAudioTrack);

    // First attach the RTC Listener so that callback events will be triggered
    this.pnRTCClient.attachRTCListener(new DemoRTCListener());

    // Then attach your local media stream to the PnRTCClient.
    //  This will trigger the onLocalStream callback.
    this.pnRTCClient.attachLocalMediaStream(mediaStream);

    // Listen on a channel. This is your "phone number," also set the max chat users.
    this.pnRTCClient.listenOn("Kevin");
    this.pnRTCClient.setMaxConnections(1);

    // If the intent contains a number to dial, call it now that you are connected.
    //  Else, remain listening for a call.
    if (extras.containsKey(Constants.CALL_USER)) {
        String callUser = extras.getString(Constants.CALL_USER, "");
        connectToUser(callUser);
    }
}
 
Example #27
Source File: MainActivity.java    From krankygeek with MIT License 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(true);

    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    peerConnectionFactory = new PeerConnectionFactory();

    VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);

    localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
    localVideoTrack.setEnabled(true);

    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(true);

    localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
    localMediaStream.addTrack(localVideoTrack);
    localMediaStream.addTrack(localAudioTrack);

    GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);

    VideoRendererGui.setView(videoView, null);
    try {
        otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        localVideoTrack.addRenderer(renderer);
    } catch (Exception e) {
        e.printStackTrace();
    }
}