Java Code Examples for org.kurento.client.WebRtcEndpoint#connect()

The following examples show how to use org.kurento.client.WebRtcEndpoint#connect() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WebRtcOneToOneTest.java    From kurento-java with Apache License 2.0 6 votes vote down vote up
@Test
public void testWebRtcOneToOneChrome() throws Exception {
  // Media Pipeline
  final MediaPipeline mp = kurentoClient.createMediaPipeline();
  final WebRtcEndpoint masterWebRtcEp = new WebRtcEndpoint.Builder(mp).build();
  final WebRtcEndpoint viewerWebRtcEP = new WebRtcEndpoint.Builder(mp).build();
  masterWebRtcEp.connect(viewerWebRtcEP);

  // WebRTC setup
  getPresenter().initWebRtc(masterWebRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
  getViewer().initWebRtc(viewerWebRtcEP, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
  getViewer().subscribeEvents("playing");
  getViewer().waitForEvent("playing");

  // Guard time to play the video
  waitSeconds(PLAYTIME);

  // Assertions
  double currentTime = getViewer().getCurrentTime();
  Assert.assertTrue("Error in play time (expected: " + PLAYTIME + " sec, real: "
      + currentTime + " sec)", getViewer().compare(PLAYTIME, currentTime));

  // Release Media Pipeline
  mp.release();
}
 
Example 2
Source File: WebRtcStabilitySwitchTest.java    From kurento-java with Apache License 2.0 5 votes vote down vote up
@Test
public void testWebRtcStabilitySwitch() throws Exception {
  final int numSwitch = parseInt(getProperty("test.webrtcstability.switch", valueOf(DEFAULT_NUM_SWITCH)));

  // Media Pipeline
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEndpoint1 = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEndpoint2 = new WebRtcEndpoint.Builder(mp).build();
  webRtcEndpoint1.connect(webRtcEndpoint1);
  webRtcEndpoint2.connect(webRtcEndpoint2);

  // WebRTC
  getPresenter().subscribeEvents("playing");
  getPresenter().initWebRtc(webRtcEndpoint1, VIDEO_ONLY, SEND_RCV);
  getViewer().subscribeEvents("playing");
  getViewer().initWebRtc(webRtcEndpoint2, VIDEO_ONLY, SEND_RCV);

  for (int i = 0; i < numSwitch; i++) {
    if (i % 2 == 0) {
      log.debug("Switch #" + i + ": loopback");
      webRtcEndpoint1.connect(webRtcEndpoint1);
      webRtcEndpoint2.connect(webRtcEndpoint2);
    } else {
      log.debug("Switch #" + i + ": B2B");
      webRtcEndpoint1.connect(webRtcEndpoint2);
      webRtcEndpoint2.connect(webRtcEndpoint1);
    }
    sleep(SECONDS.toMillis(PLAYTIME_PER_SWITCH));
  }

  // Release Media Pipeline
  mp.release();
}
 
Example 3
Source File: WebRtcQualityLoopbackTest.java    From kurento-java with Apache License 2.0 5 votes vote down vote up
public void doTest(BrowserType browserType, String videoPath, String audioUrl, Color color)
    throws InterruptedException {
  // Media Pipeline
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
  webRtcEndpoint.connect(webRtcEndpoint);

  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);

  // Wait until event playing in the remote stream
  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage().waitForEvent("playing"));

  // Guard time to play the video
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME));

  // Assert play time
  double currentTime = getPage().getCurrentTime();
  Assert.assertTrue("Error in play time of player (expected: " + PLAYTIME + " sec, real: "
      + currentTime + " sec)", getPage().compare(PLAYTIME, currentTime));

  // Assert color
  if (color != null) {
    Assert.assertTrue("The color of the video should be " + color, getPage().similarColor(color));
  }

  // Assert audio quality
  if (audioUrl != null) {
    float realPesqMos = Ffmpeg.getPesqMos(audioUrl, AUDIO_SAMPLE_RATE);
    Assert.assertTrue("Bad perceived audio quality: PESQ MOS too low (expected=" + MIN_PESQ_MOS
        + ", real=" + realPesqMos + ")", realPesqMos >= MIN_PESQ_MOS);
  }

  // Release Media Pipeline
  mp.release();
}
 
Example 4
Source File: CompositeWebRtcRecorderTest.java    From kurento-java with Apache License 2.0 5 votes vote down vote up
@Test
public void testCompositeRecorder() throws Exception {

  // MediaPipeline
  MediaPipeline mp = kurentoClient.createMediaPipeline();

  Composite composite = new Composite.Builder(mp).build();

  HubPort hubPort1 = new HubPort.Builder(composite).build();
  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  webRtcEpRed.connect(hubPort1);

  HubPort hubPort2 = new HubPort.Builder(composite).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
  webRtcEpGreen.connect(hubPort2, MediaType.AUDIO);

  HubPort hubPort3 = new HubPort.Builder(composite).build();
  WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();
  webRtcEpBlue.connect(hubPort3, MediaType.AUDIO);

  HubPort hubPort4 = new HubPort.Builder(composite).build();
  WebRtcEndpoint webRtcEpWhite = new WebRtcEndpoint.Builder(mp).build();
  webRtcEpWhite.connect(hubPort4, MediaType.AUDIO);

  String recordingFile = getDefaultOutputFile(EXTENSION_WEBM);
  RecorderEndpoint recorderEp =
      new RecorderEndpoint.Builder(mp, Protocol.FILE + recordingFile).build();
  HubPort hubPort5 = new HubPort.Builder(composite).build();
  hubPort5.connect(recorderEp);

  // WebRTC browsers
  getPage(BROWSER2).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
  getPage(BROWSER3).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);
  getPage(BROWSER4).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
  getPage(BROWSER5).initWebRtc(webRtcEpWhite, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  recorderEp.record();

  Thread.sleep(PLAYTIME * 1000);

  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));

  mp.release();

  // Media Pipeline #2
  MediaPipeline mp2 = kurentoClient.createMediaPipeline();
  PlayerEndpoint playerEp2 =
      new PlayerEndpoint.Builder(mp2, Protocol.FILE + recordingFile).build();
  WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build();
  playerEp2.connect(webRtcEp2);

  // Playing the recorded file
  launchBrowser(mp2, webRtcEp2, playerEp2, null, EXPECTED_VIDEO_CODEC_WEBM,
      EXPECTED_AUDIO_CODEC_WEBM, recordingFile, Color.RED, 0, 0, PLAYTIME);

  // Release Media Pipeline #2
  mp2.release();

  success = true;
}
 
Example 5
Source File: LongStabilityRecorderS3Test.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(final MediaProfileSpecType mediaProfileSpecType, String expectedAudioCodec,
    final String extension) throws Exception {

  long testDurationMillis =
      PropertiesManager.getProperty(TEST_DURATION_PROPERTY, DEFAULT_TEST_DURATION);

  MediaPipeline mp = kurentoClient.createMediaPipeline();

  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      log.error(msgError);
      errorPipelinelatch.countDown();
    }
  });
  final WebRtcEndpoint webRtcSender = new WebRtcEndpoint.Builder(mp).build();

  // WebRTC sender negotiation
  getPage().subscribeLocalEvents("playing");
  getPage().initWebRtc(webRtcSender, WebRtcChannel.AUDIO_ONLY, WebRtcMode.SEND_ONLY);
  Assert.assertTrue("Not received media in sender webrtc", getPage().waitForEvent("playing"));

  // Recorder
  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorder = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();
  webRtcSender.connect(recorder);

  // Start recorder
  recorder.record();

  // Wait recording time
  Thread.sleep(testDurationMillis);

  // Stop recorder
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorder.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  // Release Media Pipeline
  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
  if (mp != null) {
    mp.release();
  }

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  waitForFileExists(recordingFile);

  // Assessments
  AssertMedia.assertDuration(recordingFile, testDurationMillis, THRESHOLD_MS);

}
 
Example 6
Source File: RoomParticipant.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
private String createSdpResponseForUser(RoomParticipant sender, String sdpOffer) {

    WebRtcEndpoint receivingEndpoint = sender.getReceivingEndpoint();
    if (receivingEndpoint == null) {
      log.warn("PARTICIPANT {}: Trying to connect to a user without receiving endpoint "
          + "(it seems is not yet fully connected)", this.name);
      return null;
    }

    if (sender.getName().equals(name)) {
      // FIXME: Use another message type for receiving sdp offer
      log.debug("PARTICIPANT {}: configuring loopback", this.name);
      return receivingEndpoint.processOffer(sdpOffer);
    }

    if (sendingEndpoints.get(sender.getName()) != null) {
      log.warn("PARTICIPANT {}: There is a sending endpoint to user {} "
          + "when trying to create another one", this.name, sender.getName());
      return null;
    }

    log.debug("PARTICIPANT {}: Creating a sending endpoint to user {}", this.name,
        sender.getName());

    WebRtcEndpoint sendingEndpoint = new WebRtcEndpoint.Builder(pipeline).build();
    WebRtcEndpoint oldSendingEndpoint =
        sendingEndpoints.putIfAbsent(sender.getName(), sendingEndpoint);

    if (oldSendingEndpoint != null) {
      log.warn(
          "PARTICIPANT {}: 2 threads have simultaneously created a sending endpoint for user {}",
          this.name, sender.getName());
      return null;
    }

    log.debug("PARTICIPANT {}: Created sending endpoint for user {}", this.name, sender.getName());
    try {
      receivingEndpoint = sender.getReceivingEndpoint();
      if (receivingEndpoint != null) {
        receivingEndpoint.connect(sendingEndpoint);
        return sendingEndpoint.processOffer(sdpOffer);
      }

    } catch (KurentoServerException e) {

      // TODO Check object status when KurentoClient set this info in the
      // object
      if (e.getCode() == 40101) {
        log.warn("Receiving endpoint is released when trying to connect a sending endpoint to it",
            e);
      } else {
        log.error("Exception connecting receiving endpoint to sending endpoint", e);
        sendingEndpoint.release(new Continuation<Void>() {
          @Override
          public void onSuccess(Void result) throws Exception {

          }

          @Override
          public void onError(Throwable cause) throws Exception {
            log.error("Exception releasing WebRtcEndpoint", cause);
          }
        });
      }

      sendingEndpoints.remove(sender.getName());

      releaseEndpoint(sender.getName(), sendingEndpoint);
    }

    return null;
  }
 
Example 7
Source File: WebRtcOneLoopbackTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
@Test
public void testWebRtcLoopback() throws Exception {

  // Media Pipeline
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
  webRtcEndpoint.connect(webRtcEndpoint);

  final CountDownLatch flowingLatch = new CountDownLatch(1);
  webRtcEndpoint
      .addMediaFlowInStateChangeListener(new EventListener<MediaFlowInStateChangeEvent>() {

        @Override
        public void onEvent(MediaFlowInStateChangeEvent event) {
          if (event.getState().equals(MediaFlowState.FLOWING)) {
            flowingLatch.countDown();
          }
        }
      });

  // Start WebRTC and wait for playing event
  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);

  Assert.assertTrue("Not received FLOWING IN event in webRtcEp: " + WebRtcChannel.AUDIO_AND_VIDEO,
      flowingLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  Assert.assertTrue(
      "Not received media (timeout waiting playing event): " + WebRtcChannel.AUDIO_AND_VIDEO,
      getPage().waitForEvent("playing"));

  // Guard time to play the video
  waitSeconds(PLAYTIME);

  // Assertions
  double currentTime = getPage().getCurrentTime();
  Assert.assertTrue(
      "Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)",
      getPage().compare(PLAYTIME, currentTime));
  Assert.assertTrue("The color of the video should be green",
      getPage().similarColor(CHROME_VIDEOTEST_COLOR));

  // Release Media Pipeline
  mp.release();
}
 
Example 8
Source File: WebRtcFakeMediaTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
@Test
public void testWebRtcLoopback() throws Exception {

  // Media Pipeline
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
  webRtcEndpoint.connect(webRtcEndpoint);

  final CountDownLatch flowingLatch = new CountDownLatch(1);
  webRtcEndpoint
      .addMediaFlowInStateChangeListener(new EventListener<MediaFlowInStateChangeEvent>() {

        @Override
        public void onEvent(MediaFlowInStateChangeEvent event) {
          if (event.getState().equals(MediaFlowState.FLOWING)) {
            flowingLatch.countDown();
          }
        }
      });

  // Start WebRTC and wait for playing event
  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);

  Assert.assertTrue("Not received FLOWING IN event in webRtcEp: " + WebRtcChannel.AUDIO_AND_VIDEO,
      flowingLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  Assert.assertTrue(
      "Not received media (timeout waiting playing event): " + WebRtcChannel.AUDIO_AND_VIDEO,
      getPage().waitForEvent("playing"));

  // Guard time to play the video
  waitSeconds(PLAYTIME);

  // Assertions
  double currentTime = getPage().getCurrentTime();
  Assert.assertTrue(
      "Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)",
      getPage().compare(PLAYTIME, currentTime));

  // Release Media Pipeline
  mp.release();
}
 
Example 9
Source File: WebRtcFourOneToManyTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
@Test
public void testWebRtcOneToManyChrome() throws InterruptedException, IOException {
  // Media Pipeline
  final MediaPipeline mp = kurentoClient.createMediaPipeline();
  final WebRtcEndpoint masterWebRtcEp = new WebRtcEndpoint.Builder(mp).build();

  // Assets for viewers
  final LatencyController[] cs = new LatencyController[numViewers];
  final WebRtcEndpoint[] viewerWebRtcEPs = new WebRtcEndpoint[numViewers];
  final CountDownLatch latch = new CountDownLatch(numViewers);

  // Presenter
  getPresenter().subscribeLocalEvents("playing");
  getPresenter().initWebRtc(masterWebRtcEp, WebRtcChannel.VIDEO_ONLY, WebRtcMode.SEND_ONLY);

  if (monitor != null) {
    monitor.addWebRtcClientAndActivateOutboundStats(getPresenter().getBrowser().getId(),
        masterWebRtcEp, getPresenter(), "webRtcPeer.peerConnection");
  }

  // Viewers
  ExecutorService exec = Executors.newFixedThreadPool(numViewers);
  for (int j = 0; j < numViewers; j++) {
    final int i = j;
    Thread thread = new Thread() {
      @Override
      public void run() {
        try {
          viewerWebRtcEPs[i] = new WebRtcEndpoint.Builder(mp).build();
          masterWebRtcEp.connect(viewerWebRtcEPs[i]);
          if (monitor != null) {
            monitor.incrementNumClients();
          }

          // Latency control
          String name = getViewer(i).getBrowser().getId();
          cs[i] = new LatencyController(name, monitor);

          // WebRTC
          getViewer(i).subscribeEvents("playing");
          getViewer(i).initWebRtc(viewerWebRtcEPs[i], WebRtcChannel.VIDEO_ONLY,
              WebRtcMode.RCV_ONLY);
          if (monitor != null) {
            monitor.addWebRtcClientAndActivateInboundStats(getViewer(i).getBrowser().getId(),
                viewerWebRtcEPs[i], getViewer(i), "webRtcPeer.peerConnection");
          }

          // Latency assessment
          cs[i].checkLatency(PLAYTIME, TimeUnit.SECONDS, getPresenter(), getViewer(i));
          cs[i].drawChart(getDefaultOutputFile("-" + name + "-latency.png"), 500, 270);
          cs[i].writeCsv(getDefaultOutputFile("-" + name + "-latency.csv"));
          cs[i].logLatencyErrorrs();
        } catch (Exception e) {
          e.printStackTrace();
        } finally {
          latch.countDown();
          if (monitor != null) {
            monitor.decrementNumClients();
          }
        }
      }
    };
    exec.execute(thread);
  }

  // Wait to finish viewers threads
  latch.await();

  // Release Media Pipeline
  mp.release();
}
 
Example 10
Source File: DispatcherOneToManyWebRtcTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
@Test
public void testDispatcherOneToManyWebRtc() throws Exception {

  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEp3 = new WebRtcEndpoint.Builder(mp).build();

  DispatcherOneToMany dispatcherOneToMany = new DispatcherOneToMany.Builder(mp).build();
  HubPort hubPort1 = new HubPort.Builder(dispatcherOneToMany).build();
  HubPort hubPort2 = new HubPort.Builder(dispatcherOneToMany).build();
  HubPort hubPort3 = new HubPort.Builder(dispatcherOneToMany).build();

  webRtcEp1.connect(hubPort1);
  webRtcEp2.connect(hubPort2);
  webRtcEp3.connect(hubPort3);
  hubPort1.connect(webRtcEp1);
  hubPort2.connect(webRtcEp2);
  hubPort3.connect(webRtcEp3);

  dispatcherOneToMany.setSource(hubPort1);

  getPage(BROWSER1).subscribeEvents("playing");
  getPage(BROWSER1).initWebRtc(webRtcEp1, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);

  getPage(BROWSER2).subscribeEvents("playing");
  getPage(BROWSER2).initWebRtc(webRtcEp2, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);

  getPage(BROWSER3).subscribeEvents("playing");
  getPage(BROWSER3).initWebRtc(webRtcEp3, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);

  Thread.sleep(PLAYTIME * 1000);

  // Assertions
  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER2).waitForEvent("playing"));
  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER3).waitForEvent("playing"));

  Assert.assertTrue("The color of the video should be green (GREEN)",
      getPage(BROWSER1).similarColor(Color.GREEN));
  Assert.assertTrue("The color of the video should be green (GREEN)",
      getPage(BROWSER2).similarColor(Color.GREEN));
  Assert.assertTrue("The color of the video should be green (GREEN)",
      getPage(BROWSER3).similarColor(Color.GREEN));

  Thread.sleep(3000);
  dispatcherOneToMany.setSource(hubPort2);

  Assert.assertTrue("The color of the video should be blue (BLUE)",
      getPage(BROWSER1).similarColor(Color.BLUE));
  Assert.assertTrue("The color of the video should be blue (BLUE)",
      getPage(BROWSER2).similarColor(Color.BLUE));
  Assert.assertTrue("The color of the video should be blue (BLUE)",
      getPage(BROWSER3).similarColor(Color.BLUE));

  Thread.sleep(3000);
  dispatcherOneToMany.setSource(hubPort3);

  Assert.assertTrue("The color of the video should be red (RED)",
      getPage(BROWSER1).similarColor(Color.RED));
  Assert.assertTrue("The color of the video should be red (RED)",
      getPage(BROWSER2).similarColor(Color.RED));
  Assert.assertTrue("The color of the video should be red (RED)",
      getPage(BROWSER3).similarColor(Color.RED));

  Thread.sleep(3000);
  dispatcherOneToMany.removeSource();
  Assert.assertTrue("The color of the video should be red (RED)",
      getPage(BROWSER1).similarColor(Color.RED));
  Assert.assertTrue("The color of the video should be red (RED)",
      getPage(BROWSER2).similarColor(Color.RED));
  Assert.assertTrue("The color of the video should be red (RED)",
      getPage(BROWSER3).similarColor(Color.RED));

  Thread.sleep(2000);
}
 
Example 11
Source File: CompositeWebRtcTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
@Test
public void testCompositeWebRtc() throws Exception {
  // Media Pipeline
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();

  Composite composite = new Composite.Builder(mp).build();
  HubPort hubPort1 = new HubPort.Builder(composite).build();
  HubPort hubPort2 = new HubPort.Builder(composite).build();
  HubPort hubPort3 = new HubPort.Builder(composite).build();

  webRtcEpRed.connect(hubPort1);
  webRtcEpGreen.connect(hubPort2);
  webRtcEpBlue.connect(hubPort3);

  WebRtcEndpoint webRtcEpWhite = new WebRtcEndpoint.Builder(mp).build();
  HubPort hubPort4 = new HubPort.Builder(composite).build();
  webRtcEpWhite.connect(hubPort4);

  WebRtcEndpoint webRtcEpComposite = new WebRtcEndpoint.Builder(mp).build();
  HubPort hubPort5 = new HubPort.Builder(composite).build();
  hubPort5.connect(webRtcEpComposite);

  // WebRTC browsers
  getPage(BROWSER2).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
  getPage(BROWSER3).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);
  getPage(BROWSER4).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
  getPage(BROWSER5).initWebRtc(webRtcEpWhite, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  getPage(BROWSER1).subscribeEvents("playing");
  getPage(BROWSER1).initWebRtc(webRtcEpComposite, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.RCV_ONLY);

  // Assertions
  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  Assert.assertTrue("Upper left part of the video must be red",
      getPage(BROWSER1).similarColorAt(Color.RED, 0, 0));
  Assert.assertTrue("Upper right part of the video must be green",
      getPage(BROWSER1).similarColorAt(Color.GREEN, 450, 0));
  Assert.assertTrue("Lower left part of the video must be blue",
      getPage(BROWSER1).similarColorAt(Color.BLUE, 0, 450));
  Assert.assertTrue("Lower right part of the video must be white",
      getPage(BROWSER1).similarColorAt(Color.WHITE, 450, 450));

  // Finally, a black & white filter is connected to one WebRTC
  GStreamerFilter bwFilter =
      new GStreamerFilter.Builder(mp, "videobalance saturation=0.0").build();
  webRtcEpRed.connect(bwFilter);
  bwFilter.connect(hubPort1);
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME));
  Assert.assertTrue("When connecting the filter, the upper left part of the video must be gray",
      getPage(BROWSER1).similarColorAt(new Color(75, 75, 75), 0, 0));

  // Release Media Pipeline
  mp.release();
}
 
Example 12
Source File: CompositeWebRtcUsersTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
@Test
public void testCompositeWebRtcUsers() throws Exception {
  // Media Pipeline
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  Composite composite = new Composite.Builder(mp).build();
  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  HubPort hubPort1 = new HubPort.Builder(composite).build();
  webRtcEpRed.connect(hubPort1);

  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
  HubPort hubPort2 = new HubPort.Builder(composite).build();
  webRtcEpGreen.connect(hubPort2);

  WebRtcEndpoint webRtcEpComposite = new WebRtcEndpoint.Builder(mp).build();

  HubPort hubPort5 = new HubPort.Builder(composite).build();
  hubPort5.connect(webRtcEpComposite);

  WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpWhite = new WebRtcEndpoint.Builder(mp).build();

  // Test execution
  getPage(BROWSER2).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
  getPage(BROWSER3).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);
  getPage(BROWSER4).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
  getPage(BROWSER5).initWebRtc(webRtcEpWhite, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  getPage(BROWSER1).subscribeEvents("playing");
  getPage(BROWSER1).initWebRtc(webRtcEpComposite, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.RCV_ONLY);

  // Assertions
  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  Assert.assertTrue("Left part of the video must be red",
      getPage(BROWSER1).similarColorAt(Color.RED, 0, 200));
  Assert.assertTrue("Upper right part of the video must be green",
      getPage(BROWSER1).similarColorAt(Color.GREEN, 450, 300));

  hubPort2.release();
  Thread.sleep(3000);

  Assert.assertTrue("All the video must be red",
      getPage(BROWSER1).similarColorAt(Color.RED, 300, 200));

  HubPort hubPort4 = new HubPort.Builder(composite).build();
  webRtcEpWhite.connect(hubPort4);
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME));

  Assert.assertTrue("Left part of the video must be red",
      getPage(BROWSER1).similarColorAt(Color.RED, 0, 300));
  Assert.assertTrue("Left part of the video must be white",
      getPage(BROWSER1).similarColorAt(Color.WHITE, 450, 300));

  hubPort4.release();
  hubPort2 = new HubPort.Builder(composite).build();
  hubPort4 = new HubPort.Builder(composite).build();

  webRtcEpGreen.connect(hubPort2);

  HubPort hubPort3 = new HubPort.Builder(composite).build();
  webRtcEpBlue.connect(hubPort3);
  webRtcEpWhite.connect(hubPort4);
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME));

  Assert.assertTrue("The red color must be in some position",
      (getPage(BROWSER1).similarColorAt(Color.RED, 0, 0)
          || getPage(BROWSER1).similarColorAt(Color.RED, 450, 0)
          || getPage(BROWSER1).similarColorAt(Color.RED, 0, 450)
          || getPage(BROWSER1).similarColorAt(Color.RED, 450, 450)));
  Assert.assertTrue("The blue color must be in some position",
      (getPage(BROWSER1).similarColorAt(Color.BLUE, 450, 450)
          || getPage(BROWSER1).similarColorAt(Color.BLUE, 0, 450)
          || getPage(BROWSER1).similarColorAt(Color.BLUE, 450, 0)
          || getPage(BROWSER1).similarColorAt(Color.BLUE, 0, 0)));
  Assert.assertTrue("The green color must be in some position",
      (getPage(BROWSER1).similarColorAt(Color.GREEN, 450, 0)
          || getPage(BROWSER1).similarColorAt(Color.GREEN, 0, 450)
          || getPage(BROWSER1).similarColorAt(Color.GREEN, 0, 0)
          || getPage(BROWSER1).similarColorAt(Color.GREEN, 450, 450)));
  Assert.assertTrue("The white color must be in some position",
      (getPage(BROWSER1).similarColorAt(Color.WHITE, 0, 450)
          || getPage(BROWSER1).similarColorAt(Color.WHITE, 450, 0)
          || getPage(BROWSER1).similarColorAt(Color.WHITE, 0, 0)
          || getPage(BROWSER1).similarColorAt(Color.WHITE, 450, 450)));

  // Release Media Pipeline
  mp.release();
}
 
Example 13
Source File: RecorderMultiSlashesDirectoryTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {

  String multiSlashses = File.separator + File.separator + File.separator;
  final CountDownLatch recorderLatch = new CountDownLatch(1);

  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension).replace(getSimpleTestName(),
      new Date().getTime() + File.separator + getSimpleTestName());

  String recordingFileWithMultiSlashes = recordingFile.replace(File.separator, multiSlashses);

  log.debug("The path with multi slash is {} ", recordingFileWithMultiSlashes);

  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFileWithMultiSlashes)
      .withMediaProfile(mediaProfileSpecType).build();
  webRtcEp.connect(webRtcEp);
  webRtcEp.connect(recorderEp);

  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEp, AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);
  recorderEp.record();

  // Wait until event playing in the remote stream
  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage().waitForEvent("playing"));

  Thread.sleep(SECONDS.toMillis(PLAYTIME));

  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  // Wait until file exists
  waitForFileExists(recordingFile);

  AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec);
  mp.release();
}
 
Example 14
Source File: RecorderSwitchWebRtcWebRtcAndPlayerTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {
  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(recorderEp);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  getPage(BROWSER2).subscribeLocalEvents("playing");
  startWebrtc = System.currentTimeMillis();
  getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  // green
  webRtcEpGreen.connect(recorderEp);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER2).waitForEvent("playing"));
  long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(recorderEp);

  startWebrtc = System.currentTimeMillis();
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER2).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME + TimeUnit.MILLISECONDS
      .toSeconds((2 * webrtcRedConnectionTime) + webrtcGreenConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 15
Source File: RecorderSwitchWebRtcWebRtcAndPlayerTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTestWithPlayer(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension, String mediaUrlPlayer) throws Exception {
  // Media Pipeline #1
  getPage(BROWSER2).close();
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrlPlayer).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(recorderEp);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  playerEp.play();
  playerEp.connect(recorderEp);
  long playerEpConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(recorderEp);
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);

  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME
      + TimeUnit.MILLISECONDS.toSeconds((2 * webrtcRedConnectionTime) + playerEpConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 16
Source File: RecorderSwitchWebRtcWebRtcPlayerWithPassThroughTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {
  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  PassThrough passThrough = new PassThrough.Builder(mp).build();
  passThrough.connect(recorderEp);

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(passThrough);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  getPage(BROWSER2).subscribeLocalEvents("playing");
  startWebrtc = System.currentTimeMillis();
  getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  // green
  webRtcEpGreen.connect(passThrough);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER2).waitForEvent("playing"));
  long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(passThrough);
  startWebrtc = System.currentTimeMillis();
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER2).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME + TimeUnit.MILLISECONDS
      .toSeconds((2 * webrtcRedConnectionTime) + webrtcGreenConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 17
Source File: RecorderSwitchWebRtcWebRtcPlayerWithPassThroughTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTestWithPlayer(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension, String mediaUrlPlayer) throws Exception {
  // Media Pipeline #1
  getPage(BROWSER2).close();
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrlPlayer).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  PassThrough passThrough = new PassThrough.Builder(mp).build();
  passThrough.connect(recorderEp);

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(passThrough);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  playerEp.play();
  playerEp.connect(passThrough);
  long playerEpConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(passThrough);
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);

  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME
      + TimeUnit.MILLISECONDS.toSeconds((2 * webrtcRedConnectionTime) + playerEpConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 18
Source File: RecorderSwitchWebrtcTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {
  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  getPage(BROWSER2).subscribeLocalEvents("playing");
  getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  getPage(BROWSER3).subscribeLocalEvents("playing");
  getPage(BROWSER3).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(recorderEp);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  // green
  webRtcEpGreen.connect(recorderEp);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER2).waitForEvent("playing"));
  long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  // blue
  webRtcEpBlue.connect(recorderEp);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER3).waitForEvent("playing"));
  long webrtcBlueConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER3).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  // Reloading browser
  getPage(BROWSER3).close();

  long playtime = PLAYTIME + TimeUnit.MILLISECONDS
      .toSeconds(webrtcRedConnectionTime + webrtcGreenConnectionTime + webrtcBlueConnectionTime);

  checkRecordingFile(recordingFile, BROWSER4, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 19
Source File: AlphaBlendingWebRtcTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
@Test
public void testAlphaBlendingWebRtc() throws Exception {
  // Media Pipeline
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();

  AlphaBlending alphaBlending = new AlphaBlending.Builder(mp).build();
  HubPort hubPort1 = new HubPort.Builder(alphaBlending).build();
  HubPort hubPort2 = new HubPort.Builder(alphaBlending).build();
  HubPort hubPort3 = new HubPort.Builder(alphaBlending).build();

  webRtcEpRed.connect(hubPort1);
  webRtcEpGreen.connect(hubPort2);
  webRtcEpBlue.connect(hubPort3);

  WebRtcEndpoint webRtcEpAlphabaBlending = new WebRtcEndpoint.Builder(mp).build();
  HubPort hubPort4 = new HubPort.Builder(alphaBlending).build();
  hubPort4.connect(webRtcEpAlphabaBlending);

  alphaBlending.setMaster(hubPort1, 1);

  alphaBlending.setPortProperties(0F, 0F, 8, 0.2F, 0.2F, hubPort2);
  alphaBlending.setPortProperties(0.4F, 0.4F, 7, 0.2F, 0.2F, hubPort3);

  getPage(BROWSER1).subscribeLocalEvents("playing");
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  getPage(BROWSER2).subscribeLocalEvents("playing");
  getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  getPage(BROWSER3).subscribeLocalEvents("playing");
  getPage(BROWSER3).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  getPage(BROWSER4).subscribeEvents("playing");
  getPage(BROWSER4).initWebRtc(webRtcEpAlphabaBlending, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.RCV_ONLY);

  // Assertions
  Assert.assertTrue("Upper left part of the video must be blue",
      getPage(BROWSER4).similarColorAt(Color.GREEN, 0, 0));
  Assert.assertTrue("Lower right part of the video must be red",
      getPage(BROWSER4).similarColorAt(Color.RED, 315, 235));
  Assert.assertTrue("Center of the video must be blue",
      getPage(BROWSER4).similarColorAt(Color.BLUE, 160, 120));

  // alphaBlending.setMaster(hubPort3, 1);
  alphaBlending.setPortProperties(0.8F, 0.8F, 7, 0.2F, 0.2F, hubPort3);

  Assert.assertTrue("Lower right part of the video must be blue",
      getPage(BROWSER4).similarColorAt(Color.BLUE, 315, 235));
  Assert.assertTrue("Center of the video must be red",
      getPage(BROWSER4).similarColorAt(Color.RED, 160, 120));
  Thread.sleep(PLAYTIME * 1000);
}
 
Example 20
Source File: KmsPerformanceTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
private void connectWithMediaProcessing(WebRtcEndpoint inputEndpoint,
    WebRtcEndpoint outputEndpoint) {

  switch (mediaProcessingType) {
    case ENCODER:
      Filter filter = new GStreamerFilter.Builder(mp, "capsfilter caps=video/x-raw")
          .withFilterType(FilterType.VIDEO).build();
      inputEndpoint.connect(filter);
      filter.connect(outputEndpoint);
      log.debug("Pipeline: WebRtcEndpoint -> GStreamerFilter -> WebRtcEndpoint");
      break;

    case FILTER:
    case FACEOVERLAY:
      filter = new FaceOverlayFilter.Builder(mp).build();
      inputEndpoint.connect(filter);
      filter.connect(outputEndpoint);
      log.debug("Pipeline: WebRtcEndpoint -> FaceOverlayFilter -> WebRtcEndpoint");
      break;

    case ZBAR:
      filter = new ZBarFilter.Builder(mp).build();
      inputEndpoint.connect(filter);
      filter.connect(outputEndpoint);
      log.debug("Pipeline: WebRtcEndpoint -> ZBarFilter -> WebRtcEndpoint");
      break;

    case IMAGEOVERLAY:
      filter = new ImageOverlayFilter.Builder(mp).build();
      inputEndpoint.connect(filter);
      filter.connect(outputEndpoint);
      log.debug("Pipeline: WebRtcEndpoint -> ImageOverlayFilter -> WebRtcEndpoint");
      break;

    case PLATEDETECTOR:
      filter = new PlateDetectorFilter.Builder(mp).build();
      inputEndpoint.connect(filter);
      filter.connect(outputEndpoint);
      log.debug("Pipeline: WebRtcEndpoint -> PlateDetectorFilter -> WebRtcEndpoint");
      break;

    case CROWDDETECTOR:
      List<RegionOfInterest> rois = getDummyRois();
      filter = new CrowdDetectorFilter.Builder(mp, rois).build();
      inputEndpoint.connect(filter);
      filter.connect(outputEndpoint);
      log.debug("Pipeline: WebRtcEndpoint -> CrowdDetectorFilter -> WebRtcEndpoint");
      break;

    case CHROMA:
      filter = new ChromaFilter.Builder(mp, new WindowParam(0, 0, 640, 480)).build();
      inputEndpoint.connect(filter);
      filter.connect(outputEndpoint);
      log.debug("Pipeline: WebRtcEndpoint -> ChromaFilter -> WebRtcEndpoint");
      break;

    case NONE:
    default:
      inputEndpoint.connect(outputEndpoint);
      log.debug("Pipeline: WebRtcEndpoint -> WebRtcEndpoint");
      break;
  }
}