Java Code Examples for org.kurento.client.MediaPipeline#addErrorListener()

The following examples show how to use org.kurento.client.MediaPipeline#addErrorListener() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RecorderSwitchPlayerTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension, String[] mediaUrls, Color[] expectedColors)
        throws Exception {

  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  int numPlayers = mediaUrls.length;
  PlayerEndpoint[] players = new PlayerEndpoint[numPlayers];

  for (int i = 0; i < numPlayers; i++) {
    players[i] = new PlayerEndpoint.Builder(mp, mediaUrls[i]).build();
  }

  WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();

  final CountDownLatch recorderLatch = new CountDownLatch(1);
  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  // Test execution
  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);

  boolean startRecord = false;
  for (int i = 0; i < numPlayers; i++) {
    players[i].connect(webRtcEp);
    players[i].connect(recorderEp);
    players[i].play();

    if (!startRecord) {

      Assert.assertTrue("Not received media (timeout waiting playing event)",
          getPage().waitForEvent("playing"));
      recorderEp.record();
      startRecord = true;
    }

    waitSeconds(PLAYTIME / numPlayers);
  }

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  // Reloading browser
  getPage().reload();

  checkRecordingFile(recordingFile, "browser", expectedColors, PLAYTIME, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 2
Source File: RecorderSwitchWebRtcWebRtcPlayerWithPassThroughTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTestWithPlayer(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension, String mediaUrlPlayer) throws Exception {
  // Media Pipeline #1
  getPage(BROWSER2).close();
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrlPlayer).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  PassThrough passThrough = new PassThrough.Builder(mp).build();
  passThrough.connect(recorderEp);

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(passThrough);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  playerEp.play();
  playerEp.connect(passThrough);
  long playerEpConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(passThrough);
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);

  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME
      + TimeUnit.MILLISECONDS.toSeconds((2 * webrtcRedConnectionTime) + playerEpConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 3
Source File: RecorderSwitchWebRtcWebRtcPlayerWithPassThroughTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {
  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  PassThrough passThrough = new PassThrough.Builder(mp).build();
  passThrough.connect(recorderEp);

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(passThrough);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  getPage(BROWSER2).subscribeLocalEvents("playing");
  startWebrtc = System.currentTimeMillis();
  getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  // green
  webRtcEpGreen.connect(passThrough);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER2).waitForEvent("playing"));
  long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(passThrough);
  startWebrtc = System.currentTimeMillis();
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER2).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME + TimeUnit.MILLISECONDS
      .toSeconds((2 * webrtcRedConnectionTime) + webrtcGreenConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 4
Source File: RecorderSwitchWebRtcWebRtcAndPlayerTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTestWithPlayer(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension, String mediaUrlPlayer) throws Exception {
  // Media Pipeline #1
  getPage(BROWSER2).close();
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrlPlayer).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(recorderEp);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  playerEp.play();
  playerEp.connect(recorderEp);
  long playerEpConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(recorderEp);
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);

  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME
      + TimeUnit.MILLISECONDS.toSeconds((2 * webrtcRedConnectionTime) + playerEpConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 5
Source File: RecorderSwitchWebRtcWebRtcAndPlayerTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {
  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(recorderEp);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  getPage(BROWSER2).subscribeLocalEvents("playing");
  startWebrtc = System.currentTimeMillis();
  getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  // green
  webRtcEpGreen.connect(recorderEp);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER2).waitForEvent("playing"));
  long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(recorderEp);

  startWebrtc = System.currentTimeMillis();
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER2).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME + TimeUnit.MILLISECONDS
      .toSeconds((2 * webrtcRedConnectionTime) + webrtcGreenConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 6
Source File: RecorderSwitchPlayerWithPassThroughTest.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension, String[] mediaUrls, Color[] expectedColors)
        throws Exception {

  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  int numPlayers = mediaUrls.length;
  PlayerEndpoint[] players = new PlayerEndpoint[numPlayers];

  for (int i = 0; i < numPlayers; i++) {
    players[i] = new PlayerEndpoint.Builder(mp, mediaUrls[i]).build();
  }

  WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  PassThrough passThrough = new PassThrough.Builder(mp).build();

  passThrough.connect(recorderEp);

  // Test execution
  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);

  final CountDownLatch recorderLatch = new CountDownLatch(1);

  boolean startRecord = false;
  for (int i = 0; i < numPlayers; i++) {
    players[i].connect(webRtcEp);
    players[i].connect(passThrough);
    players[i].play();

    if (!startRecord) {

      Assert.assertTrue("Not received media (timeout waiting playing event)",
          getPage().waitForEvent("playing"));
      recorderEp.record();
      startRecord = true;
    }

    waitSeconds(PLAYTIME / numPlayers);
  }

  saveGstreamerDot(mp);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  // Reloading browser
  getPage().reload();

  checkRecordingFile(recordingFile, "browser", expectedColors, PLAYTIME, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 7
Source File: LongStabilityRecorderS3Test.java    From kurento-java with Apache License 2.0 4 votes vote down vote up
public void doTest(final MediaProfileSpecType mediaProfileSpecType, String expectedAudioCodec,
    final String extension) throws Exception {

  long testDurationMillis =
      PropertiesManager.getProperty(TEST_DURATION_PROPERTY, DEFAULT_TEST_DURATION);

  MediaPipeline mp = kurentoClient.createMediaPipeline();

  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      log.error(msgError);
      errorPipelinelatch.countDown();
    }
  });
  final WebRtcEndpoint webRtcSender = new WebRtcEndpoint.Builder(mp).build();

  // WebRTC sender negotiation
  getPage().subscribeLocalEvents("playing");
  getPage().initWebRtc(webRtcSender, WebRtcChannel.AUDIO_ONLY, WebRtcMode.SEND_ONLY);
  Assert.assertTrue("Not received media in sender webrtc", getPage().waitForEvent("playing"));

  // Recorder
  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorder = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();
  webRtcSender.connect(recorder);

  // Start recorder
  recorder.record();

  // Wait recording time
  Thread.sleep(testDurationMillis);

  // Stop recorder
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorder.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  // Release Media Pipeline
  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
  if (mp != null) {
    mp.release();
  }

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  waitForFileExists(recordingFile);

  // Assessments
  AssertMedia.assertDuration(recordingFile, testDurationMillis, THRESHOLD_MS);

}