org.kurento.client.EndOfStreamEvent Java Examples
The following examples show how to use
org.kurento.client.EndOfStreamEvent.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: EventTagTest.java From kurento-java with Apache License 2.0 | 5 votes |
@Test public void testEventWithoutTag() throws Exception { MediaPipeline mp = kurentoClient.createMediaPipeline(); final CountDownLatch eventReceived = new CountDownLatch(1); PlayerEndpoint player = new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm") .build(); player.addTag("test_1", "value_1"); player.addTag("test_2", "value_2"); player.addTag("test_3", "value_3"); player.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { List<Tag> tags = event.getTags(); if (tags.size() == 0) { eventReceived.countDown(); } } }); player.play(); // Guard time to reproduce the whole video if (!eventReceived.await(TIMEOUT, TimeUnit.SECONDS)) { Assert.fail("Event not received"); } }
Example #2
Source File: RepositoryRecorderTest.java From kurento-java with Apache License 2.0 | 5 votes |
private void launchBrowser(WebRtcEndpoint webRtcEp, PlayerEndpoint playerEp, RecorderEndpoint recorderEp) throws InterruptedException { getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); if (recorderEp != null) { recorderEp.record(); } // Assertions Assert.assertTrue("Not received media (timeout waiting playing event)", getPage().waitForEvent("playing")); Assert.assertTrue("The color of the video should be black", getPage().similarColor(Color.BLACK)); Assert.assertTrue("Not received EOS event in player", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); double currentTime = getPage().getCurrentTime(); Assert.assertTrue( "Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)", getPage().compare(PLAYTIME, currentTime)); }
Example #3
Source File: BaseRecorder.java From kurento-java with Apache License 2.0 | 4 votes |
protected void launchBrowser(MediaPipeline mp, WebRtcEndpoint webRtcEp, PlayerEndpoint playerEp, RecorderEndpoint recorderEp, String expectedVideoCodec, String expectedAudioCodec, String recordingFile, Color expectedColor, int xColor, int yColor, int playTime) throws InterruptedException { Timer gettingStats = new Timer(); final CountDownLatch errorContinuityAudiolatch = new CountDownLatch(1); getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); if (recorderEp != null) { recorderEp.record(); } // Assertions String inRecording = recorderEp == null ? " in the recording" : ""; Assert.assertTrue("Not received media (timeout waiting playing event)" + inRecording, getPage().waitForEvent("playing")); if (recorderEp == null) { // Checking continuity of the audio getPage().activatePeerConnectionInboundStats("webRtcPeer.peerConnection"); gettingStats.schedule(new CheckAudioTimerTask(errorContinuityAudiolatch, getPage()), 100, 200); } Assert.assertTrue( "Color at coordinates " + xColor + "," + yColor + " must be " + expectedColor + inRecording, getPage().similarColorAt(expectedColor, xColor, yColor)); Assert.assertTrue("Not received EOS event in player" + inRecording, eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); final CountDownLatch recorderLatch = new CountDownLatch(1); if (recorderEp != null) { saveGstreamerDot(mp); recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); // Wait until file exists waitForFileExists(recordingFile); AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec); AssertMedia.assertDuration(recordingFile, TimeUnit.SECONDS.toMillis(playTime), TimeUnit.SECONDS.toMillis(getPage().getThresholdTime())); } else { gettingStats.cancel(); getPage().stopPeerConnectionInboundStats("webRtcPeer.peerConnection"); double currentTime = getPage().getCurrentTime(); Assert.assertTrue("Error in play time in the recorded video (expected: " + playTime + " sec, real: " + currentTime + " sec) " + inRecording, getPage().compare(playTime, currentTime)); if (recorderEp == null) { Assert.assertTrue("Check audio. There were more than 2 seconds without receiving packets", errorContinuityAudiolatch.getCount() == 1); } } }
Example #4
Source File: GStreamerFilterTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testInstantiation() throws InterruptedException { filter = new GStreamerFilter.Builder(pipeline, "videoflip method=horizontal-flip").build(); Assert.assertNotNull(filter); player.connect(filter); AsyncEventManager<EndOfStreamEvent> async = new AsyncEventManager<EndOfStreamEvent>("EndOfStream event"); player.addEndOfStreamListener(async.getMediaEventListener()); player.play(); async.waitForResult(); filter.release(); }
Example #5
Source File: PlayerEndpointAsyncTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testEventEndOfStream() throws InterruptedException { AsyncResultManager<ListenerSubscription> asyncListener = new AsyncResultManager<>("EndOfStream Listener registration"); AsyncEventManager<EndOfStreamEvent> asyncEvent = new AsyncEventManager<>("EndOfStream event"); player.addEndOfStreamListener(asyncEvent.getMediaEventListener(), asyncListener.getContinuation()); asyncListener.waitForResult(); player.play(); asyncEvent.waitForResult(); }
Example #6
Source File: FaceOverlayFilterTest.java From kurento-java with Apache License 2.0 | 4 votes |
/** * Test if a {@link FaceOverlayFilter} can be created in the KMS. The filter is pipelined with a * {@link PlayerEndpoint}, which feeds video to the filter. This test depends on the correct * behaviour of the player and its events. * * @throws InterruptedException */ @Test public void testFaceOverlayFilter() throws InterruptedException { PlayerEndpoint player = new PlayerEndpoint.Builder(pipeline, URL_POINTER_DETECTOR).build(); player.connect(overlayFilter); AsyncEventManager<EndOfStreamEvent> async = new AsyncEventManager<>("EndOfStream event"); player.addEndOfStreamListener(async.getMediaEventListener()); player.play(); async.waitForResult(); player.stop(); player.release(); }
Example #7
Source File: DispatcherOneToManyPlayerTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testDispatcherOneToManyPlayer() throws Exception { MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/30sec/red.webm") .build(); PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/30sec/blue.webm").build(); WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEp3 = new WebRtcEndpoint.Builder(mp).build(); DispatcherOneToMany dispatcherOneToMany = new DispatcherOneToMany.Builder(mp).build(); HubPort hubPort1 = new HubPort.Builder(dispatcherOneToMany).build(); HubPort hubPort2 = new HubPort.Builder(dispatcherOneToMany).build(); HubPort hubPort3 = new HubPort.Builder(dispatcherOneToMany).build(); HubPort hubPort4 = new HubPort.Builder(dispatcherOneToMany).build(); HubPort hubPort5 = new HubPort.Builder(dispatcherOneToMany).build(); playerEp.connect(hubPort1); playerEp2.connect(hubPort2); hubPort3.connect(webRtcEp1); hubPort4.connect(webRtcEp2); hubPort5.connect(webRtcEp3); dispatcherOneToMany.setSource(hubPort1); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp2.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); // Test execution getPage(BROWSER1).subscribeEvents("playing"); getPage(BROWSER1).initWebRtc(webRtcEp2, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); getPage(BROWSER2).subscribeEvents("playing"); getPage(BROWSER2).initWebRtc(webRtcEp1, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); getPage(BROWSER3).subscribeEvents("playing"); getPage(BROWSER3).initWebRtc(webRtcEp3, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); // Assertions Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER1).waitForEvent("playing")); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER2).waitForEvent("playing")); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER3).waitForEvent("playing")); Assert.assertTrue("The color of the video should be red", getPage(BROWSER1).similarColor(Color.RED)); Assert.assertTrue("The color of the video should be red", getPage(BROWSER2).similarColor(Color.RED)); Assert.assertTrue("The color of the video should be red", getPage(BROWSER3).similarColor(Color.RED)); Thread.sleep(3000); playerEp2.play(); dispatcherOneToMany.setSource(hubPort2); Assert.assertTrue("The color of the video should be blue", getPage(BROWSER1).similarColor(Color.BLUE)); Assert.assertTrue("The color of the video should be blue", getPage(BROWSER2).similarColor(Color.BLUE)); Assert.assertTrue("The color of the video should be blue", getPage(BROWSER3).similarColor(Color.BLUE)); Thread.sleep(3000); dispatcherOneToMany.setSource(hubPort1); Assert.assertTrue("The color of the video should be red", getPage(BROWSER1).similarColor(Color.RED)); Assert.assertTrue("The color of the video should be red", getPage(BROWSER2).similarColor(Color.RED)); Assert.assertTrue("The color of the video should be red", getPage(BROWSER3).similarColor(Color.RED)); Thread.sleep(3000); dispatcherOneToMany.setSource(hubPort2); Assert.assertTrue("The color of the video should be red", getPage(BROWSER1).similarColor(Color.BLUE)); Assert.assertTrue("The color of the video should be red", getPage(BROWSER2).similarColor(Color.BLUE)); Assert.assertTrue("The color of the video should be red", getPage(BROWSER3).similarColor(Color.BLUE)); Thread.sleep(3000); Assert.assertTrue("Not received EOS event in player", eosLatch.await(TIMEOUT_EOS, TimeUnit.SECONDS)); }
Example #8
Source File: RecorderStopTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { final CountDownLatch recorderLatch = new CountDownLatch(1); // Media Pipeline #1 MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build(); WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build(); String recordingFile = getRecordUrl(extension); final RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); playerEp.connect(webRtcEp1); playerEp.connect(recorderEp); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); // Test execution #1. Play the video while it is recorded launchBrowser(mp, webRtcEp1, playerEp, recorderEp, expectedVideoCodec, expectedAudioCodec, recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME); ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); executor.schedule(new Runnable() { @Override public void run() { recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); } }, PLAYTIME / 2, TimeUnit.SECONDS); // Wait for EOS Assert.assertTrue("No EOS event", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); // Release Media Pipeline #1 mp.release(); // Wait until file exists waitForFileExists(recordingFile); // Reloading browser getPage().reload(); // Media Pipeline #2 MediaPipeline mp2 = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build(); WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build(); playerEp2.connect(webRtcEp2); // Playing the recording launchBrowser(null, webRtcEp2, playerEp2, null, expectedVideoCodec, expectedAudioCodec, recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME / 2); // Release Media Pipeline #2 mp2.release(); executor.shutdown(); success = true; }
Example #9
Source File: BaseRecorder.java From kurento-java with Apache License 2.0 | 4 votes |
protected void checkRecordingFile(String recordingFile, String browserName, Color[] expectedColors, long playTime, String expectedVideoCodec, String expectedAudioCodec) throws InterruptedException { // Checking continuity of the audio Timer gettingStats = new Timer(); final CountDownLatch errorContinuityAudiolatch = new CountDownLatch(1); waitForFileExists(recordingFile); MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, recordingFile).build(); WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build(); playerEp.connect(webRtcEp); // Playing the recording WebRtcTestPage checkPage = getPage(browserName); checkPage.setThresholdTime(checkPage.getThresholdTime() * 2); checkPage.subscribeEvents("playing"); checkPage.initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); playerEp.play(); // Assertions in recording final String messageAppend = "[played file with media pipeline]"; Assert.assertTrue( "Not received media in the recording (timeout waiting playing event) " + messageAppend, checkPage.waitForEvent("playing")); checkPage.activatePeerConnectionInboundStats("webRtcPeer.peerConnection"); gettingStats.schedule(new CheckAudioTimerTask(errorContinuityAudiolatch, checkPage), 100, 200); for (Color color : expectedColors) { Assert.assertTrue("The color of the recorded video should be " + color + " " + messageAppend, checkPage.similarColorAt(color, 50, 50)); } Assert.assertTrue("Not received EOS event in player", eosLatch.await(checkPage.getTimeout(), TimeUnit.SECONDS)); gettingStats.cancel(); double currentTime = checkPage.getCurrentTime(); Assert.assertTrue("Error in play time in the recorded video (expected: " + playTime + " sec, real: " + currentTime + " sec) " + messageAppend, checkPage.compare(playTime, currentTime)); Assert.assertTrue("Check audio. There were more than 2 seconds without receiving packets", errorContinuityAudiolatch.getCount() == 1); AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec); AssertMedia.assertDuration(recordingFile, TimeUnit.SECONDS.toMillis(playTime), TimeUnit.SECONDS.toMillis(checkPage.getThresholdTime())); mp.release(); }
Example #10
Source File: DispatcherPlayerTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testDispatcherPlayer() throws Exception { // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm") .build(); PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/blue.webm").build(); WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build(); Dispatcher dispatcher = new Dispatcher.Builder(mp).build(); HubPort hubPort1 = new HubPort.Builder(dispatcher).build(); HubPort hubPort2 = new HubPort.Builder(dispatcher).build(); HubPort hubPort3 = new HubPort.Builder(dispatcher).build(); playerEp.connect(hubPort1); playerEp2.connect(hubPort3); hubPort2.connect(webRtcEp); dispatcher.connect(hubPort1, hubPort2); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp2.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); // Test execution getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); // Assertions Assert.assertTrue("Not received media (timeout waiting playing event)", getPage().waitForEvent("playing")); Assert.assertTrue("The color of the video should be red", getPage().similarColor(Color.RED)); Thread.sleep(5000); playerEp2.play(); dispatcher.connect(hubPort3, hubPort2); Assert.assertTrue("The color of the video should be blue", getPage().similarColor(Color.BLUE)); Assert.assertTrue("Not received EOS event in player", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); double currentTime = getPage().getCurrentTime(); Assert.assertTrue( "Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)", getPage().compare(PLAYTIME, currentTime)); // Release Media Pipeline mp.release(); }
Example #11
Source File: RecorderPlayerDisconnectTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { final CountDownLatch recorderLatch = new CountDownLatch(1); // Media Pipeline #1 MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerGreen = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build(); String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); playerGreen.play(); recorderEp.record(); for (int i = 0; i < NUM_SWAPS; i++) { if (i % 2 == 0) { playerGreen.connect(recorderEp); } else { playerGreen.disconnect(recorderEp); } Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / NUM_SWAPS); } // Release Media Pipeline #1 saveGstreamerDot(mp); recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); mp.release(); // Wait until file exists waitForFileExists(recordingFile); // Reloading browser getPage().reload(); // Media Pipeline #2 MediaPipeline mp2 = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build(); WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build(); playerEp2.connect(webRtcEp2); // Playing the recording getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp2, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp2.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); playerEp2.play(); // Assertions in recording final String messageAppend = "[played file with media pipeline]"; final int playtime = PLAYTIME; Assert.assertTrue( "Not received media in the recording (timeout waiting playing event) " + messageAppend, getPage().waitForEvent("playing")); for (Color color : EXPECTED_COLORS) { Assert.assertTrue("The color of the recorded video should be " + color + " " + messageAppend, getPage().similarColor(color)); } Assert.assertTrue("Not received EOS event in player", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); double currentTime = getPage().getCurrentTime(); Assert.assertTrue("Error in play time in the recorded video (expected: " + playtime + " sec, real: " + currentTime + " sec) " + messageAppend, getPage().compare(playtime, currentTime)); AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec); AssertMedia.assertDuration(recordingFile, TimeUnit.SECONDS.toMillis(playtime), TimeUnit.SECONDS.toMillis(getPage().getThresholdTime())); // Release Media Pipeline #2 mp2.release(); success = true; }
Example #12
Source File: RecorderPlayerTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { // Media Pipeline #1 MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build(); WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build(); String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); playerEp.connect(webRtcEp1); playerEp.connect(recorderEp); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); // Test execution #1. Play the video while it is recorded launchBrowser(mp, webRtcEp1, playerEp, recorderEp, expectedVideoCodec, expectedAudioCodec, recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME); // Wait for EOS Assert.assertTrue("No EOS event", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); // Release Media Pipeline #1 mp.release(); // Reloading browser getPage().reload(); // Media Pipeline #2 MediaPipeline mp2 = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build(); WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build(); playerEp2.connect(webRtcEp2); // Playing the recording launchBrowser(null, webRtcEp2, playerEp2, null, expectedVideoCodec, expectedAudioCodec, recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME); // Release Media Pipeline #2 mp2.release(); success = true; }
Example #13
Source File: PlayerFaceOverlayTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testPlayerFaceOverlay() throws Exception { // Test data final int playTimeSeconds = 30; final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/filter/fiwarecut.mp4"; final Color expectedColor = Color.RED; final int xExpectedColor = 420; final int yExpectedColor = 45; final String imgOverlayUrl = "http://" + getTestFilesHttpPath() + "/img/red-square.png"; final float offsetXPercent = -0.2F; final float offsetYPercent = -1.2F; final float widthPercent = 1.6F; final float heightPercent = 1.6F; // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build(); WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build(); FaceOverlayFilter filter = new FaceOverlayFilter.Builder(mp).build(); filter.setOverlayedImage(imgOverlayUrl, offsetXPercent, offsetYPercent, widthPercent, heightPercent); playerEp.connect(filter); filter.connect(webRtcEp); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); // Test execution getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); // Assertions Assert.assertTrue("Not received media (timeout waiting playing event)", getPage().waitForEvent("playing")); Assert.assertTrue( "Color at coordinates " + xExpectedColor + "," + yExpectedColor + " must be " + expectedColor, getPage().similarColorAt(expectedColor, xExpectedColor, yExpectedColor)); Assert.assertTrue("Not received EOS event in player", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); double currentTime = getPage().getCurrentTime(); Assert.assertTrue( "Error in play time (expected: " + playTimeSeconds + " sec, real: " + currentTime + " sec)", getPage().compare(playTimeSeconds, currentTime)); // Release Media Pipeline mp.release(); }
Example #14
Source File: PlayerEndTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(PlayerOperation playerOperation) throws Exception { // Test data final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/format/small.webm"; final int guardTimeSeconds = 10; // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build(); WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build(); playerEp.connect(webRtcEp); // Subscription to EOS event final boolean[] eos = new boolean[1]; eos[0] = false; playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { log.error("EOS event received: {} {}", event.getType(), event.getTimestamp()); eos[0] = true; } }); // WebRTC in receive-only mode getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage().waitForEvent("playing")); // Stop/release stream and wait x seconds switch (playerOperation) { case STOP: playerEp.stop(); break; case RELEASE: playerEp.release(); break; } Thread.sleep(TimeUnit.SECONDS.toMillis(guardTimeSeconds)); // Verify that EOS event has not being received Assert.assertFalse("EOS event has been received. " + "This should not be happenning because the stream has been stopped", eos[0]); // Release Media Pipeline mp.release(); }
Example #15
Source File: RepositoryRecorderTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testRepositoryRecorder() throws Exception { final CountDownLatch recorderLatch = new CountDownLatch(1); // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/ball.webm").build(); WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build(); RepositoryItem repositoryItem = repository.createRepositoryItem(); RepositoryHttpRecorder recorder = repositoryItem.createRepositoryHttpRecorder(); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recorder.getURL()).build(); playerEp.connect(webRtcEp1); playerEp.connect(recorderEp); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); // Test execution #1. Play the video while it is recorded launchBrowser(webRtcEp1, playerEp, recorderEp); // Wait for EOS Assert.assertTrue("Not received EOS event in player", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); // Release Media Pipeline #1 recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); mp.release(); Thread.sleep(500); }
Example #16
Source File: EventTagTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testEventTag() throws Exception { MediaPipeline mp = kurentoClient.createMediaPipeline(); final CountDownLatch eventReceived = new CountDownLatch(TAG_SIZE); PlayerEndpoint player = new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm") .build(); player.addTag("test_1", "value_1"); player.addTag("test_2", "value_2"); player.addTag("test_3", "value_3"); player.setSendTagsInEvents(true); player.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { List<Tag> tags = event.getTags(); for (Tag tag : tags) { if (tag.getKey().compareTo("test_1") == 0) { if (tag.getValue().compareTo("value_1") == 0) { eventReceived.countDown(); } } else if (tag.getKey().compareTo("test_2") == 0) { if (tag.getValue().compareTo("value_2") == 0) { eventReceived.countDown(); } } else if (tag.getKey().compareTo("test_3") == 0) { if (tag.getValue().compareTo("value_3") == 0) { eventReceived.countDown(); } } } } }); player.play(); // Guard time to reproduce the whole video if (!eventReceived.await(TIMEOUT, TimeUnit.SECONDS)) { Assert.fail("Event not received"); } }
Example #17
Source File: PlayerEndpointTest.java From kurento-java with Apache License 2.0 | 3 votes |
@Test public void testEventEndOfStream() throws InterruptedException { AsyncEventManager<EndOfStreamEvent> async = new AsyncEventManager<>("EndOfStream event"); player.addEndOfStreamListener(async.getMediaEventListener()); player.play(); async.waitForResult(); }
Example #18
Source File: FaceOverlayFilterAsyncTest.java From kurento-java with Apache License 2.0 | 3 votes |
/** * Test if a {@link FaceOverlayFilter} can be created in the KMS. The filter is pipelined with a * {@link PlayerEndpoint}, which feeds video to the filter. This test depends on the correct * behaviour of the player and its events. * * @throws InterruptedException */ @Test public void testFaceOverlayFilter() throws InterruptedException { player.connect(overlayFilter); AsyncEventManager<EndOfStreamEvent> async = new AsyncEventManager<>("EndOfStream event"); player.addEndOfStreamListener(async.getMediaEventListener()); player.play(); async.waitForResult(); }