Java Code Examples for org.kurento.client.MediaProfileSpecType

The following examples show how to use org.kurento.client.MediaProfileSpecType. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: openvidu   Source File: RecordingManager.java    License: Apache License 2.0 6 votes vote down vote up
public void startOneIndividualStreamRecording(Session session, String recordingId, MediaProfileSpecType profile,
		Participant participant) {
	Recording recording = this.sessionsRecordings.get(session.getSessionId());
	if (recording == null) {
		recording = this.sessionsRecordingsStarting.get(session.getSessionId());
		if (recording == null) {
			log.error("Cannot start recording of new stream {}. Session {} is not being recorded",
					participant.getPublisherStreamId(), session.getSessionId());
			return;
		}
	}
	if (io.openvidu.java.client.Recording.OutputMode.INDIVIDUAL.equals(recording.getOutputMode())) {
		// Start new RecorderEndpoint for this stream
		log.info("Starting new RecorderEndpoint in session {} for new stream of participant {}",
				session.getSessionId(), participant.getParticipantPublicId());
		final CountDownLatch startedCountDown = new CountDownLatch(1);
		this.singleStreamRecordingService.startRecorderEndpointForPublisherEndpoint(session, recordingId, profile,
				participant, startedCountDown);
	} else if (io.openvidu.java.client.Recording.OutputMode.COMPOSED.equals(recording.getOutputMode())
			&& !recording.hasVideo()) {
		// Connect this stream to existing Composite recorder
		log.info("Joining PublisherEndpoint to existing Composite in session {} for new stream of participant {}",
				session.getSessionId(), participant.getParticipantPublicId());
		this.composedRecordingService.joinPublisherEndpointToComposite(session, recordingId, participant);
	}
}
 
Example 2
Source Project: openvidu   Source File: SingleStreamRecordingService.java    License: Apache License 2.0 6 votes vote down vote up
private void connectAccordingToProfile(PublisherEndpoint publisherEndpoint, RecorderEndpoint recorder,
		MediaProfileSpecType profile) {
	switch (profile) {
	case WEBM:
		publisherEndpoint.connect(recorder, MediaType.AUDIO);
		publisherEndpoint.connect(recorder, MediaType.VIDEO);
		break;
	case WEBM_AUDIO_ONLY:
		publisherEndpoint.connect(recorder, MediaType.AUDIO);
		break;
	case WEBM_VIDEO_ONLY:
		publisherEndpoint.connect(recorder, MediaType.VIDEO);
		break;
	default:
		throw new UnsupportedOperationException("Unsupported profile when single stream recording: " + profile);
	}
}
 
Example 3
private MediaProfileSpecType getMediaProfileFromMessage(JsonObject jsonMessage) {

    MediaProfileSpecType profile;
    switch (jsonMessage.get("mode").getAsString()) {
      case "audio-only":
        profile = MediaProfileSpecType.WEBM_AUDIO_ONLY;
        break;
      case "video-only":
        profile = MediaProfileSpecType.WEBM_VIDEO_ONLY;
        break;
      default:
        profile = MediaProfileSpecType.WEBM;
    }

    return profile;
  }
 
Example 4
private void connectAccordingToProfile(WebRtcEndpoint webRtcEndpoint, RecorderEndpoint recorder,
    MediaProfileSpecType profile) {
  switch (profile) {
    case WEBM:
      webRtcEndpoint.connect(recorder, MediaType.AUDIO);
      webRtcEndpoint.connect(recorder, MediaType.VIDEO);
      break;
    case WEBM_AUDIO_ONLY:
      webRtcEndpoint.connect(recorder, MediaType.AUDIO);
      break;
    case WEBM_VIDEO_ONLY:
      webRtcEndpoint.connect(recorder, MediaType.VIDEO);
      break;
    default:
      throw new UnsupportedOperationException("Unsupported profile for this tutorial: " + profile);
  }
}
 
Example 5
Source Project: openvidu   Source File: CompositeWrapper.java    License: Apache License 2.0 5 votes vote down vote up
public CompositeWrapper(KurentoSession session, String path) {
	this.session = session;
	this.composite = new Composite.Builder(session.getPipeline()).build();
	this.recorderEndpoint = new RecorderEndpoint.Builder(composite.getMediaPipeline(), path)
			.withMediaProfile(MediaProfileSpecType.WEBM_AUDIO_ONLY).build();
	this.compositeToRecorderHubPort = new HubPort.Builder(composite).build();
	this.compositeToRecorderHubPort.connect(recorderEndpoint);
}
 
Example 6
Source Project: openmeetings   Source File: KTestStream.java    License: Apache License 2.0 5 votes vote down vote up
private static MediaProfileSpecType getProfile(JSONObject msg) {
	boolean a  = msg.getBoolean("audio")
			, v = msg.getBoolean("video");
	if (a && v) {
		return MediaProfileSpecType.WEBM;
	} else if (v) {
		return MediaProfileSpecType.WEBM_VIDEO_ONLY;
	} else {
		return MediaProfileSpecType.WEBM_AUDIO_ONLY;
	}
}
 
Example 7
Source Project: openmeetings   Source File: TestSetupFlowMocked.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testMsgTestRecord1() throws Exception {
	doReturn(mock(MediaPipeline.class)).when(client).createMediaPipeline(any(Transaction.class));
	WebRtcEndpoint.Builder builder = mock(WebRtcEndpoint.Builder.class);
	whenNew(WebRtcEndpoint.Builder.class).withArguments(any(MediaPipeline.class)).thenReturn(builder);
	doReturn(mock(WebRtcEndpoint.class)).when(builder).build();

	RecorderEndpoint.Builder recBuilder = mock(RecorderEndpoint.Builder.class);
	whenNew(RecorderEndpoint.Builder.class).withArguments(any(MediaPipeline.class), anyString()).thenReturn(recBuilder);
	doReturn(recBuilder).when(recBuilder).stopOnEndOfStream();
	doReturn(recBuilder).when(recBuilder).withMediaProfile(any(MediaProfileSpecType.class));
	doReturn(mock(RecorderEndpoint.class)).when(recBuilder).build();

	WsClient c = new WsClient("sessionId", 0);
	for (boolean audio : new boolean[] {true, false}) {
		for (boolean video : new boolean[] {true, false}) {
			JSONObject msg = new JSONObject(MSG_BASE.toString())
					.put("id", "record")
					.put("sdpOffer", "")
					.put("audio", audio)
					.put("video", video);
			handler.onMessage(c, msg);
		}
	}
	JSONObject iceMsg = new JSONObject(MSG_BASE.toString())
			.put("id", "iceCandidate")
			.put(PARAM_CANDIDATE, new JSONObject()
					.put(PARAM_CANDIDATE, "candidate")
					.put("sdpMid", "sdpMid")
					.put("sdpMLineIndex", 1));
	handler.onMessage(c, iceMsg);
	PlayerEndpoint.Builder playBuilder = mock(PlayerEndpoint.Builder.class);
	whenNew(PlayerEndpoint.Builder.class).withArguments(any(MediaPipeline.class), anyString()).thenReturn(playBuilder);
	doReturn(mock(PlayerEndpoint.class)).when(playBuilder).build();
	handler.onMessage(c, new JSONObject(MSG_BASE.toString())
			.put("id", "play")
			.put("sdpOffer", "sdpOffer"));
	testProcessor.destroy();
}
 
Example 8
Source Project: kurento-java   Source File: RecorderSwitchPlayerTest.java    License: Apache License 2.0 5 votes vote down vote up
public void doTestSameFormats(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/10sec/red.webm"),
      getPlayerUrl("/video/10sec/green.webm"), getPlayerUrl("/video/10sec/red.webm") };
  Color[] expectedColors = { Color.RED, Color.GREEN, Color.RED };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 9
Source Project: kurento-java   Source File: RecorderSwitchPlayerTest.java    License: Apache License 2.0 5 votes vote down vote up
public void doTestDifferentFormats(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/10sec/ball.mkv"),
      getPlayerUrl("/video/10sec/white.webm"), getPlayerUrl("/video/10sec/ball.mkv") };
  Color[] expectedColors = { Color.BLACK, Color.WHITE, Color.BLACK };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 10
Source Project: kurento-java   Source File: RecorderSwitchPlayerTest.java    License: Apache License 2.0 5 votes vote down vote up
public void doTestFrameRateDifferent(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/10sec/ball25fps.webm"),
      getPlayerUrl("/video/10sec/blue.webm"), getPlayerUrl("/video/10sec/ball25fps.webm") };
  Color[] expectedColors = { Color.BLACK, Color.BLUE, Color.BLACK };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 11
Source Project: kurento-java   Source File: RecorderSwitchPlayerTest.java    License: Apache License 2.0 5 votes vote down vote up
public void doTestFrameRateAndFrameSizeDifferent(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/15sec/rgb640x360.webm"),
      getPlayerUrl("/video/15sec/rgb.webm"), getPlayerUrl("/video/15sec/rgb640x360.webm") };
  Color[] expectedColors = { Color.RED, Color.GREEN, Color.RED };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 12
Source Project: kurento-java   Source File: RecorderSwitchPlayerTest.java    License: Apache License 2.0 5 votes vote down vote up
public void doTestFrameSizeDifferent(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/format/sintel.webm"),
      getPlayerUrl("/video/format/chrome640x360.mp4"),
      getPlayerUrl("/video/format/sintel.webm") };
  Color[] expectedColors =
    { Color.BLACK, new Color(150, 50, 50), Color.BLACK };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 13
public void doTestSameFormats(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/10sec/red.webm"),
      getPlayerUrl("/video/10sec/green.webm"), getPlayerUrl("/video/10sec/red.webm") };
  Color[] expectedColors = { Color.RED, Color.GREEN, Color.RED };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 14
public void doTestDifferentFormats(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/10sec/ball.mkv"),
      getPlayerUrl("/video/10sec/white.webm"), getPlayerUrl("/video/10sec/ball.mkv") };
  Color[] expectedColors = { Color.BLACK, Color.WHITE, Color.BLACK };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 15
public void doTestFrameRateDifferent(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/10sec/ball25fps.webm"),
      getPlayerUrl("/video/10sec/blue.webm"), getPlayerUrl("/video/10sec/ball25fps.webm") };
  Color[] expectedColors = { Color.BLACK, Color.BLUE, Color.BLACK };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 16
public void doTestFrameRateAndFrameSizeDifferent(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/15sec/rgb640x360.mov"),
      getPlayerUrl("/video/15sec/rgb.mov"), getPlayerUrl("/video/15sec/rgb640x360.mov") };
  Color[] expectedColors = { Color.RED, Color.GREEN, Color.RED };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 17
public void doTestFrameSizeDifferent(MediaProfileSpecType mediaProfileSpecType,
    String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception {
  String[] mediaUrls = { getPlayerUrl("/video/format/sintel.webm"),
      getPlayerUrl("/video/format/chrome640x360.mp4"),
      getPlayerUrl("/video/format/sintel.webm") };
  Color[] expectedColors =
    { Color.BLACK, new Color(150, 50, 50), Color.BLACK };

  doTest(mediaProfileSpecType, expectedVideoCodec, expectedAudioCodec, extension, mediaUrls,
      expectedColors);
}
 
Example 18
Source Project: openvidu   Source File: SingleStreamRecordingService.java    License: Apache License 2.0 4 votes vote down vote up
private MediaProfileSpecType generateMediaProfile(RecordingProperties properties, Participant participant)
		throws OpenViduException {

	KurentoParticipant kParticipant = (KurentoParticipant) participant;
	MediaProfileSpecType profile = null;

	boolean streamHasAudio = kParticipant.getPublisher().getMediaOptions().hasAudio();
	boolean streamHasVideo = kParticipant.getPublisher().getMediaOptions().hasVideo();
	boolean propertiesHasAudio = properties.hasAudio();
	boolean propertiesHasVideo = properties.hasVideo();

	if (streamHasAudio) {
		if (streamHasVideo) {
			// Stream has both audio and video tracks

			if (propertiesHasAudio) {
				if (propertiesHasVideo) {
					profile = MediaProfileSpecType.WEBM;
				} else {
					profile = MediaProfileSpecType.WEBM_AUDIO_ONLY;
				}
			} else {
				profile = MediaProfileSpecType.WEBM_VIDEO_ONLY;
			}
		} else {
			// Stream has audio track only

			if (propertiesHasAudio) {
				profile = MediaProfileSpecType.WEBM_AUDIO_ONLY;
			} else {
				// ERROR: RecordingProperties set to video only but there's no video track
				throw new OpenViduException(
						Code.MEDIA_TYPE_STREAM_INCOMPATIBLE_WITH_RECORDING_PROPERTIES_ERROR_CODE,
						"RecordingProperties set to \"hasAudio(false)\" but stream is audio-only");
			}
		}
	} else if (streamHasVideo) {
		// Stream has video track only

		if (propertiesHasVideo) {
			profile = MediaProfileSpecType.WEBM_VIDEO_ONLY;
		} else {
			// ERROR: RecordingProperties set to audio only but there's no audio track
			throw new OpenViduException(Code.MEDIA_TYPE_STREAM_INCOMPATIBLE_WITH_RECORDING_PROPERTIES_ERROR_CODE,
					"RecordingProperties set to \"hasVideo(false)\" but stream is video-only");
		}
	} else {
		// ERROR: Stream has no track at all. This branch should never be reachable
		throw new OpenViduException(Code.MEDIA_TYPE_STREAM_INCOMPATIBLE_WITH_RECORDING_PROPERTIES_ERROR_CODE,
				"Stream has no track at all. Cannot be recorded");
	}
	return profile;
}
 
Example 19
Source Project: openmeetings   Source File: KTestStream.java    License: Apache License 2.0 4 votes vote down vote up
public KTestStream(IWsClient c, JSONObject msg, MediaPipeline pipeline) {
	super(null, c.getUid());
	this.pipeline = pipeline;
	webRtcEndpoint = createWebRtcEndpoint(pipeline);
	webRtcEndpoint.connect(webRtcEndpoint);

	MediaProfileSpecType profile = getProfile(msg);
	initRecPath();
	recorder = createRecorderEndpoint(pipeline, recPath, profile);

	recorder.addRecordingListener(evt -> {
			recTime = 0;
			recHandle = scheduler.scheduleAtFixedRate(
					() -> WebSocketHelper.sendClient(c, newTestKurentoMsg().put("id", "recording").put("time", recTime++))
					, 0, 1, TimeUnit.SECONDS);
			scheduler.schedule(() -> {
					recorder.stop();
					recHandle.cancel(true);
				}, 5, TimeUnit.SECONDS);
		});
	recorder.addStoppedListener(evt -> {
			WebSocketHelper.sendClient(c, newTestKurentoMsg().put("id", "recStopped"));
			releaseRecorder();
		});
	switch (profile) {
		case WEBM:
			webRtcEndpoint.connect(recorder, MediaType.AUDIO);
			webRtcEndpoint.connect(recorder, MediaType.VIDEO);
			break;
		case WEBM_AUDIO_ONLY:
			webRtcEndpoint.connect(recorder, MediaType.AUDIO);
			break;
		case WEBM_VIDEO_ONLY:
			webRtcEndpoint.connect(recorder, MediaType.VIDEO);
			break;
		default:
			//no-op
			break;
	}

	String sdpOffer = msg.getString("sdpOffer");
	String sdpAnswer = webRtcEndpoint.processOffer(sdpOffer);

	addIceListener(c);

	WebSocketHelper.sendClient(c, newTestKurentoMsg()
			.put("id", "startResponse")
			.put("sdpAnswer", sdpAnswer));
	webRtcEndpoint.gatherCandidates();
	recorder.record(new Continuation<Void>() {
		@Override
		public void onSuccess(Void result) throws Exception {
			log.info("Recording started successfully");
		}

		@Override
		public void onError(Throwable cause) throws Exception {
			sendError(c, "Failed to start recording");
			log.error("Failed to start recording", cause);
		}
	});
}
 
Example 20
Source Project: openmeetings   Source File: AbstractStream.java    License: Apache License 2.0 4 votes vote down vote up
public RecorderEndpoint createRecorderEndpoint(MediaPipeline pipeline, String path, MediaProfileSpecType profile) {
	return new RecorderEndpoint.Builder(pipeline, path)
			.stopOnEndOfStream()
			.withMediaProfile(profile).build();
}
 
Example 21
Source Project: openmeetings   Source File: KStream.java    License: Apache License 2.0 4 votes vote down vote up
public KStream startBroadcast(final StreamProcessor processor, final StreamDesc sd, final String sdpOffer) {
	if (outgoingMedia != null) {
		release(processor, false);
	}
	final boolean hasAudio = sd.hasActivity(Activity.AUDIO);
	final boolean hasVideo = sd.hasActivity(Activity.VIDEO);
	final boolean hasScreen = sd.hasActivity(Activity.SCREEN);
	if ((sdpOffer.indexOf("m=audio") > -1 && !hasAudio)
			|| (sdpOffer.indexOf("m=video") > -1 && !hasVideo && StreamType.SCREEN != streamType))
	{
		log.warn("Broadcast started without enough rights");
		return this;
	}
	if (StreamType.SCREEN == streamType) {
		type = Type.SCREEN;
	} else {
		if (hasAudio && hasVideo) {
			type = Type.AUDIO_VIDEO;
		} else if (hasVideo) {
			type = Type.VIDEO_ONLY;
		} else {
			type = Type.AUDIO_ONLY;
		}
	}
	switch (type) {
		case AUDIO_VIDEO:
			profile = MediaProfileSpecType.WEBM;
			break;
		case AUDIO_ONLY:
			profile = MediaProfileSpecType.WEBM_AUDIO_ONLY;
			break;
		case SCREEN:
		case VIDEO_ONLY:
		default:
			profile = MediaProfileSpecType.WEBM_VIDEO_ONLY;
			break;
	}
	outgoingMedia = createEndpoint(processor, sd.getSid(), sd.getUid());
	outgoingMedia.addMediaSessionTerminatedListener(evt -> log.warn("Media stream terminated {}", sd));
	outgoingMedia.addMediaFlowOutStateChangeListener(evt -> {
		log.info("Media Flow STATE :: {}, type {}, evt {}", evt.getState(), evt.getType(), evt.getMediaType());
		switch (evt.getState()) {
			case NOT_FLOWING:
				log.warn("FlowOut Future is created");
				flowoutFuture = Optional.of(new CompletableFuture<>().completeAsync(() -> {
					log.warn("KStream will be dropped {}", sd);
					if (StreamType.SCREEN == streamType) {
						processor.doStopSharing(sid, uid);
					}
					stopBroadcast();
					return null;
				}, delayedExecutor(getFlowoutTimeout(), TimeUnit.SECONDS)));
				break;
			case FLOWING:
				flowoutFuture.ifPresent(f -> {
					log.warn("FlowOut Future is canceled");
					f.cancel(true);
					flowoutFuture = Optional.empty();
				});
				break;
		}
	});
	outgoingMedia.addMediaFlowInStateChangeListener(evt -> log.warn("Media FlowIn :: {}", evt));
	addListener(processor, sd.getSid(), sd.getUid(), sdpOffer);
	if (room.isRecording()) {
		startRecord(processor);
	}
	Client c = sd.getClient();
	WebSocketHelper.sendRoom(new TextRoomMessage(c.getRoomId(), c, RoomMessage.Type.RIGHT_UPDATED, c.getUid()));
	if (hasAudio || hasVideo || hasScreen) {
		WebSocketHelper.sendRoomOthers(room.getRoomId(), c.getUid(), newKurentoMsg()
				.put("id", "newStream")
				.put(PARAM_ICE, processor.getHandler().getTurnServers(c))
				.put("stream", sd.toJson()));
	}
	return this;
}
 
Example 22
Source Project: openmeetings   Source File: KStream.java    License: Apache License 2.0 4 votes vote down vote up
public MediaProfileSpecType getProfile() {
	return profile;
}
 
Example 23
Source Project: kurento-java   Source File: RecorderPlayerTest.java    License: Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {

  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  PlayerEndpoint playerEp =
      new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build();
  WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);

  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();
  playerEp.connect(webRtcEp1);

  playerEp.connect(recorderEp);

  final CountDownLatch eosLatch = new CountDownLatch(1);
  playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
    @Override
    public void onEvent(EndOfStreamEvent event) {
      eosLatch.countDown();
    }
  });

  // Test execution #1. Play the video while it is recorded
  launchBrowser(mp, webRtcEp1, playerEp, recorderEp, expectedVideoCodec, expectedAudioCodec,
      recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME);

  // Wait for EOS
  Assert.assertTrue("No EOS event", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  // Release Media Pipeline #1
  mp.release();

  // Reloading browser
  getPage().reload();

  // Media Pipeline #2
  MediaPipeline mp2 = kurentoClient.createMediaPipeline();
  PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build();
  WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build();
  playerEp2.connect(webRtcEp2);

  // Playing the recording
  launchBrowser(null, webRtcEp2, playerEp2, null, expectedVideoCodec, expectedAudioCodec,
      recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME);

  // Release Media Pipeline #2
  mp2.release();

  success = true;
}
 
Example 24
Source Project: kurento-java   Source File: RecorderSwitchWebrtcTest.java    License: Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {
  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  getPage(BROWSER2).subscribeLocalEvents("playing");
  getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  getPage(BROWSER3).subscribeLocalEvents("playing");
  getPage(BROWSER3).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(recorderEp);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  // green
  webRtcEpGreen.connect(recorderEp);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER2).waitForEvent("playing"));
  long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  // blue
  webRtcEpBlue.connect(recorderEp);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER3).waitForEvent("playing"));
  long webrtcBlueConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER3).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  // Reloading browser
  getPage(BROWSER3).close();

  long playtime = PLAYTIME + TimeUnit.MILLISECONDS
      .toSeconds(webrtcRedConnectionTime + webrtcGreenConnectionTime + webrtcBlueConnectionTime);

  checkRecordingFile(recordingFile, BROWSER4, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 25
Source Project: kurento-java   Source File: RecorderFaceOverlayTest.java    License: Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {

  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  PlayerEndpoint playerEp =
      new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/filter/fiwarecut.mp4")).build();
  WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build();

  FaceOverlayFilter filter = new FaceOverlayFilter.Builder(mp).build();
  filter.setOverlayedImage("http://" + getTestFilesHttpPath() + "/img/red-square.png", -0.2F,
      -1.2F, 1.6F, 1.6F);

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();
  playerEp.connect(filter);
  filter.connect(webRtcEp1);
  filter.connect(recorderEp);

  // Test execution #1. Play and record
  getPage().setThresholdTime(THRESHOLD);
  launchBrowser(mp, webRtcEp1, playerEp, recorderEp, expectedVideoCodec, expectedAudioCodec,
      recordingFile, EXPECTED_COLOR, EXPECTED_COLOR_X, EXPECTED_COLOR_Y, PLAYTIME);

  // Release Media Pipeline #1
  mp.release();

  // Reloading browser
  getPage().reload();

  // Media Pipeline #2
  MediaPipeline mp2 = kurentoClient.createMediaPipeline();
  PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build();
  WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build();
  playerEp2.connect(webRtcEp2);

  // Playing the recording
  launchBrowser(mp, webRtcEp2, playerEp2, null, expectedVideoCodec, expectedAudioCodec,
      recordingFile, EXPECTED_COLOR, EXPECTED_COLOR_X, EXPECTED_COLOR_Y, PLAYTIME);

  // Release Media Pipeline #2
  mp2.release();

  success = true;
}
 
Example 26
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {

  final CountDownLatch recorderLatch = new CountDownLatch(1);

  MediaPipeline mp = kurentoClient.createMediaPipeline();
  WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);

  recordingFile = recordingFile.replace(getSimpleTestName(),
      new Date().getTime() + File.separator + getSimpleTestName());

  log.debug("The path non existing is {} ", recordingFile);

  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();
  webRtcEp.connect(webRtcEp);
  webRtcEp.connect(recorderEp);

  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEp, AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);
  recorderEp.record();

  // Wait until event playing in the remote stream
  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage().waitForEvent("playing"));

  Thread.sleep(SECONDS.toMillis(PLAYTIME));

  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  // Wait until file exists
  waitForFileExists(recordingFile);

  AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec);
  mp.release();
}
 
Example 27
Source Project: kurento-java   Source File: RecorderSwitchPlayerTest.java    License: Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension, String[] mediaUrls, Color[] expectedColors)
        throws Exception {

  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  int numPlayers = mediaUrls.length;
  PlayerEndpoint[] players = new PlayerEndpoint[numPlayers];

  for (int i = 0; i < numPlayers; i++) {
    players[i] = new PlayerEndpoint.Builder(mp, mediaUrls[i]).build();
  }

  WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();

  final CountDownLatch recorderLatch = new CountDownLatch(1);
  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  // Test execution
  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);

  boolean startRecord = false;
  for (int i = 0; i < numPlayers; i++) {
    players[i].connect(webRtcEp);
    players[i].connect(recorderEp);
    players[i].play();

    if (!startRecord) {

      Assert.assertTrue("Not received media (timeout waiting playing event)",
          getPage().waitForEvent("playing"));
      recorderEp.record();
      startRecord = true;
    }

    waitSeconds(PLAYTIME / numPlayers);
  }

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  // Reloading browser
  getPage().reload();

  checkRecordingFile(recordingFile, "browser", expectedColors, PLAYTIME, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 28
public void doTestWithPlayer(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension, String mediaUrlPlayer) throws Exception {
  // Media Pipeline #1
  getPage(BROWSER2).close();
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrlPlayer).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  PassThrough passThrough = new PassThrough.Builder(mp).build();
  passThrough.connect(recorderEp);

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(passThrough);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  startWebrtc = System.currentTimeMillis();

  playerEp.play();
  playerEp.connect(passThrough);
  long playerEpConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(passThrough);
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);

  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME
      + TimeUnit.MILLISECONDS.toSeconds((2 * webrtcRedConnectionTime) + playerEpConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 29
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {
  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

  mp.addErrorListener(new EventListener<ErrorEvent>() {

    @Override
    public void onEvent(ErrorEvent event) {
      msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
      errorPipelinelatch.countDown();
    }
  });

  WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
  WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  PassThrough passThrough = new PassThrough.Builder(mp).build();
  passThrough.connect(recorderEp);

  // Test execution
  getPage(BROWSER1).subscribeLocalEvents("playing");
  long startWebrtc = System.currentTimeMillis();
  getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

  webRtcEpRed.connect(passThrough);
  recorderEp.record();

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER1).waitForEvent("playing"));
  long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  getPage(BROWSER2).subscribeLocalEvents("playing");
  startWebrtc = System.currentTimeMillis();
  getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
      WebRtcMode.SEND_ONLY);

  // green
  webRtcEpGreen.connect(passThrough);

  Assert.assertTrue("Not received media (timeout waiting playing event)",
      getPage(BROWSER2).waitForEvent("playing"));
  long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc;
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  webRtcEpRed.connect(passThrough);
  startWebrtc = System.currentTimeMillis();
  Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

  // Release Media Pipeline #1
  saveGstreamerDot(mp);
  final CountDownLatch recorderLatch = new CountDownLatch(1);
  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage(BROWSER2).getTimeout(), TimeUnit.SECONDS));
  mp.release();

  Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

  final long playtime = PLAYTIME + TimeUnit.MILLISECONDS
      .toSeconds((2 * webrtcRedConnectionTime) + webrtcGreenConnectionTime);

  checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
      expectedAudioCodec);
  success = true;
}
 
Example 30
Source Project: kurento-java   Source File: RecorderPlayerDisconnectTest.java    License: Apache License 2.0 4 votes vote down vote up
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
    String expectedAudioCodec, String extension) throws Exception {

  final CountDownLatch recorderLatch = new CountDownLatch(1);

  // Media Pipeline #1
  MediaPipeline mp = kurentoClient.createMediaPipeline();
  PlayerEndpoint playerGreen =
      new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build();

  String recordingFile = getRecordUrl(extension);
  RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
      .withMediaProfile(mediaProfileSpecType).build();

  playerGreen.play();
  recorderEp.record();
  for (int i = 0; i < NUM_SWAPS; i++) {
    if (i % 2 == 0) {
      playerGreen.connect(recorderEp);
    } else {
      playerGreen.disconnect(recorderEp);
    }

    Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / NUM_SWAPS);
  }

  // Release Media Pipeline #1
  saveGstreamerDot(mp);

  recorderEp.stopAndWait(new Continuation<Void>() {

    @Override
    public void onSuccess(Void result) throws Exception {
      recorderLatch.countDown();
    }

    @Override
    public void onError(Throwable cause) throws Exception {
      recorderLatch.countDown();
    }
  });

  Assert.assertTrue("Not stop properly",
      recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  mp.release();

  // Wait until file exists
  waitForFileExists(recordingFile);

  // Reloading browser
  getPage().reload();

  // Media Pipeline #2
  MediaPipeline mp2 = kurentoClient.createMediaPipeline();
  PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build();
  WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build();
  playerEp2.connect(webRtcEp2);

  // Playing the recording
  getPage().subscribeEvents("playing");
  getPage().initWebRtc(webRtcEp2, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
  final CountDownLatch eosLatch = new CountDownLatch(1);
  playerEp2.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
    @Override
    public void onEvent(EndOfStreamEvent event) {
      eosLatch.countDown();
    }
  });
  playerEp2.play();

  // Assertions in recording
  final String messageAppend = "[played file with media pipeline]";
  final int playtime = PLAYTIME;

  Assert.assertTrue(
      "Not received media in the recording (timeout waiting playing event) " + messageAppend,
      getPage().waitForEvent("playing"));
  for (Color color : EXPECTED_COLORS) {
    Assert.assertTrue("The color of the recorded video should be " + color + " " + messageAppend,
        getPage().similarColor(color));
  }
  Assert.assertTrue("Not received EOS event in player",
      eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));

  double currentTime = getPage().getCurrentTime();
  Assert.assertTrue("Error in play time in the recorded video (expected: " + playtime
      + " sec, real: " + currentTime + " sec) " + messageAppend,
      getPage().compare(playtime, currentTime));

  AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec);
  AssertMedia.assertDuration(recordingFile, TimeUnit.SECONDS.toMillis(playtime),
      TimeUnit.SECONDS.toMillis(getPage().getThresholdTime()));

  // Release Media Pipeline #2
  mp2.release();

  success = true;
}