Java Code Examples for com.google.android.exoplayer.util.Util#SDK_INT

The following examples show how to use com.google.android.exoplayer.util.Util#SDK_INT . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SubtitleView.java    From Exoplayer_VLC with Apache License 2.0 6 votes vote down vote up
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
  final int widthSpec = MeasureSpec.getSize(widthMeasureSpec);

  if (computeMeasurements(widthSpec)) {
    final StaticLayout layout = this.layout;
    final int paddingX = getPaddingLeft() + getPaddingRight() + innerPaddingX * 2;
    final int height = layout.getHeight() + getPaddingTop() + getPaddingBottom();
    int width = 0;
    int lineCount = layout.getLineCount();
    for (int i = 0; i < lineCount; i++) {
      width = Math.max((int) Math.ceil(layout.getLineWidth(i)), width);
    }
    width += paddingX;
    setMeasuredDimension(width, height);
  } else if (Util.SDK_INT >= 11) {
    setTooSmallMeasureDimensionV11();
  } else {
    setMeasuredDimension(0, 0);
  }
}
 
Example 2
Source File: MediaCodecTrackRenderer.java    From Exoplayer_VLC with Apache License 2.0 6 votes vote down vote up
private void flushCodec() throws ExoPlaybackException {
  codecHotswapTimeMs = -1;
  inputIndex = -1;
  outputIndex = -1;
  waitingForFirstSyncFrame = true;
  decodeOnlyPresentationTimestamps.clear();
  // Workaround for framework bugs.
  // See [Internal: b/8347958], [Internal: b/8578467], [Internal: b/8543366].
  if (Util.SDK_INT >= 18) {
    codec.flush();
  } else {
    releaseCodec();
    maybeInitCodec();
  }
  if (codecReconfigured && format != null) {
    // Any reconfiguration data that we send shortly before the flush may be discarded. We
    // avoid this issue by sending reconfiguration data following every flush.
    codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING;
  }
}
 
Example 3
Source File: VideoPlayerActivity.java    From droidkaigi2016 with Apache License 2.0 5 votes vote down vote up
@Override
public void onStop() {
    super.onStop();
    if (Util.SDK_INT > 23) {
        onHidden();
    }
}
 
Example 4
Source File: CryptoInfo.java    From Exoplayer_VLC with Apache License 2.0 5 votes vote down vote up
/**
 * @see android.media.MediaCodec.CryptoInfo#set(int, int[], int[], byte[], byte[], int)
 */
public void set(int numSubSamples, int[] numBytesOfClearData, int[] numBytesOfEncryptedData,
    byte[] key, byte[] iv, int mode) {
  this.numSubSamples = numSubSamples;
  this.numBytesOfClearData = numBytesOfClearData;
  this.numBytesOfEncryptedData = numBytesOfEncryptedData;
  this.key = key;
  this.iv = iv;
  this.mode = mode;
  if (Util.SDK_INT >= 16) {
    updateFrameworkCryptoInfoV16();
  }
}
 
Example 5
Source File: PlayerActivity.java    From Exoplayer_VLC with Apache License 2.0 5 votes vote down vote up
private void configureSubtitleView() {
  CaptionStyleCompat captionStyle;
  float captionTextSize = getCaptionFontSize();
  if (Util.SDK_INT >= 19) {
    captionStyle = getUserCaptionStyleV19();
    captionTextSize *= getUserCaptionFontScaleV19();
  } else {
    captionStyle = CaptionStyleCompat.DEFAULT;
  }
  subtitleView.setStyle(captionStyle);
  subtitleView.setTextSize(captionTextSize);
}
 
Example 6
Source File: AudioTrack.java    From Exoplayer_VLC with Apache License 2.0 5 votes vote down vote up
/**
 * Initializes the audio track for writing new buffers using {@link #handleBuffer}.
 *
 * @param sessionId Audio track session identifier to re-use, or {@link #SESSION_ID_NOT_SET} to
 *     create a new one.
 * @return The new (or re-used) session identifier.
 */
public int initialize(int sessionId) throws InitializationException {
  // If we're asynchronously releasing a previous audio track then we block until it has been
  // released. This guarantees that we cannot end up in a state where we have multiple audio
  // track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
  // the shared memory that's available for audio track buffers. This would in turn cause the
  // initialization of the audio track to fail.
  releasingConditionVariable.block();

  if (sessionId == SESSION_ID_NOT_SET) {
    audioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
        channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STREAM);
  } else {
    // Re-attach to the same audio session.
    audioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
        channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STREAM, sessionId);
  }

  checkAudioTrackInitialized();
  if (Util.SDK_INT >= 19) {
    audioTrackUtil = new AudioTrackUtilV19(audioTrack);
  } else {
    audioTrackUtil = new AudioTrackUtil(audioTrack);
  }
  setVolume(volume);
  return audioTrack.getAudioSessionId();
}
 
Example 7
Source File: VideoPlayerView.java    From iview-android-tv with MIT License 5 votes vote down vote up
public void configureSubtitleView() {
    CaptionStyleCompat captionStyle;
    float captionFontScale;
    if (Util.SDK_INT >= 19) {
        captionStyle = getUserCaptionStyleV19();
        captionFontScale = getUserCaptionFontScaleV19();
    } else {
        captionStyle = CaptionStyleCompat.DEFAULT;
        captionFontScale = 1.0f;
    }
    subtitleLayout.setStyle(captionStyle);
    subtitleLayout.setFontScale(captionFontScale);
}
 
Example 8
Source File: PlayerActivity.java    From Android-Example-HLS-ExoPlayer with Apache License 2.0 5 votes vote down vote up
private void configureSubtitleView() {
    CaptionStyleCompat style;
    float fontScale;
    if (Util.SDK_INT >= 19) {
        style = getUserCaptionStyleV19();
        fontScale = getUserCaptionFontScaleV19();
    } else {
        style = CaptionStyleCompat.DEFAULT;
        fontScale = 1.0f;
    }
    subtitleLayout.setStyle(style);
    subtitleLayout.setFractionalTextSize(SubtitleLayout.DEFAULT_TEXT_SIZE_FRACTION * fontScale);
}
 
Example 9
Source File: PlayerActivity.java    From Android-Example-HLS-ExoPlayer with Apache License 2.0 5 votes vote down vote up
@TargetApi(23)
private boolean requiresPermission(Uri uri) {
    return Util.SDK_INT >= 23
            && Util.isLocalFileUri(uri)
            && checkSelfPermission(permission.READ_EXTERNAL_STORAGE)
            != PackageManager.PERMISSION_GRANTED;
}
 
Example 10
Source File: VideoPlayerActivity.java    From droidkaigi2016 with Apache License 2.0 5 votes vote down vote up
@Override
public void onStart() {
    super.onStart();
    if (Util.SDK_INT > 23) {
        onShown();
    }
}
 
Example 11
Source File: VideoPlayerActivity.java    From droidkaigi2016 with Apache License 2.0 5 votes vote down vote up
@Override
public void onResume() {
    super.onResume();
    if (Util.SDK_INT <= 23 || player == null) {
        onShown();
    }
}
 
Example 12
Source File: VideoPlayerActivity.java    From droidkaigi2016 with Apache License 2.0 5 votes vote down vote up
@Override
public void onPause() {
    super.onPause();
    if (Util.SDK_INT <= 23) {
        onHidden();
    }
}
 
Example 13
Source File: MediaCodecUtil.java    From Exoplayer_VLC with Apache License 2.0 5 votes vote down vote up
private static boolean isAdaptive(CodecCapabilities capabilities) {
  if (Util.SDK_INT >= 19) {
    return isAdaptiveV19(capabilities);
  } else {
    return false;
  }
}
 
Example 14
Source File: VideoPlayerActivity.java    From droidkaigi2016 with Apache License 2.0 5 votes vote down vote up
@Override
public void onPause() {
    super.onPause();
    if (Util.SDK_INT <= 23) {
        onHidden();
    }
}
 
Example 15
Source File: MediaCodecTrackRenderer.java    From Exoplayer_VLC with Apache License 2.0 4 votes vote down vote up
public DecoderInitializationException(MediaFormat mediaFormat, Throwable cause,
    String decoderName) {
  super("Decoder init failed: " + decoderName + ", " + mediaFormat, cause);
  this.decoderName = decoderName;
  this.diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null;
}
 
Example 16
Source File: AudioCapabilities.java    From Exoplayer_VLC with Apache License 2.0 4 votes vote down vote up
/** Returns whether the device supports playback of enhanced AC-3. */
public boolean supportsEAc3() {
  return Util.SDK_INT >= 21 ;//&& supportedEncodings.contains(AudioFormat.ENCODING_E_AC3);
}
 
Example 17
Source File: CryptoInfo.java    From Exoplayer_VLC with Apache License 2.0 4 votes vote down vote up
public CryptoInfo() {
  frameworkCryptoInfo = Util.SDK_INT >= 16 ? newFrameworkCryptoInfoV16() : null;
}
 
Example 18
Source File: DashRendererBuilder.java    From talk-android with MIT License 4 votes vote down vote up
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
          player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true,
      mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context));

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
Example 19
Source File: SmoothStreamingRendererBuilder.java    From ShareBox with Apache License 2.0 4 votes vote down vote up
@Override
public void onSingleManifest(SmoothStreamingManifest manifest) {
  if (canceled) {
    return;
  }

  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  // Check drm support if necessary.
  DrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
  if (manifest.protectionElement != null) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newFrameworkInstance(
          manifest.protectionElement.uuid, player.getPlaybackLooper(), drmCallback, null,
          player.getMainHandler(), player);
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      drmSessionManager, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newAudioInstance(),
      audioDataSource, null, LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newTextInstance(),
      textDataSource, null, LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
Example 20
Source File: DashRendererBuilder.java    From ShareBox with Apache License 2.0 4 votes vote down vote up
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
          player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      drmSessionManager, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}