Java Code Examples for com.google.android.exoplayer2.C#TRACK_TYPE_AUDIO

The following examples show how to use com.google.android.exoplayer2.C#TRACK_TYPE_AUDIO . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MimeTypes.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Returns the {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type.
 * {@link C#TRACK_TYPE_UNKNOWN} if the MIME type is not known or the mapping cannot be
 * established.
 *
 * @param mimeType The MIME type.
 * @return The {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type.
 */
public static int getTrackType(@Nullable String mimeType) {
  if (TextUtils.isEmpty(mimeType)) {
    return C.TRACK_TYPE_UNKNOWN;
  } else if (isAudio(mimeType)) {
    return C.TRACK_TYPE_AUDIO;
  } else if (isVideo(mimeType)) {
    return C.TRACK_TYPE_VIDEO;
  } else if (isText(mimeType) || APPLICATION_CEA608.equals(mimeType)
      || APPLICATION_CEA708.equals(mimeType) || APPLICATION_MP4CEA608.equals(mimeType)
      || APPLICATION_SUBRIP.equals(mimeType) || APPLICATION_TTML.equals(mimeType)
      || APPLICATION_TX3G.equals(mimeType) || APPLICATION_MP4VTT.equals(mimeType)
      || APPLICATION_RAWCC.equals(mimeType) || APPLICATION_VOBSUB.equals(mimeType)
      || APPLICATION_PGS.equals(mimeType) || APPLICATION_DVBSUBS.equals(mimeType)) {
    return C.TRACK_TYPE_TEXT;
  } else if (APPLICATION_ID3.equals(mimeType)
      || APPLICATION_EMSG.equals(mimeType)
      || APPLICATION_SCTE35.equals(mimeType)
      || APPLICATION_CAMERA_MOTION.equals(mimeType)) {
    return C.TRACK_TYPE_METADATA;
  } else {
    return getTrackTypeForCustomMimeType(mimeType);
  }
}
 
Example 2
Source File: MediaCodecAudioRenderer.java    From MediaSDK with Apache License 2.0 6 votes vote down vote up
/**
 * @param context A context.
 * @param mediaCodecSelector A decoder selector.
 * @param drmSessionManager For use with encrypted content. May be null if support for encrypted
 *     content is not required.
 * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
 *     For example a media file may start with a short clear region so as to allow playback to
 *     begin in parallel with key acquisition. This parameter specifies whether the renderer is
 *     permitted to play clear regions of encrypted media files before {@code drmSessionManager}
 *     has obtained the keys necessary to decrypt encrypted regions of the media.
 * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
 *     initialization fails. This may result in using a decoder that is slower/less efficient than
 *     the primary decoder.
 * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
 *     null if delivery of events is not required.
 * @param eventListener A listener of events. May be null if delivery of events is not required.
 * @param audioSink The sink to which audio will be output.
 * @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler,
 *     AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the
 *     {@link MediaSource} factories.
 */
@Deprecated
public MediaCodecAudioRenderer(
    Context context,
    MediaCodecSelector mediaCodecSelector,
    @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
    boolean playClearSamplesWithoutKeys,
    boolean enableDecoderFallback,
    @Nullable Handler eventHandler,
    @Nullable AudioRendererEventListener eventListener,
    AudioSink audioSink) {
  super(
      C.TRACK_TYPE_AUDIO,
      mediaCodecSelector,
      drmSessionManager,
      playClearSamplesWithoutKeys,
      enableDecoderFallback,
      /* assumedMinimumCodecOperatingRate= */ 44100);
  this.context = context.getApplicationContext();
  this.audioSink = audioSink;
  lastInputTimeUs = C.TIME_UNSET;
  pendingStreamChangeTimesUs = new long[MAX_PENDING_STREAM_CHANGE_COUNT];
  eventDispatcher = new EventDispatcher(eventHandler, eventListener);
  audioSink.setListener(new AudioSinkListener());
}
 
Example 3
Source File: DownloadHelper.java    From MediaSDK with Apache License 2.0 6 votes vote down vote up
/**
 * Convenience method to add selections of tracks for all specified audio languages. If an audio
 * track in one of the specified languages is not available, the default fallback audio track is
 * used instead. Must not be called until after preparation completes.
 *
 * @param languages A list of audio languages for which tracks should be added to the download
 *     selection, as IETF BCP 47 conformant tags.
 */
public void addAudioLanguagesToSelection(String... languages) {
  assertPreparedWithMedia();
  for (int periodIndex = 0; periodIndex < mappedTrackInfos.length; periodIndex++) {
    DefaultTrackSelector.ParametersBuilder parametersBuilder =
        DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_CONTEXT.buildUpon();
    MappedTrackInfo mappedTrackInfo = mappedTrackInfos[periodIndex];
    int rendererCount = mappedTrackInfo.getRendererCount();
    for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) {
      if (mappedTrackInfo.getRendererType(rendererIndex) != C.TRACK_TYPE_AUDIO) {
        parametersBuilder.setRendererDisabled(rendererIndex, /* disabled= */ true);
      }
    }
    for (String language : languages) {
      parametersBuilder.setPreferredAudioLanguage(language);
      addTrackSelection(periodIndex, parametersBuilder.build());
    }
  }
}
 
Example 4
Source File: MimeTypes.java    From MediaSDK with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type.
 * {@link C#TRACK_TYPE_UNKNOWN} if the MIME type is not known or the mapping cannot be
 * established.
 *
 * @param mimeType The MIME type.
 * @return The {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type.
 */
public static int getTrackType(@Nullable String mimeType) {
  if (TextUtils.isEmpty(mimeType)) {
    return C.TRACK_TYPE_UNKNOWN;
  } else if (isAudio(mimeType)) {
    return C.TRACK_TYPE_AUDIO;
  } else if (isVideo(mimeType)) {
    return C.TRACK_TYPE_VIDEO;
  } else if (isText(mimeType) || APPLICATION_CEA608.equals(mimeType)
      || APPLICATION_CEA708.equals(mimeType) || APPLICATION_MP4CEA608.equals(mimeType)
      || APPLICATION_SUBRIP.equals(mimeType) || APPLICATION_TTML.equals(mimeType)
      || APPLICATION_TX3G.equals(mimeType) || APPLICATION_MP4VTT.equals(mimeType)
      || APPLICATION_RAWCC.equals(mimeType) || APPLICATION_VOBSUB.equals(mimeType)
      || APPLICATION_PGS.equals(mimeType) || APPLICATION_DVBSUBS.equals(mimeType)) {
    return C.TRACK_TYPE_TEXT;
  } else if (APPLICATION_ID3.equals(mimeType)
      || APPLICATION_EMSG.equals(mimeType)
      || APPLICATION_SCTE35.equals(mimeType)) {
    return C.TRACK_TYPE_METADATA;
  } else if (APPLICATION_CAMERA_MOTION.equals(mimeType)) {
    return C.TRACK_TYPE_CAMERA_MOTION;
  } else {
    return getTrackTypeForCustomMimeType(mimeType);
  }
}
 
Example 5
Source File: MimeTypes.java    From K-Sonic with MIT License 6 votes vote down vote up
/**
 * Returns the {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified mime type.
 * {@link C#TRACK_TYPE_UNKNOWN} if the mime type is not known or the mapping cannot be
 * established.
 *
 * @param mimeType The mimeType.
 * @return The {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified mime type.
 */
public static int getTrackType(String mimeType) {
  if (TextUtils.isEmpty(mimeType)) {
    return C.TRACK_TYPE_UNKNOWN;
  } else if (isAudio(mimeType)) {
    return C.TRACK_TYPE_AUDIO;
  } else if (isVideo(mimeType)) {
    return C.TRACK_TYPE_VIDEO;
  } else if (isText(mimeType) || APPLICATION_CEA608.equals(mimeType)
      || APPLICATION_CEA708.equals(mimeType) || APPLICATION_MP4CEA608.equals(mimeType)
      || APPLICATION_SUBRIP.equals(mimeType) || APPLICATION_TTML.equals(mimeType)
      || APPLICATION_TX3G.equals(mimeType) || APPLICATION_MP4VTT.equals(mimeType)
      || APPLICATION_RAWCC.equals(mimeType) || APPLICATION_VOBSUB.equals(mimeType)
      || APPLICATION_PGS.equals(mimeType)) {
    return C.TRACK_TYPE_TEXT;
  } else if (APPLICATION_ID3.equals(mimeType)
      || APPLICATION_EMSG.equals(mimeType)
      || APPLICATION_SCTE35.equals(mimeType)
      || APPLICATION_CAMERA_MOTION.equals(mimeType)) {
    return C.TRACK_TYPE_METADATA;
  } else {
    return C.TRACK_TYPE_UNKNOWN;
  }
}
 
Example 6
Source File: MimeTypes.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Returns the {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type.
 * {@link C#TRACK_TYPE_UNKNOWN} if the MIME type is not known or the mapping cannot be
 * established.
 *
 * @param mimeType The MIME type.
 * @return The {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type.
 */
public static int getTrackType(@Nullable String mimeType) {
  if (TextUtils.isEmpty(mimeType)) {
    return C.TRACK_TYPE_UNKNOWN;
  } else if (isAudio(mimeType)) {
    return C.TRACK_TYPE_AUDIO;
  } else if (isVideo(mimeType)) {
    return C.TRACK_TYPE_VIDEO;
  } else if (isText(mimeType) || APPLICATION_CEA608.equals(mimeType)
      || APPLICATION_CEA708.equals(mimeType) || APPLICATION_MP4CEA608.equals(mimeType)
      || APPLICATION_SUBRIP.equals(mimeType) || APPLICATION_TTML.equals(mimeType)
      || APPLICATION_TX3G.equals(mimeType) || APPLICATION_MP4VTT.equals(mimeType)
      || APPLICATION_RAWCC.equals(mimeType) || APPLICATION_VOBSUB.equals(mimeType)
      || APPLICATION_PGS.equals(mimeType) || APPLICATION_DVBSUBS.equals(mimeType)) {
    return C.TRACK_TYPE_TEXT;
  } else if (APPLICATION_ID3.equals(mimeType)
      || APPLICATION_EMSG.equals(mimeType)
      || APPLICATION_SCTE35.equals(mimeType)
      || APPLICATION_CAMERA_MOTION.equals(mimeType)) {
    return C.TRACK_TYPE_METADATA;
  } else {
    return getTrackTypeForCustomMimeType(mimeType);
  }
}
 
Example 7
Source File: DashUtil.java    From Telegram with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Loads {@link DrmInitData} for a given period in a DASH manifest.
 *
 * @param dataSource The {@link HttpDataSource} from which data should be loaded.
 * @param period The {@link Period}.
 * @return The loaded {@link DrmInitData}, or null if none is defined.
 * @throws IOException Thrown when there is an error while loading.
 * @throws InterruptedException Thrown if the thread was interrupted.
 */
public static @Nullable DrmInitData loadDrmInitData(DataSource dataSource, Period period)
    throws IOException, InterruptedException {
  int primaryTrackType = C.TRACK_TYPE_VIDEO;
  Representation representation = getFirstRepresentation(period, primaryTrackType);
  if (representation == null) {
    primaryTrackType = C.TRACK_TYPE_AUDIO;
    representation = getFirstRepresentation(period, primaryTrackType);
    if (representation == null) {
      return null;
    }
  }
  Format manifestFormat = representation.format;
  Format sampleFormat = DashUtil.loadSampleFormat(dataSource, primaryTrackType, representation);
  return sampleFormat == null
      ? manifestFormat.drmInitData
      : sampleFormat.copyWithManifestFormatInfo(manifestFormat).drmInitData;
}
 
Example 8
Source File: PlaybackStatsListener.java    From MediaSDK with Apache License 2.0 6 votes vote down vote up
/**
 * Notifies the tracker that the track selection for the current playback changed.
 *
 * @param eventTime The {@link EventTime}.
 * @param trackSelections The new {@link TrackSelectionArray}.
 */
public void onTracksChanged(EventTime eventTime, TrackSelectionArray trackSelections) {
  boolean videoEnabled = false;
  boolean audioEnabled = false;
  for (TrackSelection trackSelection : trackSelections.getAll()) {
    if (trackSelection != null && trackSelection.length() > 0) {
      int trackType = MimeTypes.getTrackType(trackSelection.getFormat(0).sampleMimeType);
      if (trackType == C.TRACK_TYPE_VIDEO) {
        videoEnabled = true;
      } else if (trackType == C.TRACK_TYPE_AUDIO) {
        audioEnabled = true;
      }
    }
  }
  if (!videoEnabled) {
    maybeUpdateVideoFormat(eventTime, /* newFormat= */ null);
  }
  if (!audioEnabled) {
    maybeUpdateAudioFormat(eventTime, /* newFormat= */ null);
  }
}
 
Example 9
Source File: MediaCodecAudioRenderer.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
/**
 * @param context A context.
 * @param mediaCodecSelector A decoder selector.
 * @param drmSessionManager For use with encrypted content. May be null if support for encrypted
 *     content is not required.
 * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
 *     For example a media file may start with a short clear region so as to allow playback to
 *     begin in parallel with key acquisition. This parameter specifies whether the renderer is
 *     permitted to play clear regions of encrypted media files before {@code drmSessionManager}
 *     has obtained the keys necessary to decrypt encrypted regions of the media.
 * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
 *     null if delivery of events is not required.
 * @param eventListener A listener of events. May be null if delivery of events is not required.
 * @param audioSink The sink to which audio will be output.
 */
public MediaCodecAudioRenderer(
    Context context,
    MediaCodecSelector mediaCodecSelector,
    @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
    boolean playClearSamplesWithoutKeys,
    @Nullable Handler eventHandler,
    @Nullable AudioRendererEventListener eventListener,
    AudioSink audioSink) {
  super(
      C.TRACK_TYPE_AUDIO,
      mediaCodecSelector,
      drmSessionManager,
      playClearSamplesWithoutKeys,
      /* assumedMinimumCodecOperatingRate= */ 44100);
  this.context = context.getApplicationContext();
  this.audioSink = audioSink;
  eventDispatcher = new EventDispatcher(eventHandler, eventListener);
  audioSink.setListener(new AudioSinkListener());
}
 
Example 10
Source File: DashManifestParser.java    From MediaSDK with Apache License 2.0 5 votes vote down vote up
protected int parseContentType(XmlPullParser xpp) {
  String contentType = xpp.getAttributeValue(null, "contentType");
  return TextUtils.isEmpty(contentType) ? C.TRACK_TYPE_UNKNOWN
      : MimeTypes.BASE_TYPE_AUDIO.equals(contentType) ? C.TRACK_TYPE_AUDIO
          : MimeTypes.BASE_TYPE_VIDEO.equals(contentType) ? C.TRACK_TYPE_VIDEO
              : MimeTypes.BASE_TYPE_TEXT.equals(contentType) ? C.TRACK_TYPE_TEXT
                  : C.TRACK_TYPE_UNKNOWN;
}
 
Example 11
Source File: RendererTrackIndexExtractorTest.java    From no-player with Apache License 2.0 5 votes vote down vote up
@Override
public int getRendererTypeFor(int index) {
    if (index == 0) {
        return C.TRACK_TYPE_AUDIO;
    }

    return -1;
}
 
Example 12
Source File: DashManifestParser.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
protected int parseContentType(XmlPullParser xpp) {
  String contentType = xpp.getAttributeValue(null, "contentType");
  return TextUtils.isEmpty(contentType) ? C.TRACK_TYPE_UNKNOWN
      : MimeTypes.BASE_TYPE_AUDIO.equals(contentType) ? C.TRACK_TYPE_AUDIO
          : MimeTypes.BASE_TYPE_VIDEO.equals(contentType) ? C.TRACK_TYPE_VIDEO
              : MimeTypes.BASE_TYPE_TEXT.equals(contentType) ? C.TRACK_TYPE_TEXT
                  : C.TRACK_TYPE_UNKNOWN;
}
 
Example 13
Source File: ExoMediaPlayer.java    From ExoMedia with Apache License 2.0 5 votes vote down vote up
protected RendererType getExoMediaRendererType(int exoPlayerTrackType) {
    switch (exoPlayerTrackType) {
        case C.TRACK_TYPE_AUDIO:
            return RendererType.AUDIO;
        case C.TRACK_TYPE_VIDEO:
            return RendererType.VIDEO;
        case C.TRACK_TYPE_TEXT:
            return RendererType.CLOSED_CAPTION;
        case C.TRACK_TYPE_METADATA:
            return RendererType.METADATA;
        default:
            return null;
    }
}
 
Example 14
Source File: DefaultTrackSelector.java    From TelePlus-Android with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Determines whether tunneling should be enabled, replacing {@link RendererConfiguration}s in
 * {@code rendererConfigurations} with configurations that enable tunneling on the appropriate
 * renderers if so.
 *
 * @param mappedTrackInfo Mapped track information.
 * @param rendererConfigurations The renderer configurations. Configurations may be replaced with
 *     ones that enable tunneling as a result of this call.
 * @param trackSelections The renderer track selections.
 * @param tunnelingAudioSessionId The audio session id to use when tunneling, or {@link
 *     C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled.
 */
private static void maybeConfigureRenderersForTunneling(
    MappedTrackInfo mappedTrackInfo,
    int[][][] renderererFormatSupports,
    @NullableType RendererConfiguration[] rendererConfigurations,
    @NullableType TrackSelection[] trackSelections,
    int tunnelingAudioSessionId) {
  if (tunnelingAudioSessionId == C.AUDIO_SESSION_ID_UNSET) {
    return;
  }
  // Check whether we can enable tunneling. To enable tunneling we require exactly one audio and
  // one video renderer to support tunneling and have a selection.
  int tunnelingAudioRendererIndex = -1;
  int tunnelingVideoRendererIndex = -1;
  boolean enableTunneling = true;
  for (int i = 0; i < mappedTrackInfo.getRendererCount(); i++) {
    int rendererType = mappedTrackInfo.getRendererType(i);
    TrackSelection trackSelection = trackSelections[i];
    if ((rendererType == C.TRACK_TYPE_AUDIO || rendererType == C.TRACK_TYPE_VIDEO)
        && trackSelection != null) {
      if (rendererSupportsTunneling(
          renderererFormatSupports[i], mappedTrackInfo.getTrackGroups(i), trackSelection)) {
        if (rendererType == C.TRACK_TYPE_AUDIO) {
          if (tunnelingAudioRendererIndex != -1) {
            enableTunneling = false;
            break;
          } else {
            tunnelingAudioRendererIndex = i;
          }
        } else {
          if (tunnelingVideoRendererIndex != -1) {
            enableTunneling = false;
            break;
          } else {
            tunnelingVideoRendererIndex = i;
          }
        }
      }
    }
  }
  enableTunneling &= tunnelingAudioRendererIndex != -1 && tunnelingVideoRendererIndex != -1;
  if (enableTunneling) {
    RendererConfiguration tunnelingRendererConfiguration =
        new RendererConfiguration(tunnelingAudioSessionId);
    rendererConfigurations[tunnelingAudioRendererIndex] = tunnelingRendererConfiguration;
    rendererConfigurations[tunnelingVideoRendererIndex] = tunnelingRendererConfiguration;
  }
}
 
Example 15
Source File: HlsSampleStreamWrapper.java    From TelePlus-Android with GNU General Public License v2.0 4 votes vote down vote up
@Override
public TrackOutput track(int id, int type) {
  int trackCount = sampleQueues.length;

  // Audio and video tracks are handled manually to ignore ids.
  if (type == C.TRACK_TYPE_AUDIO) {
    if (audioSampleQueueIndex != C.INDEX_UNSET) {
      if (audioSampleQueueMappingDone) {
        return sampleQueueTrackIds[audioSampleQueueIndex] == id
            ? sampleQueues[audioSampleQueueIndex]
            : createDummyTrackOutput(id, type);
      }
      audioSampleQueueMappingDone = true;
      sampleQueueTrackIds[audioSampleQueueIndex] = id;
      return sampleQueues[audioSampleQueueIndex];
    } else if (tracksEnded) {
      return createDummyTrackOutput(id, type);
    }
  } else if (type == C.TRACK_TYPE_VIDEO) {
    if (videoSampleQueueIndex != C.INDEX_UNSET) {
      if (videoSampleQueueMappingDone) {
        return sampleQueueTrackIds[videoSampleQueueIndex] == id
            ? sampleQueues[videoSampleQueueIndex]
            : createDummyTrackOutput(id, type);
      }
      videoSampleQueueMappingDone = true;
      sampleQueueTrackIds[videoSampleQueueIndex] = id;
      return sampleQueues[videoSampleQueueIndex];
    } else if (tracksEnded) {
      return createDummyTrackOutput(id, type);
    }
  } else /* sparse track */ {
    for (int i = 0; i < trackCount; i++) {
      if (sampleQueueTrackIds[i] == id) {
        return sampleQueues[i];
      }
    }
    if (tracksEnded) {
      return createDummyTrackOutput(id, type);
    }
  }
  SampleQueue trackOutput = new SampleQueue(allocator);
  trackOutput.setSampleOffsetUs(sampleOffsetUs);
  trackOutput.sourceId(chunkUid);
  trackOutput.setUpstreamFormatChangeListener(this);
  sampleQueueTrackIds = Arrays.copyOf(sampleQueueTrackIds, trackCount + 1);
  sampleQueueTrackIds[trackCount] = id;
  sampleQueues = Arrays.copyOf(sampleQueues, trackCount + 1);
  sampleQueues[trackCount] = trackOutput;
  sampleQueueIsAudioVideoFlags = Arrays.copyOf(sampleQueueIsAudioVideoFlags, trackCount + 1);
  sampleQueueIsAudioVideoFlags[trackCount] = type == C.TRACK_TYPE_AUDIO
      || type == C.TRACK_TYPE_VIDEO;
  haveAudioVideoSampleQueues |= sampleQueueIsAudioVideoFlags[trackCount];
  if (type == C.TRACK_TYPE_AUDIO) {
    audioSampleQueueMappingDone = true;
    audioSampleQueueIndex = trackCount;
  } else if (type == C.TRACK_TYPE_VIDEO) {
    videoSampleQueueMappingDone = true;
    videoSampleQueueIndex = trackCount;
  }
  if (getTrackTypeScore(type) > getTrackTypeScore(primarySampleQueueType)) {
    primarySampleQueueIndex = trackCount;
    primarySampleQueueType = type;
  }
  sampleQueuesEnabledStates = Arrays.copyOf(sampleQueuesEnabledStates, trackCount + 1);
  return trackOutput;
}
 
Example 16
Source File: DefaultTrackSelector.java    From TelePlus-Android with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Called from {@link #selectTracks(MappedTrackInfo, int[][][], int[])} to make a track selection
 * for each renderer, prior to overrides and disabled flags being applied.
 *
 * <p>The implementation should not account for overrides and disabled flags. Track selections
 * generated by this method will be overridden to account for these properties.
 *
 * @param mappedTrackInfo Mapped track information.
 * @param rendererFormatSupports The result of {@link RendererCapabilities#supportsFormat} for
 *     each mapped track, indexed by renderer, track group and track (in that order).
 * @param rendererMixedMimeTypeAdaptationSupports The result of {@link
 *     RendererCapabilities#supportsMixedMimeTypeAdaptation()} for each renderer.
 * @return Track selections for each renderer. A null selection indicates the renderer should be
 *     disabled, unless RendererCapabilities#getTrackType()} is {@link C#TRACK_TYPE_NONE}.
 * @throws ExoPlaybackException If an error occurs while selecting the tracks.
 */
protected @NullableType TrackSelection[] selectAllTracks(
    MappedTrackInfo mappedTrackInfo,
    int[][][] rendererFormatSupports,
    int[] rendererMixedMimeTypeAdaptationSupports,
    Parameters params)
    throws ExoPlaybackException {
  int rendererCount = mappedTrackInfo.getRendererCount();
  @NullableType TrackSelection[] rendererTrackSelections = new TrackSelection[rendererCount];

  boolean seenVideoRendererWithMappedTracks = false;
  boolean selectedVideoTracks = false;
  for (int i = 0; i < rendererCount; i++) {
    if (C.TRACK_TYPE_VIDEO == mappedTrackInfo.getRendererType(i)) {
      if (!selectedVideoTracks) {
        rendererTrackSelections[i] =
            selectVideoTrack(
                mappedTrackInfo.getTrackGroups(i),
                rendererFormatSupports[i],
                rendererMixedMimeTypeAdaptationSupports[i],
                params,
                adaptiveTrackSelectionFactory);
        selectedVideoTracks = rendererTrackSelections[i] != null;
      }
      seenVideoRendererWithMappedTracks |= mappedTrackInfo.getTrackGroups(i).length > 0;
    }
  }

  boolean selectedAudioTracks = false;
  boolean selectedTextTracks = false;
  for (int i = 0; i < rendererCount; i++) {
    int trackType = mappedTrackInfo.getRendererType(i);
    switch (trackType) {
      case C.TRACK_TYPE_VIDEO:
        // Already done. Do nothing.
        break;
      case C.TRACK_TYPE_AUDIO:
        if (!selectedAudioTracks) {
          rendererTrackSelections[i] =
              selectAudioTrack(
                  mappedTrackInfo.getTrackGroups(i),
                  rendererFormatSupports[i],
                  rendererMixedMimeTypeAdaptationSupports[i],
                  params,
                  seenVideoRendererWithMappedTracks ? null : adaptiveTrackSelectionFactory);
          selectedAudioTracks = rendererTrackSelections[i] != null;
        }
        break;
      case C.TRACK_TYPE_TEXT:
        if (!selectedTextTracks) {
          rendererTrackSelections[i] =
              selectTextTrack(
                  mappedTrackInfo.getTrackGroups(i), rendererFormatSupports[i], params);
          selectedTextTracks = rendererTrackSelections[i] != null;
        }
        break;
      default:
        rendererTrackSelections[i] =
            selectOtherTrack(
                trackType, mappedTrackInfo.getTrackGroups(i), rendererFormatSupports[i], params);
        break;
    }
  }

  return rendererTrackSelections;
}
 
Example 17
Source File: DefaultTrackSelector.java    From K-Sonic with MIT License 4 votes vote down vote up
@Override
protected TrackSelection[] selectTracks(RendererCapabilities[] rendererCapabilities,
    TrackGroupArray[] rendererTrackGroupArrays, int[][][] rendererFormatSupports)
    throws ExoPlaybackException {
  // Make a track selection for each renderer.
  int rendererCount = rendererCapabilities.length;
  TrackSelection[] rendererTrackSelections = new TrackSelection[rendererCount];
  Parameters params = paramsReference.get();

  for (int i = 0; i < rendererCount; i++) {
    if (C.TRACK_TYPE_VIDEO == rendererCapabilities[i].getTrackType()) {
      rendererTrackSelections[i] = selectVideoTrack(rendererCapabilities[i],
          rendererTrackGroupArrays[i], rendererFormatSupports[i], params.maxVideoWidth,
          params.maxVideoHeight, params.maxVideoBitrate, params.allowNonSeamlessAdaptiveness,
          params.allowMixedMimeAdaptiveness, params.viewportWidth, params.viewportHeight,
          params.orientationMayChange, adaptiveVideoTrackSelectionFactory,
          params.exceedVideoConstraintsIfNecessary, params.exceedRendererCapabilitiesIfNecessary);
    }
  }

  for (int i = 0; i < rendererCount; i++) {
    switch (rendererCapabilities[i].getTrackType()) {
      case C.TRACK_TYPE_VIDEO:
        // Already done. Do nothing.
        break;
      case C.TRACK_TYPE_AUDIO:
        rendererTrackSelections[i] = selectAudioTrack(rendererTrackGroupArrays[i],
            rendererFormatSupports[i], params.preferredAudioLanguage,
            params.exceedRendererCapabilitiesIfNecessary);
        break;
      case C.TRACK_TYPE_TEXT:
        rendererTrackSelections[i] = selectTextTrack(rendererTrackGroupArrays[i],
            rendererFormatSupports[i], params.preferredTextLanguage,
            params.preferredAudioLanguage, params.exceedRendererCapabilitiesIfNecessary);
        break;
      default:
        rendererTrackSelections[i] = selectOtherTrack(rendererCapabilities[i].getTrackType(),
            rendererTrackGroupArrays[i], rendererFormatSupports[i],
            params.exceedRendererCapabilitiesIfNecessary);
        break;
    }
  }
  return rendererTrackSelections;
}
 
Example 18
Source File: HlsSampleStreamWrapper.java    From TelePlus-Android with GNU General Public License v2.0 4 votes vote down vote up
@Override
public TrackOutput track(int id, int type) {
  int trackCount = sampleQueues.length;

  // Audio and video tracks are handled manually to ignore ids.
  if (type == C.TRACK_TYPE_AUDIO) {
    if (audioSampleQueueIndex != C.INDEX_UNSET) {
      if (audioSampleQueueMappingDone) {
        return sampleQueueTrackIds[audioSampleQueueIndex] == id
            ? sampleQueues[audioSampleQueueIndex]
            : createDummyTrackOutput(id, type);
      }
      audioSampleQueueMappingDone = true;
      sampleQueueTrackIds[audioSampleQueueIndex] = id;
      return sampleQueues[audioSampleQueueIndex];
    } else if (tracksEnded) {
      return createDummyTrackOutput(id, type);
    }
  } else if (type == C.TRACK_TYPE_VIDEO) {
    if (videoSampleQueueIndex != C.INDEX_UNSET) {
      if (videoSampleQueueMappingDone) {
        return sampleQueueTrackIds[videoSampleQueueIndex] == id
            ? sampleQueues[videoSampleQueueIndex]
            : createDummyTrackOutput(id, type);
      }
      videoSampleQueueMappingDone = true;
      sampleQueueTrackIds[videoSampleQueueIndex] = id;
      return sampleQueues[videoSampleQueueIndex];
    } else if (tracksEnded) {
      return createDummyTrackOutput(id, type);
    }
  } else /* sparse track */ {
    for (int i = 0; i < trackCount; i++) {
      if (sampleQueueTrackIds[i] == id) {
        return sampleQueues[i];
      }
    }
    if (tracksEnded) {
      return createDummyTrackOutput(id, type);
    }
  }
  SampleQueue trackOutput = new SampleQueue(allocator);
  trackOutput.setSampleOffsetUs(sampleOffsetUs);
  trackOutput.sourceId(chunkUid);
  trackOutput.setUpstreamFormatChangeListener(this);
  sampleQueueTrackIds = Arrays.copyOf(sampleQueueTrackIds, trackCount + 1);
  sampleQueueTrackIds[trackCount] = id;
  sampleQueues = Arrays.copyOf(sampleQueues, trackCount + 1);
  sampleQueues[trackCount] = trackOutput;
  sampleQueueIsAudioVideoFlags = Arrays.copyOf(sampleQueueIsAudioVideoFlags, trackCount + 1);
  sampleQueueIsAudioVideoFlags[trackCount] = type == C.TRACK_TYPE_AUDIO
      || type == C.TRACK_TYPE_VIDEO;
  haveAudioVideoSampleQueues |= sampleQueueIsAudioVideoFlags[trackCount];
  if (type == C.TRACK_TYPE_AUDIO) {
    audioSampleQueueMappingDone = true;
    audioSampleQueueIndex = trackCount;
  } else if (type == C.TRACK_TYPE_VIDEO) {
    videoSampleQueueMappingDone = true;
    videoSampleQueueIndex = trackCount;
  }
  if (getTrackTypeScore(type) > getTrackTypeScore(primarySampleQueueType)) {
    primarySampleQueueIndex = trackCount;
    primarySampleQueueType = type;
  }
  sampleQueuesEnabledStates = Arrays.copyOf(sampleQueuesEnabledStates, trackCount + 1);
  return trackOutput;
}
 
Example 19
Source File: MappingTrackSelector.java    From K-Sonic with MIT License 4 votes vote down vote up
/**
 * Determines whether tunneling should be enabled, replacing {@link RendererConfiguration}s in
 * {@code rendererConfigurations} with configurations that enable tunneling on the appropriate
 * renderers if so.
 *
 * @param rendererCapabilities The {@link RendererCapabilities} of the renderers for which
 *     {@link TrackSelection}s are to be generated.
 * @param rendererTrackGroupArrays An array of {@link TrackGroupArray}s where each entry
 *     corresponds to the renderer of equal index in {@code renderers}.
 * @param rendererFormatSupports Maps every available track to a specific level of support as
 *     defined by the renderer {@code FORMAT_*} constants.
 * @param rendererConfigurations The renderer configurations. Configurations may be replaced with
 *     ones that enable tunneling as a result of this call.
 * @param trackSelections The renderer track selections.
 * @param tunnelingAudioSessionId The audio session id to use when tunneling, or
 *     {@link C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled.
 */
private static void maybeConfigureRenderersForTunneling(
    RendererCapabilities[] rendererCapabilities, TrackGroupArray[] rendererTrackGroupArrays,
    int[][][] rendererFormatSupports, RendererConfiguration[] rendererConfigurations,
    TrackSelection[] trackSelections, int tunnelingAudioSessionId) {
  if (tunnelingAudioSessionId == C.AUDIO_SESSION_ID_UNSET) {
    return;
  }
  // Check whether we can enable tunneling. To enable tunneling we require exactly one audio and
  // one video renderer to support tunneling and have a selection.
  int tunnelingAudioRendererIndex = -1;
  int tunnelingVideoRendererIndex = -1;
  boolean enableTunneling = true;
  for (int i = 0; i < rendererCapabilities.length; i++) {
    int rendererType = rendererCapabilities[i].getTrackType();
    TrackSelection trackSelection = trackSelections[i];
    if ((rendererType == C.TRACK_TYPE_AUDIO || rendererType == C.TRACK_TYPE_VIDEO)
        && trackSelection != null) {
      if (rendererSupportsTunneling(rendererFormatSupports[i], rendererTrackGroupArrays[i],
          trackSelection)) {
        if (rendererType == C.TRACK_TYPE_AUDIO) {
          if (tunnelingAudioRendererIndex != -1) {
            enableTunneling = false;
            break;
          } else {
            tunnelingAudioRendererIndex = i;
          }
        } else {
          if (tunnelingVideoRendererIndex != -1) {
            enableTunneling = false;
            break;
          } else {
            tunnelingVideoRendererIndex = i;
          }
        }
      }
    }
  }
  enableTunneling &= tunnelingAudioRendererIndex != -1 && tunnelingVideoRendererIndex != -1;
  if (enableTunneling) {
    RendererConfiguration tunnelingRendererConfiguration =
        new RendererConfiguration(tunnelingAudioSessionId);
    rendererConfigurations[tunnelingAudioRendererIndex] = tunnelingRendererConfiguration;
    rendererConfigurations[tunnelingVideoRendererIndex] = tunnelingRendererConfiguration;
  }
}
 
Example 20
Source File: DashMediaSource.java    From Telegram-FOSS with GNU General Public License v2.0 4 votes vote down vote up
public static PeriodSeekInfo createPeriodSeekInfo(
    com.google.android.exoplayer2.source.dash.manifest.Period period, long durationUs) {
  int adaptationSetCount = period.adaptationSets.size();
  long availableStartTimeUs = 0;
  long availableEndTimeUs = Long.MAX_VALUE;
  boolean isIndexExplicit = false;
  boolean seenEmptyIndex = false;

  boolean haveAudioVideoAdaptationSets = false;
  for (int i = 0; i < adaptationSetCount; i++) {
    int type = period.adaptationSets.get(i).type;
    if (type == C.TRACK_TYPE_AUDIO || type == C.TRACK_TYPE_VIDEO) {
      haveAudioVideoAdaptationSets = true;
      break;
    }
  }

  for (int i = 0; i < adaptationSetCount; i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    // Exclude text adaptation sets from duration calculations, if we have at least one audio
    // or video adaptation set. See: https://github.com/google/ExoPlayer/issues/4029
    if (haveAudioVideoAdaptationSets && adaptationSet.type == C.TRACK_TYPE_TEXT) {
      continue;
    }

    DashSegmentIndex index = adaptationSet.representations.get(0).getIndex();
    if (index == null) {
      return new PeriodSeekInfo(true, 0, durationUs);
    }
    isIndexExplicit |= index.isExplicit();
    int segmentCount = index.getSegmentCount(durationUs);
    if (segmentCount == 0) {
      seenEmptyIndex = true;
      availableStartTimeUs = 0;
      availableEndTimeUs = 0;
    } else if (!seenEmptyIndex) {
      long firstSegmentNum = index.getFirstSegmentNum();
      long adaptationSetAvailableStartTimeUs = index.getTimeUs(firstSegmentNum);
      availableStartTimeUs = Math.max(availableStartTimeUs, adaptationSetAvailableStartTimeUs);
      if (segmentCount != DashSegmentIndex.INDEX_UNBOUNDED) {
        long lastSegmentNum = firstSegmentNum + segmentCount - 1;
        long adaptationSetAvailableEndTimeUs = index.getTimeUs(lastSegmentNum)
            + index.getDurationUs(lastSegmentNum, durationUs);
        availableEndTimeUs = Math.min(availableEndTimeUs, adaptationSetAvailableEndTimeUs);
      }
    }
  }
  return new PeriodSeekInfo(isIndexExplicit, availableStartTimeUs, availableEndTimeUs);
}