com.google.android.exoplayer2.util.Util Java Examples

The following examples show how to use com.google.android.exoplayer2.util.Util. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AudioTrackPositionTracker.java    From Telegram-FOSS with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Sets the {@link AudioTrack} to wrap. Subsequent method calls on this instance relate to this
 * track's position, until the next call to {@link #reset()}.
 *
 * @param audioTrack The audio track to wrap.
 * @param outputEncoding The encoding of the audio track.
 * @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored
 *     otherwise.
 * @param bufferSize The audio track buffer size in bytes.
 */
public void setAudioTrack(
    AudioTrack audioTrack,
    @C.Encoding int outputEncoding,
    int outputPcmFrameSize,
    int bufferSize) {
  this.audioTrack = audioTrack;
  this.outputPcmFrameSize = outputPcmFrameSize;
  this.bufferSize = bufferSize;
  audioTimestampPoller = new AudioTimestampPoller(audioTrack);
  outputSampleRate = audioTrack.getSampleRate();
  needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding);
  isOutputPcm = Util.isEncodingLinearPcm(outputEncoding);
  bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
  lastRawPlaybackHeadPosition = 0;
  rawPlaybackHeadWrapCount = 0;
  passthroughWorkaroundPauseOffset = 0;
  hasData = false;
  stopTimestampUs = C.TIME_UNSET;
  forceResetWorkaroundTimeMs = C.TIME_UNSET;
  latencyUs = 0;
}
 
Example #2
Source File: ConcatenatingMediaSource.java    From MediaSDK with Apache License 2.0 6 votes vote down vote up
@GuardedBy("this")
private void removePublicMediaSources(
    int fromIndex,
    int toIndex,
    @Nullable Handler handler,
    @Nullable Runnable onCompletionAction) {
  Assertions.checkArgument((handler == null) == (onCompletionAction == null));
  Handler playbackThreadHandler = this.playbackThreadHandler;
  Util.removeRange(mediaSourcesPublic, fromIndex, toIndex);
  if (playbackThreadHandler != null) {
    HandlerAndRunnable callbackAction = createOnCompletionAction(handler, onCompletionAction);
    playbackThreadHandler
        .obtainMessage(MSG_REMOVE, new MessageData<>(fromIndex, toIndex, callbackAction))
        .sendToTarget();
  } else if (onCompletionAction != null && handler != null) {
    handler.post(onCompletionAction);
  }
}
 
Example #3
Source File: PgsDecoder.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
private void parsePaletteSection(ParsableByteArray buffer, int sectionLength) {
  if ((sectionLength % 5) != 2) {
    // Section must be two bytes followed by a whole number of (index, y, cb, cr, a) entries.
    return;
  }
  buffer.skipBytes(2);

  Arrays.fill(colors, 0);
  int entryCount = sectionLength / 5;
  for (int i = 0; i < entryCount; i++) {
    int index = buffer.readUnsignedByte();
    int y = buffer.readUnsignedByte();
    int cr = buffer.readUnsignedByte();
    int cb = buffer.readUnsignedByte();
    int a = buffer.readUnsignedByte();
    int r = (int) (y + (1.40200 * (cr - 128)));
    int g = (int) (y - (0.34414 * (cb - 128)) - (0.71414 * (cr - 128)));
    int b = (int) (y + (1.77200 * (cb - 128)));
    colors[index] =
        (a << 24)
            | (Util.constrainValue(r, 0, 255) << 16)
            | (Util.constrainValue(g, 0, 255) << 8)
            | Util.constrainValue(b, 0, 255);
  }
  colorsSet = true;
}
 
Example #4
Source File: ExoMediaPlayer.java    From DKVideoPlayer with Apache License 2.0 6 votes vote down vote up
@Override
public void initPlayer() {
    mInternalPlayer = new SimpleExoPlayer.Builder(
            mAppContext,
            mRenderersFactory == null ? mRenderersFactory = new DefaultRenderersFactory(mAppContext) : mRenderersFactory,
            mTrackSelector == null ? mTrackSelector = new DefaultTrackSelector(mAppContext) : mTrackSelector,
            mLoadControl == null ? mLoadControl = new DefaultLoadControl() : mLoadControl,
            DefaultBandwidthMeter.getSingletonInstance(mAppContext),
            Util.getLooper(),
            new AnalyticsCollector(Clock.DEFAULT),
            /* useLazyPreparation= */ true,
            Clock.DEFAULT)
            .build();
    setOptions();

    //播放器日志
    if (VideoViewManager.getConfig().mIsEnableLog && mTrackSelector instanceof MappingTrackSelector) {
        mInternalPlayer.addAnalyticsListener(new EventLogger((MappingTrackSelector) mTrackSelector, "ExoPlayer"));
    }

    mInternalPlayer.addListener(this);
    mInternalPlayer.addVideoListener(this);
}
 
Example #5
Source File: MappingTrackSelector.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Returns the extent to which a renderer supports adaptation between specified tracks within a
 * {@link TrackGroup}.
 *
 * @param rendererIndex The renderer index.
 * @param groupIndex The index of the track group.
 * @return One of {@link RendererCapabilities#ADAPTIVE_SEAMLESS}, {@link
 *     RendererCapabilities#ADAPTIVE_NOT_SEAMLESS} and {@link
 *     RendererCapabilities#ADAPTIVE_NOT_SUPPORTED}.
 */
public int getAdaptiveSupport(int rendererIndex, int groupIndex, int[] trackIndices) {
  int handledTrackCount = 0;
  int adaptiveSupport = RendererCapabilities.ADAPTIVE_SEAMLESS;
  boolean multipleMimeTypes = false;
  String firstSampleMimeType = null;
  for (int i = 0; i < trackIndices.length; i++) {
    int trackIndex = trackIndices[i];
    String sampleMimeType =
        rendererTrackGroups[rendererIndex].get(groupIndex).getFormat(trackIndex).sampleMimeType;
    if (handledTrackCount++ == 0) {
      firstSampleMimeType = sampleMimeType;
    } else {
      multipleMimeTypes |= !Util.areEqual(firstSampleMimeType, sampleMimeType);
    }
    adaptiveSupport =
        Math.min(
            adaptiveSupport,
            rendererFormatSupports[rendererIndex][groupIndex][i]
                & RendererCapabilities.ADAPTIVE_SUPPORT_MASK);
  }
  return multipleMimeTypes
      ? Math.min(adaptiveSupport, rendererMixedMimeTypeAdaptiveSupports[rendererIndex])
      : adaptiveSupport;
}
 
Example #6
Source File: DashManifestParser.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Parses the number of channels from the value attribute of an AudioElementConfiguration with
 * schemeIdUri "tag:dolby.com,2014:dash:audio_channel_configuration:2011", as defined by table E.5
 * in ETSI TS 102 366.
 *
 * @param xpp The parser from which to read.
 * @return The parsed number of channels, or {@link Format#NO_VALUE} if the channel count could
 *     not be parsed.
 */
protected static int parseDolbyChannelConfiguration(XmlPullParser xpp) {
  String value = Util.toLowerInvariant(xpp.getAttributeValue(null, "value"));
  if (value == null) {
    return Format.NO_VALUE;
  }
  switch (value) {
    case "4000":
      return 1;
    case "a000":
      return 2;
    case "f801":
      return 6;
    case "fa01":
      return 8;
    default:
      return Format.NO_VALUE;
  }
}
 
Example #7
Source File: ExoPlayerImplInternal.java    From MediaSDK with Apache License 2.0 6 votes vote down vote up
@Override
public int compareTo(PendingMessageInfo other) {
  if ((resolvedPeriodUid == null) != (other.resolvedPeriodUid == null)) {
    // PendingMessageInfos with a resolved period position are always smaller.
    return resolvedPeriodUid != null ? -1 : 1;
  }
  if (resolvedPeriodUid == null) {
    // Don't sort message with unresolved positions.
    return 0;
  }
  // Sort resolved media times by period index and then by period position.
  int comparePeriodIndex = resolvedPeriodIndex - other.resolvedPeriodIndex;
  if (comparePeriodIndex != 0) {
    return comparePeriodIndex;
  }
  return Util.compareLong(resolvedPeriodTimeUs, other.resolvedPeriodTimeUs);
}
 
Example #8
Source File: SampleChooserActivity.java    From ExoPlayer-Offline with Apache License 2.0 6 votes vote down vote up
@Override
protected List<SampleGroup> doInBackground(String... uris) {
  List<SampleGroup> result = new ArrayList<>();
  Context context = getApplicationContext();
  String userAgent = Util.getUserAgent(context, "ExoPlayerDemo");
  DataSource dataSource = new DefaultDataSource(context, null, userAgent, false);
  for (String uri : uris) {
    DataSpec dataSpec = new DataSpec(Uri.parse(uri));
    InputStream inputStream = new DataSourceInputStream(dataSource, dataSpec);
    try {
      readSampleGroups(new JsonReader(new InputStreamReader(inputStream, "UTF-8")), result);
    } catch (Exception e) {
      Log.e(TAG, "Error loading sample list: " + uri, e);
      sawError = true;
    } finally {
      Util.closeQuietly(dataSource);
    }
  }
  return result;
}
 
Example #9
Source File: ExoPlayerImplInternal.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
@Override
public int compareTo(@NonNull PendingMessageInfo other) {
  if ((resolvedPeriodUid == null) != (other.resolvedPeriodUid == null)) {
    // PendingMessageInfos with a resolved period position are always smaller.
    return resolvedPeriodUid != null ? -1 : 1;
  }
  if (resolvedPeriodUid == null) {
    // Don't sort message with unresolved positions.
    return 0;
  }
  // Sort resolved media times by period index and then by period position.
  int comparePeriodIndex = resolvedPeriodIndex - other.resolvedPeriodIndex;
  if (comparePeriodIndex != 0) {
    return comparePeriodIndex;
  }
  return Util.compareLong(resolvedPeriodTimeUs, other.resolvedPeriodTimeUs);
}
 
Example #10
Source File: MediaCodecVideoRenderer.java    From Telegram-FOSS with GNU General Public License v2.0 6 votes vote down vote up
private void processOutputFormat(MediaCodec codec, int width, int height) {
  currentWidth = width;
  currentHeight = height;
  currentPixelWidthHeightRatio = pendingPixelWidthHeightRatio;
  if (Util.SDK_INT >= 21) {
    // On API level 21 and above the decoder applies the rotation when rendering to the surface.
    // Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need
    // to flip the width, height and pixel aspect ratio to reflect the rotation that was applied.
    if (pendingRotationDegrees == 90 || pendingRotationDegrees == 270) {
      int rotatedHeight = currentWidth;
      currentWidth = currentHeight;
      currentHeight = rotatedHeight;
      currentPixelWidthHeightRatio = 1 / currentPixelWidthHeightRatio;
    }
  } else {
    // On API level 20 and below the decoder does not apply the rotation.
    currentUnappliedRotationDegrees = pendingRotationDegrees;
  }
  // Must be applied each time the output format changes.
  codec.setVideoScalingMode(scalingMode);
}
 
Example #11
Source File: MediaCodecAudioRenderer.java    From Telegram with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Returns the framework {@link MediaFormat} that can be used to configure a {@link MediaCodec}
 * for decoding the given {@link Format} for playback.
 *
 * @param format The format of the media.
 * @param codecMimeType The MIME type handled by the codec.
 * @param codecMaxInputSize The maximum input size supported by the codec.
 * @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if
 *     no codec operating rate should be set.
 * @return The framework media format.
 */
@SuppressLint("InlinedApi")
protected MediaFormat getMediaFormat(
    Format format, String codecMimeType, int codecMaxInputSize, float codecOperatingRate) {
  MediaFormat mediaFormat = new MediaFormat();
  // Set format parameters that should always be set.
  mediaFormat.setString(MediaFormat.KEY_MIME, codecMimeType);
  mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, format.channelCount);
  mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, format.sampleRate);
  MediaFormatUtil.setCsdBuffers(mediaFormat, format.initializationData);
  // Set codec max values.
  MediaFormatUtil.maybeSetInteger(mediaFormat, MediaFormat.KEY_MAX_INPUT_SIZE, codecMaxInputSize);
  // Set codec configuration values.
  if (Util.SDK_INT >= 23) {
    mediaFormat.setInteger(MediaFormat.KEY_PRIORITY, 0 /* realtime priority */);
    if (codecOperatingRate != CODEC_OPERATING_RATE_UNSET && !deviceDoesntSupportOperatingRate()) {
      mediaFormat.setFloat(MediaFormat.KEY_OPERATING_RATE, codecOperatingRate);
    }
  }
  if (Util.SDK_INT <= 28 && MimeTypes.AUDIO_AC4.equals(format.sampleMimeType)) {
    // On some older builds, the AC-4 decoder expects to receive samples formatted as raw frames
    // not sync frames. Set a format key to override this.
    mediaFormat.setInteger("ac4-is-sync", 1);
  }
  return mediaFormat;
}
 
Example #12
Source File: MainActivity.java    From ExoplayerExample with The Unlicense 6 votes vote down vote up
/**
 * Prepares exoplayer for audio playback from a remote URL audiofile. Should work with most
 * popular audiofile types (.mp3, .m4a,...)
 * @param uri Provide a Uri in a form of Uri.parse("http://blabla.bleble.com/blublu.mp3)
 */
private void prepareExoPlayerFromURL(Uri uri){

    TrackSelector trackSelector = new DefaultTrackSelector();

    LoadControl loadControl = new DefaultLoadControl();

    exoPlayer = ExoPlayerFactory.newSimpleInstance(this, trackSelector, loadControl);

    DefaultDataSourceFactory dataSourceFactory = new DefaultDataSourceFactory(this, Util.getUserAgent(this, "exoplayer2example"), null);
    ExtractorsFactory extractorsFactory = new DefaultExtractorsFactory();
    MediaSource audioSource = new ExtractorMediaSource(uri, dataSourceFactory, extractorsFactory, null, null);
    exoPlayer.addListener(eventListener);

    exoPlayer.prepare(audioSource);
    initMediaControls();
}
 
Example #13
Source File: ChapterFrame.java    From Telegram with GNU General Public License v2.0 6 votes vote down vote up
@Override
public boolean equals(@Nullable Object obj) {
  if (this == obj) {
    return true;
  }
  if (obj == null || getClass() != obj.getClass()) {
    return false;
  }
  ChapterFrame other = (ChapterFrame) obj;
  return startTimeMs == other.startTimeMs
      && endTimeMs == other.endTimeMs
      && startOffset == other.startOffset
      && endOffset == other.endOffset
      && Util.areEqual(chapterId, other.chapterId)
      && Arrays.equals(subFrames, other.subFrames);
}
 
Example #14
Source File: SimpleExoPlayer.java    From Telegram-FOSS with GNU General Public License v2.0 6 votes vote down vote up
@Override
public void setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus) {
  verifyApplicationThread();
  if (!Util.areEqual(this.audioAttributes, audioAttributes)) {
    this.audioAttributes = audioAttributes;
    for (Renderer renderer : renderers) {
      if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) {
        player
            .createMessage(renderer)
            .setType(C.MSG_SET_AUDIO_ATTRIBUTES)
            .setPayload(audioAttributes)
            .send();
      }
    }
    for (AudioListener audioListener : audioListeners) {
      audioListener.onAudioAttributesChanged(audioAttributes);
    }
  }

  @AudioFocusManager.PlayerCommand
  int playerCommand =
      audioFocusManager.setAudioAttributes(
          handleAudioFocus ? audioAttributes : null, getPlayWhenReady(), getPlaybackState());
  updatePlayWhenReady(getPlayWhenReady(), playerCommand);
}
 
Example #15
Source File: DefaultBandwidthMeter.java    From Telegram with GNU General Public License v2.0 6 votes vote down vote up
private DefaultBandwidthMeter(
    @Nullable Context context,
    SparseArray<Long> initialBitrateEstimates,
    int maxWeight,
    Clock clock,
    boolean resetOnNetworkTypeChange) {
  this.context = context == null ? null : context.getApplicationContext();
  this.initialBitrateEstimates = initialBitrateEstimates;
  this.eventDispatcher = new EventDispatcher<>();
  this.slidingPercentile = new SlidingPercentile(maxWeight);
  this.clock = clock;
  // Set the initial network type and bitrate estimate
  networkType = context == null ? C.NETWORK_TYPE_UNKNOWN : Util.getNetworkType(context);
  bitrateEstimate = getInitialBitrateEstimateForNetworkType(networkType);
  // Register to receive connectivity actions if possible.
  if (context != null && resetOnNetworkTypeChange) {
    ConnectivityActionReceiver connectivityActionReceiver =
        ConnectivityActionReceiver.getInstance(context);
    connectivityActionReceiver.register(/* bandwidthMeter= */ this);
  }
}
 
Example #16
Source File: IcyHeaders.java    From MediaSDK with Apache License 2.0 6 votes vote down vote up
@Override
public boolean equals(@Nullable Object obj) {
  if (this == obj) {
    return true;
  }
  if (obj == null || getClass() != obj.getClass()) {
    return false;
  }
  IcyHeaders other = (IcyHeaders) obj;
  return bitrate == other.bitrate
      && Util.areEqual(genre, other.genre)
      && Util.areEqual(name, other.name)
      && Util.areEqual(url, other.url)
      && isPublic == other.isPublic
      && metadataInterval == other.metadataInterval;
}
 
Example #17
Source File: WebvttCueParser.java    From Telegram with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Returns the tag name for the given tag contents.
 *
 * @param tagExpression Characters between &amp;lt: and &amp;gt; of a start or end tag.
 * @return The name of tag.
 */
private static String getTagName(String tagExpression) {
  tagExpression = tagExpression.trim();
  if (tagExpression.isEmpty()) {
    return null;
  }
  return Util.splitAtFirst(tagExpression, "[ \\.]")[0];
}
 
Example #18
Source File: ChunkSampleStream.java    From MediaSDK with Apache License 2.0 5 votes vote down vote up
/**
 * Discard upstream media chunks from {@code chunkIndex} and corresponding samples from sample
 * queues.
 *
 * @param chunkIndex The index of the first chunk to discard.
 * @return The chunk at given index.
 */
private BaseMediaChunk discardUpstreamMediaChunksFromIndex(int chunkIndex) {
  BaseMediaChunk firstRemovedChunk = mediaChunks.get(chunkIndex);
  Util.removeRange(mediaChunks, /* fromIndex= */ chunkIndex, /* toIndex= */ mediaChunks.size());
  nextNotifyPrimaryFormatMediaChunkIndex =
      Math.max(nextNotifyPrimaryFormatMediaChunkIndex, mediaChunks.size());
  primarySampleQueue.discardUpstreamSamples(firstRemovedChunk.getFirstSampleIndex(0));
  for (int i = 0; i < embeddedSampleQueues.length; i++) {
    embeddedSampleQueues[i].discardUpstreamSamples(firstRemovedChunk.getFirstSampleIndex(i + 1));
  }
  return firstRemovedChunk;
}
 
Example #19
Source File: DashTest.java    From ExoPlayer-Offline with Apache License 2.0 5 votes vote down vote up
private static boolean shouldSkipAdaptiveTest(String mimeType) throws DecoderQueryException {
  MediaCodecInfo decoderInfo = MediaCodecUtil.getDecoderInfo(mimeType, false);
  assertNotNull(decoderInfo);
  if (decoderInfo.adaptive) {
    return false;
  }
  assertTrue(Util.SDK_INT < 21);
  return true;
}
 
Example #20
Source File: DefaultTimeBar.java    From tysq-android with GNU General Public License v3.0 5 votes vote down vote up
private void drawTimeBar(Canvas canvas) {
    int progressBarHeight = progressBar.height();
    int barTop = progressBar.centerY() - progressBarHeight / 2;
    int barBottom = barTop + progressBarHeight;
    if (duration <= 0) {
        canvas.drawRect(progressBar.left, barTop, progressBar.right, barBottom, unplayedPaint);
        return;
    }
    int bufferedLeft = bufferedBar.left;
    int bufferedRight = bufferedBar.right;
    int progressLeft = Math.max(Math.max(progressBar.left, bufferedRight), scrubberBar.right);
    if (progressLeft < progressBar.right) {
        canvas.drawRect(progressLeft, barTop, progressBar.right, barBottom, unplayedPaint);
    }
    bufferedLeft = Math.max(bufferedLeft, scrubberBar.right);
    if (bufferedRight > bufferedLeft) {
        canvas.drawRect(bufferedLeft, barTop, bufferedRight, barBottom, bufferedPaint);
    }
    if (scrubberBar.width() > 0) {
        canvas.drawRect(scrubberBar.left, barTop, scrubberBar.right, barBottom, playedPaint);
    }
    if (adGroupCount == 0) {
        return;
    }
    long[] adGroupTimesMs = Assertions.checkNotNull(this.adGroupTimesMs);
    boolean[] playedAdGroups = Assertions.checkNotNull(this.playedAdGroups);
    int adMarkerOffset = adMarkerWidth / 2;
    for (int i = 0; i < adGroupCount; i++) {
        long adGroupTimeMs = Util.constrainValue(adGroupTimesMs[i], 0, duration);
        int markerPositionOffset =
                (int) (progressBar.width() * adGroupTimeMs / duration) - adMarkerOffset;
        int markerLeft = progressBar.left + Math.min(progressBar.width() - adMarkerWidth,
                Math.max(0, markerPositionOffset));
        Paint paint = playedAdGroups[i] ? playedAdMarkerPaint : adMarkerPaint;
        canvas.drawRect(markerLeft, barTop, markerLeft + adMarkerWidth, barBottom, paint);
    }
}
 
Example #21
Source File: BinarySearchSeeker.java    From MediaSDK with Apache License 2.0 5 votes vote down vote up
/**
 * Returns the next position in the stream to search for target frame, given [floorBytePosition,
 * ceilingBytePosition), with corresponding [floorTimePosition, ceilingTimePosition).
 */
protected static long calculateNextSearchBytePosition(
    long targetTimePosition,
    long floorTimePosition,
    long ceilingTimePosition,
    long floorBytePosition,
    long ceilingBytePosition,
    long approxBytesPerFrame) {
  if (floorBytePosition + 1 >= ceilingBytePosition
      || floorTimePosition + 1 >= ceilingTimePosition) {
    return floorBytePosition;
  }
  long seekTimeDuration = targetTimePosition - floorTimePosition;
  float estimatedBytesPerTimeUnit =
      (float) (ceilingBytePosition - floorBytePosition)
          / (ceilingTimePosition - floorTimePosition);
  // It's better to under-estimate rather than over-estimate, because the extractor
  // input can skip forward easily, but cannot rewind easily (it may require a new connection
  // to be made).
  // Therefore, we should reduce the estimated position by some amount, so it will converge to
  // the correct frame earlier.
  long bytesToSkip = (long) (seekTimeDuration * estimatedBytesPerTimeUnit);
  long confidenceInterval = bytesToSkip / 20;
  long estimatedFramePosition = floorBytePosition + bytesToSkip - approxBytesPerFrame;
  long estimatedPosition = estimatedFramePosition - confidenceInterval;
  return Util.constrainValue(estimatedPosition, floorBytePosition, ceilingBytePosition - 1);
}
 
Example #22
Source File: TextInformationFrame.java    From K-Sonic with MIT License 5 votes vote down vote up
@Override
public boolean equals(Object obj) {
  if (this == obj) {
    return true;
  }
  if (obj == null || getClass() != obj.getClass()) {
    return false;
  }
  TextInformationFrame other = (TextInformationFrame) obj;
  return id.equals(other.id) && Util.areEqual(description, other.description)
      && Util.areEqual(value, other.value);
}
 
Example #23
Source File: PlayerActivity.java    From exoplayer-intro with Apache License 2.0 5 votes vote down vote up
@Override
public void onStop() {
  super.onStop();
  if (Util.SDK_INT > 23) {
    releasePlayer();
  }
}
 
Example #24
Source File: ExoMediaPlayer.java    From PlayerBase with Apache License 2.0 5 votes vote down vote up
private MediaSource getMediaSource(Uri uri, com.google.android.exoplayer2.upstream.DataSource.Factory dataSourceFactory){
    int contentType = Util.inferContentType(uri);
    switch (contentType) {
        case C.TYPE_DASH:
            return new DashMediaSource.Factory(dataSourceFactory).createMediaSource(uri);
        case C.TYPE_SS:
            return new SsMediaSource.Factory(dataSourceFactory).createMediaSource(uri);
        case C.TYPE_HLS:
            return new HlsMediaSource.Factory(dataSourceFactory).createMediaSource(uri);
        case C.TYPE_OTHER:
        default:
            // This is the MediaSource representing the media to be played.
            return new ExtractorMediaSource.Factory(dataSourceFactory).createMediaSource(uri);
    }
}
 
Example #25
Source File: DefaultLoadControl.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
@Override
public boolean shouldStartPlayback(
    long bufferedDurationUs, float playbackSpeed, boolean rebuffering) {
  bufferedDurationUs = Util.getPlayoutDurationForMediaDuration(bufferedDurationUs, playbackSpeed);
  long minBufferDurationUs = rebuffering ? bufferForPlaybackAfterRebufferUs : bufferForPlaybackUs;
  return minBufferDurationUs <= 0
      || bufferedDurationUs >= minBufferDurationUs
      || (!prioritizeTimeOverSizeThresholds
          && allocator.getTotalBytesAllocated() >= targetBufferSize);
}
 
Example #26
Source File: MediaCodecAudioRenderer.java    From K-Sonic with MIT License 5 votes vote down vote up
/**
 * Returns whether the decoder is known to output six audio channels when provided with input with
 * fewer than six channels.
 * <p>
 * See [Internal: b/35655036].
 */
private static boolean codecNeedsDiscardChannelsWorkaround(String codecName) {
  // The workaround applies to Samsung Galaxy S6 and Samsung Galaxy S7.
  return Util.SDK_INT < 24 && "OMX.SEC.aac.dec".equals(codecName)
      && "samsung".equals(Util.MANUFACTURER)
      && (Util.DEVICE.startsWith("zeroflte") || Util.DEVICE.startsWith("herolte")
      || Util.DEVICE.startsWith("heroqlte"));
}
 
Example #27
Source File: SonicAudioProcessor.java    From Telegram with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Returns the specified duration scaled to take into account the speedup factor of this instance,
 * in the same units as {@code duration}.
 *
 * @param duration The duration to scale taking into account speedup.
 * @return The specified duration scaled to take into account speedup, in the same units as
 *     {@code duration}.
 */
public long scaleDurationForSpeedup(long duration) {
  if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) {
    return outputSampleRateHz == sampleRateHz
        ? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes)
        : Util.scaleLargeTimestamp(duration, inputBytes * outputSampleRateHz,
            outputBytes * sampleRateHz);
  } else {
    return (long) ((double) speed * duration);
  }
}
 
Example #28
Source File: VbriSeeker.java    From Telegram with GNU General Public License v2.0 5 votes vote down vote up
@Override
public SeekPoints getSeekPoints(long timeUs) {
  int tableIndex = Util.binarySearchFloor(timesUs, timeUs, true, true);
  SeekPoint seekPoint = new SeekPoint(timesUs[tableIndex], positions[tableIndex]);
  if (seekPoint.timeUs >= timeUs || tableIndex == timesUs.length - 1) {
    return new SeekPoints(seekPoint);
  } else {
    SeekPoint nextSeekPoint = new SeekPoint(timesUs[tableIndex + 1], positions[tableIndex + 1]);
    return new SeekPoints(seekPoint, nextSeekPoint);
  }
}
 
Example #29
Source File: CustomizeControlView.java    From bcm-android with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void onScrubMove(TimeBar timeBar, long position) {
    if (positionView != null) {
        positionView.setText(Util.getStringForTime(formatBuilder, formatter, position));
    }
    if (playPositionListener != null) {
        playPositionListener.onPositionChanged(position);
    }
}
 
Example #30
Source File: WebvttParserUtil.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Parses a WebVTT timestamp.
 *
 * @param timestamp The timestamp string.
 * @return The parsed timestamp in microseconds.
 * @throws NumberFormatException If the timestamp could not be parsed.
 */
public static long parseTimestampUs(String timestamp) throws NumberFormatException {
  long value = 0;
  String[] parts = Util.splitAtFirst(timestamp, "\\.");
  String[] subparts = Util.split(parts[0], ":");
  for (String subpart : subparts) {
    value = (value * 60) + Long.parseLong(subpart);
  }
  value *= 1000;
  if (parts.length == 2) {
    value += Long.parseLong(parts[1]);
  }
  return value * 1000;
}