Java Code Examples for android.media.MediaCodec#BufferInfo

The following examples show how to use android.media.MediaCodec#BufferInfo . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MediaCodecRenderer.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
/**
 * @param trackType The track type that the renderer handles. One of the {@code C.TRACK_TYPE_*}
 *     constants defined in {@link C}.
 * @param mediaCodecSelector A decoder selector.
 * @param drmSessionManager For use with encrypted media. May be null if support for encrypted
 *     media is not required.
 * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
 *     For example a media file may start with a short clear region so as to allow playback to
 *     begin in parallel with key acquisition. This parameter specifies whether the renderer is
 *     permitted to play clear regions of encrypted media files before {@code drmSessionManager}
 *     has obtained the keys necessary to decrypt encrypted regions of the media.
 * @param assumedMinimumCodecOperatingRate A codec operating rate that all codecs instantiated by
 *     this renderer are assumed to meet implicitly (i.e. without the operating rate being set
 *     explicitly using {@link MediaFormat#KEY_OPERATING_RATE}).
 */
public MediaCodecRenderer(
    int trackType,
    MediaCodecSelector mediaCodecSelector,
    @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
    boolean playClearSamplesWithoutKeys,
    float assumedMinimumCodecOperatingRate) {
  super(trackType);
  Assertions.checkState(Util.SDK_INT >= 16);
  this.mediaCodecSelector = Assertions.checkNotNull(mediaCodecSelector);
  this.drmSessionManager = drmSessionManager;
  this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
  this.assumedMinimumCodecOperatingRate = assumedMinimumCodecOperatingRate;
  buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
  flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
  formatHolder = new FormatHolder();
  decodeOnlyPresentationTimestamps = new ArrayList<>();
  outputBufferInfo = new MediaCodec.BufferInfo();
  codecReconfigurationState = RECONFIGURATION_STATE_NONE;
  codecReinitializationState = REINITIALIZATION_STATE_NONE;
  codecOperatingRate = CODEC_OPERATING_RATE_UNSET;
  rendererOperatingRate = 1f;
}
 
Example 2
Source File: EncodedAudioRecorder.java    From AlexaAndroid with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Save the encoded (output) buffer into the complete encoded recording.
 * TODO: copy directly (without the intermediate byte array)
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void dequeueOutputBuffer(MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        ByteBuffer buffer = outputBuffers[index];
        Log.i("size/remaining: " + info.size + "/" + buffer.remaining());
        if (info.size <= buffer.remaining()) {
            final byte[] bufferCopied = new byte[info.size];
            buffer.get(bufferCopied); // TODO: catch BufferUnderflow
            // TODO: do we need to clear?
            // on N5: always size == remaining(), clearing is not needed
            // on SGS2: remaining decreases until it becomes less than size, which results in BufferUnderflow
            // (but SGS2 records only zeros anyway)
            //buffer.clear();
            codec.releaseOutputBuffer(index, false);
            addEncoded(bufferCopied);
            if (Log.DEBUG) {
                AudioUtils.showSomeBytes("out", bufferCopied);
            }
        } else {
            Log.e("size > remaining");
            codec.releaseOutputBuffer(index, false);
        }
    }
}
 
Example 3
Source File: Track.java    From talk-android with MIT License 6 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    delta = (delta * timeScale + 500000L) / 1000000L;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
Example 4
Source File: Mpeg2TsMuxer.java    From DeviceConnect-Android with MIT License 6 votes vote down vote up
@Override
public void onWriteAudioData(ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
    if (isConfigFrame(bufferInfo)) {
        return;
    }

    int outPacketSize = bufferInfo.size;
    if (isADTSPacket(encodedData)) {
        // 送られてきたデータが ADTS ヘッダーを含んでいる場合
        checkADTSBuffer(outPacketSize);
        encodedData.get(mADTS, 0, outPacketSize);
    } else {
        outPacketSize += ADTS_LENGTH;
        checkADTSBuffer(outPacketSize);

        addADTStoPacket(mADTS, outPacketSize);
        encodedData.get(mADTS, ADTS_LENGTH, bufferInfo.size);
    }

    mADTSBuffer.position(0);
    mADTSBuffer.limit(outPacketSize);
    mTsWriter.writeADTS(mADTSBuffer, getPts(bufferInfo));
}
 
Example 5
Source File: DefaultDataSink.java    From GIFCompressor with Apache License 2.0 6 votes vote down vote up
/**
 * Writes all enqueued samples into the muxer, now that it is
 * open and running.
 */
private void drainQueue() {
    if (mQueue.isEmpty()) return;
    mQueueBuffer.flip();
    LOG.i("Output format determined, writing pending data into the muxer. "
            + "samples:" + mQueue.size() + " "
            + "bytes:" + mQueueBuffer.limit());
    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
    int offset = 0;
    for (QueuedSample sample : mQueue) {
        bufferInfo.set(offset, sample.mSize, sample.mTimeUs, sample.mFlags);
        write(mQueueBuffer, bufferInfo);
        offset += sample.mSize;
    }
    mQueue.clear();
    mQueueBuffer = null;
}
 
Example 6
Source File: ScreenRecorder.java    From ScreenCapture with MIT License 5 votes vote down vote up
private void signalEndOfStream() {
    MediaCodec.BufferInfo eos = new MediaCodec.BufferInfo();
    ByteBuffer buffer = ByteBuffer.allocate(0);
    eos.set(0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
    if (VERBOSE) Log.i(TAG, "Signal EOS to muxer ");
    if (mVideoTrackIndex != INVALID_INDEX) {
        writeSampleData(mVideoTrackIndex, eos, buffer);
    }
    if (mAudioTrackIndex != INVALID_INDEX) {
        writeSampleData(mAudioTrackIndex, eos, buffer);
    }
    mVideoTrackIndex = INVALID_INDEX;
    mAudioTrackIndex = INVALID_INDEX;
}
 
Example 7
Source File: ScreenRecorder.java    From SoloPi with Apache License 2.0 5 votes vote down vote up
private void resetVideoPts(MediaCodec.BufferInfo buffer) {
    if (mVideoPtsOffset == 0) {
        mVideoPtsOffset = buffer.presentationTimeUs;
        buffer.presentationTimeUs = 0;
    } else {
        buffer.presentationTimeUs -= mVideoPtsOffset;
    }
}
 
Example 8
Source File: Camera1Base.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 5 votes vote down vote up
@Override
public void getAacData(ByteBuffer aacBuffer, MediaCodec.BufferInfo info) {
  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
    recordController.recordAudio(aacBuffer, info);
  }
  if (streaming) getAacDataRtp(aacBuffer, info);
}
 
Example 9
Source File: MediaCodecBridge.java    From android-chromium with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@CalledByNative
private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int status = MEDIA_CODEC_ERROR;
    int index = -1;
    try {
        int index_or_status = mMediaCodec.dequeueOutputBuffer(info, timeoutUs);
        if (info.presentationTimeUs < mLastPresentationTimeUs) {
            // TODO(qinmin): return a special code through DequeueOutputResult
            // to notify the native code the the frame has a wrong presentation
            // timestamp and should be skipped.
            info.presentationTimeUs = mLastPresentationTimeUs;
        }
        mLastPresentationTimeUs = info.presentationTimeUs;

        if (index_or_status >= 0) { // index!
            status = MEDIA_CODEC_OK;
            index = index_or_status;
        } else if (index_or_status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED;
        } else if (index_or_status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED;
        } else if (index_or_status == MediaCodec.INFO_TRY_AGAIN_LATER) {
            status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER;
        } else {
            assert(false);
        }
    } catch (IllegalStateException e) {
        Log.e(TAG, "Failed to dequeue output buffer: " + e.toString());
    }

    return new DequeueOutputResult(
            status, index, info.flags, info.offset, info.presentationTimeUs, info.size);
}
 
Example 10
Source File: Mpeg2TsMuxer.java    From DeviceConnect-Android with MIT License 5 votes vote down vote up
@Override
public void onWriteVideoData(ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
    encodedData.position(bufferInfo.offset);
    encodedData.limit(bufferInfo.offset + bufferInfo.size);

    if (isConfigFrame(bufferInfo)) {
        storeConfig(encodedData, bufferInfo);
    } else {
        if (isKeyFrame(bufferInfo) && mConfigBuffer.limit() > 0) {
            mConfigBuffer.position(0);
            mTsWriter.writeNALU(mConfigBuffer, getPts(bufferInfo));
        }
        mTsWriter.writeNALU(encodedData, getPts(bufferInfo));
    }
}
 
Example 11
Source File: VideoCodecTask.java    From MultiMediaSample with Apache License 2.0 4 votes vote down vote up
public Frame(MediaCodec.BufferInfo videoInfo, Bitmap bitmap) {
    this.videoInfo = videoInfo;
    this.bitmap = bitmap;
}
 
Example 12
Source File: IMuxer.java    From libcommon with Apache License 2.0 4 votes vote down vote up
public void writeSampleData(final int trackIndex,
@NonNull final ByteBuffer byteBuf,
@NonNull final MediaCodec.BufferInfo bufferInfo);
 
Example 13
Source File: MediaCodecVideoTrackRenderer.java    From Exoplayer_VLC with Apache License 2.0 4 votes vote down vote up
@Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
    ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip) {

    // ###AXT:: just a temporary hack to see what we send without any issue on presentation timing.
    //Log.d("MediaCodecVideoTrackRenderer", "processOutputBuffer(): --> <-- positionUs="+positionUs+", elapsedRealtimeUs="+elapsedRealtimeUs+", state="+(getState()==TrackRenderer.STATE_STARTED));
    boolean hack = false;
    if(hack) {
        renderOutputBufferImmediate(codec, bufferIndex);
        return true;
    }
    // ###AXT:: end of hack

  if (shouldSkip) {
    skipOutputBuffer(codec, bufferIndex);
    return true;
  }

  // Compute how many microseconds it is until the buffer's presentation time.
  long elapsedSinceStartOfLoopUs = (SystemClock.elapsedRealtime() * 1000) - elapsedRealtimeUs;
  long earlyUs = bufferInfo.presentationTimeUs - positionUs - elapsedSinceStartOfLoopUs;
    //Log.d("MediaCodecVideoTrackRenderer", "processOutputBuffer(): --> <-- earlyUs="+earlyUs+", pts="+bufferInfo.presentationTimeUs+", positionUs="+positionUs+", elapsed="+elapsedSinceStartOfLoopUs);

  // Compute the buffer's desired release time in nanoseconds.
  long systemTimeNs = System.nanoTime();
  long unadjustedFrameReleaseTimeNs = systemTimeNs + (earlyUs * 1000);

  // Apply a timestamp adjustment, if there is one.
  long adjustedReleaseTimeNs;
  if (frameReleaseTimeHelper != null) {
    adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime(
        bufferInfo.presentationTimeUs, unadjustedFrameReleaseTimeNs);
    earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
  } else {
    adjustedReleaseTimeNs = unadjustedFrameReleaseTimeNs;
  }

  //###AXT:: set thresold to 50ms in place of 30ms
  if (earlyUs < -50000) {
    // We're more than 50ms late rendering the frame.
    Log.d("MediaCodecVideoTrackRenderer", "processOutputBuffer(): drop frame more than 50ms late");
    dropOutputBuffer(codec, bufferIndex);
    return true;
  }

  if (!renderedFirstFrame) {
    Log.d("MediaCodecVideoTrackRenderer", "processOutputBuffer(): ##### render first frame");
    renderOutputBufferImmediate(codec, bufferIndex);
    renderedFirstFrame = true;
    return true;
  }

  if (getState() != TrackRenderer.STATE_STARTED) {
    Log.d("MediaCodecVideoTrackRenderer", "processOutputBuffer(): TrackRenderer not started");
    return false;
  }

  if (Util.SDK_INT >= 21) {
    // Let the underlying framework time the release.
    if (earlyUs < 50000) {
      renderOutputBufferTimedV21(codec, bufferIndex, adjustedReleaseTimeNs);
      return true;
    }
  } else {
    // We need to time the release ourselves.
    if (earlyUs < 30000) {
      if (earlyUs > 11000) {
        // We're a little too early to render the frame. Sleep until the frame can be rendered.
        // Note: The 11ms threshold was chosen fairly arbitrarily.
        try {
          // Subtracting 10000 rather than 11000 ensures the sleep time will be at least 1ms.
          Thread.sleep((earlyUs - 10000) / 1000);
        } catch (InterruptedException e) {
          Thread.currentThread().interrupt();
        }
      }

      renderOutputBufferImmediate(codec, bufferIndex);
      return true;
    }
  }

  // We're either not playing, or it's not time to render the frame yet.
  return false;
}
 
Example 14
Source File: RtmpFromFile.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 4 votes vote down vote up
@Override
protected void getH264DataRtp(ByteBuffer h264Buffer, MediaCodec.BufferInfo info) {
  srsFlvMuxer.sendVideo(h264Buffer, info);
}
 
Example 15
Source File: VideoController.java    From VideoCompressor with Apache License 2.0 4 votes vote down vote up
@TargetApi(16)
private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception {
    int trackIndex = selectTrack(extractor, isAudio);
    if (trackIndex >= 0) {
        extractor.selectTrack(trackIndex);
        MediaFormat trackFormat = extractor.getTrackFormat(trackIndex);
        int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio);
        int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        boolean inputDone = false;
        if (start > 0) {
            extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        } else {
            extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        }
        ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
        long startTime = -1;

        while (!inputDone) {

            boolean eof = false;
            int index = extractor.getSampleTrackIndex();
            if (index == trackIndex) {
                info.size = extractor.readSampleData(buffer, 0);

                if (info.size < 0) {
                    info.size = 0;
                    eof = true;
                } else {
                    info.presentationTimeUs = extractor.getSampleTime();
                    if (start > 0 && startTime == -1) {
                        startTime = info.presentationTimeUs;
                    }
                    if (end < 0 || info.presentationTimeUs < end) {
                        info.offset = 0;
                        info.flags = extractor.getSampleFlags();
                        if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) {
                            // didWriteData(messageObject, file, false, false);
                        }
                        extractor.advance();
                    } else {
                        eof = true;
                    }
                }
            } else if (index == -1) {
                eof = true;
            }
            if (eof) {
                inputDone = true;
            }
        }

        extractor.unselectTrack(trackIndex);
        return startTime;
    }
    return -1;
}
 
Example 16
Source File: AACHelper.java    From CameraV with GNU General Public License v3.0 4 votes vote down vote up
public void startPlaying(InputStream is)
{
    int len = 1024;
    byte[] buffer2 = new byte[len];

    byte[] data = new byte[len];

    ByteBuffer[] inputBuffers;
    ByteBuffer[] outputBuffers;

    ByteBuffer inputBuffer;
    ByteBuffer outputBuffer;

    MediaCodec.BufferInfo bufferInfo;
    int inputBufferIndex;
    int outputBufferIndex;
    byte[] outData;
    try
    {
        player.play();
        decoder.start();
        isPlaying = true;
        while (isPlaying)
        {
            
            	int read = is.read(data);
            	
                //===========
                inputBuffers = decoder.getInputBuffers();
                outputBuffers = decoder.getOutputBuffers();
                inputBufferIndex = decoder.dequeueInputBuffer(-1);
                if (inputBufferIndex >= 0)
                {
                    inputBuffer = inputBuffers[inputBufferIndex];
                    inputBuffer.clear();

                    inputBuffer.put(data);

                    decoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
                }

                bufferInfo = new MediaCodec.BufferInfo();
                outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);

                while (outputBufferIndex >= 0)
                {
                    outputBuffer = outputBuffers[outputBufferIndex];

                    outputBuffer.position(bufferInfo.offset);
                    outputBuffer.limit(bufferInfo.offset + bufferInfo.size);

                    outData = new byte[bufferInfo.size];
                    outputBuffer.get(outData);

                  //  Log.d("AudioDecoder", outData.length + " bytes decoded");

                    player.write(outData, 0, outData.length);

                    decoder.releaseOutputBuffer(outputBufferIndex, false);
                    outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);

                }

                //===========

            
        }

        decoder.stop();
        player.stop();

    }
    catch (Exception e)
    {
    }
}
 
Example 17
Source File: EncodedAudioRecorder.java    From speechutils with Apache License 2.0 4 votes vote down vote up
/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private int recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    int status = -1;
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numDequeueOutputBufferTimeout = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_INPUT_BUFFER_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                numDequeueOutputBufferTimeout++;
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numDequeueOutputBufferTimeout);
                if (numDequeueOutputBufferTimeout > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                numDequeueOutputBufferTimeout = 0;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    status = 0;
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
        Log.i("stopped and released codec");
    }
    return status;
}
 
Example 18
Source File: MediaController.java    From SiliCompressor with Apache License 2.0 4 votes vote down vote up
@TargetApi(16)
private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception {
    int trackIndex = selectTrack(extractor, isAudio);
    if (trackIndex >= 0) {
        extractor.selectTrack(trackIndex);
        MediaFormat trackFormat = extractor.getTrackFormat(trackIndex);
        int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio);
        int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        boolean inputDone = false;
        if (start > 0) {
            extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        } else {
            extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        }
        ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
        long startTime = -1;

        while (!inputDone) {

            boolean eof = false;
            int index = extractor.getSampleTrackIndex();
            if (index == trackIndex) {
                info.size = extractor.readSampleData(buffer, 0);

                if (info.size < 0) {
                    info.size = 0;
                    eof = true;
                } else {
                    info.presentationTimeUs = extractor.getSampleTime();
                    if (start > 0 && startTime == -1) {
                        startTime = info.presentationTimeUs;
                    }
                    if (end < 0 || info.presentationTimeUs < end) {
                        info.offset = 0;
                        info.flags = extractor.getSampleFlags();
                        if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) {
                            // didWriteData(messageObject, file, false, false);
                        }
                        extractor.advance();
                    } else {
                        eof = true;
                    }
                }
            } else if (index == -1) {
                eof = true;
            }
            if (eof) {
                inputDone = true;
            }
        }

        extractor.unselectTrack(trackIndex);
        return startTime;
    }
    return -1;
}
 
Example 19
Source File: MediaCodecDecoderPlugin.java    From media-for-mobile with Apache License 2.0 4 votes vote down vote up
private void init() {
    outputBufferInfo = new MediaCodec.BufferInfo();
    inputBufferInfo = new MediaCodec.BufferInfo();
}
 
Example 20
Source File: DisplayBase.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 votes vote down vote up
protected abstract void getH264DataRtp(ByteBuffer h264Buffer, MediaCodec.BufferInfo info);