Java Code Examples for android.media.MediaCodec#BUFFER_FLAG_SYNC_FRAME

The following examples show how to use android.media.MediaCodec#BUFFER_FLAG_SYNC_FRAME . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CircularEncoderBuffer.java    From pause-resume-video-recording with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the index of the oldest sync frame.  Valid until the next add().
 * <p>
 * When sending output to a MediaMuxer, start here.
 */
public int getFirstIndex() {
    final int metaLen = mPacketStart.length;

    int index = mMetaTail;
    while (index != mMetaHead) {
        if ((mPacketFlags[index] & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
            break;
        }
        index = (index + 1) % metaLen;
    }

    if (index == mMetaHead) {
        Log.w(TAG, "HEY: could not find sync frame in buffer");
        index = -1;
    }
    return index;
}
 
Example 2
Source File: PassthroughTranscoderShould.java    From LiTr with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@Test
public void writeIFrameWhenInputDataIsAvailable() {
    passthroughTranscoder.sourceTrack = 0;
    passthroughTranscoder.targetTrack = 0;
    passthroughTranscoder.duration = DURATION;
    passthroughTranscoder.targetTrackAdded = true;
    int outputFlags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;

    doReturn(0).when(mediaSource).getSampleTrackIndex();
    doReturn(BUFFER_SIZE).when(mediaSource).readSampleData(outputBuffer, 0);
    doReturn(SAMPLE_TIME).when(mediaSource).getSampleTime();
    doReturn(outputFlags).when(mediaSource).getSampleFlags();

    int result = passthroughTranscoder.processNextFrame();

    verify(outputBufferInfo).set(0, BUFFER_SIZE, SAMPLE_TIME, outputFlags);
    verify(mediaSource).advance();
    verify(mediaTarget).writeSampleData(0, outputBuffer, outputBufferInfo);

    assertThat(passthroughTranscoder.progress, is((float) SAMPLE_TIME / DURATION));
    assertThat(result, is(TrackTranscoder.RESULT_FRAME_PROCESSED));
    assertThat(passthroughTranscoder.lastResult, is(TrackTranscoder.RESULT_FRAME_PROCESSED));
}
 
Example 3
Source File: PassthroughTranscoderShould.java    From LiTr with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@Test
public void keepProgressAtZeroWhenDurationIsNotAvailable() {
    passthroughTranscoder.sourceTrack = 0;
    passthroughTranscoder.targetTrack = 0;
    passthroughTranscoder.duration = 0;
    passthroughTranscoder.targetTrackAdded = true;
    int outputFlags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;

    doReturn(0).when(mediaSource).getSampleTrackIndex();
    doReturn(BUFFER_SIZE).when(mediaSource).readSampleData(outputBuffer, 0);
    doReturn(SAMPLE_TIME).when(mediaSource).getSampleTime();
    doReturn(outputFlags).when(mediaSource).getSampleFlags();

    int result = passthroughTranscoder.processNextFrame();

    verify(outputBufferInfo).set(0, BUFFER_SIZE, SAMPLE_TIME, outputFlags);
    verify(mediaSource).advance();
    verify(mediaTarget).writeSampleData(0, outputBuffer, outputBufferInfo);

    assertThat(passthroughTranscoder.progress, is(0f));
    assertThat(result, is(TrackTranscoder.RESULT_FRAME_PROCESSED));
    assertThat(passthroughTranscoder.lastResult, is(TrackTranscoder.RESULT_FRAME_PROCESSED));
}
 
Example 4
Source File: Track.java    From VideoCompressor with Apache License 2.0 6 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    delta = (delta * timeScale + 500000L) / 1000000L;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
Example 5
Source File: Track.java    From SiliCompressor with Apache License 2.0 6 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    delta = (delta * timeScale + 500000L) / 1000000L;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
Example 6
Source File: Track.java    From talk-android with MIT License 6 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    delta = (delta * timeScale + 500000L) / 1000000L;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
Example 7
Source File: Track.java    From react-native-video-helper with MIT License 6 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    delta = (delta * timeScale + 500000L) / 1000000L;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
Example 8
Source File: CircularEncoderBuffer.java    From grafika with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the index of the oldest sync frame.  Valid until the next add().
 * <p>
 * When sending output to a MediaMuxer, start here.
 */
public int getFirstIndex() {
    final int metaLen = mPacketStart.length;

    int index = mMetaTail;
    while (index != mMetaHead) {
        if ((mPacketFlags[index] & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
            break;
        }
        index = (index + 1) % metaLen;
    }

    if (index == mMetaHead) {
        Log.w(TAG, "HEY: could not find sync frame in buffer");
        index = -1;
    }
    return index;
}
 
Example 9
Source File: Track.java    From VideoCompressor with Apache License 2.0 6 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    delta = (delta * timeScale + 500000L) / 1000000L;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
Example 10
Source File: Track.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }
    samplePresentationTimes.add(new SamplePresentationTime(samplePresentationTimes.size(), (bufferInfo.presentationTimeUs * timeScale + 500000L) / 1000000L));
}
 
Example 11
Source File: Mpeg2TsMuxer.java    From DeviceConnect-Android with MIT License 5 votes vote down vote up
/**
 * キーフレームか確認します.
 *
 * @param bufferInfo 映像データの情報
 * @return キーフレームの場合はtrue、それ以外はfalse
 */
@SuppressWarnings("deprecation")
private boolean isKeyFrame(MediaCodec.BufferInfo bufferInfo) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
        return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
    } else {
        return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    }
}
 
Example 12
Source File: Track.java    From Telegram with GNU General Public License v2.0 5 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }
    samplePresentationTimes.add(new SamplePresentationTime(samplePresentationTimes.size(), (bufferInfo.presentationTimeUs * timeScale + 500000L) / 1000000L));
}
 
Example 13
Source File: MediaMuxerWarpper.java    From VideoRecorder with Apache License 2.0 5 votes vote down vote up
/**
 * 如果混合器未启动 , 那么返回false ,(重新发送到消息队列 , 并且保持原有顺序, 等待混合器启动 , 通常不会等待太久)
 * 如果去掉重新重新发送到消息队列的逻辑 , 在某些手机上生成视频会有问题 , 比如华为 Honor 9 ,
 * 因为第一帧的关键帧丢失 , 如果第一帧视频帧不是关键帧 , 会卡顿 , 直到播放到下一个关键帧 , 并且关键帧间隔时间小于录制的时间会导致混合器无法停止而崩溃
 */
private boolean writeSampleData(int trackIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
    LogUtil.logd(TAG,"IFrame = "+((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0)+"\t\t size = "+bufferInfo.size);
    if (isMuxerStarted()) {
        mMediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
        return true;
    } else {
        if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
            SystemClock.sleep(10);
        }
        return false;
    }
}
 
Example 14
Source File: Track.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }
    samplePresentationTimes.add(new SamplePresentationTime(samplePresentationTimes.size(), (bufferInfo.presentationTimeUs * timeScale + 500000L) / 1000000L));
}
 
Example 15
Source File: MediaCodecVideoEncoder.java    From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
private OutputBufferInfo dequeueOutputBuffer() {
  checkOnMediaCodecThread();
  try {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
    if (result >= 0) {
      // MediaCodec doesn't care about Buffer position/remaining/etc so we can
      // mess with them to get a slice and avoid having to pass extra
      // (BufferInfo-related) parameters back to C++.
      ByteBuffer outputBuffer = outputBuffers[result].duplicate();
      outputBuffer.position(info.offset);
      outputBuffer.limit(info.offset + info.size);
      boolean isKeyFrame =
          (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
      if (isKeyFrame) {
        Log.d(TAG, "Sync frame generated");
      }
      return new OutputBufferInfo(
          result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
    } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
      outputBuffers = mediaCodec.getOutputBuffers();
      return dequeueOutputBuffer();
    } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
      return dequeueOutputBuffer();
    } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
      return null;
    }
    throw new RuntimeException("dequeueOutputBuffer: " + result);
  } catch (IllegalStateException e) {
    Log.e(TAG, "dequeueOutputBuffer failed", e);
    return new OutputBufferInfo(-1, null, false, -1);
  }
}
 
Example 16
Source File: MediaCodecVideoEncoder.java    From VideoCRE with MIT License 4 votes vote down vote up
private void deliverEncodedImage() {
  try {
    int index = mediaCodec.dequeueOutputBuffer(outputBufferInfo,
            OUTPUT_THREAD_DEQUEUE_TIMEOUT_US);
    if (index < 0) {
      if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        callback.onOutputFormatChanged(mediaCodec, mediaCodec.getOutputFormat());
      }
      return;
    }

    ByteBuffer codecOutputBuffer = mediaCodec.getOutputBuffers()[index];
    codecOutputBuffer.position(outputBufferInfo.offset);
    codecOutputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size);

    if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
      Logging.d(TAG, "Config frame generated. Offset: " + outputBufferInfo.offset
                     + ". Size: " + outputBufferInfo.size);
      configData = ByteBuffer.allocateDirect(outputBufferInfo.size);
      configData.put(codecOutputBuffer);
      // Log few SPS header bytes to check profile and level.
      String spsData = "";
      for (int i = 0; i < (outputBufferInfo.size < 8 ? outputBufferInfo.size : 8); i++) {
        spsData += Integer.toHexString(configData.get(i) & 0xff) + " ";
      }
      Logging.d(TAG, spsData);
    } else {
      reportEncodedFrame(outputBufferInfo.size);

      // Check key frame flag.
      boolean isKeyFrame = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
      if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
        // For H.264 key frame append SPS and PPS NALs at the start
        if (keyFrameData.capacity() < configData.capacity() + outputBufferInfo.size) {
          // allocate double size
          int newSize = Math.max(keyFrameData.capacity() * 2,
                  configData.capacity() + outputBufferInfo.size);
          keyFrameData = ByteBuffer.allocateDirect(newSize);
        }
        keyFrameData.position(0);
        configData.rewind();
        keyFrameData.put(configData);
        keyFrameData.put(codecOutputBuffer);
        keyFrameData.position(0);
        outputFrame.fill(index, keyFrameData, configData.capacity() + outputBufferInfo.size,
                isKeyFrame, outputBufferInfo.presentationTimeUs);
        callback.onEncodedFrame(outputFrame, outputBufferInfo);
        releaseOutputBuffer(index);
      } else {
        outputFrame.fill(index, codecOutputBuffer, outputBufferInfo.size, isKeyFrame,
                outputBufferInfo.presentationTimeUs);
        callback.onEncodedFrame(outputFrame, outputBufferInfo);
        releaseOutputBuffer(index);
      }
    }
  } catch (IllegalStateException e) {
    Logging.e(TAG, "deliverOutput failed", e);
  }
}
 
Example 17
Source File: MainActivity.java    From Android with Apache License 2.0 4 votes vote down vote up
protected boolean process() throws IOException {

        mMediaExtractor = new MediaExtractor();          
        mMediaExtractor.setDataSource(SDCARD_PATH+"/input.mp4");                
                
        int mVideoTrackIndex = -1;
        int framerate = 0;
        for(int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
            MediaFormat format = mMediaExtractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if(!mime.startsWith("video/")) {                
                continue;
            }
            framerate = format.getInteger(MediaFormat.KEY_FRAME_RATE);            
            mMediaExtractor.selectTrack(i);
            mMediaMuxer = new MediaMuxer(SDCARD_PATH+"/ouput.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4);
            mVideoTrackIndex = mMediaMuxer.addTrack(format);  
            mMediaMuxer.start();
        }
        
        if(mMediaMuxer == null) {
            return false;
        }
        
        BufferInfo info = new BufferInfo();
        info.presentationTimeUs = 0;
        ByteBuffer buffer = ByteBuffer.allocate(500*1024);        
        while(true) {
            int sampleSize = mMediaExtractor.readSampleData(buffer, 0);
            if(sampleSize < 0) {
                break;
            }
            mMediaExtractor.advance();
            info.offset = 0;
            info.size = sampleSize;
            info.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;        
            info.presentationTimeUs += 1000*1000/framerate;
            mMediaMuxer.writeSampleData(mVideoTrackIndex,buffer,info);
        }

        mMediaExtractor.release();
        
        mMediaMuxer.stop();
        mMediaMuxer.release();
        
        return true;
    }
 
Example 18
Source File: MediaCodecVideoEncoder.java    From webrtc_android with MIT License 4 votes vote down vote up
@CalledByNativeUnchecked
OutputBufferInfo dequeueOutputBuffer() {
    checkOnMediaCodecThread();
    try {
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
        // Check if this is config frame and save configuration data.
        if (result >= 0) {
            boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
            if (isConfigFrame) {
                Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
                configData = ByteBuffer.allocateDirect(info.size);
                outputBuffers[result].position(info.offset);
                outputBuffers[result].limit(info.offset + info.size);
                configData.put(outputBuffers[result]);
                // Log few SPS header bytes to check profile and level.
                String spsData = "";
                for (int i = 0; i < (info.size < 8 ? info.size : 8); i++) {
                    spsData += Integer.toHexString(configData.get(i) & 0xff) + " ";
                }
                Logging.d(TAG, spsData);
                // Release buffer back.
                mediaCodec.releaseOutputBuffer(result, false);
                // Query next output.
                result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            }
        }
        if (result >= 0) {
            // MediaCodec doesn't care about Buffer position/remaining/etc so we can
            // mess with them to get a slice and avoid having to pass extra
            // (BufferInfo-related) parameters back to C++.
            ByteBuffer outputBuffer = outputBuffers[result].duplicate();
            outputBuffer.position(info.offset);
            outputBuffer.limit(info.offset + info.size);
            reportEncodedFrame(info.size);

            // Check key frame flag.
            boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
            if (isKeyFrame) {
                Logging.d(TAG, "Sync frame generated");
            }
            if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
                Logging.d(TAG, "Appending config frame of size " + configData.capacity()
                        + " to output buffer with offset " + info.offset + ", size " + info.size);
                // For H.264 key frame append SPS and PPS NALs at the start
                ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
                configData.rewind();
                keyFrameBuffer.put(configData);
                keyFrameBuffer.put(outputBuffer);
                keyFrameBuffer.position(0);
                return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
            } else {
                return new OutputBufferInfo(
                        result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
            }
        } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            outputBuffers = mediaCodec.getOutputBuffers();
            return dequeueOutputBuffer();
        } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            return dequeueOutputBuffer();
        } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
            return null;
        }
        throw new RuntimeException("dequeueOutputBuffer: " + result);
    } catch (IllegalStateException e) {
        Logging.e(TAG, "dequeueOutputBuffer failed", e);
        return new OutputBufferInfo(-1, null, false, -1);
    }
}
 
Example 19
Source File: FFmpegMuxer.java    From kickflip-android-sdk with Apache License 2.0 4 votes vote down vote up
private void handleWriteSampleData(MediaCodec encoder, int trackIndex, int bufferIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
    super.writeSampleData(encoder, trackIndex, bufferIndex, encodedData, bufferInfo);
    mPacketCount++;

    // Don't write the samples directly if they're CODEC_CONFIG data
    // Of if the muxer has already shutdown
    if (((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)) {
        if (VERBOSE) Log.i(TAG, "handling BUFFER_FLAG_CODEC_CONFIG for track " + trackIndex);
        if (trackIndex == mVideoTrackIndex) {
            // Capture H.264 SPS + PPS Data
            if (VERBOSE) Log.i(TAG, "Capture SPS + PPS");
            captureH264MetaData(encodedData, bufferInfo);
            releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);
            return;
        } else {
            if (VERBOSE) Log.i(TAG, "Ignoring audio CODEC_CONFIG");
            releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);
            return;
        }
    }

    if (trackIndex == mAudioTrackIndex && formatRequiresADTS()) {
        addAdtsToByteBuffer(encodedData, bufferInfo);
    }

    // adjust the ByteBuffer values to match BufferInfo (not needed?)
    encodedData.position(bufferInfo.offset);
    encodedData.limit(bufferInfo.offset + bufferInfo.size);

    bufferInfo.presentationTimeUs = getNextRelativePts(bufferInfo.presentationTimeUs, trackIndex);

    if (VERBOSE)
        Log.i(TAG, mPacketCount + " PTS " + bufferInfo.presentationTimeUs + " size: " + bufferInfo.size + " " + (trackIndex == mVideoTrackIndex ? "video " : "audio ") + (((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) ? "keyframe" : "") + (((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) ? " EOS" : ""));
    if (DEBUG_PKTS) writePacketToFile(encodedData, bufferInfo);

    if (!allTracksFinished()) {
        if (trackIndex == mVideoTrackIndex && ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0)) {
            packageH264Keyframe(encodedData, bufferInfo);
            mFFmpeg.writeAVPacketFromEncodedData(mH264Keyframe, 1, bufferInfo.offset, bufferInfo.size + mH264MetaSize, bufferInfo.flags, bufferInfo.presentationTimeUs);
        } else
            mFFmpeg.writeAVPacketFromEncodedData(encodedData, (trackIndex == mVideoTrackIndex ? 1 : 0), bufferInfo.offset, bufferInfo.size, bufferInfo.flags, bufferInfo.presentationTimeUs);
    }
    releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);

    if (allTracksFinished()) {
        /*if (VERBOSE) */ Log.i(TAG, "Shutting down on last frame");
        handleForceStop();
    }
}
 
Example 20
Source File: HardwareVideoEncoder.java    From webrtc_android with MIT License 4 votes vote down vote up
protected void deliverEncodedImage() {
  outputThreadChecker.checkIsOnValidThread();
  try {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
    if (index < 0) {
      return;
    }

    ByteBuffer codecOutputBuffer = codec.getOutputBuffers()[index];
    codecOutputBuffer.position(info.offset);
    codecOutputBuffer.limit(info.offset + info.size);

    if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
      Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
      configBuffer = ByteBuffer.allocateDirect(info.size);
      configBuffer.put(codecOutputBuffer);
    } else {
      bitrateAdjuster.reportEncodedFrame(info.size);
      if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
        updateBitrate();
      }

      final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
      if (isKeyFrame) {
        Logging.d(TAG, "Sync frame generated");
      }

      final ByteBuffer frameBuffer;
      if (isKeyFrame && codecType == VideoCodecType.H264) {
        Logging.d(TAG,
            "Prepending config frame of size " + configBuffer.capacity()
                + " to output buffer with offset " + info.offset + ", size " + info.size);
        // For H.264 key frame prepend SPS and PPS NALs at the start.
        frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
        configBuffer.rewind();
        frameBuffer.put(configBuffer);
        frameBuffer.put(codecOutputBuffer);
        frameBuffer.rewind();
      } else {
        frameBuffer = codecOutputBuffer.slice();
      }

      final EncodedImage.FrameType frameType = isKeyFrame
          ? EncodedImage.FrameType.VideoFrameKey
          : EncodedImage.FrameType.VideoFrameDelta;

      EncodedImage.Builder builder = outputBuilders.poll();
      builder.setBuffer(frameBuffer).setFrameType(frameType);
      // TODO(mellem):  Set codec-specific info.
      callback.onEncodedFrame(builder.createEncodedImage(), new CodecSpecificInfo());
    }
    codec.releaseOutputBuffer(index, false);
  } catch (IllegalStateException e) {
    Logging.e(TAG, "deliverOutput failed", e);
  }
}