Java Code Examples for android.media.MediaCodec#INFO_OUTPUT_FORMAT_CHANGED

The following examples show how to use android.media.MediaCodec#INFO_OUTPUT_FORMAT_CHANGED . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ViEMediaCodecDecoder.java    From webrtc-app-mono with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
public void doSomeWork() {
    int index = mCodec.dequeueInputBuffer(0 /* timeoutUs */);

    if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        mAvailableInputBufferIndices.add(new Integer(index));
    }

    while (feedInputBuffer()) {}

    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    index = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);

    if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        mOutputFormat = mCodec.getOutputFormat();
    } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
        mCodecOutputBuffers = mCodec.getOutputBuffers();
    } else if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        mAvailableOutputBufferIndices.add(new Integer(index));
        mAvailableOutputBufferInfos.add(info);
    }

    while (drainOutputBuffer()) {}
}
 
Example 2
Source File: AudioTrackTranscoder.java    From phoenix with Apache License 2.0 6 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
Example 3
Source File: MediaCodecVideoEncoder.java    From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
private OutputBufferInfo dequeueOutputBuffer() {
  checkOnMediaCodecThread();
  try {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
    if (result >= 0) {
      // MediaCodec doesn't care about Buffer position/remaining/etc so we can
      // mess with them to get a slice and avoid having to pass extra
      // (BufferInfo-related) parameters back to C++.
      ByteBuffer outputBuffer = outputBuffers[result].duplicate();
      outputBuffer.position(info.offset);
      outputBuffer.limit(info.offset + info.size);
      boolean isKeyFrame =
          (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
      if (isKeyFrame) {
        Log.d(TAG, "Sync frame generated");
      }
      return new OutputBufferInfo(
          result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
    } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
      outputBuffers = mediaCodec.getOutputBuffers();
      return dequeueOutputBuffer();
    } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
      return dequeueOutputBuffer();
    } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
      return null;
    }
    throw new RuntimeException("dequeueOutputBuffer: " + result);
  } catch (IllegalStateException e) {
    Log.e(TAG, "dequeueOutputBuffer failed", e);
    return new OutputBufferInfo(-1, null, false, -1);
  }
}
 
Example 4
Source File: CameraRecorder.java    From AAVT with Apache License 2.0 5 votes vote down vote up
private boolean audioEncodeStep(boolean isEnd){
    if(isRecordAudioStarted){
        int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
        if(inputIndex>=0){
            ByteBuffer buffer=getInputBuffer(mAudioEncoder,inputIndex);
            buffer.clear();
            long time=(System.currentTimeMillis()-BASE_TIME)*1000;
            int length=mAudioRecord.read(buffer,mRecordBufferSize);
            if(length>=0){
                mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
                        isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
            }
        }
        while (true){
            int outputIndex=mAudioEncoder.dequeueOutputBuffer(mAudioEncodeBufferInfo,TIME_OUT);
            if(outputIndex>=0){
                //todo 第一帧音频时间戳为0的问题
                if(isMuxStarted&&mAudioEncodeBufferInfo.size>0&&mAudioEncodeBufferInfo.presentationTimeUs>0){
                    mMuxer.writeSampleData(mAudioTrack,getOutputBuffer(mAudioEncoder,outputIndex),mAudioEncodeBufferInfo);
                }
                mAudioEncoder.releaseOutputBuffer(outputIndex,false);
                if(mAudioEncodeBufferInfo.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                    AvLog.d("CameraRecorder get audio encode end of stream");
                    isTryStopAudio=false;
                    isRecordAudioStarted=false;
                    return true;
                }
            }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
                break;
            }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
                synchronized (VIDEO_LOCK){
                    mAudioTrack=mMuxer.addTrack(mAudioEncoder.getOutputFormat());
                    isRecordVideoStarted=true;
                }
            }
        }
    }
    return false;
}
 
Example 5
Source File: VideoTrackTranscoder.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;
    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage();
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.swapBuffers();
    }
    return DRAIN_STATE_CONSUMED;
}
 
Example 6
Source File: MediaCodecBridge.java    From android-chromium with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@CalledByNative
private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int status = MEDIA_CODEC_ERROR;
    int index = -1;
    try {
        int index_or_status = mMediaCodec.dequeueOutputBuffer(info, timeoutUs);
        if (info.presentationTimeUs < mLastPresentationTimeUs) {
            // TODO(qinmin): return a special code through DequeueOutputResult
            // to notify the native code the the frame has a wrong presentation
            // timestamp and should be skipped.
            info.presentationTimeUs = mLastPresentationTimeUs;
        }
        mLastPresentationTimeUs = info.presentationTimeUs;

        if (index_or_status >= 0) { // index!
            status = MEDIA_CODEC_OK;
            index = index_or_status;
        } else if (index_or_status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED;
        } else if (index_or_status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED;
        } else if (index_or_status == MediaCodec.INFO_TRY_AGAIN_LATER) {
            status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER;
        } else {
            assert(false);
        }
    } catch (IllegalStateException e) {
        Log.e(TAG, "Failed to dequeue output buffer: " + e.toString());
    }

    return new DequeueOutputResult(
            status, index, info.flags, info.offset, info.presentationTimeUs, info.size);
}
 
Example 7
Source File: MediaMoviePlayer.java    From AudioVideoPlayerSample with Apache License 2.0 5 votes vote down vote up
/**
	 * @param frameCallback
	 */
	private final void handleOutputVideo(final IFrameCallback frameCallback) {
//    	if (DEBUG) Log.v(TAG, "handleDrainVideo:");
		while (mIsRunning && !mVideoOutputDone) {
			final int decoderStatus = mVideoMediaCodec.dequeueOutputBuffer(mVideoBufferInfo, TIMEOUT_USEC);
			if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
				return;
			} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
				mVideoOutputBuffers = mVideoMediaCodec.getOutputBuffers();
				if (DEBUG) Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED:");
			} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
				final MediaFormat newFormat = mVideoMediaCodec.getOutputFormat();
				if (DEBUG) Log.d(TAG, "video decoder output format changed: " + newFormat);
			} else if (decoderStatus < 0) {
				throw new RuntimeException(
					"unexpected result from video decoder.dequeueOutputBuffer: " + decoderStatus);
			} else { // decoderStatus >= 0
				boolean doRender = false;
				if (mVideoBufferInfo.size > 0) {
					doRender = (mVideoBufferInfo.size != 0)
						&& !internalWriteVideo(mVideoOutputBuffers[decoderStatus],
							0, mVideoBufferInfo.size, mVideoBufferInfo.presentationTimeUs);
					if (doRender) {
						if (!frameCallback.onFrameAvailable(mVideoBufferInfo.presentationTimeUs))
							mVideoStartTime = adjustPresentationTime(mVideoSync, mVideoStartTime, mVideoBufferInfo.presentationTimeUs);
					}
				}
				mVideoMediaCodec.releaseOutputBuffer(decoderStatus, doRender);
				if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
					if (DEBUG) Log.d(TAG, "video:output EOS");
					synchronized (mVideoTask) {
						mVideoOutputDone = true;
						mVideoTask.notifyAll();
					}
				}
			}
		}
	}
 
Example 8
Source File: CameraToMpegTest.java    From Android-MediaCodec-Examples with Apache License 2.0 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder and forwards it to the muxer.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 * <p>
 * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream).  We're
 * not recording audio.
 */
private void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

    if (endOfStream) {
        if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            } else {
                if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);

            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                    encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE) Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
            }

            mEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE) Log.d(TAG, "end of stream reached");
                }
                break;      // out of while
            }
        }
    }
}
 
Example 9
Source File: VideoEncoderCore.java    From grafika with Apache License 2.0 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder and forwards it to the muxer.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 * <p>
 * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream).  We're
 * not recording audio.
 */
public void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

    if (endOfStream) {
        if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            } else {
                if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);

            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                    encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE) {
                    Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
                            mBufferInfo.presentationTimeUs);
                }
            }

            mEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE) Log.d(TAG, "end of stream reached");
                }
                break;      // out of while
            }
        }
    }
}
 
Example 10
Source File: MediaEncoder.java    From UVCCameraZxing with Apache License 2.0 4 votes vote down vote up
/**
     * drain encoded data and write them to muxer
     */
    protected void drain() {
    	if (mMediaCodec == null) return;
        ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
        int encoderStatus, count = 0;
        final MediaMuxerWrapper muxer = mWeakMuxer.get();
        if (muxer == null) {
//        	throw new NullPointerException("muxer is unexpectedly null");
        	Log.w(TAG, "muxer is unexpectedly null");
        	return;
        }
LOOP:	while (mIsCapturing) {
			// get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
            encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
                if (!mIsEOS) {
                	if (++count > 5)
                		break LOOP;		// out of while
                }
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            	if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
                // this shoud not come when encoding
                encoderOutputBuffers = mMediaCodec.getOutputBuffers();
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            	if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
            	// this status indicate the output format of codec is changed
                // this should come only once before actual encoded data
            	// but this status never come on Android4.3 or less
            	// and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
                if (mMuxerStarted) {	// second time request is error
                    throw new RuntimeException("format changed twice");
                }
				// get output format from codec and pass them to muxer
				// getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
                final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
               	mTrackIndex = muxer.addTrack(format);
               	mMuxerStarted = true;
               	if (!muxer.start()) {
               		// we should wait until muxer is ready
               		synchronized (muxer) {
	               		while (!muxer.isStarted())
						try {
							muxer.wait(100);
						} catch (final InterruptedException e) {
							break LOOP;
						}
               		}
               	}
            } else if (encoderStatus < 0) {
            	// unexpected status
            	if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
            } else {
                final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                if (encodedData == null) {
                	// this never should come...may be a MediaCodec internal error
                    throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
                }
                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                	// You shoud set output format to muxer here when you target Android4.3 or less
                	// but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
                	// therefor we should expand and prepare output format from buffer data.
                	// This sample is for API>=18(>=Android 4.3), just ignore this flag here
					if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
					mBufferInfo.size = 0;
                }

                if (mBufferInfo.size != 0) {
                	// encoded data is ready, clear waiting counter
            		count = 0;
                    if (!mMuxerStarted) {
                    	// muxer is not ready...this will prrograming failure.
                        throw new RuntimeException("drain:muxer hasn't started");
                    }
                    // write encoded data to muxer(need to adjust presentationTimeUs.
                   	mBufferInfo.presentationTimeUs = getPTSUs();
                   	muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
					prevOutputPTSUs = mBufferInfo.presentationTimeUs;
                }
                // return buffer to encoder
                mMediaCodec.releaseOutputBuffer(encoderStatus, false);
                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                	// when EOS come.
               		mMuxerStarted = mIsCapturing = false;
                    break;      // out of while
                }
            }
        }
    }
 
Example 11
Source File: MediaCodecTrackRenderer.java    From Exoplayer_VLC with Apache License 2.0 4 votes vote down vote up
/**
 * @return True if it may be possible to drain more output data. False otherwise.
 * @throws ExoPlaybackException If an error occurs draining the output buffer.
 */
@SuppressWarnings("deprecation")
private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs)
    throws ExoPlaybackException {
  if (outputStreamEnded) {
    return false;
  }

  if (outputIndex < 0) {
    outputIndex = codec.dequeueOutputBuffer(outputBufferInfo, 0);
  }
  //Log.d("MediaCodecTrackRenderer", "#####drainOutputBuffer(): --> <-- outputIndex="+outputIndex+", buffer=(offs="+outputBufferInfo.offset+", size="+outputBufferInfo.size+", time="+outputBufferInfo.presentationTimeUs+", flags="+outputBufferInfo.flags+")");
  if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    onOutputFormatChanged(codec.getOutputFormat());
    codecCounters.outputFormatChangedCount++;
    return true;
  } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    outputBuffers = codec.getOutputBuffers();
    codecCounters.outputBuffersChangedCount++;
    return true;
  } else if (outputIndex < 0) {
    return false;
  }

  if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    Log.w("MediaCodecTrackRenderer", "drainOutputBuffer(): receive MediaCodec.BUFFER_FLAG_END_OF_STREAM ");
    outputStreamEnded = true;
    return false;
  }

  int decodeOnlyIndex = getDecodeOnlyIndex(outputBufferInfo.presentationTimeUs);
  if (processOutputBuffer(positionUs, elapsedRealtimeUs, codec, outputBuffers[outputIndex],
      outputBufferInfo, outputIndex, decodeOnlyIndex != -1)) {
    if (decodeOnlyIndex != -1) {
      decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex);
    } else {
      currentPositionUs = outputBufferInfo.presentationTimeUs;
    }
    outputIndex = -1;
    return true;
  }

  return false;
}
 
Example 12
Source File: EncoderDebugger.java    From spydroid-ipcamera with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {

	ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
	BufferInfo info = new BufferInfo();
	byte[] csd = new byte[128];
	int len = 0, p = 4, q = 4;
	long elapsed = 0, now = timestamp();

	while (elapsed<3000000 && (mSPS==null || mPPS==null)) {

		// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
		int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (bufferIndex>=0) {
			check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			inputBuffers[bufferIndex].clear();
			inputBuffers[bufferIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.e(TAG,"No buffer available !");
		}

		// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
		// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
		// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...

		int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);

		if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

			// The PPS and PPS shoud be there
			MediaFormat format = mEncoder.getOutputFormat();
			ByteBuffer spsb = format.getByteBuffer("csd-0");
			ByteBuffer ppsb = format.getByteBuffer("csd-1");
			mSPS = new byte[spsb.capacity()-4];
			spsb.position(4);
			spsb.get(mSPS,0,mSPS.length);
			mPPS = new byte[ppsb.capacity()-4];
			ppsb.position(4);
			ppsb.get(mPPS,0,mPPS.length);
			break;

		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			outputBuffers = mEncoder.getOutputBuffers();
		} else if (index>=0) {

			len = info.size;
			if (len<128) {
				outputBuffers[index].get(csd,0,len);
				if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
					// Parses the SPS and PPS, they could be in two different packets and in a different order 
					//depending on the phone so we don't make any assumption about that
					while (p<len) {
						while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
						if (p+3>=len) p=len;
						if ((csd[q]&0x1F)==7) {
							mSPS = new byte[p-q];
							System.arraycopy(csd, q, mSPS, 0, p-q);
						} else {
							mPPS = new byte[p-q];
							System.arraycopy(csd, q, mPPS, 0, p-q);
						}
						p += 4;
						q = p;
					}
				}					
			}
			mEncoder.releaseOutputBuffer(index, false);
		}

		elapsed = timestamp() - now;
	}

	check(mPPS != null & mSPS != null, "Could not determine the SPS & PPS.");
	mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
	mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);

	return elapsed;
}
 
Example 13
Source File: GeneratedMovie.java    From pause-resume-video-recording with Apache License 2.0 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 */
protected void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

    if (endOfStream) {
        if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            } else {
                if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);

            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                    encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                // adjust the ByteBuffer values to match BufferInfo
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE) Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
            }

            mEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE) Log.d(TAG, "end of stream reached");
                }
                break;      // out of while
            }
        }
    }
}
 
Example 14
Source File: SoftInputSurfaceActivity.java    From grafika with Apache License 2.0 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 */
private void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

    if (endOfStream) {
        if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            } else {
                if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);

            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                    encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                // adjust the ByteBuffer values to match BufferInfo
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                mBufferInfo.presentationTimeUs = mFakePts;
                mFakePts += 1000000L / FRAMES_PER_SECOND;

                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE) Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
            }

            mEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE) Log.d(TAG, "end of stream reached");
                }
                break;      // out of while
            }
        }
    }
}
 
Example 15
Source File: AudioRecordThread.java    From PhotoMovie with Apache License 2.0 4 votes vote down vote up
/**
 * 需要改变音频速率的情况下,需要先解码->改变速率->编码
 */
private void decodeToPCM(MediaCodec decoder, MediaExtractor extractor, MediaFormat oriAudioFormat, String outPath, Long endTimeUs) throws IOException {
    int maxBufferSize = getAudioMaxBufferSize(oriAudioFormat);
    ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

    //调整音频速率需要重解码音频帧
    decoder.configure(oriAudioFormat, null, null, 0);
    decoder.start();

    boolean decodeDone = false;
    boolean decodeInputDone = false;
    final int TIMEOUT_US = 2500;
    File pcmFile = new File(outPath);
    FileChannel writeChannel = new FileOutputStream(pcmFile).getChannel();
    ByteBuffer[] inputBuffers = null;
    ByteBuffer[] outputBuffers = null;

    try {
        while (!decodeDone) {
            if (!decodeInputDone) {
                boolean eof = false;
                int decodeInputIndex = decoder.dequeueInputBuffer(TIMEOUT_US);
                if (Build.VERSION.SDK_INT < 21 && decodeInputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    outputBuffers = decoder.getOutputBuffers();
                    inputBuffers = decoder.getInputBuffers();
                } else if (decodeInputIndex >= 0) {
                    long sampleTimeUs = extractor.getSampleTime();
                    if (sampleTimeUs == -1) {
                        eof = true;
                    } else if (endTimeUs != null && sampleTimeUs > endTimeUs) {
                        eof = true;
                    }

                    if (eof) {
                        decodeInputDone = true;
                        decoder.queueInputBuffer(decodeInputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    } else {
                        info.size = extractor.readSampleData(buffer, 0);
                        info.presentationTimeUs = sampleTimeUs;
                        info.flags = extractor.getSampleFlags();
                        ByteBuffer inputBuffer = null;
                        if (android.os.Build.VERSION.SDK_INT >= 21) {
                            inputBuffer = decoder.getInputBuffer(decodeInputIndex);
                        } else {
                            inputBuffer = inputBuffers[decodeInputIndex];
                        }
                        inputBuffer.put(buffer);
                        MLog.i(TAG, "audio decode queueInputBuffer " + info.presentationTimeUs / 1000);
                        decoder.queueInputBuffer(decodeInputIndex, 0, info.size, info.presentationTimeUs, info.flags);
                        extractor.advance();
                    }

                }
            }

            while (!decodeDone) {
                int outputBufferIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US);
                if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    break;
                } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    MediaFormat newFormat = decoder.getOutputFormat();
                    MLog.i(TAG, "audio decode newFormat = " + newFormat);
                } else if (outputBufferIndex < 0) {
                    //ignore
                    MLog.e(TAG, "unexpected result from audio decoder.dequeueOutputBuffer: " + outputBufferIndex);
                } else {
                    if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
                        decodeDone = true;
                    } else {
                        ByteBuffer decodeOutputBuffer = null;
                        if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                            decodeOutputBuffer = decoder.getOutputBuffer(outputBufferIndex);
                        } else {
                            decodeOutputBuffer = outputBuffers[outputBufferIndex];
                        }
                        MLog.i(TAG, "audio decode saveFrame " + info.presentationTimeUs / 1000);
                        writeChannel.write(decodeOutputBuffer);
                    }
                    decoder.releaseOutputBuffer(outputBufferIndex, false);
                }
            }
        }
    } finally {
        writeChannel.close();
        extractor.release();
        decoder.stop();
        decoder.release();
    }
}
 
Example 16
Source File: RemixAudioComposer.java    From GPUVideo-android with MIT License 4 votes vote down vote up
private int drainEncoder(long timeoutUs) {
    if (isEncoderEOS) return DRAIN_STATE_NONE;

    int result = encoder.dequeueOutputBuffer(bufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (actualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            actualOutputFormat = encoder.getOutputFormat();
            muxer.setOutputFormat(SAMPLE_TYPE, actualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            encoderBuffers = new MediaCodecBufferCompatWrapper(encoder);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (actualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        isEncoderEOS = true;
        bufferInfo.set(0, 0, 0, bufferInfo.flags);
    }
    if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        encoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (muxCount == 1) {
        muxer.writeSampleData(SAMPLE_TYPE, encoderBuffers.getOutputBuffer(result), bufferInfo);
    }
    if (muxCount < timeScale) {
        muxCount++;
    } else {
        muxCount = 1;
    }

    writtenPresentationTimeUs = bufferInfo.presentationTimeUs;
    encoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
Example 17
Source File: EncoderDebugger.java    From VideoMeeting with Apache License 2.0 4 votes vote down vote up
/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {
    long elapsed = 0, now = timestamp();
    ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    byte[] csd = new byte[128];
    int len = 0, p = 4, q = 4;

    while (elapsed < 3000000 && (mSPS == null || mPPS == null)) {

        // Some encoders won't give us the SPS and PPS unless they receive
        // something to encode first...
        int bufferIndex = mEncoder.dequeueInputBuffer(1000000 / FRAMERATE);
        if (bufferIndex >= 0) {
            check(inputBuffers[bufferIndex].capacity() >= mData.length,
                    "The input buffer is not big enough.");
            inputBuffers[bufferIndex].clear();
            inputBuffers[bufferIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(bufferIndex, 0, mData.length,
                    timestamp(), 0);
        } else {
            if (VERBOSE)
                L.e("No buffer available !");
        }

        // We are looking for the SPS and the PPS here. As always, Android
        // is very inconsistent, I have observed that some
        // encoders will give those parameters through the MediaFormat
        // object (that is the normal behaviour).
        // But some other will not, in that case we try to find a NAL unit
        // of type 7 or 8 in the byte stream outputed by the encoder...

        int index = mEncoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE);

        if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

            // The PPS and PPS shoud be there
            MediaFormat format = mEncoder.getOutputFormat();
            ByteBuffer spsb = format.getByteBuffer("csd-0");
            ByteBuffer ppsb = format.getByteBuffer("csd-1");
            mSPS = new byte[spsb.capacity() - 4];
            spsb.position(4);
            spsb.get(mSPS, 0, mSPS.length);
            mPPS = new byte[ppsb.capacity() - 4];
            ppsb.position(4);
            ppsb.get(mPPS, 0, mPPS.length);
            break;

        } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            outputBuffers = mEncoder.getOutputBuffers();
        } else if (index >= 0) {

            len = info.size;
            if (len < 128) {
                outputBuffers[index].get(csd, 0, len);
                if (len > 0 && csd[0] == 0 && csd[1] == 0 && csd[2] == 0
                        && csd[3] == 1 && csd[4] == 103) {
                    // Parses the SPS and PPS, they could be in two
                    // different packets and in a different order
                    // depending on the phone so we don't make any
                    // assumption about that
                    while (p < len) {
                        while (!(csd[p + 0] == 0 && csd[p + 1] == 0
                                && csd[p + 2] == 0 && csd[p + 3] == 1)
                                && p + 3 < len)
                            p++;
                        if (p + 3 >= len)
                            p = len;
                        if ((csd[q] & 0x1F) == 7) {
                            mSPS = new byte[p - q];
                            System.arraycopy(csd, q, mSPS, 0, p - q);
                        } else {
                            mPPS = new byte[p - q];
                            System.arraycopy(csd, q, mPPS, 0, p - q);
                        }
                        p += 4;
                        q = p;
                    }
                }
            }
            mEncoder.releaseOutputBuffer(index, false);
        }

        elapsed = timestamp() - now;
    }

    check(mPPS != null & mSPS != null, "Could not determine the SPS & PPS.");
    mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
    mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);

    return elapsed;
}
 
Example 18
Source File: BaseMediaEncoderRunable.java    From GLES2_AUDIO_VIDEO_RECODE with Apache License 2.0 4 votes vote down vote up
/**
 * mEncoder从缓冲区取数据,然后交给mMuxer编码
 */
protected void drainEncoder() {
    if (mMediaCodec == null) {
        return;
    }

    //
    int count = 0;

    if (mSohuMediaMuxerManager == null) {
        return;
    }

    //拿到输出缓冲区,用于取到编码后的数据
    ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();

    LOOP:
    while (mIsCapturing) {
        //拿到输出缓冲区的索引
        int encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!mIsEndOfStream) {
                if (++count > 5) {
                    // out of while
                    break LOOP;
                }
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // this shoud not come when encoding
            //拿到输出缓冲区,用于取到编码后的数据
            encoderOutputBuffers = mMediaCodec.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            // get output format from codec and pass them to muxer
            final MediaFormat format = mMediaCodec.getOutputFormat();
            //
            mTrackIndex = mSohuMediaMuxerManager.addTrack(format);
            //
            mMuxerStarted = true;
            //
            if (!mSohuMediaMuxerManager.start()) {
                // we should wait until muxer is ready
                synchronized (mSohuMediaMuxerManager) {
                    while (!mSohuMediaMuxerManager.isStarted())
                        try {
                            mSohuMediaMuxerManager.wait(100);
                        } catch (final InterruptedException e) {
                            break LOOP;
                        }
                }
            }
        } else if (encoderStatus < 0) {
            // unexpected status

        } else {
            //获取解码后的数据
            final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                // this never should come...may be a MediaCodec internal error
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
            }
            //
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                mBufferInfo.size = 0;
            }
            //
            if (mBufferInfo.size != 0) {
                // encoded data is ready, clear waiting counter
                count = 0;
                if (!mMuxerStarted) {
                    // muxer is not ready...this will prrograming failure.
                    throw new RuntimeException("drain:muxer hasn't started");
                }
                // write encoded data to muxer(need to adjust presentationTimeUs.
                mBufferInfo.presentationTimeUs = getPTSUs();
                // 编码
                mSohuMediaMuxerManager.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                prevOutputPTSUs = mBufferInfo.presentationTimeUs;
            }
            // return buffer to encoder
            mMediaCodec.releaseOutputBuffer(encoderStatus, false);
            //
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                // when EOS come.
                mIsCapturing = false;
                break;      // out of while
            }
        }
    }
}
 
Example 19
Source File: VideoEncoderCore.java    From OpenGLESRecorder with Apache License 2.0 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder and forwards it to the muxer.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 * <p>
 * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream).  We're
 * not recording audio.
 */
public void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

    if (endOfStream) {
        if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            } else {
                if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);

            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                    encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE) {
                    Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
                            mBufferInfo.presentationTimeUs);
                }
            }

            mEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE) Log.d(TAG, "end of stream reached");
                }
                break;      // out of while
            }
        }
    }
}
 
Example 20
Source File: MediaCodecInputStream.java    From AndroidInstantVideo with Apache License 2.0 4 votes vote down vote up
@Override
public int read(byte[] buffer, int offset, int length) throws IOException {
    int readLength;
    int encoderStatus = -1;

    if (mBuffer == null) {
        while (!Thread.interrupted() && !mClosed) {
            synchronized (mMediaCodec) {
                if (mClosed) return 0;
                encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 110000);
                Loggers.d(TAG, "Index: " + encoderStatus + " Time: " + mBufferInfo.presentationTimeUs + " size: " + mBufferInfo.size);
                if (encoderStatus >= 0) {
                    if (Build.VERSION.SDK_INT >= 21) {
                        mBuffer = mMediaCodec.getOutputBuffer(encoderStatus);
                    } else {
                        mBuffer = mMediaCodec.getOutputBuffers()[encoderStatus];
                    }
                    mBuffer.position(0);
                    break;
                } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    mMediaFormat = mMediaCodec.getOutputFormat();
                    if (mediaFormatCallback != null) {
                        mediaFormatCallback.onChangeMediaFormat(mMediaFormat);
                    }
                    Log.i(TAG, mMediaFormat.toString());
                } else if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    Log.v(TAG, "No buffer available...");
                    return 0;
                } else {
                    Log.e(TAG, "Message: " + encoderStatus);
                    return 0;
                }

                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    return 0;
                }
            }
        }
    }


    if (mClosed) throw new IOException("This InputStream was closed");

    readLength = length < mBufferInfo.size - mBuffer.position() ? length :
            mBufferInfo.size - mBuffer.position();
    mBuffer.get(buffer, offset, readLength);
    if (mBuffer.position() >= mBufferInfo.size) {
        mMediaCodec.releaseOutputBuffer(encoderStatus, false);
        mBuffer = null;
    }

    return readLength;
}