Java Code Examples for android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM

The following examples show how to use android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AudioTrackTranscoder.java    From EZFilter with MIT License 6 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
Example 2
Source File: AudioTrackTranscoder.java    From android-transcoder with Apache License 2.0 6 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
Example 3
Source File: CameraRecorder.java    From AAVT with Apache License 2.0 6 votes vote down vote up
private boolean videoEncodeStep(boolean isEnd){
    if(isEnd){
        mVideoEncoder.signalEndOfInputStream();
    }
    while (true){
        int outputIndex=mVideoEncoder.dequeueOutputBuffer(mVideoEncodeBufferInfo,TIME_OUT);
        if(outputIndex>=0){
            if(isMuxStarted&&mVideoEncodeBufferInfo.size>0&&mVideoEncodeBufferInfo.presentationTimeUs>0){
                mMuxer.writeSampleData(mVideoTrack,getOutputBuffer(mVideoEncoder,outputIndex),mVideoEncodeBufferInfo);
            }
            mVideoEncoder.releaseOutputBuffer(outputIndex,false);
            if(mVideoEncodeBufferInfo.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                AvLog.d("CameraRecorder get video encode end of stream");
                return true;
            }
        }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
            AvLog.d("get video output format changed ->"+mVideoEncoder.getOutputFormat().toString());
            mVideoTrack=mMuxer.addTrack(mVideoEncoder.getOutputFormat());
            mMuxer.start();
            isMuxStarted=true;
        }
    }
    return false;
}
 
Example 4
Source File: AudioTrackTranscoderShould.java    From LiTr with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@Test
public void finishWhenEosReceived() throws Exception {
    audioTrackTranscoder.lastExtractFrameResult = TrackTranscoder.RESULT_EOS_REACHED;
    audioTrackTranscoder.lastDecodeFrameResult = TrackTranscoder.RESULT_EOS_REACHED;
    audioTrackTranscoder.lastEncodeFrameResult = TrackTranscoder.RESULT_FRAME_PROCESSED;

    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
    bufferInfo.flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
    bufferInfo.size = 0;
    bufferInfo.presentationTimeUs = -1L;
    Frame frame = new Frame(BUFFER_INDEX, ByteBuffer.allocate(BUFFER_SIZE), bufferInfo);

    doReturn(BUFFER_INDEX).when(encoder).dequeueOutputFrame(anyLong());
    doReturn(frame).when(encoder).getOutputFrame(anyInt());

    int result = audioTrackTranscoder.processNextFrame();

    assertThat(audioTrackTranscoder.progress, is(1.0f));

    verify(encoder).releaseOutputFrame(eq(BUFFER_INDEX));
    assertThat(result, is(TrackTranscoder.RESULT_EOS_REACHED));
}
 
Example 5
Source File: AudioTrackTranscoder.java    From phoenix with Apache License 2.0 6 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
Example 6
Source File: VideoDecoder.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 5 votes vote down vote up
private void decodeVideo() throws IllegalStateException {
  ByteBuffer[] inputBuffers = videoDecoder.getInputBuffers();
  startMs = System.currentTimeMillis();
  while (decoding) {
    int inIndex = videoDecoder.dequeueInputBuffer(10000);
    if (inIndex >= 0) {
      ByteBuffer buffer = inputBuffers[inIndex];
      int sampleSize = videoExtractor.readSampleData(buffer, 0);
      if (sampleSize < 0) {
        videoDecoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
      } else {
        videoDecoder.queueInputBuffer(inIndex, 0, sampleSize, videoExtractor.getSampleTime(), 0);
        videoExtractor.advance();
      }
    }
    int outIndex = videoDecoder.dequeueOutputBuffer(videoInfo, 10000);
    if (outIndex >= 0) {
      while (videoExtractor.getSampleTime() / 1000
          > System.currentTimeMillis() - startMs + seekTime) {
        try {
          Thread.sleep(10);
        } catch (InterruptedException e) {
          if (thread != null) thread.interrupt();
          return;
        }
      }
      videoDecoder.releaseOutputBuffer(outIndex, videoInfo.size != 0);
    }
    if ((videoInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
      seekTime = 0;
      Log.i(TAG, "end of file out");
      if (loopMode) {
        loopFileInterface.onReset(true);
      } else {
        videoDecoderInterface.onVideoDecoderFinished();
      }
    }
  }
}
 
Example 7
Source File: MediaVideoPlayer.java    From AudioVideoPlayerSample with Apache License 2.0 5 votes vote down vote up
/**
	 * @param frameCallback
	 */
	private final void handleOutputVideo(final IFrameCallback frameCallback) {
//    	if (DEBUG) Log.v(TAG, "handleDrainVideo:");
		while (mIsRunning && !mVideoOutputDone) {
			final int decoderStatus = mVideoMediaCodec.dequeueOutputBuffer(mVideoBufferInfo, TIMEOUT_USEC);
			if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
				return;
			} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
				mVideoOutputBuffers = mVideoMediaCodec.getOutputBuffers();
				if (DEBUG) Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED:");
			} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
				final MediaFormat newFormat = mVideoMediaCodec.getOutputFormat();
				if (DEBUG) Log.d(TAG, "video decoder output format changed: " + newFormat);
			} else if (decoderStatus < 0) {
				throw new RuntimeException(
					"unexpected result from video decoder.dequeueOutputBuffer: " + decoderStatus);
			} else { // decoderStatus >= 0
				boolean doRender = false;
				if (mVideoBufferInfo.size > 0) {
					doRender = (mVideoBufferInfo.size != 0)
						&& !internalWriteVideo(mVideoOutputBuffers[decoderStatus],
							0, mVideoBufferInfo.size, mVideoBufferInfo.presentationTimeUs);
					if (doRender) {
						if (!frameCallback.onFrameAvailable(mVideoBufferInfo.presentationTimeUs))
							mVideoStartTime = adjustPresentationTime(mVideoStartTime, mVideoBufferInfo.presentationTimeUs);
					}
				}
				mVideoMediaCodec.releaseOutputBuffer(decoderStatus, doRender);
				if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
					if (DEBUG) Log.d(TAG, "video:output EOS");
					synchronized (mSync) {
						mVideoOutputDone = true;
						mSync.notifyAll();
					}
				}
			}
		}
	}
 
Example 8
Source File: AudioTrackTranscoder.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
Example 9
Source File: VideoTrackTranscoder.java    From Pix-Art-Messenger with GNU General Public License v3.0 5 votes vote down vote up
private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;
    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null)
                throw new RuntimeException("Video output format changed twice.");
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderOutputBuffers = mEncoder.getOutputBuffers();
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
Example 10
Source File: VideoTrackTranscoder.java    From android-transcoder with Apache License 2.0 5 votes vote down vote up
private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;
    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null)
                throw new RuntimeException("Video output format changed twice.");
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderOutputBuffers = mEncoder.getOutputBuffers();
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
Example 11
Source File: MediaCodecWrapper.java    From ScreenCapture with MIT License 5 votes vote down vote up
/**
 * Write a media sample to the decoder.
 *
 * A "sample" here refers to a single atomic access unit in the media stream. The definition
 * of "access unit" is dependent on the type of encoding used, but it typically refers to
 * a single frame of video or a few seconds of audio. {@link android.media.MediaExtractor}
 * extracts data from a stream one sample at a time.
 *
 * @param extractor  Instance of {@link android.media.MediaExtractor} wrapping the media.
 *
 * @param presentationTimeUs The time, relative to the beginning of the media stream,
 * at which this buffer should be rendered.
 *
 * @param flags  Flags to pass to the decoder. See {@link MediaCodec#queueInputBuffer(int,
 * int, int, long, int)}
 *
 * @throws MediaCodec.CryptoException
 */
public boolean writeSample(final MediaExtractor extractor,
                           final boolean isSecure,
                           final long presentationTimeUs,
                           int flags) {
    boolean result = false;

    if (!mAvailableInputBuffers.isEmpty()) {
        int index = mAvailableInputBuffers.remove();
        ByteBuffer buffer = mInputBuffers[index];

        // reads the sample from the file using extractor into the buffer
        int size = extractor.readSampleData(buffer, 0);
        if (size <= 0) {
            flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
        }

        // Submit the buffer to the codec for decoding. The presentationTimeUs
        // indicates the position (play time) for the current sample.
        if (!isSecure) {
            mDecoder.queueInputBuffer(index, 0, size, presentationTimeUs, flags);
        } else {
            extractor.getSampleCryptoInfo(sCryptoInfo);
            mDecoder.queueSecureInputBuffer(index, 0, sCryptoInfo, presentationTimeUs, flags);
        }

        result = true;
    }
    return result;
}
 
Example 12
Source File: VideoTrackTranscoder.java    From Pix-Art-Messenger with GNU General Public License v3.0 5 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;
    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage();
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.swapBuffers();
    }
    return DRAIN_STATE_CONSUMED;
}
 
Example 13
Source File: VideoEncoderCore.java    From grafika with Apache License 2.0 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder and forwards it to the muxer.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 * <p>
 * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream).  We're
 * not recording audio.
 */
public void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

    if (endOfStream) {
        if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            } else {
                if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);

            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                    encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE) {
                    Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
                            mBufferInfo.presentationTimeUs);
                }
            }

            mEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE) Log.d(TAG, "end of stream reached");
                }
                break;      // out of while
            }
        }
    }
}
 
Example 14
Source File: MediaCodecAudioRenderer.java    From MediaSDK with Apache License 2.0 4 votes vote down vote up
@Override
protected boolean processOutputBuffer(
    long positionUs,
    long elapsedRealtimeUs,
    MediaCodec codec,
    ByteBuffer buffer,
    int bufferIndex,
    int bufferFlags,
    long bufferPresentationTimeUs,
    boolean isDecodeOnlyBuffer,
    boolean isLastBuffer,
    Format format)
    throws ExoPlaybackException {
  if (codecNeedsEosBufferTimestampWorkaround
      && bufferPresentationTimeUs == 0
      && (bufferFlags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0
      && lastInputTimeUs != C.TIME_UNSET) {
    bufferPresentationTimeUs = lastInputTimeUs;
  }

  if (passthroughEnabled && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
    // Discard output buffers from the passthrough (raw) decoder containing codec specific data.
    codec.releaseOutputBuffer(bufferIndex, false);
    return true;
  }

  if (isDecodeOnlyBuffer) {
    codec.releaseOutputBuffer(bufferIndex, false);
    decoderCounters.skippedOutputBufferCount++;
    audioSink.handleDiscontinuity();
    return true;
  }

  try {
    if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) {
      codec.releaseOutputBuffer(bufferIndex, false);
      decoderCounters.renderedOutputBufferCount++;
      return true;
    }
  } catch (AudioSink.InitializationException | AudioSink.WriteException e) {
    // TODO(internal: b/145658993) Use outputFormat instead.
    throw createRendererException(e, inputFormat);
  }
  return false;
}
 
Example 15
Source File: DecodeEditEncodeTest.java    From Android-MediaCodec-Examples with Apache License 2.0 4 votes vote down vote up
/**
 * Generates video frames, feeds them into the encoder, and writes the output to the
 * VideoChunks instance.
 */
private void generateVideoData(MediaCodec encoder, InputSurface inputSurface,
        VideoChunks output) {
    final int TIMEOUT_USEC = 10000;
    ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int generateIndex = 0;
    int outputCount = 0;
    // Loop until the output side is done.
    boolean inputDone = false;
    boolean outputDone = false;
    while (!outputDone) {
        if (VERBOSE) Log.d(TAG, "gen loop");
        // If we're not done submitting frames, generate a new one and submit it.  The
        // eglSwapBuffers call will block if the input is full.
        if (!inputDone) {
            if (generateIndex == NUM_FRAMES) {
                // Send an empty frame with the end-of-stream flag set.
                if (VERBOSE) Log.d(TAG, "signaling input EOS");
                if (WORK_AROUND_BUGS) {
                    // Might drop a frame, but at least we won't crash mediaserver.
                    try { Thread.sleep(500); } catch (InterruptedException ie) {}
                    outputDone = true;
                } else {
                    encoder.signalEndOfInputStream();
                }
                inputDone = true;
            } else {
                generateSurfaceFrame(generateIndex);
                inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000);
                if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
                inputSurface.swapBuffers();
            }
            generateIndex++;
        }
        // Check for output from the encoder.  If there's no output yet, we either need to
        // provide more input, or we need to wait for the encoder to work its magic.  We
        // can't actually tell which is the case, so if we can't get an output buffer right
        // away we loop around and see if it wants more input.
        //
        // If we do find output, drain it all before supplying more input.
        while (true) {
            int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (VERBOSE) Log.d(TAG, "no output from encoder available");
                break;      // out of while
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not expected for an encoder
                encoderOutputBuffers = encoder.getOutputBuffers();
                if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                // not expected for an encoder
                MediaFormat newFormat = encoder.getOutputFormat();
                if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
            } else if (encoderStatus < 0) {
                fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
            } else { // encoderStatus >= 0
                ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                if (encodedData == null) {
                    fail("encoderOutputBuffer " + encoderStatus + " was null");
                }
                // Codec config flag must be set iff this is the first chunk of output.  This
                // may not hold for all codecs, but it appears to be the case for video/avc.
                assertTrue((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 ||
                        outputCount != 0);
                if (info.size != 0) {
                    // Adjust the ByteBuffer values to match BufferInfo.
                    encodedData.position(info.offset);
                    encodedData.limit(info.offset + info.size);
                    output.addChunk(encodedData, info.flags, info.presentationTimeUs);
                    outputCount++;
                }
                encoder.releaseOutputBuffer(encoderStatus, false);
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    outputDone = true;
                    break;      // out of while
                }
            }
        }
    }
    // One chunk per frame, plus one for the config data.
    assertEquals("Frame count", NUM_FRAMES + 1, outputCount);
}
 
Example 16
Source File: MediaCodecAudioRenderer.java    From Telegram-FOSS with GNU General Public License v2.0 4 votes vote down vote up
@Override
protected boolean processOutputBuffer(
    long positionUs,
    long elapsedRealtimeUs,
    MediaCodec codec,
    ByteBuffer buffer,
    int bufferIndex,
    int bufferFlags,
    long bufferPresentationTimeUs,
    boolean isDecodeOnlyBuffer,
    boolean isLastBuffer,
    Format format)
    throws ExoPlaybackException {
  if (codecNeedsEosBufferTimestampWorkaround
      && bufferPresentationTimeUs == 0
      && (bufferFlags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0
      && lastInputTimeUs != C.TIME_UNSET) {
    bufferPresentationTimeUs = lastInputTimeUs;
  }

  if (passthroughEnabled && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
    // Discard output buffers from the passthrough (raw) decoder containing codec specific data.
    codec.releaseOutputBuffer(bufferIndex, false);
    return true;
  }

  if (isDecodeOnlyBuffer) {
    codec.releaseOutputBuffer(bufferIndex, false);
    decoderCounters.skippedOutputBufferCount++;
    audioSink.handleDiscontinuity();
    return true;
  }

  try {
    if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) {
      codec.releaseOutputBuffer(bufferIndex, false);
      decoderCounters.renderedOutputBufferCount++;
      return true;
    }
  } catch (AudioSink.InitializationException | AudioSink.WriteException e) {
    throw ExoPlaybackException.createForRenderer(e, getIndex());
  }
  return false;
}
 
Example 17
Source File: MediaEncoder.java    From Lassi-Android with MIT License 4 votes vote down vote up
/**
 * Extracts all pending data that was written and encoded into {@link #mMediaCodec},
 * and forwards it to the muxer.
 * <p>
 * If drainAll is not set, this returns after TIMEOUT_USEC if there is no more data to drain.
 * If drainAll is set, we wait until we see EOS on the output.
 * Calling this with drainAll set should be done once, right before stopping the muxer.
 */
@SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess")
protected void drainOutput(boolean drainAll) {
    LOG.w("DRAINING:", getName(), "EOS:", drainAll);
    if (mMediaCodec == null) {
        LOG.e("drain() was called before prepare() or after releasing.");
        return;
    }
    if (mBuffers == null) {
        mBuffers = new MediaCodecBuffers(mMediaCodec);
    }
    while (true) {
        int encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, OUTPUT_TIMEOUT_US);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!drainAll) break; // out of while

        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            mBuffers.onOutputBuffersChanged();

        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mController.isStarted())
                throw new RuntimeException("MediaFormat changed twice.");
            MediaFormat newFormat = mMediaCodec.getOutputFormat();
            mTrackIndex = mController.requestStart(newFormat);
            mOutputBufferPool = new OutputBufferPool(mTrackIndex);
        } else if (encoderStatus < 0) {
            LOG.e("Unexpected result from dequeueOutputBuffer: " + encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = mBuffers.getOutputBuffer(encoderStatus);

            // Codec config means that config data was pulled out and fed to the muxer when we got
            // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
            boolean isCodecConfig = (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
            if (!isCodecConfig && mController.isStarted() && mBufferInfo.size != 0) {
                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                // Store startPresentationTime and lastPresentationTime, useful for example to
                // detect the mMaxLengthReached and stop recording.
                if (mStartPresentationTimeUs == Long.MIN_VALUE) {
                    mStartPresentationTimeUs = mBufferInfo.presentationTimeUs;
                }
                mLastPresentationTimeUs = mBufferInfo.presentationTimeUs;
                // Pass presentation times as offets with respect to the mStartPresentationTimeUs.
                // This ensures consistency between audio pts (coming from System.nanoTime()) and
                // video pts (coming from SurfaceTexture) both of which have no meaningful time-base
                // and should be used for offsets only.
                // TODO find a better way, this causes sync issues. (+ note: this sends pts=0 at first)
                // mBufferInfo.presentationTimeUs = mLastPresentationTimeUs - mStartPresentationTimeUs;
                LOG.i("DRAINING:", getName(), "Dispatching write(). Presentation:", mBufferInfo.presentationTimeUs);

                // TODO fix the mBufferInfo being the same, then implement delayed writing in Controller
                // and remove the isStarted() check here.
                OutputBuffer buffer = mOutputBufferPool.get();
                buffer.info = mBufferInfo;
                buffer.trackIndex = mTrackIndex;
                buffer.data = encodedData;
                mController.write(mOutputBufferPool, buffer);
            }
            mMediaCodec.releaseOutputBuffer(encoderStatus, false);

            // Check for the maxLength constraint (with appropriate conditions)
            // Not needed if drainAll because we already were asked to stop
            if (!drainAll
                    && !mMaxLengthReached
                    && mStartPresentationTimeUs != Long.MIN_VALUE
                    && mLastPresentationTimeUs - mStartPresentationTimeUs > mMaxLengthMillis * 1000) {
                LOG.w("DRAINING: Reached maxLength! mLastPresentationTimeUs:", mLastPresentationTimeUs,
                        "mStartPresentationTimeUs:", mStartPresentationTimeUs,
                        "mMaxLengthUs:", mMaxLengthMillis * 1000);
                mMaxLengthReached = true;
                mController.requestStop(mTrackIndex);
                break;
            }

            // Check for the EOS flag so we can release the encoder.
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                LOG.w("DRAINING:", getName(), "Dispatching release().");
                release();
                break;
            }
        }
    }
}
 
Example 18
Source File: AudioTrackTranscoder.java    From LiTr with BSD 2-Clause "Simplified" License 4 votes vote down vote up
private int writeEncodedOutputFrame() throws TrackTranscoderException {
    int encodeFrameResult = RESULT_FRAME_PROCESSED;

    int tag = encoder.dequeueOutputFrame(0);
    if (tag >= 0) {
        Frame frame = encoder.getOutputFrame(tag);
        if (frame == null) {
            throw new TrackTranscoderException(TrackTranscoderException.Error.NO_FRAME_AVAILABLE);
        }

        if (frame.bufferInfo.size > 0
            && (frame.bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
            mediaMuxer.writeSampleData(targetTrack, frame.buffer, frame.bufferInfo);
            if (duration > 0) {
                progress = ((float) frame.bufferInfo.presentationTimeUs) / duration;
            }
        }

        if ((frame.bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
            Log.d(TAG, "Encoder produced EoS, we are done");
            progress = 1.0f;
            encodeFrameResult = RESULT_EOS_REACHED;
        }

        encoder.releaseOutputFrame(tag);
    } else {
        switch (tag) {
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                //                        Log.d(TAG, "Will try getting encoder output buffer later");
                break;
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                // TODO for now, we assume that we only get one media format as a first buffer
                MediaFormat outputMediaFormat = encoder.getOutputFormat();
                if (!targetTrackAdded) {
                    targetTrack = mediaMuxer.addTrack(outputMediaFormat, targetTrack);
                    targetTrackAdded = true;
                }
                encodeFrameResult = RESULT_OUTPUT_MEDIA_FORMAT_CHANGED;
                Log.d(TAG, "Encoder output format received " + outputMediaFormat);
                break;
            default:
                Log.e(TAG, "Unhandled value " + tag + " when receiving encoded output frame");
                break;
        }
    }

    return encodeFrameResult;
}
 
Example 19
Source File: VideoTrackTranscoder.java    From EZFilter with MIT License 4 votes vote down vote up
private int drainEncoder() {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result;
    try {
        result = mEncoder.dequeueOutputBuffer(mBufferInfo, 0);
    } catch (Exception e) {
        e.printStackTrace();

        result = MediaCodec.INFO_TRY_AGAIN_LATER;
    }
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Video output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, CodecUtil.getOutputBuffer(mEncoder, result), mBufferInfo);
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
Example 20
Source File: AudioUtil.java    From VideoProcessor with Apache License 2.0 4 votes vote down vote up
/**
 * 需要改变音频速率的情况下,需要先解码->改变速率->编码
 */
public static void decodeToPCM(VideoProcessor.MediaSource audioSource, String outPath, Integer startTimeUs, Integer endTimeUs) throws IOException {
    MediaExtractor extractor = new MediaExtractor();
    audioSource.setDataSource(extractor);
    int audioTrack = VideoUtil.selectTrack(extractor, true);
    extractor.selectTrack(audioTrack);
    if (startTimeUs == null) {
        startTimeUs = 0;
    }
    extractor.seekTo(startTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
    MediaFormat oriAudioFormat = extractor.getTrackFormat(audioTrack);
    int maxBufferSize;
    if (oriAudioFormat.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
        maxBufferSize = oriAudioFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
    } else {
        maxBufferSize = 100 * 1000;
    }
    ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

    //调整音频速率需要重解码音频帧
    MediaCodec decoder = MediaCodec.createDecoderByType(oriAudioFormat.getString(MediaFormat.KEY_MIME));
    decoder.configure(oriAudioFormat, null, null, 0);
    decoder.start();

    boolean decodeDone = false;
    boolean decodeInputDone = false;
    final int TIMEOUT_US = 2500;
    File pcmFile = new File(outPath);
    FileChannel writeChannel = new FileOutputStream(pcmFile).getChannel();
    try {
        while (!decodeDone) {
            if (!decodeInputDone) {
                boolean eof = false;
                int decodeInputIndex = decoder.dequeueInputBuffer(TIMEOUT_US);
                if (decodeInputIndex >= 0) {
                    long sampleTimeUs = extractor.getSampleTime();
                    if (sampleTimeUs == -1) {
                        eof = true;
                    } else if (sampleTimeUs < startTimeUs) {
                        extractor.advance();
                        continue;
                    } else if (endTimeUs != null && sampleTimeUs > endTimeUs) {
                        eof = true;
                    }

                    if (eof) {
                        decodeInputDone = true;
                        decoder.queueInputBuffer(decodeInputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    } else {
                        info.size = extractor.readSampleData(buffer, 0);
                        info.presentationTimeUs = sampleTimeUs;
                        info.flags = extractor.getSampleFlags();
                        ByteBuffer inputBuffer = decoder.getInputBuffer(decodeInputIndex);
                        inputBuffer.put(buffer);
                        CL.it(TAG, "audio decode queueInputBuffer " + info.presentationTimeUs / 1000);
                        decoder.queueInputBuffer(decodeInputIndex, 0, info.size, info.presentationTimeUs, info.flags);
                        extractor.advance();
                    }

                }
            }

            while (!decodeDone) {
                int outputBufferIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US);
                if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    break;
                } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    MediaFormat newFormat = decoder.getOutputFormat();
                    CL.it(TAG, "audio decode newFormat = " + newFormat);
                } else if (outputBufferIndex < 0) {
                    //ignore
                    CL.et(TAG, "unexpected result from audio decoder.dequeueOutputBuffer: " + outputBufferIndex);
                } else {
                    if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
                        decodeDone = true;
                    } else {
                        ByteBuffer decodeOutputBuffer = decoder.getOutputBuffer(outputBufferIndex);
                        CL.it(TAG, "audio decode saveFrame " + info.presentationTimeUs / 1000);
                        writeChannel.write(decodeOutputBuffer);
                    }
                    decoder.releaseOutputBuffer(outputBufferIndex, false);
                }
            }
        }
    } finally {
        writeChannel.close();
        extractor.release();
        decoder.stop();
        decoder.release();
    }
}