Java Code Examples for android.media.MediaCodec#stop()

The following examples show how to use android.media.MediaCodec#stop() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MediaCodecUtils.java    From In77Camera with MIT License 6 votes vote down vote up
@TargetApi(MIN_API_LEVEL_AUDIO)
public static int checkMediaCodecAudioEncoderSupport(){
    if(getApiLevel()<MIN_API_LEVEL_AUDIO){
        Log.d(TAG, "checkMediaCodecAudioEncoderSupport: Min API is 16");
        return CODEC_REQ_API_NOT_SATISFIED;
    }
    final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, TEST_SAMPLE_RATE, 1);
    audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, TEST_AUDIO_BIT_RATE);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    MediaCodec mediaCodec;
    try {
        mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
        mediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.start();
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
    } catch (Exception ex) {
        Log.e(TAG, "Failed on creation of codec #", ex);
        return CODEC_ERROR;
    }
    return CODEC_SUPPORTED;
}
 
Example 2
Source File: MediaCodecUtils.java    From Fatigue-Detection with MIT License 6 votes vote down vote up
@TargetApi(MIN_API_LEVEL_AUDIO)
public static int checkMediaCodecAudioEncoderSupport(){
    if(getApiLevel()<MIN_API_LEVEL_AUDIO){
        Log.d(TAG, "checkMediaCodecAudioEncoderSupport: Min API is 16");
        return CODEC_REQ_API_NOT_SATISFIED;
    }
    final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, TEST_SAMPLE_RATE, 1);
    audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, TEST_AUDIO_BIT_RATE);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    MediaCodec mediaCodec;
    try {
        mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
        mediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.start();
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
    } catch (Exception ex) {
        Log.e(TAG, "Failed on creation of codec #", ex);
        return CODEC_ERROR;
    }
    return CODEC_SUPPORTED;
}
 
Example 3
Source File: MediaCodecUtils.java    From In77Camera with MIT License 5 votes vote down vote up
@TargetApi(MIN_API_LEVEL_VIDEO)
public static int checkMediaCodecVideoEncoderSupport(){
    if(getApiLevel()<MIN_API_LEVEL_VIDEO){
        Log.d(TAG, "checkMediaCodecVideoEncoderSupport: Min API is 18");
        return CODEC_REQ_API_NOT_SATISFIED;
    }
    MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE_VIDEO, TEST_WIDTH, TEST_HEIGHT);
    format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
            MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    format.setInteger(MediaFormat.KEY_BIT_RATE, TEST_VIDEO_BIT_RATE);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, TEST_FRAME_RATE);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, TEST_IFRAME_INTERVAL);
    MediaCodec mediaCodec;
    try {
        mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE_VIDEO);
        mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.createInputSurface();
        mediaCodec.start();
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
    } catch (Exception ex) {
        Log.e(TAG, "Failed on creation of codec #", ex);
        return CODEC_ERROR;
    }
    return CODEC_SUPPORTED;
}
 
Example 4
Source File: MediaCodecUtils.java    From Fatigue-Detection with MIT License 5 votes vote down vote up
@TargetApi(MIN_API_LEVEL_VIDEO)
public static int checkMediaCodecVideoEncoderSupport(){
    if(getApiLevel()<MIN_API_LEVEL_VIDEO){
        Log.d(TAG, "checkMediaCodecVideoEncoderSupport: Min API is 18");
        return CODEC_REQ_API_NOT_SATISFIED;
    }
    MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE_VIDEO, TEST_WIDTH, TEST_HEIGHT);
    format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
            MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    format.setInteger(MediaFormat.KEY_BIT_RATE, TEST_VIDEO_BIT_RATE);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, TEST_FRAME_RATE);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, TEST_IFRAME_INTERVAL);
    MediaCodec mediaCodec;
    try {
        mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE_VIDEO);
        mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.createInputSurface();
        mediaCodec.start();
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
    } catch (Exception ex) {
        Log.e(TAG, "Failed on creation of codec #", ex);
        return CODEC_ERROR;
    }
    return CODEC_SUPPORTED;
}
 
Example 5
Source File: ScreenRecoder.java    From ScreenRecoder with MIT License 5 votes vote down vote up
@Override
public void run() {
	MediaFormat format = MediaFormat.createVideoFormat("video/avc",
			mWidth, mHeight);
	format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
			MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
	format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
	format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
	format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
			I_FRAME_INTERVAL);

	MediaCodec codec = MediaCodec.createEncoderByType("video/avc");
	codec.configure(format, null, null,
			MediaCodec.CONFIGURE_FLAG_ENCODE);
	Surface surface = codec.createInputSurface();
	codec.start();

	VirtualDisplay virtualDisplay = mDisplayManager
			.createVirtualDisplay(DISPLAY_NAME, mWidth, mHeight,
					mDensityDpi, surface,
					DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC);
	
	if (virtualDisplay != null) {
		stream(codec);
		virtualDisplay.release();
	}

	codec.signalEndOfInputStream();
	codec.stop();
}
 
Example 6
Source File: DecodeEditEncodeTest.java    From Android-MediaCodec-Examples with Apache License 2.0 5 votes vote down vote up
/**
 * Checks the video file to see if the contents match our expectations.  We decode the
 * video to a Surface and check the pixels with GL.
 */
private void checkVideoFile(VideoChunks inputData) {
    OutputSurface surface = null;
    MediaCodec decoder = null;
    mLargestColorDelta = -1;
    if (VERBOSE) Log.d(TAG, "checkVideoFile");
    try {
        surface = new OutputSurface(mWidth, mHeight);
        MediaFormat format = inputData.getMediaFormat();
        decoder = MediaCodec.createDecoderByType(MIME_TYPE);
        decoder.configure(format, surface.getSurface(), null, 0);
        decoder.start();
        int badFrames = checkVideoData(inputData, decoder, surface);
        if (badFrames != 0) {
            fail("Found " + badFrames + " bad frames");
        }
    } finally {
        if (surface != null) {
            surface.release();
        }
        if (decoder != null) {
            decoder.stop();
            decoder.release();
        }
        Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    }
}
 
Example 7
Source File: HWEncoder.java    From LiveMultimedia with Apache License 2.0 4 votes vote down vote up
/**
 * Tests encoding and subsequently decoding video from frames generated into a buffer.
 * <p>
 * We encode several frames of a video test pattern using MediaCodec, then decode the
 * output with MediaCodec and do some simple checks.
 */
private void encodeDecodeVideoFromSurfaceToSurface() throws Exception {
    MediaCodec encoder = null;
    MediaCodec decoder = null;
    InputSurface inputSurface = null;
    OutputSurface outputSurface = null;

    mLargestColorDelta = -1;

    try {
        MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
        if (codecInfo == null) {
            // Don't fail CTS if they don't have an AVC codec (not here, anyway).
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return;
        }
        if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());

        int colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;

        // We avoid the device-specific limitations on width and height by using values that
        // are multiples of 16, which all tested devices seem to be able to handle.
        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);

        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
        format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        if (VERBOSE) Log.d(TAG, "format: " + format);

        // Create the output surface.
        outputSurface = new OutputSurface(mWidth, mHeight);

        // Create a MediaCodec for the decoder, just based on the MIME type.  The various
        // format details will be passed through the csd-0 meta-data later on.
        decoder = MediaCodec.createDecoderByType(MIME_TYPE);
        MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        decoder.configure(format, outputSurface.getSurface(), null, 0);
        decoder.start();

        // Create a MediaCodec for the desired codec, then configure it as an encoder with
        // our desired properties.  Request a Surface to use for input.
        encoder = MediaCodec.createByCodecName(codecInfo.getName());
        encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new InputSurface(encoder.createInputSurface());
        encoder.start();

        doEncodeDecodeVideoFromSurfaceToSurface(encoder, inputSurface, colorFormat, decoder, outputSurface);
    } finally {
        if (VERBOSE) Log.d(TAG, "releasing codecs");
        if (inputSurface != null) {
            inputSurface.release();
        }
        if (outputSurface != null) {
            outputSurface.release();
        }
        if (encoder != null) {
            encoder.stop();
            encoder.release();
        }
        if (decoder != null) {
            decoder.stop();
            decoder.release();
        }

        Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    }
}
 
Example 8
Source File: VideoThumbnailsExtractor.java    From mollyim-android with GNU General Public License v3.0 4 votes vote down vote up
static void extractThumbnails(final @NonNull MediaInput input,
                              final int thumbnailCount,
                              final int thumbnailResolution,
                              final @NonNull Callback callback)
{
  MediaExtractor extractor     = null;
  MediaCodec     decoder       = null;
  OutputSurface  outputSurface = null;
  try {
    extractor = input.createExtractor();
    MediaFormat mediaFormat = null;
    for (int index = 0; index < extractor.getTrackCount(); ++index) {
      if (extractor.getTrackFormat(index).getString(MediaFormat.KEY_MIME).startsWith("video/")) {
        extractor.selectTrack(index);
        mediaFormat = extractor.getTrackFormat(index);
        break;
      }
    }
    if (mediaFormat != null) {
      final String mime     = mediaFormat.getString(MediaFormat.KEY_MIME);
      final int    rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
      final int    width    = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
      final int    height   = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
      final int    outputWidth;
      final int    outputHeight;

      if (width < height) {
        outputWidth  = thumbnailResolution;
        outputHeight = height * outputWidth / width;
      } else {
        outputHeight = thumbnailResolution;
        outputWidth  = width * outputHeight / height;
      }

      final int outputWidthRotated;
      final int outputHeightRotated;

      if ((rotation % 180 == 90)) {
        //noinspection SuspiciousNameCombination
        outputWidthRotated = outputHeight;
        //noinspection SuspiciousNameCombination
        outputHeightRotated = outputWidth;
      } else {
        outputWidthRotated  = outputWidth;
        outputHeightRotated = outputHeight;
      }

      Log.i(TAG, "video: " + width + "x" + height + " " + rotation);
      Log.i(TAG, "output: " + outputWidthRotated + "x" + outputHeightRotated);

      outputSurface = new OutputSurface(outputWidthRotated, outputHeightRotated, true);

      decoder = MediaCodec.createDecoderByType(mime);
      decoder.configure(mediaFormat, outputSurface.getSurface(), null, 0);
      decoder.start();

      long duration = 0;

      if (mediaFormat.containsKey(MediaFormat.KEY_DURATION)) {
        duration = mediaFormat.getLong(MediaFormat.KEY_DURATION);
      } else {
        Log.w(TAG, "Video is missing duration!");
      }

      callback.durationKnown(duration);

      doExtract(extractor, decoder, outputSurface, outputWidthRotated, outputHeightRotated, duration, thumbnailCount, callback);
    }
  } catch (IOException | TranscodingException e) {
    Log.w(TAG, e);
    callback.failed();
  } finally {
    if (outputSurface != null) {
      outputSurface.release();
    }
    if (decoder != null) {
      decoder.stop();
      decoder.release();
    }
    if (extractor != null) {
      extractor.release();
    }
  }
}
 
Example 9
Source File: HWEncoder.java    From LiveMultimedia with Apache License 2.0 4 votes vote down vote up
private void encodeDecodeVideoFromBuffer(boolean toSurface) throws Exception {
      MediaCodec encoder = null;
      MediaCodec decoder = null;
      try {
   	               MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
   	               if (codecInfo == null) {
   	                   // Don't fail CTS if they don't have an AVC codec (not here, anyway).
   	                   Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
   	                  return;
   	              }
   	              if (VERBOSE)  Log.d(TAG, "found codec: " + codecInfo.getName());
   	   
   	              int colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
   	              if (VERBOSE)  Log.d(TAG, "found colorFormat: " + colorFormat);
   	   
   	               // We avoid the device-specific limitations on width and height by using values that
   	             // are multiples of 16, which all tested devices seem to be able to handle.
   	              MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
   	   
   	              // Set some properties.  Failing to specify some of these can cause the MediaCodec
   	               // configure() call to throw an unhelpful exception.
   	               format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
   	               format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
   	               format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
   	               format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
   	               if (VERBOSE) Log.d(TAG, "format: " + format);
   	   
   	               // Create a MediaCodec for the desired codec, then configure it as an encoder with
   	               // our desired properties.
   	               encoder = MediaCodec.createByCodecName(codecInfo.getName());
   	               encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
   	               encoder.start();
   	   
   	               // Create a MediaCodec for the decoder, just based on the MIME type.  The various
   	               // format details will be passed through the csd-0 meta-data later on.
   	               decoder = MediaCodec.createDecoderByType(MIME_TYPE);
   	   
   	               doEncodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface);
   	           } finally {
   	              if (VERBOSE) Log.d(TAG, "releasing codecs");
   	              if (encoder != null) {
   	                  encoder.stop();
   	                  encoder.release();
   	              }
   	             if (decoder != null) {
   	                  decoder.stop();
   	                  decoder.release();
   	              }
   	   
   	              Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
   	          }
}
 
Example 10
Source File: VideoResampler.java    From AndroidVideoSamples with Apache License 2.0 4 votes vote down vote up
private void feedClipToEncoder( SamplerClip clip ) {

      mLastSampleTime = 0;

      MediaCodec decoder = null;

      MediaExtractor extractor = setupExtractorForClip(clip);
      
      if(extractor == null ) {
         return;
      }
      
      int trackIndex = getVideoTrackIndex(extractor);
      extractor.selectTrack( trackIndex );

      MediaFormat clipFormat = extractor.getTrackFormat( trackIndex );

      if ( clip.getStartTime() != -1 ) {
         extractor.seekTo( clip.getStartTime() * 1000, MediaExtractor.SEEK_TO_PREVIOUS_SYNC );
         clip.setStartTime( extractor.getSampleTime() / 1000 );
      }
      
      try {
         decoder = MediaCodec.createDecoderByType( MediaHelper.MIME_TYPE_AVC );
         mOutputSurface = new OutputSurface();

         decoder.configure( clipFormat, mOutputSurface.getSurface(), null, 0 );
         decoder.start();

         resampleVideo( extractor, decoder, clip );

      } finally {

         if ( mOutputSurface != null ) {
            mOutputSurface.release();
         }
         if ( decoder != null ) {
            decoder.stop();
            decoder.release();
         }

         if ( extractor != null ) {
            extractor.release();
            extractor = null;
         }
      }
   }
 
Example 11
Source File: EncodedAudioRecorder.java    From speechutils with Apache License 2.0 4 votes vote down vote up
/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private int recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    int status = -1;
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numDequeueOutputBufferTimeout = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_INPUT_BUFFER_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                numDequeueOutputBufferTimeout++;
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numDequeueOutputBufferTimeout);
                if (numDequeueOutputBufferTimeout > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                numDequeueOutputBufferTimeout = 0;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    status = 0;
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
        Log.i("stopped and released codec");
    }
    return status;
}
 
Example 12
Source File: EncodedAudioRecorder.java    From AlexaAndroid with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numRetriesDequeueOutputBuffer = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
                if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
    }
}
 
Example 13
Source File: EncodedAudioRecorder.java    From AlexaAndroid with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numRetriesDequeueOutputBuffer = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
                if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
    }
}
 
Example 14
Source File: EncodeDecodeTest.java    From Android-MediaCodec-Examples with Apache License 2.0 4 votes vote down vote up
/**
 * Tests encoding and subsequently decoding video from frames generated into a buffer.
 * <p>
 * We encode several frames of a video test pattern using MediaCodec, then decode the
 * output with MediaCodec and do some simple checks.
 */
private void encodeDecodeVideoFromSurfaceToSurface() throws Exception {
    MediaCodec encoder = null;
    MediaCodec decoder = null;
    InputSurface inputSurface = null;
    OutputSurface outputSurface = null;
    mLargestColorDelta = -1;
    try {
        MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
        if (codecInfo == null) {
            // Don't fail CTS if they don't have an AVC codec (not here, anyway).
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return;
        }
        if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());
        int colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
        // We avoid the device-specific limitations on width and height by using values that
        // are multiples of 16, which all tested devices seem to be able to handle.
        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
        format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        if (VERBOSE) Log.d(TAG, "format: " + format);
        // Create the output surface.
        outputSurface = new OutputSurface(mWidth, mHeight);
        // Create a MediaCodec for the decoder, just based on the MIME type.  The various
        // format details will be passed through the csd-0 meta-data later on.
        decoder = MediaCodec.createDecoderByType(MIME_TYPE);
        MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        decoder.configure(format, outputSurface.getSurface(), null, 0);
        decoder.start();
        // Create a MediaCodec for the desired codec, then configure it as an encoder with
        // our desired properties.  Request a Surface to use for input.
        encoder = MediaCodec.createByCodecName(codecInfo.getName());
        encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new InputSurface(encoder.createInputSurface());
        encoder.start();
        doEncodeDecodeVideoFromSurfaceToSurface(encoder, inputSurface, colorFormat, decoder, outputSurface);
    } finally {
        if (VERBOSE) Log.d(TAG, "releasing codecs");
        if (inputSurface != null) {
            inputSurface.release();
        }
        if (outputSurface != null) {
            outputSurface.release();
        }
        if (encoder != null) {
            encoder.stop();
            encoder.release();
        }
        if (decoder != null) {
            decoder.stop();
            decoder.release();
        }
        Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    }
}
 
Example 15
Source File: EncodeDecodeTest.java    From Android-MediaCodec-Examples with Apache License 2.0 4 votes vote down vote up
/**
 * Tests encoding and subsequently decoding video from frames generated into a buffer.
 * <p>
 * We encode several frames of a video test pattern using MediaCodec, then decode the
 * output with MediaCodec and do some simple checks.
 * <p>
 * See http://b.android.com/37769 for a discussion of input format pitfalls.
 */
private void encodeDecodeVideoFromBuffer(boolean toSurface) throws Exception {
    MediaCodec encoder = null;
    MediaCodec decoder = null;
    mLargestColorDelta = -1;
    try {
        MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
        if (codecInfo == null) {
            // Don't fail CTS if they don't have an AVC codec (not here, anyway).
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return;
        }
        if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());
        int colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
        if (VERBOSE) Log.d(TAG, "found colorFormat: " + colorFormat);
        // We avoid the device-specific limitations on width and height by using values that
        // are multiples of 16, which all tested devices seem to be able to handle.
        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
        format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        if (VERBOSE) Log.d(TAG, "format: " + format);
        // Create a MediaCodec for the desired codec, then configure it as an encoder with
        // our desired properties.
        encoder = MediaCodec.createByCodecName(codecInfo.getName());
        encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        encoder.start();
        // Create a MediaCodec for the decoder, just based on the MIME type.  The various
        // format details will be passed through the csd-0 meta-data later on.
        decoder = MediaCodec.createDecoderByType(MIME_TYPE);
        doEncodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface);
    } finally {
        if (VERBOSE) Log.d(TAG, "releasing codecs");
        if (encoder != null) {
            encoder.stop();
            encoder.release();
        }
        if (decoder != null) {
            decoder.stop();
            decoder.release();
        }
        Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    }
}
 
Example 16
Source File: DecodeEditEncodeTest.java    From Android-MediaCodec-Examples with Apache License 2.0 4 votes vote down vote up
/**
 * Edits a video file, saving the contents to a new file.  This involves decoding and
 * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
 * <p>
 * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
 * output, but it's not practical to support all OEM formats.  By using a SurfaceTexture
 * for output and a Surface for input, we can avoid issues with obscure formats and can
 * use a fragment shader to do transformations.
 */
private VideoChunks editVideoFile(VideoChunks inputData) {
    if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight);
    VideoChunks outputData = new VideoChunks();
    MediaCodec decoder = null;
    MediaCodec encoder = null;
    InputSurface inputSurface = null;
    OutputSurface outputSurface = null;
    try {
        MediaFormat inputFormat = inputData.getMediaFormat();
        // Create an encoder format that matches the input format.  (Might be able to just
        // re-use the format used to generate the video, since we want it to be the same.)
        MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        outputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
                inputFormat.getInteger(MediaFormat.KEY_BIT_RATE));
        outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
                inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE));
        outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
                inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL));
        outputData.setMediaFormat(outputFormat);
        encoder = MediaCodec.createEncoderByType(MIME_TYPE);
        encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new InputSurface(encoder.createInputSurface());
        inputSurface.makeCurrent();
        encoder.start();
        // OutputSurface uses the EGL context created by InputSurface.
        decoder = MediaCodec.createDecoderByType(MIME_TYPE);
        outputSurface = new OutputSurface();
        outputSurface.changeFragmentShader(FRAGMENT_SHADER);
        decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
        decoder.start();
        editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
    } finally {
        if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder");
        if (outputSurface != null) {
            outputSurface.release();
        }
        if (inputSurface != null) {
            inputSurface.release();
        }
        if (encoder != null) {
            encoder.stop();
            encoder.release();
        }
        if (decoder != null) {
            decoder.stop();
            decoder.release();
        }
    }
    return outputData;
}
 
Example 17
Source File: DecodeEditEncodeTest.java    From Android-MediaCodec-Examples with Apache License 2.0 4 votes vote down vote up
/**
 * Generates a test video file, saving it as VideoChunks.  We generate frames with GL to
 * avoid having to deal with multiple YUV formats.
 *
 * @return true on success, false on "soft" failure
 */
private boolean generateVideoFile(VideoChunks output) {
    if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight);
    MediaCodec encoder = null;
    InputSurface inputSurface = null;
    try {
        MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
        if (codecInfo == null) {
            // Don't fail CTS if they don't have an AVC codec (not here, anyway).
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return false;
        }
        if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());
        // We avoid the device-specific limitations on width and height by using values that
        // are multiples of 16, which all tested devices seem to be able to handle.
        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        if (VERBOSE) Log.d(TAG, "format: " + format);
        output.setMediaFormat(format);
        // Create a MediaCodec for the desired codec, then configure it as an encoder with
        // our desired properties.
        encoder = MediaCodec.createByCodecName(codecInfo.getName());
        encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new InputSurface(encoder.createInputSurface());
        inputSurface.makeCurrent();
        encoder.start();
        generateVideoData(encoder, inputSurface, output);
    } finally {
        if (encoder != null) {
            if (VERBOSE) Log.d(TAG, "releasing encoder");
            encoder.stop();
            encoder.release();
            if (VERBOSE) Log.d(TAG, "released encoder");
        }
        if (inputSurface != null) {
            inputSurface.release();
        }
    }
    return true;
}
 
Example 18
Source File: HeifReader.java    From heifreader with MIT License 4 votes vote down vote up
private static void renderHevcImage(ByteBuffer bitstream, ImageInfo info, Surface surface) {
    long beginTime = SystemClock.elapsedRealtimeNanos();

    // configure HEVC decoder
    MediaCodec decoder = configureDecoder(info, bitstream.limit(), surface);
    MediaFormat outputFormat = decoder.getOutputFormat();
    Log.d(TAG, "HEVC output-format=" + outputFormat);

    decoder.start();
    try {
        // set bitstream to decoder
        int inputBufferId = decoder.dequeueInputBuffer(-1);
        if (inputBufferId < 0) {
            throw new IllegalStateException("dequeueInputBuffer return " + inputBufferId);
        }
        ByteBuffer inBuffer = decoder.getInputBuffer(inputBufferId);
        inBuffer.put(bitstream);
        decoder.queueInputBuffer(inputBufferId, 0, bitstream.limit(), 0, 0);

        // notify end of stream
        inputBufferId = decoder.dequeueInputBuffer(-1);
        if (inputBufferId < 0) {
            throw new IllegalStateException("dequeueInputBuffer return " + inputBufferId);
        }
        decoder.queueInputBuffer(inputBufferId, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);

        // get decoded image
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        while (true) {
            int outputBufferId = decoder.dequeueOutputBuffer(bufferInfo, -1);
            if (outputBufferId >= 0) {
                decoder.releaseOutputBuffer(outputBufferId, true);
                break;
            } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                outputFormat = decoder.getOutputFormat();
                Log.d(TAG, "HEVC output-format=" + outputFormat);
            } else {
                Log.d(TAG, "HEVC dequeueOutputBuffer return " + outputBufferId);
            }
        }
        decoder.flush();
    } finally {
        decoder.stop();
        decoder.release();
    }
    long endTime = SystemClock.elapsedRealtimeNanos();
    Log.i(TAG, "HEVC decoding elapsed=" + (endTime - beginTime) / 1000000.f + "[msec]");
}
 
Example 19
Source File: AudioUtil.java    From VideoProcessor with Apache License 2.0 4 votes vote down vote up
/**
 * 需要改变音频速率的情况下,需要先解码->改变速率->编码
 */
public static void decodeToPCM(VideoProcessor.MediaSource audioSource, String outPath, Integer startTimeUs, Integer endTimeUs) throws IOException {
    MediaExtractor extractor = new MediaExtractor();
    audioSource.setDataSource(extractor);
    int audioTrack = VideoUtil.selectTrack(extractor, true);
    extractor.selectTrack(audioTrack);
    if (startTimeUs == null) {
        startTimeUs = 0;
    }
    extractor.seekTo(startTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
    MediaFormat oriAudioFormat = extractor.getTrackFormat(audioTrack);
    int maxBufferSize;
    if (oriAudioFormat.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
        maxBufferSize = oriAudioFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
    } else {
        maxBufferSize = 100 * 1000;
    }
    ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

    //调整音频速率需要重解码音频帧
    MediaCodec decoder = MediaCodec.createDecoderByType(oriAudioFormat.getString(MediaFormat.KEY_MIME));
    decoder.configure(oriAudioFormat, null, null, 0);
    decoder.start();

    boolean decodeDone = false;
    boolean decodeInputDone = false;
    final int TIMEOUT_US = 2500;
    File pcmFile = new File(outPath);
    FileChannel writeChannel = new FileOutputStream(pcmFile).getChannel();
    try {
        while (!decodeDone) {
            if (!decodeInputDone) {
                boolean eof = false;
                int decodeInputIndex = decoder.dequeueInputBuffer(TIMEOUT_US);
                if (decodeInputIndex >= 0) {
                    long sampleTimeUs = extractor.getSampleTime();
                    if (sampleTimeUs == -1) {
                        eof = true;
                    } else if (sampleTimeUs < startTimeUs) {
                        extractor.advance();
                        continue;
                    } else if (endTimeUs != null && sampleTimeUs > endTimeUs) {
                        eof = true;
                    }

                    if (eof) {
                        decodeInputDone = true;
                        decoder.queueInputBuffer(decodeInputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    } else {
                        info.size = extractor.readSampleData(buffer, 0);
                        info.presentationTimeUs = sampleTimeUs;
                        info.flags = extractor.getSampleFlags();
                        ByteBuffer inputBuffer = decoder.getInputBuffer(decodeInputIndex);
                        inputBuffer.put(buffer);
                        CL.it(TAG, "audio decode queueInputBuffer " + info.presentationTimeUs / 1000);
                        decoder.queueInputBuffer(decodeInputIndex, 0, info.size, info.presentationTimeUs, info.flags);
                        extractor.advance();
                    }

                }
            }

            while (!decodeDone) {
                int outputBufferIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US);
                if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    break;
                } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    MediaFormat newFormat = decoder.getOutputFormat();
                    CL.it(TAG, "audio decode newFormat = " + newFormat);
                } else if (outputBufferIndex < 0) {
                    //ignore
                    CL.et(TAG, "unexpected result from audio decoder.dequeueOutputBuffer: " + outputBufferIndex);
                } else {
                    if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
                        decodeDone = true;
                    } else {
                        ByteBuffer decodeOutputBuffer = decoder.getOutputBuffer(outputBufferIndex);
                        CL.it(TAG, "audio decode saveFrame " + info.presentationTimeUs / 1000);
                        writeChannel.write(decodeOutputBuffer);
                    }
                    decoder.releaseOutputBuffer(outputBufferIndex, false);
                }
            }
        }
    } finally {
        writeChannel.close();
        extractor.release();
        decoder.stop();
        decoder.release();
    }
}
 
Example 20
Source File: AudioRecordThread.java    From PhotoMovie with Apache License 2.0 4 votes vote down vote up
/**
 * 需要改变音频速率的情况下,需要先解码->改变速率->编码
 */
private void decodeToPCM(MediaCodec decoder, MediaExtractor extractor, MediaFormat oriAudioFormat, String outPath, Long endTimeUs) throws IOException {
    int maxBufferSize = getAudioMaxBufferSize(oriAudioFormat);
    ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

    //调整音频速率需要重解码音频帧
    decoder.configure(oriAudioFormat, null, null, 0);
    decoder.start();

    boolean decodeDone = false;
    boolean decodeInputDone = false;
    final int TIMEOUT_US = 2500;
    File pcmFile = new File(outPath);
    FileChannel writeChannel = new FileOutputStream(pcmFile).getChannel();
    ByteBuffer[] inputBuffers = null;
    ByteBuffer[] outputBuffers = null;

    try {
        while (!decodeDone) {
            if (!decodeInputDone) {
                boolean eof = false;
                int decodeInputIndex = decoder.dequeueInputBuffer(TIMEOUT_US);
                if (Build.VERSION.SDK_INT < 21 && decodeInputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    outputBuffers = decoder.getOutputBuffers();
                    inputBuffers = decoder.getInputBuffers();
                } else if (decodeInputIndex >= 0) {
                    long sampleTimeUs = extractor.getSampleTime();
                    if (sampleTimeUs == -1) {
                        eof = true;
                    } else if (endTimeUs != null && sampleTimeUs > endTimeUs) {
                        eof = true;
                    }

                    if (eof) {
                        decodeInputDone = true;
                        decoder.queueInputBuffer(decodeInputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    } else {
                        info.size = extractor.readSampleData(buffer, 0);
                        info.presentationTimeUs = sampleTimeUs;
                        info.flags = extractor.getSampleFlags();
                        ByteBuffer inputBuffer = null;
                        if (android.os.Build.VERSION.SDK_INT >= 21) {
                            inputBuffer = decoder.getInputBuffer(decodeInputIndex);
                        } else {
                            inputBuffer = inputBuffers[decodeInputIndex];
                        }
                        inputBuffer.put(buffer);
                        MLog.i(TAG, "audio decode queueInputBuffer " + info.presentationTimeUs / 1000);
                        decoder.queueInputBuffer(decodeInputIndex, 0, info.size, info.presentationTimeUs, info.flags);
                        extractor.advance();
                    }

                }
            }

            while (!decodeDone) {
                int outputBufferIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US);
                if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    break;
                } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    MediaFormat newFormat = decoder.getOutputFormat();
                    MLog.i(TAG, "audio decode newFormat = " + newFormat);
                } else if (outputBufferIndex < 0) {
                    //ignore
                    MLog.e(TAG, "unexpected result from audio decoder.dequeueOutputBuffer: " + outputBufferIndex);
                } else {
                    if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
                        decodeDone = true;
                    } else {
                        ByteBuffer decodeOutputBuffer = null;
                        if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                            decodeOutputBuffer = decoder.getOutputBuffer(outputBufferIndex);
                        } else {
                            decodeOutputBuffer = outputBuffers[outputBufferIndex];
                        }
                        MLog.i(TAG, "audio decode saveFrame " + info.presentationTimeUs / 1000);
                        writeChannel.write(decodeOutputBuffer);
                    }
                    decoder.releaseOutputBuffer(outputBufferIndex, false);
                }
            }
        }
    } finally {
        writeChannel.close();
        extractor.release();
        decoder.stop();
        decoder.release();
    }
}