android.media.MediaCodec.BufferInfo Java Examples

The following examples show how to use android.media.MediaCodec.BufferInfo. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MediaCodecDecodeController.java    From AndroidVideoSamples with Apache License 2.0 5 votes vote down vote up
private void setupExtractor() {
   mExtractor = new MediaExtractor();
   try {
      mExtractor.setDataSource( mUri.toString() );
   } catch ( IOException e ) {
      e.printStackTrace();
   }

   int videoIndex = 0;

   for ( int trackIndex = 0; trackIndex < mExtractor.getTrackCount(); trackIndex++ ) {
      MediaFormat format = mExtractor.getTrackFormat( trackIndex );

      String mime = format.getString( MediaFormat.KEY_MIME );
      if ( mime != null ) {
         if ( mime.equals( "video/avc" ) ) {
            mExtractor.selectTrack( trackIndex );
            videoIndex = trackIndex;
            break;
         }
      }
   }

   mDecoder = MediaCodec.createDecoderByType( "video/avc" );
   mDecoder.configure( mExtractor.getTrackFormat( videoIndex ), mSurface, null, 0 );
   mDecoder.start();

   mInfo = new BufferInfo();

   mInputBuffers = mDecoder.getInputBuffers();
   mOutputBuffers = mDecoder.getOutputBuffers();
}
 
Example #2
Source File: EncoderDebugger.java    From libstreaming with Apache License 2.0 5 votes vote down vote up
private void flushMediaCodec(MediaCodec mc) {
	int index = 0;
	BufferInfo info = new BufferInfo();
	while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
		index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (index>=0) {
			mc.releaseOutputBuffer(index, false);
		}
	}
}
 
Example #3
Source File: EncoderDebugger.java    From spydroid-ipcamera with GNU General Public License v3.0 5 votes vote down vote up
private void flushMediaCodec(MediaCodec mc) {
	int index = 0;
	BufferInfo info = new BufferInfo();
	while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
		index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (index>=0) {
			mc.releaseOutputBuffer(index, false);
		}
	}
}
 
Example #4
Source File: GPUEncoder.java    From LiveMultimedia with Apache License 2.0 5 votes vote down vote up
private synchronized void dequeueOutputBuffer(
        MediaCodec codec, ByteBuffer[] outputBuffers,
        int index, MediaCodec.BufferInfo info) {
    if (mAudioFeatureActive) {
        codec.releaseOutputBuffer(index, false);
    }
}
 
Example #5
Source File: GPUEncoder.java    From LiveMultimedia with Apache License 2.0 5 votes vote down vote up
/*******************************************************************
* createVideoCodec() creates the video codec which is H264 based
******************************************************************/
public synchronized void createVideoCodec() {
    try {
        Log.w(TAG, "----->createVideoCodec()<-----");
        mCodec.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mCodec.start();
        mBufferInfo = new BufferInfo();
    } catch (IllegalStateException e) {
        Log.e(TAG, "Error in creating video codec failed configuration.");
    }
    Log.w(TAG, "----->end createVideoCodec()<-----");
 }
 
Example #6
Source File: MediaCodecDecodeController.java    From AndroidVideoSamples with Apache License 2.0 5 votes vote down vote up
private void seekTo( long ms, int seekMode ) {

         // Log.d( TAG, String.format( Locale.US, "seeking to %d", ms ) );

         mExtractor.seekTo( ms * 1000, seekMode );
         mCurrentPosition = (int) mExtractor.getSampleTime() / 1000;
         mTimer.setTime( mCurrentPosition );
         // Log.d( TAG, String.format( Locale.US, "seeking extractor to %d, sample time is now %d", ms, mExtractor.getSampleTime() ) );
         mDecoder.flush();
         mInputBuffers = mDecoder.getInputBuffers();
         mOutputBuffers = mDecoder.getOutputBuffers();

         mInfo = new BufferInfo();
      }
 
Example #7
Source File: AvcDecoder.java    From cameraMediaCodec with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public int Init()
{
	Log.i("AvcDecoder", "Init");
	mMC = MediaCodec.createDecoderByType(MIME_TYPE);
	mStatus = STATUS_LOADED;
	mBI = new BufferInfo();
	Log.i("AvcDecoder", "Init, createDecoderByType");
	return 0;
}
 
Example #8
Source File: MediaMuxerWrapper.java    From libcommon with Apache License 2.0 5 votes vote down vote up
@Override
public void writeSampleData(final int trackIndex,
	@NonNull final ByteBuffer byteBuf, @NonNull final BufferInfo bufferInfo) {

	if (!mReleased) {
		mMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
	}
}
 
Example #9
Source File: MP4Encoder.java    From Bitmp4 with Apache License 2.0 5 votes vote down vote up
@Override
protected void onStart() {
  isStarted = true;
  addedFrameCount = 0;
  encodedFrameCount = 0;
  int width = getWidth();
  int height = getHeight();
  try {
    bufferInfo = new BufferInfo();
    videoCodec = MediaCodec.createEncoderByType(MIMETYPE_VIDEO_AVC);
    MediaFormat videoFormat = MediaFormat.createVideoFormat(MIMETYPE_VIDEO_AVC, width, height);
    videoFormat.setInteger(KEY_BIT_RATE, BIT_RATE);
    videoFormat.setInteger(KEY_FRAME_RATE, FRAME_RATE);
    videoFormat.setInteger(KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
    videoFormat.setInteger(KEY_COLOR_FORMAT, COLOR_FormatYUV420SemiPlanar);
    videoCodec.configure(videoFormat, null, null, CONFIGURE_FLAG_ENCODE);
    videoCodec.start();
    audioCodec = MediaCodec.createEncoderByType(MIMETYPE_AUDIO_AAC);
    MediaFormat audioFormat = MediaFormat.createAudioFormat(MIMETYPE_AUDIO_AAC, 44100, 1);
    int profile;
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
      profile = MPEG2ProfileHigh;
    } else {
      profile = 5;
    }
    audioFormat.setInteger(KEY_AAC_PROFILE, profile);
    audioFormat.setInteger(KEY_BIT_RATE, 65536);
    audioCodec.configure(audioFormat, null, null, CONFIGURE_FLAG_ENCODE);
    audioCodec.start();
    mediaMuxer = new MediaMuxer(outputFilePath, MUXER_OUTPUT_MPEG_4);
  } catch (IOException ioe) {
    throw new RuntimeException("MediaMuxer creation failed", ioe);
  }
}
 
Example #10
Source File: MediaCodecInputStream.java    From AndroidInstantVideo with Apache License 2.0 5 votes vote down vote up
@NonNull
private static BufferInfo copyBufferInfo(BufferInfo lastBufferInfo) {
    BufferInfo bufferInfo = new BufferInfo();
    bufferInfo.presentationTimeUs = lastBufferInfo.presentationTimeUs;
    bufferInfo.flags = lastBufferInfo.flags;
    bufferInfo.offset = lastBufferInfo.offset;
    bufferInfo.size = lastBufferInfo.size;
    return bufferInfo;
}
 
Example #11
Source File: MediaCodecInputStream.java    From AndroidInstantVideo with Apache License 2.0 4 votes vote down vote up
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
 
Example #12
Source File: MediaCodecWrapperFactoryImpl.java    From webrtc_android with MIT License 4 votes vote down vote up
@Override
public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) {
  return mediaCodec.dequeueOutputBuffer(info, timeoutUs);
}
 
Example #13
Source File: EncoderDebugger.java    From libstreaming with Apache License 2.0 4 votes vote down vote up
private long encode() {
	int n = 0;
	long elapsed = 0, now = timestamp();
	int encOutputIndex = 0, encInputIndex = 0;
	BufferInfo info = new BufferInfo();
	ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

	while (elapsed<5000000) {
		// Feeds the encoder with an image
		encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (encInputIndex>=0) {
			check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			encInputBuffers[encInputIndex].clear();
			encInputBuffers[encInputIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.d(TAG,"No buffer available !");
		}

		// Tries to get a NAL unit
		encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			encOutputBuffers = mEncoder.getOutputBuffers();
		} else if (encOutputIndex>=0) {
			mVideo[n] = new byte[info.size];
			encOutputBuffers[encOutputIndex].clear();
			encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
			mEncoder.releaseOutputBuffer(encOutputIndex, false);
			if (n>=NB_ENCODED) {
				flushMediaCodec(mEncoder);
				return elapsed;
			}
		}

		elapsed = timestamp() - now;
	}

	throw new RuntimeException("The encoder is too slow.");

}
 
Example #14
Source File: EncoderDebugger.java    From libstreaming with Apache License 2.0 4 votes vote down vote up
/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {

	ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
	BufferInfo info = new BufferInfo();
	byte[] csd = new byte[128];
	int len = 0, p = 4, q = 4;
	long elapsed = 0, now = timestamp();

	while (elapsed<3000000 && (mSPS==null || mPPS==null)) {

		// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
		int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (bufferIndex>=0) {
			check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			inputBuffers[bufferIndex].clear();
			inputBuffers[bufferIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.e(TAG,"No buffer available !");
		}

		// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
		// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
		// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...

		int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);

		if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

			// The PPS and PPS shoud be there
			MediaFormat format = mEncoder.getOutputFormat();
			ByteBuffer spsb = format.getByteBuffer("csd-0");
			ByteBuffer ppsb = format.getByteBuffer("csd-1");
			mSPS = new byte[spsb.capacity()-4];
			spsb.position(4);
			spsb.get(mSPS,0,mSPS.length);
			mPPS = new byte[ppsb.capacity()-4];
			ppsb.position(4);
			ppsb.get(mPPS,0,mPPS.length);
			break;

		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			outputBuffers = mEncoder.getOutputBuffers();
		} else if (index>=0) {

			len = info.size;
			if (len<128) {
				outputBuffers[index].get(csd,0,len);
				if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
					// Parses the SPS and PPS, they could be in two different packets and in a different order 
					//depending on the phone so we don't make any assumption about that
					while (p<len) {
						while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
						if (p+3>=len) p=len;
						if ((csd[q]&0x1F)==7) {
							mSPS = new byte[p-q];
							System.arraycopy(csd, q, mSPS, 0, p-q);
						} else {
							mPPS = new byte[p-q];
							System.arraycopy(csd, q, mPPS, 0, p-q);
						}
						p += 4;
						q = p;
					}
				}					
			}
			mEncoder.releaseOutputBuffer(index, false);
		}

		elapsed = timestamp() - now;
	}

	check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
	mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
	mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);

	return elapsed;
}
 
Example #15
Source File: MediaCodecInputStream.java    From libstreaming with Apache License 2.0 4 votes vote down vote up
public BufferInfo getLastBufferInfo() {
	return mBufferInfo;
}
 
Example #16
Source File: EncoderDebugger.java    From spydroid-ipcamera with GNU General Public License v3.0 4 votes vote down vote up
private long encode() {
	int n = 0;
	long elapsed = 0, now = timestamp();
	int encOutputIndex = 0, encInputIndex = 0;
	BufferInfo info = new BufferInfo();
	ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

	while (elapsed<5000000) {
		// Feeds the encoder with an image
		encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (encInputIndex>=0) {
			check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			encInputBuffers[encInputIndex].clear();
			encInputBuffers[encInputIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.d(TAG,"No buffer available !");
		}

		// Tries to get a NAL unit
		encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			encOutputBuffers = mEncoder.getOutputBuffers();
		} else if (encOutputIndex>=0) {
			mVideo[n] = new byte[info.size];
			encOutputBuffers[encOutputIndex].clear();
			encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
			mEncoder.releaseOutputBuffer(encOutputIndex, false);
			if (n>=NB_ENCODED) {
				flushMediaCodec(mEncoder);
				return elapsed;
			}
		}

		elapsed = timestamp() - now;
	}

	throw new RuntimeException("The encoder is too slow.");

}
 
Example #17
Source File: EncoderDebugger.java    From spydroid-ipcamera with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {

	ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
	BufferInfo info = new BufferInfo();
	byte[] csd = new byte[128];
	int len = 0, p = 4, q = 4;
	long elapsed = 0, now = timestamp();

	while (elapsed<3000000 && (mSPS==null || mPPS==null)) {

		// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
		int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (bufferIndex>=0) {
			check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			inputBuffers[bufferIndex].clear();
			inputBuffers[bufferIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.e(TAG,"No buffer available !");
		}

		// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
		// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
		// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...

		int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);

		if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

			// The PPS and PPS shoud be there
			MediaFormat format = mEncoder.getOutputFormat();
			ByteBuffer spsb = format.getByteBuffer("csd-0");
			ByteBuffer ppsb = format.getByteBuffer("csd-1");
			mSPS = new byte[spsb.capacity()-4];
			spsb.position(4);
			spsb.get(mSPS,0,mSPS.length);
			mPPS = new byte[ppsb.capacity()-4];
			ppsb.position(4);
			ppsb.get(mPPS,0,mPPS.length);
			break;

		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			outputBuffers = mEncoder.getOutputBuffers();
		} else if (index>=0) {

			len = info.size;
			if (len<128) {
				outputBuffers[index].get(csd,0,len);
				if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
					// Parses the SPS and PPS, they could be in two different packets and in a different order 
					//depending on the phone so we don't make any assumption about that
					while (p<len) {
						while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
						if (p+3>=len) p=len;
						if ((csd[q]&0x1F)==7) {
							mSPS = new byte[p-q];
							System.arraycopy(csd, q, mSPS, 0, p-q);
						} else {
							mPPS = new byte[p-q];
							System.arraycopy(csd, q, mPPS, 0, p-q);
						}
						p += 4;
						q = p;
					}
				}					
			}
			mEncoder.releaseOutputBuffer(index, false);
		}

		elapsed = timestamp() - now;
	}

	check(mPPS != null & mSPS != null, "Could not determine the SPS & PPS.");
	mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
	mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);

	return elapsed;
}
 
Example #18
Source File: MediaCodecInputStream.java    From spydroid-ipcamera with GNU General Public License v3.0 4 votes vote down vote up
public BufferInfo getLastBufferInfo() {
	return mBufferInfo;
}
 
Example #19
Source File: MediaCodecInputStream.java    From VideoMeeting with Apache License 2.0 4 votes vote down vote up
public BufferInfo getLastBufferInfo() {
	return mBufferInfo;
}
 
Example #20
Source File: GPUEncoder.java    From LiveMultimedia with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("all")
public synchronized void encodeAudio() {
    if (!mAudioFeatureActive ) {
        return;
    }

    mAudioFrame++;
    // not yet encoded.
    ByteBuffer savedAudioBytes = mApp.pullAudioData();
    byte[] audioBytes =  new byte[savedAudioBytes.capacity()];
    System.arraycopy(savedAudioBytes.array(), 0, audioBytes, 0, audioBytes.length);

    Log.w(TAG, "Encoding audio frame " + mAudioFrame + " into AAC!");
    ByteBuffer[] codecInputBuffers  = mAudioEncoder.getInputBuffers();
    ByteBuffer[] codecOutputBuffers = mAudioEncoder.getOutputBuffers();
    int numBytesSubmitted = 0;
    boolean doneSubmittingInput = false;
    int numBytesDequeued = 0;
    int index;
    if (!doneSubmittingInput) {
        index = mAudioEncoder.dequeueInputBuffer(kTimeoutUs /* timeoutUs */);
        if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
            if (numBytesSubmitted >= kNumInputBytes) {
                mAudioEncoder.queueInputBuffer(
                        index,
                        0 /* offset */,
                        0 /* size */,
                        0 /* timeUs */,
                        MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                Log.d(TAG, "queued input EOS.");
                doneSubmittingInput = true;
            } else {
                int size = queueInputBuffer(mAudioEncoder, codecInputBuffers, index, audioBytes);
                numBytesSubmitted += size;
                Log.d(TAG, "queued " + size + " bytes of input data.");
            }
        }
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        index = mAudioEncoder.dequeueOutputBuffer(info, kTimeoutUs /* timeoutUs */);
        if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
            Log.d(TAG, "AUDIO Info try again later!!");
        } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            Log.d(TAG, "encoder output format changed:  Added track index: " + mAudioTrackIndex);
        } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            codecOutputBuffers = mAudioEncoder.getOutputBuffers();
        } else {
            ByteBuffer encodedData = codecOutputBuffers[index];
            if (encodedData == null) {
                Log.e(TAG, "encoderOutputBuffer " + index + " was null in encoding audio!!");
                return;
            }

            if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                info.size = 0;
            }
            // Copy the converted audio data
            mCurrentEncodedAudioData = new byte[info.size];
            System.arraycopy(encodedData , 0, mCurrentEncodedAudioData,0, mCurrentEncodedAudioData.length);
            numBytesDequeued += info.size;
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.w(TAG, "dequeued output EOS.");
            }
            Log.w(TAG, "dequeued " + info.size + " bytes of output data.");
        }
    }
    Log.d(TAG, "queued a total of " + numBytesSubmitted + "bytes, "
                + "dequeued " + numBytesDequeued + " bytes.");
    int sampleRate   = mAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
    int channelCount = mAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
    int inBitrate    = sampleRate * channelCount * 16;  // bit/sec
    int outBitrate   = mAudioFormat.getInteger(MediaFormat.KEY_BIT_RATE);
    float desiredRatio = (float)outBitrate / (float)inBitrate;
    float actualRatio  = (float)numBytesDequeued / (float)numBytesSubmitted;
    if (actualRatio < 0.9 * desiredRatio || actualRatio > 1.1 * desiredRatio) {
        Log.w(TAG, "desiredRatio = " + desiredRatio
                + ", actualRatio = " + actualRatio);
    }
}
 
Example #21
Source File: AvcDecoder.java    From VIA-AI with MIT License 4 votes vote down vote up
@Override
    public void run() {
        BufferInfo info = new BufferInfo();
        ByteBuffer[] inputBuffers = mDecoder.getInputBuffers();

        boolean isInput = true;
        boolean first = false;
        long startWhen = 0;

        while (!eosReceived) {
            if (isInput) {
                int inputIndex = mDecoder.dequeueInputBuffer(10000);
                if (inputIndex >= 0) {
                    // fill inputBuffers[inputBufferIndex] with valid data
                    ByteBuffer inputBuffer = mDecoder.getInputBuffers()[inputIndex];
//                    ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputIndex);
                    int sampleSize = mExtractor.readSampleData(inputBuffer, 0);

                    if (mExtractor.advance() && sampleSize > 0) {
                        mDecoder.queueInputBuffer(inputIndex, 0, sampleSize, mExtractor.getSampleTime(), 0);

                    } else {
                        Log.d(TAG, "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                        mDecoder.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isInput = false;
                    }
                }
            }

            int outIndex = mDecoder.dequeueOutputBuffer(info, 10000);
            switch (outIndex) {
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                    Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
                    mDecoder.getOutputBuffers();
                    break;

                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    Log.d(TAG, "INFO_OUTPUT_FORMAT_CHANGED format : " + mDecoder.getOutputFormat());
                    MediaFormat format = mDecoder.getOutputFormat();
                    mOutputHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
                    mOutputWidth = format.getInteger(MediaFormat.KEY_WIDTH);
                    mOutputStride = format.getInteger(MediaFormat.KEY_STRIDE);
                    mOutputColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
                    break;

                case MediaCodec.INFO_TRY_AGAIN_LATER:
//				Log.d(TAG, "INFO_TRY_AGAIN_LATER");
                    break;

                default:
//                    if (!first) {
//                        startWhen = System.currentTimeMillis();
//                        first = true;
//                    }
//                    try {
//                        long sleepTime = (info.presentationTimeUs / 1000) - (System.currentTimeMillis() - startWhen);
//                        Log.d(TAG, "info.presentationTimeUs : " + (info.presentationTimeUs / 1000) + " playTime: " + (System.currentTimeMillis() - startWhen) + " sleepTime : " + sleepTime);
//
//                        if (sleepTime > 0)
//                            Thread.sleep(sleepTime);
//                    } catch (InterruptedException e) {
//                        // TODO Auto-generated catch block
//                        e.printStackTrace();
//                    }
                    ByteBuffer decodedBuffer = mDecoder.getOutputBuffers()[outIndex];//mDecoder.getOutputBuffer(outIndex);
                    if(frameListener != null) {
                        frameListener.onFrameDecoded(decodedBuffer, info.offset, info.size, mOutputWidth, mOutputHeight, mOutputStride, mOutputColorFormat);
                    }

                    mDecoder.releaseOutputBuffer(outIndex, true /* Surface init */);
                    break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }
        if(null!=frameListener) frameListener.onEOS();
        mDecoder.stop();
        mDecoder.release();
        mExtractor.release();
    }
 
Example #22
Source File: ScreenRecoder.java    From ScreenRecoder with MIT License 4 votes vote down vote up
private void stream(MediaCodec codec) {
	BufferInfo info = new BufferInfo();
	ByteBuffer[] buffers = null;

	while (!mQuitting) {
		int index = codec.dequeueOutputBuffer(info, TIMEOUT_USEC);
		if (index >= 0) {
			if (buffers == null) {
				buffers = codec.getOutputBuffers();
			}

			ByteBuffer buffer = buffers[index];
			buffer.limit(info.offset + info.size);
			buffer.position(info.offset);

			muxer.writeSampleData(videoTrackIndex, buffer, info);

			codec.releaseOutputBuffer(index, false);
		} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
			if (mMuxerStarted) {
				throw new RuntimeException("format changed twice");
			}
			
			MediaFormat newFormat = codec.getOutputFormat();

			// now that we have the Magic Goodies, start the muxer
			videoTrackIndex = muxer.addTrack(newFormat);
			muxer.start();

			mMuxerStarted = true;

			buffers = null;
		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			buffers = null;
		} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
			Log.e("sam", "Codec dequeue buffer timed out.");
		}
	}

	muxer.stop();
	muxer.release();
}
 
Example #23
Source File: AvcEncoder.java    From cameraMediaCodec with BSD 2-Clause "Simplified" License 4 votes vote down vote up
public void Init(int colorformat, AvcEncoderSink sink/*null as default*/)
{
	Log.i("AvcEncoder", "Init");
	
    mPrimeColorFormat = colorformat;
	
	mMC = MediaCodec.createEncoderByType(MIME_TYPE);
	
	mBI = new BufferInfo();
	
	mSink = sink;
	
	mFpsHelper = new FpsHelper();
	mFpsHelper.SetEnableDrop(true);
	
	mStatus = STATUS_LOADED;
}
 
Example #24
Source File: MainActivity.java    From Android with Apache License 2.0 4 votes vote down vote up
protected boolean process() throws IOException {

        mMediaExtractor = new MediaExtractor();          
        mMediaExtractor.setDataSource(SDCARD_PATH+"/input.mp4");                
                
        int mVideoTrackIndex = -1;
        int framerate = 0;
        for(int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
            MediaFormat format = mMediaExtractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if(!mime.startsWith("video/")) {                
                continue;
            }
            framerate = format.getInteger(MediaFormat.KEY_FRAME_RATE);            
            mMediaExtractor.selectTrack(i);
            mMediaMuxer = new MediaMuxer(SDCARD_PATH+"/ouput.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4);
            mVideoTrackIndex = mMediaMuxer.addTrack(format);  
            mMediaMuxer.start();
        }
        
        if(mMediaMuxer == null) {
            return false;
        }
        
        BufferInfo info = new BufferInfo();
        info.presentationTimeUs = 0;
        ByteBuffer buffer = ByteBuffer.allocate(500*1024);        
        while(true) {
            int sampleSize = mMediaExtractor.readSampleData(buffer, 0);
            if(sampleSize < 0) {
                break;
            }
            mMediaExtractor.advance();
            info.offset = 0;
            info.size = sampleSize;
            info.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;        
            info.presentationTimeUs += 1000*1000/framerate;
            mMediaMuxer.writeSampleData(mVideoTrackIndex,buffer,info);
        }

        mMediaExtractor.release();
        
        mMediaMuxer.stop();
        mMediaMuxer.release();
        
        return true;
    }
 
Example #25
Source File: MediaCodecInputStream.java    From AndroidInstantVideo with Apache License 2.0 votes vote down vote up
void onReadOnce(byte[] buffer, int readSize, BufferInfo mediaBufferSize);