Java Code Examples for android.media.MediaCodec#INFO_OUTPUT_BUFFERS_CHANGED

The following examples show how to use android.media.MediaCodec#INFO_OUTPUT_BUFFERS_CHANGED . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AudioTrackTranscoder.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
Example 2
Source File: VideoTrackTranscoder.java    From android-transcoder with Apache License 2.0 5 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;
    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage();
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.swapBuffers();
    }
    return DRAIN_STATE_CONSUMED;
}
 
Example 3
Source File: AudioTransCoder.java    From SimpleVideoEditor with Apache License 2.0 5 votes vote down vote up
@TargetApi(20)
private void decodeOutput20() throws InterruptedException {
    while (true) {
        int outIndex = decoder.dequeueOutputBuffer(info, TIME_OUT);
        switch (outIndex) {
            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                Log.d(TAG, "decodeOutput20: INFO_OUTPUT_BUFFERS_CHANGED");
                outputBuffers = decoder.getOutputBuffers();
                break;
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                Log.d(TAG, "decodeOutput20: INFO_OUTPUT_FORMAT_CHANGED");
                MediaFormat mf = decoder.getOutputFormat();
                // start encode worker
                EncodeInputWorker encodeTask = new EncodeInputWorker();
                int sampleRate = mf.getInteger(MediaFormat.KEY_SAMPLE_RATE);
                int channelCount = mf.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
                encodeTask.setAudioParams(sampleRate, channelCount);
                WorkRunner.addTaskToBackground(encodeTask);
                break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                Log.d(TAG, "dequeueOutputBuffer timed out!");
                break;
            default:
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    mRawQueue.put(new RawBuffer(null, true, info.presentationTimeUs));
                } else {
                    ByteBuffer buffer = outputBuffers[outIndex];
                    byte[] outData = new byte[info.size];
                    buffer.get(outData, 0, info.size);
                    mRawQueue.put(new RawBuffer(outData, false, info.presentationTimeUs));
                }
                decoder.releaseOutputBuffer(outIndex, false);
                break;
        }
        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
            Log.d(TAG, "Decode output reach eos.");
            break;
        }
    }
}
 
Example 4
Source File: VideoTrackTranscoder.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;
    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage();
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.swapBuffers();
    }
    return DRAIN_STATE_CONSUMED;
}
 
Example 5
Source File: VideoTrackTranscoder.java    From Pix-Art-Messenger with GNU General Public License v3.0 5 votes vote down vote up
private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;
    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null)
                throw new RuntimeException("Video output format changed twice.");
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderOutputBuffers = mEncoder.getOutputBuffers();
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
Example 6
Source File: VideoTrackTranscoder.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;
    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage();
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.swapBuffers();
    }
    return DRAIN_STATE_CONSUMED;
}
 
Example 7
Source File: EncoderDebugger.java    From libstreaming with Apache License 2.0 4 votes vote down vote up
private long encode() {
	int n = 0;
	long elapsed = 0, now = timestamp();
	int encOutputIndex = 0, encInputIndex = 0;
	BufferInfo info = new BufferInfo();
	ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

	while (elapsed<5000000) {
		// Feeds the encoder with an image
		encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (encInputIndex>=0) {
			check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			encInputBuffers[encInputIndex].clear();
			encInputBuffers[encInputIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.d(TAG,"No buffer available !");
		}

		// Tries to get a NAL unit
		encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			encOutputBuffers = mEncoder.getOutputBuffers();
		} else if (encOutputIndex>=0) {
			mVideo[n] = new byte[info.size];
			encOutputBuffers[encOutputIndex].clear();
			encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
			mEncoder.releaseOutputBuffer(encOutputIndex, false);
			if (n>=NB_ENCODED) {
				flushMediaCodec(mEncoder);
				return elapsed;
			}
		}

		elapsed = timestamp() - now;
	}

	throw new RuntimeException("The encoder is too slow.");

}
 
Example 8
Source File: SdlEncoder.java    From sdl_java_suite with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder
 * <p>
 * If endOfStream is not set, this returns when there is no more data to
 * drain. If it is set, we send EOS to the encoder, and then iterate until
 * we see EOS on the output. Calling this with endOfStream set should be
 * done once, right before stopping the muxer.
 */
public void drainEncoder(boolean endOfStream) {
	final int TIMEOUT_USEC = 10000;

	if(mEncoder == null || (mOutputStream == null && mOutputListener == null)) {
	   return;
	}
	if (endOfStream) {
		  mEncoder.signalEndOfInputStream();
	}

	ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
	while (true) {
		int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo,
				TIMEOUT_USEC);
		if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
			// no output available yet
			if (!endOfStream) {
				trySendVideoKeepalive();
				break; // out of while
			}
		} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			// not expected for an encoder
			encoderOutputBuffers = mEncoder.getOutputBuffers();
		} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
			if (mH264CodecSpecificData == null) {
				MediaFormat format = mEncoder.getOutputFormat();
				mH264CodecSpecificData = EncoderUtils.getCodecSpecificData(format);
			} else {
				Log.w(TAG, "Output format change notified more than once, ignoring.");
			}
		} else if (encoderStatus < 0) {
		} else {
			if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
				// If we already retrieve codec specific data via OUTPUT_FORMAT_CHANGED event,
				// we do not need this data.
				if (mH264CodecSpecificData != null) {
					mBufferInfo.size = 0;
				} else {
					Log.i(TAG, "H264 codec specific data not retrieved yet.");
				}
			}

			if (mBufferInfo.size != 0) {
				ByteBuffer encoderOutputBuffer = encoderOutputBuffers[encoderStatus];
				byte[] dataToWrite = null;
				int dataOffset = 0;

				// append SPS and PPS in front of every IDR NAL unit
				if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
						&& mH264CodecSpecificData != null) {
					dataToWrite = new byte[mH264CodecSpecificData.length + mBufferInfo.size];
					System.arraycopy(mH264CodecSpecificData, 0,
							dataToWrite, 0, mH264CodecSpecificData.length);
					dataOffset = mH264CodecSpecificData.length;
				} else {
					dataToWrite = new byte[mBufferInfo.size];
				}

				try {
					encoderOutputBuffer.position(mBufferInfo.offset);
					encoderOutputBuffer.limit(mBufferInfo.offset + mBufferInfo.size);

					encoderOutputBuffer.get(dataToWrite, dataOffset, mBufferInfo.size);

                       emitFrame(dataToWrite);
                   } catch (Exception e) {}
			}

			mEncoder.releaseOutputBuffer(encoderStatus, false);

			if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
				break; // out of while
			}
		}
	}
}
 
Example 9
Source File: MediaCodecVideoDecoder.java    From webrtc_android with MIT License 4 votes vote down vote up
@CalledByNativeUnchecked
private   DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
  checkOnMediaCodecThread();
  if (decodeStartTimeMs.isEmpty()) {
    return null;
  }
  // Drain the decoder until receiving a decoded buffer or hitting
  // MediaCodec.INFO_TRY_AGAIN_LATER.
  final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
  while (true) {
    final int result =
        mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
    switch (result) {
      case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
        outputBuffers = mediaCodec.getOutputBuffers();
        Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
        if (hasDecodedFirstFrame) {
          throw new RuntimeException("Unexpected output buffer change event.");
        }
        break;
      case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        MediaFormat format = mediaCodec.getOutputFormat();
        Logging.d(TAG, "Decoder format changed: " + format.toString());
        final int newWidth;
        final int newHeight;
        if (format.containsKey(FORMAT_KEY_CROP_LEFT) && format.containsKey(FORMAT_KEY_CROP_RIGHT)
            && format.containsKey(FORMAT_KEY_CROP_BOTTOM)
            && format.containsKey(FORMAT_KEY_CROP_TOP)) {
          newWidth = 1 + format.getInteger(FORMAT_KEY_CROP_RIGHT)
              - format.getInteger(FORMAT_KEY_CROP_LEFT);
          newHeight = 1 + format.getInteger(FORMAT_KEY_CROP_BOTTOM)
              - format.getInteger(FORMAT_KEY_CROP_TOP);
        } else {
          newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
          newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
        }
        if (hasDecodedFirstFrame && (newWidth != width || newHeight != height)) {
          throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
              + ". New " + newWidth + "*" + newHeight);
        }
        width = newWidth;
        height = newHeight;
        if (textureListener != null) {
          textureListener.setSize(width, height);
        }

        if (!useSurface() && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
          colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
          Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
          if (!supportedColorList.contains(colorFormat)) {
            throw new IllegalStateException("Non supported color format: " + colorFormat);
          }
        }
        if (format.containsKey(FORMAT_KEY_STRIDE)) {
          stride = format.getInteger(FORMAT_KEY_STRIDE);
        }
        if (format.containsKey(FORMAT_KEY_SLICE_HEIGHT)) {
          sliceHeight = format.getInteger(FORMAT_KEY_SLICE_HEIGHT);
        }
        Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
        stride = Math.max(width, stride);
        sliceHeight = Math.max(height, sliceHeight);
        break;
      case MediaCodec.INFO_TRY_AGAIN_LATER:
        return null;
      default:
        hasDecodedFirstFrame = true;
        TimeStamps timeStamps = decodeStartTimeMs.remove();
        long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
        if (decodeTimeMs > MAX_DECODE_TIME_MS) {
          Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
                  + ". Q size: " + decodeStartTimeMs.size()
                  + ". Might be caused by resuming H264 decoding after a pause.");
          decodeTimeMs = MAX_DECODE_TIME_MS;
        }
        return new DecodedOutputBuffer(result, info.offset, info.size,
            TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
            timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
    }
  }
}
 
Example 10
Source File: EncoderDebugger.java    From libstreaming with Apache License 2.0 4 votes vote down vote up
/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {

	ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
	BufferInfo info = new BufferInfo();
	byte[] csd = new byte[128];
	int len = 0, p = 4, q = 4;
	long elapsed = 0, now = timestamp();

	while (elapsed<3000000 && (mSPS==null || mPPS==null)) {

		// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
		int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (bufferIndex>=0) {
			check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			inputBuffers[bufferIndex].clear();
			inputBuffers[bufferIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.e(TAG,"No buffer available !");
		}

		// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
		// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
		// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...

		int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);

		if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

			// The PPS and PPS shoud be there
			MediaFormat format = mEncoder.getOutputFormat();
			ByteBuffer spsb = format.getByteBuffer("csd-0");
			ByteBuffer ppsb = format.getByteBuffer("csd-1");
			mSPS = new byte[spsb.capacity()-4];
			spsb.position(4);
			spsb.get(mSPS,0,mSPS.length);
			mPPS = new byte[ppsb.capacity()-4];
			ppsb.position(4);
			ppsb.get(mPPS,0,mPPS.length);
			break;

		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			outputBuffers = mEncoder.getOutputBuffers();
		} else if (index>=0) {

			len = info.size;
			if (len<128) {
				outputBuffers[index].get(csd,0,len);
				if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
					// Parses the SPS and PPS, they could be in two different packets and in a different order 
					//depending on the phone so we don't make any assumption about that
					while (p<len) {
						while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
						if (p+3>=len) p=len;
						if ((csd[q]&0x1F)==7) {
							mSPS = new byte[p-q];
							System.arraycopy(csd, q, mSPS, 0, p-q);
						} else {
							mPPS = new byte[p-q];
							System.arraycopy(csd, q, mPPS, 0, p-q);
						}
						p += 4;
						q = p;
					}
				}					
			}
			mEncoder.releaseOutputBuffer(index, false);
		}

		elapsed = timestamp() - now;
	}

	check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
	mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
	mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);

	return elapsed;
}
 
Example 11
Source File: SurfaceEncoder.java    From AndroidVideoSamples with Apache License 2.0 4 votes vote down vote up
private void drainEncoder( boolean endOfStream ) {

         if ( endOfStream ) {
            mEncoder.signalEndOfInputStream();
         }

         ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
         while ( true ) {
            int encoderStatus = mEncoder.dequeueOutputBuffer( mBufferInfo, TIMEOUT_USEC );
            if ( encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER ) {
               // no output available yet
               if ( !endOfStream ) {
                  break; // out of while
               } else {
                  Log.d( TAG, "no output available, spinning to await EOS" );
               }
            } else if ( encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ) {
               // not expected for an encoder
               encoderOutputBuffers = mEncoder.getOutputBuffers();
            } else if ( encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED ) {
               // should happen before receiving buffers, and should only happen once
               if ( mMuxerStarted ) {
                  throw new RuntimeException( "format changed twice" );
               }
               MediaFormat newFormat = mEncoder.getOutputFormat();
               Log.d( TAG, "encoder output format changed: " + newFormat );

               // now that we have the Magic Goodies, start the muxer
               mTrackIndex = mMuxer.addTrack( newFormat );
               mMuxer.start();
               mMuxerStarted = true;
            } else if ( encoderStatus < 0 ) {
               Log.w( TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus );
               // let's ignore it
            } else {
               ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
               if ( encodedData == null ) {
                  throw new RuntimeException( "encoderOutputBuffer " + encoderStatus + " was null" );
               }

               if ( ( mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG ) != 0 ) {
                  // The codec config data was pulled out and fed to the muxer when we got
                  // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
                  Log.d( TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG" );
                  mBufferInfo.size = 0;
               }

               if ( mBufferInfo.size != 0 ) {
                  if ( !mMuxerStarted ) {
                     throw new RuntimeException( "muxer hasn't started" );
                  }

                  // adjust the ByteBuffer values to match BufferInfo (not needed?)
                  encodedData.position( mBufferInfo.offset );
                  encodedData.limit( mBufferInfo.offset + mBufferInfo.size );

                  mMuxer.writeSampleData( mTrackIndex, encodedData, mBufferInfo );
                  Log.d( TAG, "sent " + mBufferInfo.size + " bytes to muxer" );
               }

               mEncoder.releaseOutputBuffer( encoderStatus, false );

               if ( ( mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM ) != 0 ) {
                  if ( !endOfStream ) {
                     Log.w( TAG, "reached end of stream unexpectedly" );
                  } else {
                     Log.d( TAG, "end of stream reached" );
                  }
                  break; // out of while
               }
            }
         }
      }
 
Example 12
Source File: EncodeAndMuxTest.java    From AndroidPlayground with MIT License 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 */
private void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

    if (endOfStream) {
        if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            } else {
                if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);

            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                    encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE) Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
            }

            mEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE) Log.d(TAG, "end of stream reached");
                }
                break;      // out of while
            }
        }
    }
}
 
Example 13
Source File: AndroidEncoder.java    From cineio-broadcast-android with MIT License 4 votes vote down vote up
public void drainEncoder(boolean endOfStream) {
        if (endOfStream && VERBOSE) {
            if (isSurfaceInputEncoder()) {
                Log.i(TAG, "final video drain");
            } else {
                Log.i(TAG, "final audio drain");
            }
        }
        synchronized (mMuxer) {
            final int TIMEOUT_USEC = 1000;
            if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ") track: " + mTrackIndex);

            if (endOfStream) {
                if (VERBOSE) Log.d(TAG, "sending EOS to encoder for track " + mTrackIndex);
//                When all target devices honor MediaCodec#signalEndOfInputStream, return to this method
//                if(isSurfaceInputEncoder()){
//                    if (VERBOSE) Log.i(TAG, "signalEndOfInputStream for track " + mTrackIndex);
//                    mEncoder.signalEndOfInputStream();
//                    // Note: This method isn't honored on certain devices including Google Glass
//                }
            }

            ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
            while (true) {
                int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
                if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    // no output available yet
                    if (!endOfStream) {
                        break;      // out of while
                    } else {
                        if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
                    }
                } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    // not expected for an encoder
                    encoderOutputBuffers = mEncoder.getOutputBuffers();
                } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    // should happen before receiving buffers, and should only happen once
                    MediaFormat newFormat = mEncoder.getOutputFormat();
                    if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);

                    // now that we have the Magic Goodies, start the muxer
                    mTrackIndex = mMuxer.addTrack(newFormat);
                    Log.d(TAG, "ADDED TRACK INDEX: " + mTrackIndex + " " + this.getClass().getName());
                    // Muxer is responsible for starting/stopping itself
                    // based on knowledge of expected # tracks
                } else if (encoderStatus < 0) {
                    Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                            encoderStatus);
                    // let's ignore it
                } else {
                    ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                    if (encodedData == null) {
                        throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                                " was null");
                    }

                    if (mBufferInfo.size >= 0) {    // Allow zero length buffer for purpose of sending 0 size video EOS Flag
                        // adjust the ByteBuffer values to match BufferInfo (not needed?)
                        encodedData.position(mBufferInfo.offset);
                        encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                        if (mForceEos) {
                            mBufferInfo.flags = mBufferInfo.flags | MediaCodec.BUFFER_FLAG_END_OF_STREAM;
                            Log.i(TAG, "Forcing EOS");
                        }
                        // It is the muxer's responsibility to release encodedData
                        mMuxer.writeSampleData(mEncoder, mTrackIndex, encoderStatus, encodedData, mBufferInfo);
                        if (VERBOSE) {
                            Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, \t ts=" +
                                    mBufferInfo.presentationTimeUs + "track " + mTrackIndex);
                        }
                    }

                    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        if (!endOfStream) {
                            Log.w(TAG, "reached end of stream unexpectedly");
                        } else {
                            if (VERBOSE)
                                Log.d(TAG, "end of stream reached for track " + mTrackIndex);
                        }
                        break;      // out of while
                    }
                }
            }
            if (endOfStream && VERBOSE) {
                if (isSurfaceInputEncoder()) {
                    Log.i(TAG, "final video drain complete");
                } else {
                    Log.i(TAG, "final audio drain complete");
                }
            }
        }
    }
 
Example 14
Source File: MediaCodecTrackRenderer.java    From Exoplayer_VLC with Apache License 2.0 4 votes vote down vote up
/**
 * @return True if it may be possible to drain more output data. False otherwise.
 * @throws ExoPlaybackException If an error occurs draining the output buffer.
 */
@SuppressWarnings("deprecation")
private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs)
    throws ExoPlaybackException {
  if (outputStreamEnded) {
    return false;
  }

  if (outputIndex < 0) {
    outputIndex = codec.dequeueOutputBuffer(outputBufferInfo, 0);
  }
  //Log.d("MediaCodecTrackRenderer", "#####drainOutputBuffer(): --> <-- outputIndex="+outputIndex+", buffer=(offs="+outputBufferInfo.offset+", size="+outputBufferInfo.size+", time="+outputBufferInfo.presentationTimeUs+", flags="+outputBufferInfo.flags+")");
  if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    onOutputFormatChanged(codec.getOutputFormat());
    codecCounters.outputFormatChangedCount++;
    return true;
  } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    outputBuffers = codec.getOutputBuffers();
    codecCounters.outputBuffersChangedCount++;
    return true;
  } else if (outputIndex < 0) {
    return false;
  }

  if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    Log.w("MediaCodecTrackRenderer", "drainOutputBuffer(): receive MediaCodec.BUFFER_FLAG_END_OF_STREAM ");
    outputStreamEnded = true;
    return false;
  }

  int decodeOnlyIndex = getDecodeOnlyIndex(outputBufferInfo.presentationTimeUs);
  if (processOutputBuffer(positionUs, elapsedRealtimeUs, codec, outputBuffers[outputIndex],
      outputBufferInfo, outputIndex, decodeOnlyIndex != -1)) {
    if (decodeOnlyIndex != -1) {
      decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex);
    } else {
      currentPositionUs = outputBufferInfo.presentationTimeUs;
    }
    outputIndex = -1;
    return true;
  }

  return false;
}
 
Example 15
Source File: AvcDecoder.java    From VIA-AI with MIT License 4 votes vote down vote up
@Override
    public void run() {
        BufferInfo info = new BufferInfo();
        ByteBuffer[] inputBuffers = mDecoder.getInputBuffers();

        boolean isInput = true;
        boolean first = false;
        long startWhen = 0;

        while (!eosReceived) {
            if (isInput) {
                int inputIndex = mDecoder.dequeueInputBuffer(10000);
                if (inputIndex >= 0) {
                    // fill inputBuffers[inputBufferIndex] with valid data
                    ByteBuffer inputBuffer = mDecoder.getInputBuffers()[inputIndex];
//                    ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputIndex);
                    int sampleSize = mExtractor.readSampleData(inputBuffer, 0);

                    if (mExtractor.advance() && sampleSize > 0) {
                        mDecoder.queueInputBuffer(inputIndex, 0, sampleSize, mExtractor.getSampleTime(), 0);

                    } else {
                        Log.d(TAG, "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                        mDecoder.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isInput = false;
                    }
                }
            }

            int outIndex = mDecoder.dequeueOutputBuffer(info, 10000);
            switch (outIndex) {
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                    Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
                    mDecoder.getOutputBuffers();
                    break;

                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    Log.d(TAG, "INFO_OUTPUT_FORMAT_CHANGED format : " + mDecoder.getOutputFormat());
                    MediaFormat format = mDecoder.getOutputFormat();
                    mOutputHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
                    mOutputWidth = format.getInteger(MediaFormat.KEY_WIDTH);
                    mOutputStride = format.getInteger(MediaFormat.KEY_STRIDE);
                    mOutputColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
                    break;

                case MediaCodec.INFO_TRY_AGAIN_LATER:
//				Log.d(TAG, "INFO_TRY_AGAIN_LATER");
                    break;

                default:
//                    if (!first) {
//                        startWhen = System.currentTimeMillis();
//                        first = true;
//                    }
//                    try {
//                        long sleepTime = (info.presentationTimeUs / 1000) - (System.currentTimeMillis() - startWhen);
//                        Log.d(TAG, "info.presentationTimeUs : " + (info.presentationTimeUs / 1000) + " playTime: " + (System.currentTimeMillis() - startWhen) + " sleepTime : " + sleepTime);
//
//                        if (sleepTime > 0)
//                            Thread.sleep(sleepTime);
//                    } catch (InterruptedException e) {
//                        // TODO Auto-generated catch block
//                        e.printStackTrace();
//                    }
                    ByteBuffer decodedBuffer = mDecoder.getOutputBuffers()[outIndex];//mDecoder.getOutputBuffer(outIndex);
                    if(frameListener != null) {
                        frameListener.onFrameDecoded(decodedBuffer, info.offset, info.size, mOutputWidth, mOutputHeight, mOutputStride, mOutputColorFormat);
                    }

                    mDecoder.releaseOutputBuffer(outIndex, true /* Surface init */);
                    break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }
        if(null!=frameListener) frameListener.onEOS();
        mDecoder.stop();
        mDecoder.release();
        mExtractor.release();
    }
 
Example 16
Source File: VideoEncoderCore.java    From pause-resume-video-recording with Apache License 2.0 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder and forwards it to the muxer.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 * <p>
 * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream).  We're
 * not recording audio.
 */
public void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

    if (endOfStream) {
        if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            } else {
                if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);

            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                    encoderStatus);
            // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE) {
                    Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
                            mBufferInfo.presentationTimeUs);
                }
            }

            mEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE) Log.d(TAG, "end of stream reached");
                }
                break;      // out of while
            }
        }
    }
}
 
Example 17
Source File: EncoderDebugger.java    From spydroid-ipcamera with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {

	ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
	BufferInfo info = new BufferInfo();
	byte[] csd = new byte[128];
	int len = 0, p = 4, q = 4;
	long elapsed = 0, now = timestamp();

	while (elapsed<3000000 && (mSPS==null || mPPS==null)) {

		// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
		int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (bufferIndex>=0) {
			check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			inputBuffers[bufferIndex].clear();
			inputBuffers[bufferIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.e(TAG,"No buffer available !");
		}

		// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
		// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
		// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...

		int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);

		if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

			// The PPS and PPS shoud be there
			MediaFormat format = mEncoder.getOutputFormat();
			ByteBuffer spsb = format.getByteBuffer("csd-0");
			ByteBuffer ppsb = format.getByteBuffer("csd-1");
			mSPS = new byte[spsb.capacity()-4];
			spsb.position(4);
			spsb.get(mSPS,0,mSPS.length);
			mPPS = new byte[ppsb.capacity()-4];
			ppsb.position(4);
			ppsb.get(mPPS,0,mPPS.length);
			break;

		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			outputBuffers = mEncoder.getOutputBuffers();
		} else if (index>=0) {

			len = info.size;
			if (len<128) {
				outputBuffers[index].get(csd,0,len);
				if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
					// Parses the SPS and PPS, they could be in two different packets and in a different order 
					//depending on the phone so we don't make any assumption about that
					while (p<len) {
						while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
						if (p+3>=len) p=len;
						if ((csd[q]&0x1F)==7) {
							mSPS = new byte[p-q];
							System.arraycopy(csd, q, mSPS, 0, p-q);
						} else {
							mPPS = new byte[p-q];
							System.arraycopy(csd, q, mPPS, 0, p-q);
						}
						p += 4;
						q = p;
					}
				}					
			}
			mEncoder.releaseOutputBuffer(index, false);
		}

		elapsed = timestamp() - now;
	}

	check(mPPS != null & mSPS != null, "Could not determine the SPS & PPS.");
	mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
	mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);

	return elapsed;
}
 
Example 18
Source File: AudioDecoder.java    From ssj with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Decodes audio file into a raw file. This method accepts audio file formats with valid
 * headers (like .mp3, .mp4, and .wav).
 * @param filepath Path of the file to decode.
 * @return Decoded raw audio file.
 * @throws IOException when file cannot be read.
 */
private File decode(String filepath) throws IOException
{
	// Set selected audio file as a source.
	MediaExtractor extractor = new MediaExtractor();
	extractor.setDataSource(filepath);

	// Get audio format.
	MediaFormat format = extractor.getTrackFormat(0);
	String mime = format.getString(MediaFormat.KEY_MIME);

	// Cache necessary audio attributes.
	sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
	channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);

	// Create and configure decoder based on audio format.
	MediaCodec decoder = MediaCodec.createDecoderByType(mime);
	decoder.configure(format, null, null, 0);
	decoder.start();

	// Create input/output buffers.
	ByteBuffer[] inputBuffers = decoder.getInputBuffers();
	ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
	MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
	extractor.selectTrack(0);

	File dst = new File(FileCons.SSJ_EXTERNAL_STORAGE + File.separator + "output.raw");
	FileOutputStream f = new FileOutputStream(dst);

	boolean endOfStreamReached = false;

	while (true)
	{
		if (!endOfStreamReached)
		{
			int inputBufferIndex = decoder.dequeueInputBuffer(10 * 1000);
			if (inputBufferIndex >= 0)
			{
				ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
				int sampleSize = extractor.readSampleData(inputBuffer, 0);
				if (sampleSize < 0)
				{
					// Pass empty buffer and the end of stream flag to the codec.
					decoder.queueInputBuffer(inputBufferIndex, 0, 0,
											 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
					endOfStreamReached = true;
				}
				else
				{
					// Pass data-filled buffer to the decoder.
					decoder.queueInputBuffer(inputBufferIndex, 0, sampleSize,
											 extractor.getSampleTime(), 0);
					extractor.advance();
				}
			}
		}

		int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10 * 1000);
		if (outputBufferIndex >= 0)
		{
			ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
			byte[] data = new byte[bufferInfo.size];
			outputBuffer.get(data);
			outputBuffer.clear();

			if (data.length > 0)
			{
				f.write(data, 0, data.length);
			}
			decoder.releaseOutputBuffer(outputBufferIndex, false);

			if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
			{
				endOfStreamReached = true;
			}
		}
		else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
		{
			outputBuffers = decoder.getOutputBuffers();
		}

		if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
		{
			return dst;
		}
	}
}
 
Example 19
Source File: DecodeEditEncodeTest.java    From Android-MediaCodec-Examples with Apache License 2.0 4 votes vote down vote up
/**
 * Generates video frames, feeds them into the encoder, and writes the output to the
 * VideoChunks instance.
 */
private void generateVideoData(MediaCodec encoder, InputSurface inputSurface,
        VideoChunks output) {
    final int TIMEOUT_USEC = 10000;
    ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int generateIndex = 0;
    int outputCount = 0;
    // Loop until the output side is done.
    boolean inputDone = false;
    boolean outputDone = false;
    while (!outputDone) {
        if (VERBOSE) Log.d(TAG, "gen loop");
        // If we're not done submitting frames, generate a new one and submit it.  The
        // eglSwapBuffers call will block if the input is full.
        if (!inputDone) {
            if (generateIndex == NUM_FRAMES) {
                // Send an empty frame with the end-of-stream flag set.
                if (VERBOSE) Log.d(TAG, "signaling input EOS");
                if (WORK_AROUND_BUGS) {
                    // Might drop a frame, but at least we won't crash mediaserver.
                    try { Thread.sleep(500); } catch (InterruptedException ie) {}
                    outputDone = true;
                } else {
                    encoder.signalEndOfInputStream();
                }
                inputDone = true;
            } else {
                generateSurfaceFrame(generateIndex);
                inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000);
                if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
                inputSurface.swapBuffers();
            }
            generateIndex++;
        }
        // Check for output from the encoder.  If there's no output yet, we either need to
        // provide more input, or we need to wait for the encoder to work its magic.  We
        // can't actually tell which is the case, so if we can't get an output buffer right
        // away we loop around and see if it wants more input.
        //
        // If we do find output, drain it all before supplying more input.
        while (true) {
            int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (VERBOSE) Log.d(TAG, "no output from encoder available");
                break;      // out of while
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not expected for an encoder
                encoderOutputBuffers = encoder.getOutputBuffers();
                if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                // not expected for an encoder
                MediaFormat newFormat = encoder.getOutputFormat();
                if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
            } else if (encoderStatus < 0) {
                fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
            } else { // encoderStatus >= 0
                ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                if (encodedData == null) {
                    fail("encoderOutputBuffer " + encoderStatus + " was null");
                }
                // Codec config flag must be set iff this is the first chunk of output.  This
                // may not hold for all codecs, but it appears to be the case for video/avc.
                assertTrue((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 ||
                        outputCount != 0);
                if (info.size != 0) {
                    // Adjust the ByteBuffer values to match BufferInfo.
                    encodedData.position(info.offset);
                    encodedData.limit(info.offset + info.size);
                    output.addChunk(encodedData, info.flags, info.presentationTimeUs);
                    outputCount++;
                }
                encoder.releaseOutputBuffer(encoderStatus, false);
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    outputDone = true;
                    break;      // out of while
                }
            }
        }
    }
    // One chunk per frame, plus one for the config data.
    assertEquals("Frame count", NUM_FRAMES + 1, outputCount);
}
 
Example 20
Source File: MediaEncoder.java    From Tok-Android with GNU General Public License v3.0 4 votes vote down vote up
/**
 * drain encoded data and write them to muxer
 */
protected void drain() {
    if (mMediaCodec == null) return;
    ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
    int encoderStatus, count = 0;
    final MediaMuxerWrapper muxer = mWeakMuxer.get();
    if (muxer == null) {
        //        	throw new NullPointerException("muxer is unexpectedly null");
        Log.w(TAG, "muxer is unexpectedly null");
        return;
    }
    LOOP:
    while (mIsCapturing) {
        // get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
        encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
            if (!mIsEOS) {
                if (++count > 5) break LOOP;        // out of while
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
            // this shoud not come when encoding
            encoderOutputBuffers = mMediaCodec.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
            // this status indicate the output format of codec is changed
            // this should come only once before actual encoded data
            // but this status never come on Android4.3 or less
            // and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
            if (mMuxerStarted) {    // second time request is error
                throw new RuntimeException("format changed twice");
            }
            // get output format from codec and pass them to muxer
            // getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
            final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
            mTrackIndex = muxer.addTrack(format);
            mMuxerStarted = true;
            if (!muxer.start()) {
                // we should wait until muxer is ready
                synchronized (muxer) {
                    while (!muxer.isStarted()) {
                        try {
                            muxer.wait(100);
                        } catch (final InterruptedException e) {
                            break LOOP;
                        }
                    }
                }
            }
        } else if (encoderStatus < 0) {
            // unexpected status
            if (DEBUG) {
                Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: "
                    + encoderStatus);
            }
        } else {
            final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                // this never should come...may be a MediaCodec internal error
                throw new RuntimeException(
                    "encoderOutputBuffer " + encoderStatus + " was null");
            }
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // You shoud set output format to muxer here when you target Android4.3 or less
                // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
                // therefor we should expand and prepare output format from buffer data.
                // This sample is for API>=18(>=Android 4.3), just ignore this flag here
                if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }

            if (mBufferInfo.size != 0) {
                // encoded data is ready, clear waiting counter
                count = 0;
                if (!mMuxerStarted) {
                    // muxer is not ready...this will prrograming failure.
                    throw new RuntimeException("drain:muxer hasn't started");
                }
                // write encoded data to muxer(need to adjust presentationTimeUs.
                mBufferInfo.presentationTimeUs = getPTSUs();
                muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                prevOutputPTSUs = mBufferInfo.presentationTimeUs;
            }
            // return buffer to encoder
            mMediaCodec.releaseOutputBuffer(encoderStatus, false);
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                // when EOS come.
                mIsCapturing = false;
                break;      // out of while
            }
        }
    }
}