Java Code Examples for android.media.MediaCodec#BUFFER_FLAG_KEY_FRAME

The following examples show how to use android.media.MediaCodec#BUFFER_FLAG_KEY_FRAME . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RecordController.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 6 votes vote down vote up
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
public void recordVideo(ByteBuffer videoBuffer, MediaCodec.BufferInfo videoInfo) {
  if (status == Status.STARTED
      && videoInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME
      && videoFormat != null
      && audioFormat != null) {
    videoTrack = mediaMuxer.addTrack(videoFormat);
    audioTrack = mediaMuxer.addTrack(audioFormat);
    mediaMuxer.start();
    status = Status.RECORDING;
    if (listener != null) listener.onStatusChange(status);
  } else if (status == Status.RESUMED && videoInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
    status = Status.RECORDING;
    if (listener != null) listener.onStatusChange(status);
  }
  if (status == Status.RECORDING) {
    updateFormat(this.videoInfo, videoInfo);
    mediaMuxer.writeSampleData(videoTrack, videoBuffer, this.videoInfo);
  }
}
 
Example 2
Source File: AudioComposer.java    From Mp4Composer-android with MIT License 5 votes vote down vote up
@SuppressLint("Assert")
public boolean stepPipeline() {
    if (isEOS) return false;
    int trackIndex = mediaExtractor.getSampleTrackIndex();
    if (trackIndex < 0) {
        buffer.clear();
        bufferInfo.set(0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
        muxRender.writeSampleData(sampleType, buffer, bufferInfo);
        isEOS = true;
        return true;
    }
    if (trackIndex != this.trackIndex) return false;

    buffer.clear();
    int sampleSize = mediaExtractor.readSampleData(buffer, 0);
    if (sampleSize > bufferSize) {
        logger.warning(TAG, "Sample size smaller than buffer size, resizing buffer: " + sampleSize);
        bufferSize = 2 * sampleSize;
        buffer = ByteBuffer.allocateDirect(bufferSize).order(ByteOrder.nativeOrder());
    }
    boolean isKeyFrame = (mediaExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
    int flags = isKeyFrame ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;

    if (mediaExtractor.getSampleTime() >= trimStartUs && (mediaExtractor.getSampleTime() <= trimEndUs || trimEndUs == -1)) {
        bufferInfo.set(0, sampleSize, mediaExtractor.getSampleTime(), flags);
        muxRender.writeSampleData(sampleType, buffer, bufferInfo);
    }

    writtenPresentationTimeUs = mediaExtractor.getSampleTime();
    mediaExtractor.advance();
    return true;
}
 
Example 3
Source File: RtspSession.java    From DeviceConnect-Android with MIT License 5 votes vote down vote up
@Override
public void onWriteVideoData(ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
    VideoStream videoStream = getVideoStream();
    if (videoStream != null) {
        long pts = getPresentationTime(bufferInfo);

        boolean isConfigFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
        if (isConfigFrame) {
            if (mConfigData == null || mConfigData.length < bufferInfo.size) {
                mConfigData = new byte[bufferInfo.size];
            }
            encodedData.position(bufferInfo.offset);
            encodedData.limit(bufferInfo.offset + bufferInfo.size);
            encodedData.get(mConfigData, 0, bufferInfo.size);
            mConfigLength = bufferInfo.size;
        }

        boolean isKeyFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
        if (isKeyFrame && mConfigData != null) {
            // H264 の SPS、PPS はキーフレームごとに送信するようにする。
            videoStream.writePacket(mConfigData, mConfigLength, pts);
        }

        if (mVideoBuffer.length < bufferInfo.size) {
            mVideoBuffer = new byte[bufferInfo.size];
        }
        encodedData.position(bufferInfo.offset);
        encodedData.limit(bufferInfo.offset + bufferInfo.size);
        encodedData.get(mVideoBuffer, 0, bufferInfo.size);

        videoStream.writePacket(mVideoBuffer, bufferInfo.size, pts);
    }
}
 
Example 4
Source File: Mpeg2TsMuxer.java    From DeviceConnect-Android with MIT License 5 votes vote down vote up
/**
 * キーフレームか確認します.
 *
 * @param bufferInfo 映像データの情報
 * @return キーフレームの場合はtrue、それ以外はfalse
 */
@SuppressWarnings("deprecation")
private boolean isKeyFrame(MediaCodec.BufferInfo bufferInfo) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
        return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
    } else {
        return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    }
}
 
Example 5
Source File: VIARecorder.java    From VIA-AI with MIT License 4 votes vote down vote up
public VIARecorder(String path, String prefix,int width, int height, int bitrate, int fps, long perodicTimeInSec, Mode mode){
        mMode = mode;
        mWidth = width;
        mHeight = height;
        mPrefix = prefix;
        if(mMode.equals(Mode.YUV420SemiPlanar)) {
            COLOR_FORMAT = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
        } else if (mMode.equals(Mode.Surface)) {
            COLOR_FORMAT = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
        }

        AvcEncoder.EncodeParameters parameters = new AvcEncoder.EncodeParameters(width,height,bitrate, COLOR_FORMAT);
        mFPS = fps;
        mFrameDiffTimes = 1000/mFPS;
        mPeriodicTimeInSec = perodicTimeInSec;
        mPath = path;

        File f = new File(path);
        if(!f.exists()) {
            f.mkdirs();
        }
        f = null;

        try {
            mMediaFormat = new MediaFormat();
            mMediaFormat.setInteger(MediaFormat.KEY_WIDTH, width);
            mMediaFormat.setInteger(MediaFormat.KEY_HEIGHT, height);
            mMediaFormat.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_VIDEO_AVC);
            mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
            mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
            mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

            mAvcEncoder = new AvcEncoder(parameters, new AvcEncoder.EncodedFrameListener() {
                @Override
                public void onFirstSpsPpsEncoded(byte[] sps, byte[] pps) {
                    mMediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(sps));
                    mMediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(pps));
                    bFormatReady = true;
                }

                @Override
                public boolean onFrameEncoded(ByteBuffer nalu, MediaCodec.BufferInfo info) {
                    if(!bStarted) return false;

                    mFrameCount++;
                    info.presentationTimeUs = System.currentTimeMillis()*1000;
                    boolean bFlush = false;
//                    mTime += mFrameDiffTimes;
                    if(((info.flags&MediaCodec.BUFFER_FLAG_KEY_FRAME)==1) && (System.currentTimeMillis()-mFileStartTime)>=mPeriodicTimeInSec*1000) {
                        createRecordFile();
                        bFlush = false;
                    }

                    synchronized (mLock) {
                        if (mMediaMuxer != null && bFormatReady) {
                            if (mVideoTrack == -1) {
                                mVideoTrack = mMediaMuxer.addTrack(mMediaFormat);
                                mMediaMuxer.start();
                            }
                            if(null!=mMuxerCallback) {
                                mMuxerCallback.OnMuxerWriterFrame(info.presentationTimeUs/1000);
                            }
                            mMediaMuxer.writeSampleData(mVideoTrack, nalu, info);
                        }
                    }
                    return bFlush;
                }
            });
        } catch (IOException e) {
            Log.d("VIARecorder", "VIARecorder: "+e.toString());
        }
    }
 
Example 6
Source File: PassthroughTranscoder.java    From LiTr with BSD 2-Clause "Simplified" License 4 votes vote down vote up
@Override
public int processNextFrame() {
    if (lastResult == RESULT_EOS_REACHED) {
        // we are done
        return lastResult;
    }

    // TranscoderJob expects the first result to be RESULT_OUTPUT_MEDIA_FORMAT_CHANGED, so that it can start the mediaMuxer
    if (!targetTrackAdded) {
        targetFormat = mediaSource.getTrackFormat(sourceTrack);
        if (duration > 0) {
            targetFormat.setLong(MediaFormat.KEY_DURATION, duration);
        }

        targetTrack = mediaMuxer.addTrack(targetFormat, targetTrack);
        targetTrackAdded = true;

        int bufferSize = targetFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        outputBuffer = ByteBuffer.allocate(bufferSize);

        lastResult = RESULT_OUTPUT_MEDIA_FORMAT_CHANGED;
        return lastResult;
    }

    int selectedTrack = mediaSource.getSampleTrackIndex();
    if (selectedTrack != NO_SELECTED_TRACK && selectedTrack != sourceTrack) {
        lastResult = RESULT_FRAME_PROCESSED;
        return lastResult;
    }

    lastResult = RESULT_FRAME_PROCESSED;

    int bytesRead = mediaSource.readSampleData(outputBuffer, 0);
    if (bytesRead > 0) {
        int outputFlags = 0;
        long sampleTime = mediaSource.getSampleTime();
        int inputFlags = mediaSource.getSampleFlags();

        if ((inputFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
                outputFlags = MediaCodec.BUFFER_FLAG_KEY_FRAME;
            } else {
                outputFlags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
            }
        }
        if (duration > 0) {
            progress = ((float) sampleTime) / duration;
        }
        outputBufferInfo.set(0, bytesRead, sampleTime, outputFlags);
        mediaMuxer.writeSampleData(targetTrack, outputBuffer, outputBufferInfo);
        mediaSource.advance();
    } else {
        outputBuffer.clear();
        progress = 1.0f;
        lastResult = RESULT_EOS_REACHED;
        Log.d(TAG, "Reach EoS on input stream");
    }

    return lastResult;
}
 
Example 7
Source File: VideoEncoder.java    From TikTok with Apache License 2.0 4 votes vote down vote up
public void offerData(byte [] buffer) {

        if(!isStart) return;

        ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
        ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
        int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);

        if (inputBufferIndex >= 0) {
            ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
            inputBuffer.clear();
            inputBuffer.put(buffer);
            long timepts = 1000000*frameCount / frameRate;
            mediaCodec.queueInputBuffer(inputBufferIndex, 0, buffer.length, timepts, 0);
            frameCount++;
        }

        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);

        while (outputBufferIndex >= 0) {

            ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
            Log.i(TAG, "outBufferIndex:" + outputBufferIndex+",size="+bufferInfo.size+",flag="+bufferInfo.flags+",buffer limit="+outputBuffer.limit());

            if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                Log.e(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                info = new byte[bufferInfo.size];
                outputBuffer.get(info);
            } else if((bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0){

                encodeBuffer.clear();
                encodeBuffer.put(info);
                encodeBuffer.put(outputBuffer);
                encodeBuffer.flip();

                Log.e(TAG,"encode buffer="+encodeBuffer.limit());
                if(callback != null) {
                    callback.onEncodeData(encodeBuffer, MediaCodec.BUFFER_FLAG_KEY_FRAME);
                }
            } else{
                if(callback != null) {
                    callback.onEncodeData(outputBuffer,0);
                }
            }

            mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
            outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);

            if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.e(TAG,"++++++++stream is end+++++++++++");
                break;      // out of while
            }
        }
    }
 
Example 8
Source File: RecordUtil.java    From WeiXinRecordedDemo with MIT License 4 votes vote down vote up
private void encodeVideo(byte[] nv21)throws IOException {

        if(checkMaxFrame()){
            currFrame--;
            return ;
        }

        byte[] nv12 = new byte[nv21.length];
        byte[] yuvI420 = new byte[nv21.length];
        byte[] tempYuvI420 = new byte[nv21.length];

        LibyuvUtil.convertNV21ToI420(nv21, yuvI420, videoWidth, videoHeight);
        LibyuvUtil.compressI420(yuvI420, videoWidth, videoHeight, tempYuvI420, videoWidth, videoHeight, rotation, isFrontCamera);
        LibyuvUtil.convertI420ToNV12(tempYuvI420, nv12, videoWidth, videoHeight);

        //得到编码器的输入和输出流, 输入流写入源数据 输出流读取编码后的数据
        //得到要使用的缓存序列角标
        int inputIndex = videoMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
        if (inputIndex >= 0) {
            ByteBuffer inputBuffer = videoMediaCodec.getInputBuffer(inputIndex);
            inputBuffer.clear();
            //把要编码的数据添加进去
            inputBuffer.put(nv12);
            //塞到编码序列中, 等待MediaCodec编码
            videoMediaCodec.queueInputBuffer(inputIndex, 0, nv12.length,  System.nanoTime()/1000, 0);
        }

        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        //读取MediaCodec编码后的数据
        int outputIndex = videoMediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);

        boolean keyFrame = false;
        while (outputIndex >= 0) {
            ByteBuffer outputBuffer = videoMediaCodec.getOutputBuffer(outputIndex);
            byte[] h264 = new byte[bufferInfo.size];
            //这步就是编码后的h264数据了
            outputBuffer.get(h264);
            switch (bufferInfo.flags) {
                case MediaCodec.BUFFER_FLAG_CODEC_CONFIG://视频信息
                    configByte = new byte[bufferInfo.size];
                    configByte = h264;
                    break;
                case MediaCodec.BUFFER_FLAG_KEY_FRAME://关键帧
                    frameBuffer.put(configByte);
                    frameBuffer.put(h264);
                    keyFrame = true;
                    break;
                default://正常帧
                    frameBuffer.put(h264);
                    break;
            }
            //数据写入本地成功 通知MediaCodec释放data
            videoMediaCodec.releaseOutputBuffer(outputIndex, false);
            //读取下一次编码数据
            outputIndex = videoMediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
        }

        if(frameBuffer.position() > 0){
            byte[] frameByte = new byte[frameBuffer.position()];
            frameBuffer.flip();
            frameBuffer.get(frameByte);
            frameBuffer.clear();

            currFrame++;
            videoOut.write(frameByte, 0, frameByte.length);
            videoOut.flush();

            while (keyFrame && checkMinFrame()){
                currFrame++;
                videoOut.write(frameByte, 0, frameByte.length);
                videoOut.flush();
            }
        }
    }
 
Example 9
Source File: SrsFlvMuxer.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 4 votes vote down vote up
public void writeVideoSample(final ByteBuffer bb, MediaCodec.BufferInfo bi) {
  if (bi.size < 4) return;

  bb.rewind();  //Sometimes the position is not 0.
  int pts = (int) (bi.presentationTimeUs / 1000);
  int type = SrsCodecVideoAVCFrame.InterFrame;
  SrsFlvFrameBytes frame = avc.demuxAnnexb(bb, bi.size, true);
  int nal_unit_type = frame.data.get(0) & 0x1f;
  if (nal_unit_type == SrsAvcNaluType.IDR || bi.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
    type = SrsCodecVideoAVCFrame.KeyFrame;
  } else if (nal_unit_type == SrsAvcNaluType.SPS || nal_unit_type == SrsAvcNaluType.PPS) {
    SrsFlvFrameBytes frame_pps = avc.demuxAnnexb(bb, bi.size, false);
    frame.size = frame.size - frame_pps.size - 4;  // 4 ---> 00 00 00 01 pps
    if (!frame.data.equals(Sps)) {
      byte[] sps = new byte[frame.size];
      frame.data.get(sps);
      isPpsSpsSend = false;
      Sps = ByteBuffer.wrap(sps);
    }

    SrsFlvFrameBytes frame_sei = avc.demuxAnnexb(bb, bi.size, false);
    if (frame_sei.size > 0) {
      if (SrsAvcNaluType.SEI == (frame_sei.data.get(0) & 0x1f)) {
        frame_pps.size = frame_pps.size - frame_sei.size - 3;// 3 ---> 00 00 01 SEI
      }
    }

    if (frame_pps.size > 0 && !frame_pps.data.equals(Pps)) {
      byte[] pps = new byte[frame_pps.size];
      frame_pps.data.get(pps);
      isPpsSpsSend = false;
      Pps = ByteBuffer.wrap(pps);
      writeH264SpsPps(pts);
    }
    return;
  } else if (nal_unit_type != SrsAvcNaluType.NonIDR) {
    return;
  }

  ipbs.add(avc.muxNaluHeader(frame));
  ipbs.add(frame);

  writeH264IpbFrame(ipbs, type, pts);
  ipbs.clear();
}
 
Example 10
Source File: RtpMuxer.java    From DeviceConnect-Android with MIT License 4 votes vote down vote up
@Override
public void onWriteVideoData(ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
    if (mVideoPacketize != null) {
        boolean isConfigFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
        if (isConfigFrame) {
            byte[] configData = new byte[bufferInfo.size];
            encodedData.position(bufferInfo.offset);
            encodedData.limit(bufferInfo.offset + bufferInfo.size);
            encodedData.get(configData, 0, bufferInfo.size);
            int configLength = bufferInfo.size;

            search(configData, configLength, (data, startPos, length) -> {
                switch (data[startPos + 4] & 0x1F) {
                    case 0x07:
                        mSPS = new byte[length];
                        System.arraycopy(data, startPos, mSPS, 0, length);
                        break;
                    case 0x08:
                        mPPS = new byte[length];
                        System.arraycopy(data, startPos, mPPS, 0, length);
                        break;
                    default:
                        break;
                }
            });
        }

        boolean isKeyFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
        if (isKeyFrame && mSPS != null && mPPS != null) {
            // H264 の SPS、PPS はキーフレームごとに送信するようにする。
            mVideoPacketize.write(mSPS, mSPS.length, bufferInfo.presentationTimeUs);
            mVideoPacketize.write(mPPS, mPPS.length, bufferInfo.presentationTimeUs);
        }

        if (mVideoBuffer.length < bufferInfo.size) {
            mVideoBuffer = new byte[bufferInfo.size];
        }
        encodedData.position(bufferInfo.offset);
        encodedData.limit(bufferInfo.offset + bufferInfo.size);
        encodedData.get(mVideoBuffer, 0, bufferInfo.size);

        mVideoPacketize.write(mVideoBuffer, bufferInfo.size, bufferInfo.presentationTimeUs);
    }
}
 
Example 11
Source File: SdlEncoder.java    From sdl_java_suite with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Extracts all pending data from the encoder
 * <p>
 * If endOfStream is not set, this returns when there is no more data to
 * drain. If it is set, we send EOS to the encoder, and then iterate until
 * we see EOS on the output. Calling this with endOfStream set should be
 * done once, right before stopping the muxer.
 */
public void drainEncoder(boolean endOfStream) {
	final int TIMEOUT_USEC = 10000;

	if(mEncoder == null || (mOutputStream == null && mOutputListener == null)) {
	   return;
	}
	if (endOfStream) {
		  mEncoder.signalEndOfInputStream();
	}

	ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
	while (true) {
		int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo,
				TIMEOUT_USEC);
		if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
			// no output available yet
			if (!endOfStream) {
				trySendVideoKeepalive();
				break; // out of while
			}
		} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			// not expected for an encoder
			encoderOutputBuffers = mEncoder.getOutputBuffers();
		} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
			if (mH264CodecSpecificData == null) {
				MediaFormat format = mEncoder.getOutputFormat();
				mH264CodecSpecificData = EncoderUtils.getCodecSpecificData(format);
			} else {
				Log.w(TAG, "Output format change notified more than once, ignoring.");
			}
		} else if (encoderStatus < 0) {
		} else {
			if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
				// If we already retrieve codec specific data via OUTPUT_FORMAT_CHANGED event,
				// we do not need this data.
				if (mH264CodecSpecificData != null) {
					mBufferInfo.size = 0;
				} else {
					Log.i(TAG, "H264 codec specific data not retrieved yet.");
				}
			}

			if (mBufferInfo.size != 0) {
				ByteBuffer encoderOutputBuffer = encoderOutputBuffers[encoderStatus];
				byte[] dataToWrite = null;
				int dataOffset = 0;

				// append SPS and PPS in front of every IDR NAL unit
				if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
						&& mH264CodecSpecificData != null) {
					dataToWrite = new byte[mH264CodecSpecificData.length + mBufferInfo.size];
					System.arraycopy(mH264CodecSpecificData, 0,
							dataToWrite, 0, mH264CodecSpecificData.length);
					dataOffset = mH264CodecSpecificData.length;
				} else {
					dataToWrite = new byte[mBufferInfo.size];
				}

				try {
					encoderOutputBuffer.position(mBufferInfo.offset);
					encoderOutputBuffer.limit(mBufferInfo.offset + mBufferInfo.size);

					encoderOutputBuffer.get(dataToWrite, dataOffset, mBufferInfo.size);

                       emitFrame(dataToWrite);
                   } catch (Exception e) {}
			}

			mEncoder.releaseOutputBuffer(encoderStatus, false);

			if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
				break; // out of while
			}
		}
	}
}
 
Example 12
Source File: VirtualDisplayEncoder.java    From sdl_java_suite with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
@SuppressWarnings("deprecation")
void drainEncoder(boolean endOfStream) {
    if (mVideoEncoder == null || mOutputListener == null) {
        return;
    }

    if (endOfStream) {
        mVideoEncoder.signalEndOfInputStream();
    }

    ByteBuffer[] encoderOutputBuffers = mVideoEncoder.getOutputBuffers();
    Thread currentThread = Thread.currentThread();
    while (!currentThread.isInterrupted()) {
        int encoderStatus = mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, -1);
        if(encoderStatus < 0){
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (!endOfStream) {
                    break; // out of while
                }
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not expected for an encoder
                encoderOutputBuffers = mVideoEncoder.getOutputBuffers();
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                if (mH264CodecSpecificData == null) {
                    MediaFormat format = mVideoEncoder.getOutputFormat();
                    mH264CodecSpecificData = EncoderUtils.getCodecSpecificData(format);
                } else {
                    Log.w(TAG, "Output format change notified more than once, ignoring.");
                }
            }
        } else {
            if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // If we already retrieve codec specific data via OUTPUT_FORMAT_CHANGED event,
                // we do not need this data.
                if (mH264CodecSpecificData != null) {
                    mVideoBufferInfo.size = 0;
                } else {
                    Log.i(TAG, "H264 codec specific data not retrieved yet.");
                }
            }

            if (mVideoBufferInfo.size != 0) {
                ByteBuffer encoderOutputBuffer = encoderOutputBuffers[encoderStatus];
                byte[] dataToWrite;
                int dataOffset = 0;

                // append SPS and PPS in front of every IDR NAL unit
                if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0 && mH264CodecSpecificData != null) {
                    dataToWrite = new byte[mH264CodecSpecificData.length + mVideoBufferInfo.size];
                    System.arraycopy(mH264CodecSpecificData, 0, dataToWrite, 0, mH264CodecSpecificData.length);
                    dataOffset = mH264CodecSpecificData.length;
                } else {
                    dataToWrite = new byte[mVideoBufferInfo.size];
                }

                try {
                    encoderOutputBuffer.position(mVideoBufferInfo.offset);
                    encoderOutputBuffer.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size);

                    encoderOutputBuffer.get(dataToWrite, dataOffset, mVideoBufferInfo.size);

                    if (mOutputListener != null) {
                        mOutputListener.sendFrame(dataToWrite, 0, dataToWrite.length, mVideoBufferInfo.presentationTimeUs);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }

            mVideoEncoder.releaseOutputBuffer(encoderStatus, false);

            if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                break; // out of while
            }
        }
    }
}