com.qiniu.pili.droid.streaming.AVCodecType Java Examples

The following examples show how to use com.qiniu.pili.droid.streaming.AVCodecType. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ImportStreamingActivity.java    From PLDroidMediaStreaming with Apache License 2.0 5 votes vote down vote up
@Override
protected void initStreamingManager() {
    mStreamingManager = new StreamingManager(this, AVCodecType.HW_VIDEO_YUV_AS_INPUT_WITH_HW_AUDIO_CODEC);
    mStreamingManager.prepare(mProfile);
    mStreamingManager.setStreamingSessionListener(this);
    mStreamingManager.setStreamStatusCallback(this);
    mStreamingManager.setStreamingStateListener(this);
}
 
Example #2
Source File: PiliStreamingViewManager.java    From pili-react-native with MIT License 4 votes vote down vote up
@Override
public boolean onRecordAudioFailedHandled(int err) {
    mMediaStreamingManager.updateEncodingType(AVCodecType.SW_VIDEO_CODEC);
    mMediaStreamingManager.startStreaming();
    return true;
}
 
Example #3
Source File: PiliAudioStreamingViewManager.java    From pili-react-native with MIT License 4 votes vote down vote up
@Override
public boolean onRecordAudioFailedHandled(int err) {
    mMediaStreamingManager.updateEncodingType(AVCodecType.SW_VIDEO_CODEC);
    mMediaStreamingManager.startStreaming();
    return true;
}
 
Example #4
Source File: PKViceAnchorActivity.java    From PLDroidRTCStreaming with Apache License 2.0 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.activity_pk_vice_anchor);

    /**
     * Step 1: find & init views
     */
    GLSurfaceView cameraPreviewFrameView = (GLSurfaceView) findViewById(R.id.cameraPreview_surfaceView);

    boolean isSwCodec = getIntent().getBooleanExtra("swcodec", true);
    mRoomName = getIntent().getStringExtra("roomName");
    setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);

    boolean isBeautyEnabled = getIntent().getBooleanExtra("beauty", false);
    boolean isDebugModeEnabled = getIntent().getBooleanExtra("debugMode", false);

    mControlButton = (Button) findViewById(R.id.ControlButton);
    mMuteCheckBox = (CheckBox) findViewById(R.id.MuteCheckBox);
    mMuteCheckBox.setOnClickListener(mMuteButtonClickListener);
    mControlButton.setText("开始连麦");

    CameraStreamingSetting.CAMERA_FACING_ID facingId = chooseCameraFacingId();
    mCurrentCamFacingIndex = facingId.ordinal();

    /**
     * Step 2: config camera & microphone settings
     */
    CameraStreamingSetting cameraStreamingSetting = new CameraStreamingSetting();
    cameraStreamingSetting.setCameraFacingId(facingId)
            .setContinuousFocusModeEnabled(true)
            .setRecordingHint(false)
            .setResetTouchFocusDelayInMs(3000)
            .setFocusMode(CameraStreamingSetting.FOCUS_MODE_CONTINUOUS_PICTURE)
            .setCameraPrvSizeLevel(CameraStreamingSetting.PREVIEW_SIZE_LEVEL.MEDIUM)
            .setCameraPrvSizeRatio(CameraStreamingSetting.PREVIEW_SIZE_RATIO.RATIO_4_3);

    if (isBeautyEnabled) {
        cameraStreamingSetting.setBuiltInFaceBeautyEnabled(true); // Using sdk built in face beauty algorithm
        cameraStreamingSetting.setFaceBeautySetting(new CameraStreamingSetting.FaceBeautySetting(0.8f, 0.8f, 0.6f)); // sdk built in face beauty settings
        cameraStreamingSetting.setVideoFilter(CameraStreamingSetting.VIDEO_FILTER_TYPE.VIDEO_FILTER_BEAUTY); // set the beauty on/off
    }

    /**
     * Step 3: create streaming manager and set listeners
     */
    AVCodecType codecType = isSwCodec ? AVCodecType.SW_VIDEO_WITH_SW_AUDIO_CODEC : AVCodecType.HW_VIDEO_YUV_AS_INPUT_WITH_HW_AUDIO_CODEC;
    mRTCStreamingManager = new RTCMediaStreamingManager(getApplicationContext(), cameraPreviewFrameView, codecType);
    mRTCStreamingManager.setConferenceStateListener(mRTCStreamingStateChangedListener);
    mRTCStreamingManager.setRemoteWindowEventListener(mRTCRemoteWindowEventListener);
    mRTCStreamingManager.setStreamingSessionListener(mStreamingSessionListener);
    mRTCStreamingManager.setDebugLoggingEnabled(isDebugModeEnabled);

    /**
     * Step 4: set conference options
     */
    RTCConferenceOptions options = new RTCConferenceOptions();
    // RATIO_4_3 & VIDEO_ENCODING_SIZE_HEIGHT_240 means the output size is 640 x 480
    options.setVideoEncodingSizeRatio(RTCConferenceOptions.VIDEO_ENCODING_SIZE_RATIO.RATIO_4_3);
    options.setVideoEncodingSizeLevel(RTCConferenceOptions.VIDEO_ENCODING_SIZE_HEIGHT_480);
    options.setVideoEncodingOrientation(RTCConferenceOptions.VIDEO_ENCODING_ORIENTATION.LAND);
    options.setVideoBitrateRange(300 * 1024, 800 * 1024);
    // 15 fps is enough
    options.setVideoEncodingFps(15);
    mRTCStreamingManager.setConferenceOptions(options);

    /**
     * Step 5: create the remote windows
     */
    RTCVideoWindow windowA = new RTCVideoWindow((RTCSurfaceView) findViewById(R.id.RemoteGLSurfaceViewA));

    /**
     * Step 6: add the remote windows
     */
    mRTCStreamingManager.addRemoteWindow(windowA);

    /**
     * Step 7: do prepare
     */
    mRTCStreamingManager.prepare(cameraStreamingSetting, null, null, null);

    mProgressDialog = new ProgressDialog(this);
}
 
Example #5
Source File: RTCAudioStreamingActivity.java    From PLDroidRTCStreaming with Apache License 2.0 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.activity_audio_streaming);

    mRole = getIntent().getIntExtra("role", QiniuAppServer.RTC_ROLE_VICE_ANCHOR);
    mRoomName = getIntent().getStringExtra("roomName");
    boolean isSwCodec = getIntent().getBooleanExtra("swcodec", true);
    boolean isLandscape = getIntent().getBooleanExtra("orientation", false);
    mIsAudioLevelCallbackEnabled = getIntent().getBooleanExtra("audioLevelCallback", false);
    setRequestedOrientation(isLandscape ? ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE : ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);

    mControlButton = (Button) findViewById(R.id.ControlButton);
    mStatusTextView = (TextView) findViewById(R.id.StatusTextView);
    mStatTextView = (TextView) findViewById(R.id.StatTextView);
    mMuteCheckBox = (CheckBox) findViewById(R.id.MuteCheckBox);
    mMuteCheckBox.setOnClickListener(mMuteButtonClickListener);
    mConferenceCheckBox = (CheckBox) findViewById(R.id.ConferenceCheckBox);
    mConferenceCheckBox.setOnClickListener(mConferenceButtonClickListener);

    if (mRole == QiniuAppServer.RTC_ROLE_ANCHOR) {
        mConferenceCheckBox.setVisibility(View.VISIBLE);
    }

    AVCodecType codecType = isSwCodec ? AVCodecType.SW_AUDIO_CODEC : AVCodecType.HW_AUDIO_CODEC;
    mRTCStreamingManager = new RTCMediaStreamingManager(getApplicationContext(), codecType);
    mRTCStreamingManager.setConferenceStateListener(mRTCStreamingStateChangedListener);
    mRTCStreamingManager.setUserEventListener(mRTCUserEventListener);
    mRTCStreamingManager.setDebugLoggingEnabled(false);

    if (mIsAudioLevelCallbackEnabled) {
        mRTCStreamingManager.setAudioLevelCallback(mRTCAudioLevelCallback);
    }

    RTCConferenceOptions options = new RTCConferenceOptions();
    options.setHWCodecEnabled(!isSwCodec);
    mRTCStreamingManager.setConferenceOptions(options);

    // the anchor must configure the `StreamingProfile`
    if (mRole == QiniuAppServer.RTC_ROLE_ANCHOR) {
        mRTCStreamingManager.setStreamStatusCallback(mStreamStatusCallback);
        mRTCStreamingManager.setStreamingStateListener(mStreamingStateChangedListener);
        mRTCStreamingManager.setStreamingSessionListener(mStreamingSessionListener);

        mStreamingProfile = new StreamingProfile();
        mStreamingProfile.setAudioQuality(StreamingProfile.AUDIO_QUALITY_MEDIUM2)
                .setEncoderRCMode(StreamingProfile.EncoderRCModes.QUALITY_PRIORITY);
        mRTCStreamingManager.prepare(null,  mStreamingProfile);
    } else {
        /**
         * The RTCVideoWindow is used to show the anchor's video
         * This code is not required when the anchor is publishing audio streaming only.
         */
        RTCVideoWindow remoteAnchorView = new RTCVideoWindow((RTCSurfaceView) findViewById(R.id.RemoteAnchorView));
        mRTCStreamingManager.addRemoteWindow(remoteAnchorView);

        mControlButton.setText("开始连麦");
        mRTCStreamingManager.prepare(null);
    }

    mProgressDialog = new ProgressDialog(this);
}
 
Example #6
Source File: PiliStreamingViewManager.java    From react-native-pili with MIT License 4 votes vote down vote up
@Override
public boolean onRecordAudioFailedHandled(int err) {
    mMediaStreamingManager.updateEncodingType(AVCodecType.SW_VIDEO_CODEC);
    mMediaStreamingManager.startStreaming();
    return true;
}
 
Example #7
Source File: PiliAudioStreamingViewManager.java    From react-native-pili with MIT License 4 votes vote down vote up
@Override
public boolean onRecordAudioFailedHandled(int err) {
    mMediaStreamingManager.updateEncodingType(AVCodecType.SW_VIDEO_CODEC);
    mMediaStreamingManager.startStreaming();
    return true;
}