Java Code Examples for org.webrtc.Logging#d()

The following examples show how to use org.webrtc.Logging#d() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 6 votes vote down vote up
@TargetApi(21)
private static AudioTrack createAudioTrackOnLollipopOrHigher(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
  // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
  // performance when Android O is supported. Add some logging in the mean time.
  final int nativeOutputSampleRate =
      AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
  Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
  if (sampleRateInHz != nativeOutputSampleRate) {
    Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
  }
  // Create an audio track where the audio usage is for VoIP and the content type is speech.
  return new AudioTrack(new AudioAttributes.Builder()
                            .setUsage(DEFAULT_USAGE)
                            .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
                            .build(),
      new AudioFormat.Builder()
          .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
          .setSampleRate(sampleRateInHz)
          .setChannelMask(channelConfig)
          .build(),
      bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
}
 
Example 2
Source File: WebRtcAudioUtils.java    From webrtc_android with MIT License 6 votes vote down vote up
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
  final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
      AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION,
      AudioManager.STREAM_SYSTEM};
  Logging.d(tag, "Audio State: ");
  // Some devices may not have volume controls and might use a fixed volume.
  boolean fixedVolume = isVolumeFixed(audioManager);
  Logging.d(tag, "  fixed volume=" + fixedVolume);
  if (!fixedVolume) {
    for (int stream : streams) {
      StringBuilder info = new StringBuilder();
      info.append("  " + streamTypeToString(stream) + ": ");
      info.append("volume=").append(audioManager.getStreamVolume(stream));
      info.append(", max=").append(audioManager.getStreamMaxVolume(stream));
      logIsStreamMute(tag, audioManager, stream, info);
      Logging.d(tag, info.toString());
    }
  }
}
 
Example 3
Source File: Mp4Recorder.java    From VideoCRE with MIT License 6 votes vote down vote up
@Override
public void onOutputFormatChanged(final MediaCodec codec, final MediaFormat format) {
    if (mMuxerStarted) {
        throw new RuntimeException("format changed twice");
    }

    String name = format.getString(MediaFormat.KEY_MIME);
    int width = format.getInteger(MediaFormat.KEY_WIDTH);
    int height = format.getInteger(MediaFormat.KEY_HEIGHT);

    Logging.d(TAG, "onOutputFormatChanged " + name + " " + width + "x" + height);

    mTrackIndex = mMediaMuxer.addTrack(format);
    mMediaMuxer.start();
    mMuxerStarted = true;
}
 
Example 4
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 6 votes vote down vote up
@CalledByNative
private boolean stopPlayout() {
  threadChecker.checkIsOnValidThread();
  volumeLogger.stop();
  Logging.d(TAG, "stopPlayout");
  assertTrue(audioThread != null);
  logUnderrunCount();
  audioThread.stopThread();

  Logging.d(TAG, "Stopping the AudioTrackThread...");
  audioThread.interrupt();
  if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
    Logging.e(TAG, "Join of AudioTrackThread timed out.");
    WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
  }
  Logging.d(TAG, "AudioTrackThread has now been stopped.");
  audioThread = null;
  releaseAudioResources();
  return true;
}
 
Example 5
Source File: WebRtcAudioRecord.java    From webrtc_android with MIT License 5 votes vote down vote up
@CalledByNative
private boolean stopRecording() {
  Logging.d(TAG, "stopRecording");
  assertTrue(audioThread != null);
  audioThread.stopThread();
  if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
    Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
    WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
  }
  audioThread = null;
  effects.release();
  releaseAudioResources();
  return true;
}
 
Example 6
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 5 votes vote down vote up
/** Get current volume level for a phone call audio stream. */
@CalledByNative
private int getStreamVolume() {
  threadChecker.checkIsOnValidThread();
  Logging.d(TAG, "getStreamVolume");
  return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
}
 
Example 7
Source File: JavaAudioDeviceModule.java    From webrtc_android with MIT License 5 votes vote down vote up
/**
 * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership
 * and is responsible for calling release().
 */
public AudioDeviceModule createAudioDeviceModule() {
    Logging.d(TAG, "createAudioDeviceModule");
    if (useHardwareNoiseSuppressor) {
        Logging.d(TAG, "HW NS will be used.");
    } else {
        if (isBuiltInNoiseSuppressorSupported()) {
            Logging.d(TAG, "Overriding default behavior; now using WebRTC NS!");
        }
        Logging.d(TAG, "HW NS will not be used.");
    }
    if (useHardwareAcousticEchoCanceler) {
        Logging.d(TAG, "HW AEC will be used.");
    } else {
        if (isBuiltInAcousticEchoCancelerSupported()) {
            Logging.d(TAG, "Overriding default behavior; now using WebRTC AEC!");
        }
        Logging.d(TAG, "HW AEC will not be used.");
    }
    final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource,
            audioFormat, audioRecordErrorCallback, samplesReadyCallback,
            useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
    final WebRtcAudioTrack audioOutput =
            new WebRtcAudioTrack(context, audioManager, audioTrackErrorCallback);
    return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput,
            inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
}
 
Example 8
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 5 votes vote down vote up
private void releaseAudioResources() {
  Logging.d(TAG, "releaseAudioResources");
  if (audioTrack != null) {
    audioTrack.release();
    audioTrack = null;
  }
}
 
Example 9
Source File: VideoSource.java    From VideoCRE with MIT License 5 votes vote down vote up
public void stop() {
    if (mVideoCapturer != null && !mVideoCapturerStopped) {
        Logging.d(TAG, "Stop video source.");
        try {
            mVideoCapturer.stopCapture();
        } catch (InterruptedException e) {
            Logging.e(TAG, "stop", e);
        }
        mVideoCapturerStopped = true;
    }
}
 
Example 10
Source File: WebRtcAudioEffects.java    From webrtc_android with MIT License 5 votes vote down vote up
public void release() {
  Logging.d(TAG, "release");
  if (aec != null) {
    aec.release();
    aec = null;
  }
  if (ns != null) {
    ns.release();
    ns = null;
  }
}
 
Example 11
Source File: JavaAudioDeviceModule.java    From webrtc_android with MIT License 5 votes vote down vote up
/**
 * Call this method to specifically override input sample rate.
 */
// 设置输入音频采样率
public Builder setInputSampleRate(int inputSampleRate) {
    Logging.d(TAG, "Input sample rate overridden to: " + inputSampleRate);
    this.inputSampleRate = inputSampleRate;
    return this;
}
 
Example 12
Source File: WebRtcAudioRecord.java    From webrtc_android with MIT License 5 votes vote down vote up
private boolean enableBuiltInAEC(boolean enable) {
  Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
  if (effects == null) {
    Logging.e(TAG, "Built-in AEC is not supported on this platform");
    return false;
  }
  return effects.setAEC(enable);
}
 
Example 13
Source File: WebRtcAudioUtils.java    From webrtc_android with MIT License 5 votes vote down vote up
private static void logAudioStateBasic(String tag, Context context, AudioManager audioManager) {
  Logging.d(tag,
      "Audio State: "
          + "audio mode: " + modeToString(audioManager.getMode()) + ", "
          + "has mic: " + hasMicrophone(context) + ", "
          + "mic muted: " + audioManager.isMicrophoneMute() + ", "
          + "music active: " + audioManager.isMusicActive() + ", "
          + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", "
          + "BT SCO: " + audioManager.isBluetoothScoOn());
}
 
Example 14
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 5 votes vote down vote up
private boolean startPlayout() {
  threadChecker.checkIsOnValidThread();
  Logging.d(TAG, "startPlayout");
  assertTrue(audioTrack != null);
  assertTrue(audioThread == null);

  // Starts playing an audio track.
  try {
    audioTrack.play();
  } catch (IllegalStateException e) {
    reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
        "AudioTrack.play failed: " + e.getMessage());
    releaseAudioResources();
    return false;
  }
  if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
    reportWebRtcAudioTrackStartError(
        AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
        "AudioTrack.play failed - incorrect state :"
        + audioTrack.getPlayState());
    releaseAudioResources();
    return false;
  }

  // Create and start new high-priority thread which calls AudioTrack.write()
  // and where we also call the native nativeGetPlayoutData() callback to
  // request decoded audio from WebRTC.
  audioThread = new AudioTrackThread("AudioTrackJavaThread");
  audioThread.start();
  return true;
}
 
Example 15
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 5 votes vote down vote up
WebRtcAudioTrack(long nativeAudioTrack) {
  threadChecker.checkIsOnValidThread();
  Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
  this.nativeAudioTrack = nativeAudioTrack;
  audioManager =
      (AudioManager) ContextUtils.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
  if (DEBUG) {
    WebRtcAudioUtils.logDeviceInfo(TAG);
  }
}
 
Example 16
Source File: WebRtcAudioUtils.java    From webrtc_android with MIT License 5 votes vote down vote up
private static void logAudioStateBasic(String tag, AudioManager audioManager) {
  Logging.d(tag, "Audio State: "
          + "audio mode: " + modeToString(audioManager.getMode()) + ", "
          + "has mic: " + hasMicrophone() + ", "
          + "mic muted: " + audioManager.isMicrophoneMute() + ", "
          + "music active: " + audioManager.isMusicActive() + ", "
          + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", "
          + "BT SCO: " + audioManager.isBluetoothScoOn());
}
 
Example 17
Source File: WebRtcAudioUtils.java    From webrtc_android with MIT License 5 votes vote down vote up
private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
  if (Build.VERSION.SDK_INT < 23) {
    return;
  }
  final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
  if (devices.length == 0) {
    return;
  }
  Logging.d(tag, "Audio Devices: ");
  for (AudioDeviceInfo device : devices) {
    StringBuilder info = new StringBuilder();
    info.append("  ").append(deviceTypeToString(device.getType()));
    info.append(device.isSource() ? "(in): " : "(out): ");
    // An empty array indicates that the device supports arbitrary channel counts.
    if (device.getChannelCounts().length > 0) {
      info.append("channels=").append(Arrays.toString(device.getChannelCounts()));
      info.append(", ");
    }
    if (device.getEncodings().length > 0) {
      // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4.
      info.append("encodings=").append(Arrays.toString(device.getEncodings()));
      info.append(", ");
    }
    if (device.getSampleRates().length > 0) {
      info.append("sample rates=").append(Arrays.toString(device.getSampleRates()));
      info.append(", ");
    }
    info.append("id=").append(device.getId());
    Logging.d(tag, info.toString());
  }
}
 
Example 18
Source File: WebRtcAudioRecord.java    From webrtc_android with MIT License 4 votes vote down vote up
@CalledByNative
private boolean enableBuiltInNS(boolean enable) {
  Logging.d(TAG, "enableBuiltInNS(" + enable + ")");
  return effects.setNS(enable);
}
 
Example 19
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 4 votes vote down vote up
public void stopThread() {
  Logging.d(TAG, "stopThread");
  keepAlive = false;
}
 
Example 20
Source File: TextureViewRenderer.java    From VideoCRE with MIT License 4 votes vote down vote up
private void logD(String string) {
  Logging.d(TAG, resourceName + string);
}