Java Code Examples for org.webrtc.Logging#w()

The following examples show how to use org.webrtc.Logging#w() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 6 votes vote down vote up
@TargetApi(21)
private static AudioTrack createAudioTrackOnLollipopOrHigher(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
  // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
  // performance when Android O is supported. Add some logging in the mean time.
  final int nativeOutputSampleRate =
      AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
  Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
  if (sampleRateInHz != nativeOutputSampleRate) {
    Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
  }
  // Create an audio track where the audio usage is for VoIP and the content type is speech.
  return new AudioTrack(new AudioAttributes.Builder()
                            .setUsage(DEFAULT_USAGE)
                            .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
                            .build(),
      new AudioFormat.Builder()
          .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
          .setSampleRate(sampleRateInHz)
          .setChannelMask(channelConfig)
          .build(),
      bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
}
 
Example 2
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 5 votes vote down vote up
@TargetApi(21)
private static AudioTrack createAudioTrackOnLollipopOrHigher(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
  // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
  // performance when Android O is supported. Add some logging in the mean time.
  final int nativeOutputSampleRate =
      AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
  Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
  if (sampleRateInHz != nativeOutputSampleRate) {
    Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
  }
  if (usageAttribute != DEFAULT_USAGE) {
    Logging.w(TAG, "A non default usage attribute is used: " + usageAttribute);
  }
  // Create an audio track where the audio usage is for VoIP and the content type is speech.
  return new AudioTrack(
      new AudioAttributes.Builder()
          .setUsage(usageAttribute)
          .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
      .build(),
      new AudioFormat.Builder()
        .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
        .setSampleRate(sampleRateInHz)
        .setChannelMask(channelConfig)
        .build(),
      bufferSizeInBytes,
      AudioTrack.MODE_STREAM,
      AudioManager.AUDIO_SESSION_ID_GENERATE);
}
 
Example 3
Source File: WebRtcAudioEffects.java    From webrtc_android with MIT License 5 votes vote down vote up
public static boolean isAcousticEchoCancelerBlacklisted() {
  List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAecUsage();
  boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
  if (isBlacklisted) {
    Logging.w(TAG, Build.MODEL + " is blacklisted for HW AEC usage!");
  }
  return isBlacklisted;
}
 
Example 4
Source File: WebRtcAudioEffects.java    From webrtc_android with MIT License 5 votes vote down vote up
public static boolean isNoiseSuppressorBlacklisted() {
  List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForNsUsage();
  boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
  if (isBlacklisted) {
    Logging.w(TAG, Build.MODEL + " is blacklisted for HW NS usage!");
  }
  return isBlacklisted;
}
 
Example 5
Source File: WebRtcAudioEffects.java    From webrtc_android with MIT License 5 votes vote down vote up
public boolean setAEC(boolean enable) {
  Logging.d(TAG, "setAEC(" + enable + ")");
  if (!canUseAcousticEchoCanceler()) {
    Logging.w(TAG, "Platform AEC is not supported");
    shouldEnableAec = false;
    return false;
  }
  if (aec != null && (enable != shouldEnableAec)) {
    Logging.e(TAG, "Platform AEC state can't be modified while recording");
    return false;
  }
  shouldEnableAec = enable;
  return true;
}
 
Example 6
Source File: WebRtcAudioEffects.java    From webrtc_android with MIT License 5 votes vote down vote up
public boolean setNS(boolean enable) {
  Logging.d(TAG, "setNS(" + enable + ")");
  if (!canUseNoiseSuppressor()) {
    Logging.w(TAG, "Platform NS is not supported");
    shouldEnableNs = false;
    return false;
  }
  if (ns != null && (enable != shouldEnableNs)) {
    Logging.e(TAG, "Platform NS state can't be modified while recording");
    return false;
  }
  shouldEnableNs = enable;
  return true;
}
 
Example 7
Source File: WebRtcAudioEffects.java    From webrtc_android with MIT License 5 votes vote down vote up
public boolean setNS(boolean enable) {
  Logging.d(TAG, "setNS(" + enable + ")");
  if (!isNoiseSuppressorSupported()) {
    Logging.w(TAG, "Platform NS is not supported");
    shouldEnableNs = false;
    return false;
  }
  if (ns != null && (enable != shouldEnableNs)) {
    Logging.e(TAG, "Platform NS state can't be modified while recording");
    return false;
  }
  shouldEnableNs = enable;
  return true;
}
 
Example 8
Source File: WebRtcAudioEffects.java    From webrtc_android with MIT License 5 votes vote down vote up
public boolean setAEC(boolean enable) {
  Logging.d(TAG, "setAEC(" + enable + ")");
  if (!isAcousticEchoCancelerSupported()) {
    Logging.w(TAG, "Platform AEC is not supported");
    shouldEnableAec = false;
    return false;
  }
  if (aec != null && (enable != shouldEnableAec)) {
    Logging.e(TAG, "Platform AEC state can't be modified while recording");
    return false;
  }
  shouldEnableAec = enable;
  return true;
}
 
Example 9
Source File: WebRtcAudioUtils.java    From webrtc_android with MIT License 5 votes vote down vote up
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized boolean useWebRtcBasedAcousticEchoCanceler() {
  if (useWebRtcBasedAcousticEchoCanceler) {
    Logging.w(TAG, "Overriding default behavior; now using WebRTC AEC!");
  }
  return useWebRtcBasedAcousticEchoCanceler;
}
 
Example 10
Source File: WebRtcAudioUtils.java    From webrtc_android with MIT License 5 votes vote down vote up
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized boolean useWebRtcBasedNoiseSuppressor() {
  if (useWebRtcBasedNoiseSuppressor) {
    Logging.w(TAG, "Overriding default behavior; now using WebRTC NS!");
  }
  return useWebRtcBasedNoiseSuppressor;
}
 
Example 11
Source File: WebRtcAudioRecord.java    From webrtc_android with MIT License 4 votes vote down vote up
public void setMicrophoneMute(boolean mute) {
  Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
  microphoneMute = mute;
}
 
Example 12
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 4 votes vote down vote up
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void setAudioTrackUsageAttribute(int usage) {
  Logging.w(TAG, "Default usage attribute is changed from: "
      + DEFAULT_USAGE + " to " + usage);
  usageAttribute = usage;
}
 
Example 13
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 4 votes vote down vote up
public void setSpeakerMute(boolean mute) {
  Logging.w(TAG, "setSpeakerMute(" + mute + ")");
  speakerMute = mute;
}
 
Example 14
Source File: WebRtcAudioRecord.java    From webrtc_android with MIT License 4 votes vote down vote up
public static void setMicrophoneMute(boolean mute) {
  Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
  microphoneMute = mute;
}
 
Example 15
Source File: WebRtcAudioRecord.java    From webrtc_android with MIT License 4 votes vote down vote up
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void setAudioSource(int source) {
  Logging.w(TAG, "Audio source is changed from: " + audioSource
          + " to " + source);
  audioSource = source;
}
 
Example 16
Source File: WebRtcAudioUtils.java    From webrtc_android with MIT License 4 votes vote down vote up
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void setWebRtcBasedAutomaticGainControl(boolean enable) {
  // TODO(henrika): deprecated; remove when no longer used by any client.
  Logging.w(TAG, "setWebRtcBasedAutomaticGainControl() is deprecated");
}
 
Example 17
Source File: WebRtcAudioManager.java    From webrtc_android with MIT License 4 votes vote down vote up
private boolean isAAudioSupported() {
  if (blacklistDeviceForAAudioUsage) {
    Logging.w(TAG, "AAudio support is currently disabled on all devices!");
  }
  return !blacklistDeviceForAAudioUsage && Build.VERSION.SDK_INT >= 27;
}
 
Example 18
Source File: WebRtcAudioManager.java    From webrtc_android with MIT License 4 votes vote down vote up
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void setStereoInput(boolean enable) {
  Logging.w(TAG, "Overriding default input behavior: setStereoInput(" + enable + ')');
  useStereoInput = enable;
}
 
Example 19
Source File: WebRtcAudioManager.java    From webrtc_android with MIT License 4 votes vote down vote up
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void setStereoOutput(boolean enable) {
  Logging.w(TAG, "Overriding default output behavior: setStereoOutput(" + enable + ')');
  useStereoOutput = enable;
}
 
Example 20
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 4 votes vote down vote up
public static void setSpeakerMute(boolean mute) {
  Logging.w(TAG, "setSpeakerMute(" + mute + ")");
  speakerMute = mute;
}