android.media.AudioTrack Java Examples

The following examples show how to use android.media.AudioTrack. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FromFileBase.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 6 votes vote down vote up
/**
 * Stop stream started with @startStream.
 */
public void stopStream() {
  if (streaming) {
    streaming = false;
    stopStreamRtp();
  }
  if (!recordController.isRecording()) {
    if (glInterface != null) {
      glInterface.removeMediaCodecSurface();
      glInterface.stop();
    }
    if (videoDecoder != null) videoDecoder.stop();
    if (audioDecoder != null) audioDecoder.stop();
    if (audioTrackPlayer != null
        && audioTrackPlayer.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
      audioTrackPlayer.stop();
    }
    audioTrackPlayer = null;
    videoEncoder.stop();
    audioEncoder.stop();
    recordController.resetFormats();
  }
}
 
Example #2
Source File: StreamAudioPlayer.java    From RxAndroidAudio with MIT License 6 votes vote down vote up
@WorkerThread
public synchronized boolean play(byte[] data, int size) {
    if (mAudioTrack != null) {
        try {
            int ret = mAudioTrack.write(data, 0, size);
            switch (ret) {
                case AudioTrack.ERROR_INVALID_OPERATION:
                    Log.w(TAG, "play fail: ERROR_INVALID_OPERATION");
                    return false;
                case AudioTrack.ERROR_BAD_VALUE:
                    Log.w(TAG, "play fail: ERROR_BAD_VALUE");
                    return false;
                case AudioManager.ERROR_DEAD_OBJECT:
                    Log.w(TAG, "play fail: ERROR_DEAD_OBJECT");
                    return false;
                default:
                    return true;
            }
        } catch (IllegalStateException e) {
            Log.w(TAG, "play fail: " + e.getMessage());
            return false;
        }
    }
    Log.w(TAG, "play fail: null mAudioTrack");
    return false;
}
 
Example #3
Source File: AndroidAudioForJSyn.java    From science-journal with Apache License 2.0 6 votes vote down vote up
@Override
public void start() {
  minBufferSize =
      AudioTrack.getMinBufferSize(
          frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
  System.out.println("Audio minBufferSize = " + minBufferSize);
  bufferSize = (3 * (minBufferSize / 2)) & ~3;
  System.out.println("Audio bufferSize = " + bufferSize);
  audioTrack =
      new AudioTrack(
          AudioManager.STREAM_MUSIC,
          frameRate,
          AudioFormat.CHANNEL_OUT_STEREO,
          AudioFormat.ENCODING_PCM_16BIT,
          bufferSize,
          AudioTrack.MODE_STREAM);
  audioTrack.play();
}
 
Example #4
Source File: CustomAudioDevice.java    From opentok-android-sdk-samples with MIT License 6 votes vote down vote up
@Override
public boolean stopRenderer() {
    Log.d("AUDIO_FOCUS", "Stop Renderer");

    rendererLock.lock();
    try {
        // only stop if we are playing
        if (audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
            // stop playout
            audioTrack.stop();

        }
        // flush the buffers
        audioTrack.flush();
    } catch (Exception e) {
        throw new RuntimeException(e.getMessage());
    } finally {
        // Ensure we always unlock, both for success, exception or error
        // return.
        isRendering = false;
        rendererLock.unlock();
    }
    unregisterHeadsetReceiver();
    unregisterBtReceiver();
    return true;
}
 
Example #5
Source File: AudioTrackPositionTracker.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Sets the {@link AudioTrack} to wrap. Subsequent method calls on this instance relate to this
 * track's position, until the next call to {@link #reset()}.
 *
 * @param audioTrack The audio track to wrap.
 * @param outputEncoding The encoding of the audio track.
 * @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored
 *     otherwise.
 * @param bufferSize The audio track buffer size in bytes.
 */
public void setAudioTrack(
    AudioTrack audioTrack,
    @C.Encoding int outputEncoding,
    int outputPcmFrameSize,
    int bufferSize) {
  this.audioTrack = audioTrack;
  this.outputPcmFrameSize = outputPcmFrameSize;
  this.bufferSize = bufferSize;
  audioTimestampPoller = new AudioTimestampPoller(audioTrack);
  outputSampleRate = audioTrack.getSampleRate();
  needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding);
  isOutputPcm = Util.isEncodingLinearPcm(outputEncoding);
  bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
  lastRawPlaybackHeadPosition = 0;
  rawPlaybackHeadWrapCount = 0;
  passthroughWorkaroundPauseOffset = 0;
  hasData = false;
  stopTimestampUs = C.TIME_UNSET;
  forceResetWorkaroundTimeMs = C.TIME_UNSET;
  latencyUs = 0;
}
 
Example #6
Source File: MjpegPlayerActivity.java    From CameraV with GNU General Public License v3.0 6 votes vote down vote up
public void initAudio(String vfsPath) throws Exception {

    	isAudio = new BufferedInputStream(new FileInputStream(vfsPath));

    	if (useAAC)
    	{
    		aac = new AACHelper();
    		aac.setDecoder(MediaConstants.sAudioSampleRate, MediaConstants.sAudioChannels, MediaConstants.sAudioBitRate);
    	}
    	else
    	{
	
	        int minBufferSize = AudioTrack.getMinBufferSize(MediaConstants.sAudioSampleRate,
	        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT)*8;
	
	        at = new AudioTrack(AudioManager.STREAM_MUSIC, MediaConstants.sAudioSampleRate,
	        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT,
	            minBufferSize, AudioTrack.MODE_STREAM);
	        
    	}
         
    }
 
Example #7
Source File: TTSUtility.java    From speech-android-sdk with Apache License 2.0 6 votes vote down vote up
private void initPlayer(){
    stopTtsPlayer();
    // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode.
    int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    synchronized (this) {
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                sampleRate,
                AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT,
                bufferSize,
                AudioTrack.MODE_STREAM);
        if (audioTrack != null)
            audioTrack.play();
    }
}
 
Example #8
Source File: OpusTrack.java    From DeviceConnect-Android with MIT License 6 votes vote down vote up
/**
 * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します.
 */
private void createAudioTrack() {
    int bufSize = AudioTrack.getMinBufferSize(mSamplingRate,
            mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT) * 2;

    if (DEBUG) {
        Log.d(TAG, "OpusTrack::createAudioTrack");
        Log.d(TAG, "  SamplingRate: " + mSamplingRate);
        Log.d(TAG, "  Channels: " + mChannel);
        Log.d(TAG, "  AudioFormat: " + AudioFormat.ENCODING_PCM_16BIT);
        Log.d(TAG, "  BufSize: " + bufSize);
    }

    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSamplingRate,
            mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, bufSize,
            AudioTrack.MODE_STREAM);

    mAudioTrack.play();
}
 
Example #9
Source File: MediaCodecBridge.java    From android-chromium with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@CalledByNative
private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int flags,
        boolean playAudio) {
    try {
        mMediaCodec.configure(format, null, crypto, flags);
        if (playAudio) {
            int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
            int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
            int channelConfig = (channelCount == 1) ? AudioFormat.CHANNEL_OUT_MONO :
                    AudioFormat.CHANNEL_OUT_STEREO;
            // Using 16bit PCM for output. Keep this value in sync with
            // kBytesPerAudioOutputSample in media_codec_bridge.cc.
            int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig,
                    AudioFormat.ENCODING_PCM_16BIT);
            mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
                    AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
        }
        return true;
    } catch (IllegalStateException e) {
        Log.e(TAG, "Cannot configure the audio codec " + e.toString());
    }
    return false;
}
 
Example #10
Source File: AudioTrackPositionTracker.java    From MediaSDK with Apache License 2.0 6 votes vote down vote up
/**
 * Sets the {@link AudioTrack} to wrap. Subsequent method calls on this instance relate to this
 * track's position, until the next call to {@link #reset()}.
 *
 * @param audioTrack The audio track to wrap.
 * @param outputEncoding The encoding of the audio track.
 * @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored
 *     otherwise.
 * @param bufferSize The audio track buffer size in bytes.
 */
public void setAudioTrack(
    AudioTrack audioTrack,
    @C.Encoding int outputEncoding,
    int outputPcmFrameSize,
    int bufferSize) {
  this.audioTrack = audioTrack;
  this.outputPcmFrameSize = outputPcmFrameSize;
  this.bufferSize = bufferSize;
  audioTimestampPoller = new AudioTimestampPoller(audioTrack);
  outputSampleRate = audioTrack.getSampleRate();
  needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding);
  isOutputPcm = Util.isEncodingLinearPcm(outputEncoding);
  bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
  lastRawPlaybackHeadPosition = 0;
  rawPlaybackHeadWrapCount = 0;
  passthroughWorkaroundPauseOffset = 0;
  hasData = false;
  stopTimestampUs = C.TIME_UNSET;
  forceResetWorkaroundTimeMs = C.TIME_UNSET;
  latencyUs = 0;
}
 
Example #11
Source File: DefaultAudioSink.java    From Telegram-FOSS with GNU General Public License v2.0 6 votes vote down vote up
private int getDefaultBufferSize() {
  if (isInputPcm) {
    int minBufferSize =
        AudioTrack.getMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding);
    Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
    int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
    int minAppBufferSize =
        (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
    int maxAppBufferSize =
        (int)
            Math.max(
                minBufferSize, durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
    return Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize);
  } else {
    int rate = getMaximumEncodedRateBytesPerSecond(outputEncoding);
    if (outputEncoding == C.ENCODING_AC3) {
      rate *= AC3_BUFFER_MULTIPLICATION_FACTOR;
    }
    return (int) (PASSTHROUGH_BUFFER_DURATION_US * rate / C.MICROS_PER_SECOND);
  }
}
 
Example #12
Source File: BlockingAudioTrack.java    From android_9.0.0_r45 with Apache License 2.0 6 votes vote down vote up
private static void setupVolume(AudioTrack audioTrack, float volume, float pan) {
    final float vol = clip(volume, 0.0f, 1.0f);
    final float panning = clip(pan, -1.0f, 1.0f);

    float volLeft = vol;
    float volRight = vol;
    if (panning > 0.0f) {
        volLeft *= (1.0f - panning);
    } else if (panning < 0.0f) {
        volRight *= (1.0f + panning);
    }
    if (DBG) Log.d(TAG, "volLeft=" + volLeft + ",volRight=" + volRight);
    if (audioTrack.setStereoVolume(volLeft, volRight) != AudioTrack.SUCCESS) {
        Log.e(TAG, "Failed to set volume");
    }
}
 
Example #13
Source File: AudioPlayer.java    From Android with Apache License 2.0 6 votes vote down vote up
public boolean startPlayer(int streamType, int sampleRateInHz, int channelConfig, int audioFormat) {
    
    if (mIsPlayStarted) {
        Log.e(TAG, "Player already started !");
        return false;
    }
    
    mMinBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz,channelConfig,audioFormat);
    if (mMinBufferSize == AudioTrack.ERROR_BAD_VALUE) {
        Log.e(TAG, "Invalid parameter !");
        return false;
    }
    Log.d(TAG , "getMinBufferSize = "+mMinBufferSize+" bytes !");
    
    mAudioTrack = new AudioTrack(streamType,sampleRateInHz,channelConfig,audioFormat,mMinBufferSize,DEFAULT_PLAY_MODE);
    if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) {
        Log.e(TAG, "AudioTrack initialize fail !");
        return false;
    }            
    
    mIsPlayStarted = true;
    
    Log.d(TAG, "Start audio player success !");
    
    return true;
}
 
Example #14
Source File: BlockingAudioTrack.java    From android_9.0.0_r45 with Apache License 2.0 6 votes vote down vote up
private static int writeToAudioTrack(AudioTrack audioTrack, byte[] bytes) {
    if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
        if (DBG) Log.d(TAG, "AudioTrack not playing, restarting : " + audioTrack.hashCode());
        audioTrack.play();
    }

    int count = 0;
    while (count < bytes.length) {
        // Note that we don't take bufferCopy.mOffset into account because
        // it is guaranteed to be 0.
        int written = audioTrack.write(bytes, count, bytes.length);
        if (written <= 0) {
            break;
        }
        count += written;
    }
    return count;
}
 
Example #15
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 6 votes vote down vote up
@TargetApi(21)
private static AudioTrack createAudioTrackOnLollipopOrHigher(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
  // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
  // performance when Android O is supported. Add some logging in the mean time.
  final int nativeOutputSampleRate =
      AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
  Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
  if (sampleRateInHz != nativeOutputSampleRate) {
    Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
  }
  // Create an audio track where the audio usage is for VoIP and the content type is speech.
  return new AudioTrack(new AudioAttributes.Builder()
                            .setUsage(DEFAULT_USAGE)
                            .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
                            .build(),
      new AudioFormat.Builder()
          .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
          .setSampleRate(sampleRateInHz)
          .setChannelMask(channelConfig)
          .build(),
      bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
}
 
Example #16
Source File: AudioThread.java    From Viewer with Apache License 2.0 6 votes vote down vote up
public AudioThread(int sampleRateInHz, int channel, long streamId, long decoderId, Media media)
{
	if (channel == 1)
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
	} else
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
	}
	this.mediaStreamId = streamId;
	this.decoderId = decoderId;
	this.media = media;
	int minBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT);
	if (minBufferSize > audioLength)
	{
		audioLength = minBufferSize;
	}
	mAudioBuffer = new byte[audioLength];
	mAudio = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT, audioLength, AudioTrack.MODE_STREAM);
}
 
Example #17
Source File: PlayToneThread.java    From zentone with Apache License 2.0 5 votes vote down vote up
/**
 * Stop tone.
 */
void stopTone() {
  if (audioTrack != null && audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) {
    audioTrack.stop();
    audioTrack.release();
    isPlaying = false;
  }
}
 
Example #18
Source File: SaiyTextToSpeech.java    From Saiy-PS with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * Helper method to double check the returned {@link SaiyAudioTrack} object hasn't been released
 * elsewhere.
 *
 * @return the {@link SaiyAudioTrack} object, or null it the creation process failed.
 */
private SaiyAudioTrack getAudioTrack() {
    if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        audioTrack = SaiyAudioTrack.getSaiyAudioTrack();
        audioTrack.setListener(listener);
        return audioTrack;
    } else {
        return audioTrack;
    }
}
 
Example #19
Source File: AndroidAudioForJSyn.java    From jsyn with Apache License 2.0 5 votes vote down vote up
public void start() {
    Process.setThreadPriority(-5);
    minBufferSize = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT);
    System.out.println("Audio minBufferSize = " + minBufferSize);
    bufferSize = (3 * (minBufferSize / 2)) & ~3;
    System.out.println("Audio bufferSize = " + bufferSize);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    audioTrack.play();
}
 
Example #20
Source File: AudioPlayer.java    From Android with Apache License 2.0 5 votes vote down vote up
public void stopPlayer() {
    
    if (!mIsPlayStarted) {
        return;
    }
    
    if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
        mAudioTrack.stop();                        
    }
    
    mAudioTrack.release();
    mIsPlayStarted = false;
       
    Log.d(TAG, "Stop audio player success !");
}
 
Example #21
Source File: AudioTimestampPoller.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Creates a new audio timestamp poller.
 *
 * @param audioTrack The audio track that will provide timestamps, if the platform supports it.
 */
public AudioTimestampPoller(AudioTrack audioTrack) {
  if (Util.SDK_INT >= 19) {
    audioTimestamp = new AudioTimestampV19(audioTrack);
    reset();
  } else {
    audioTimestamp = null;
    updateState(STATE_NO_TIMESTAMP);
  }
}
 
Example #22
Source File: MediaPlayer.java    From HPlayer with Apache License 2.0 5 votes vote down vote up
private void audioTrackWrite(byte[] audioData, int offsetInBytes, int sizeInBytes) {
  if (mAudioTrack != null && mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
    int written;
    while (sizeInBytes > 0) {
      written = sizeInBytes > mAudioTrackBufferSize ? mAudioTrackBufferSize : sizeInBytes;
      mAudioTrack.write(audioData, offsetInBytes, written);
      sizeInBytes -= written;
      offsetInBytes += written;
    }
  }
}
 
Example #23
Source File: MediaPlayer.java    From HPlayer with Apache License 2.0 5 votes vote down vote up
public int audioTrackInit() {
//	  Log.e("  ffff mediaplayer audiotrackinit start .  sampleRateInHz:=" + sampleRateInHz + " channels:=" + channels );
	    audioTrackRelease();
	    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
	    try {
	      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
	      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
	    } catch (Exception e) {
	      mAudioTrackBufferSize = 0;
	      Log.e("audioTrackInit", e);
	    }
	    return mAudioTrackBufferSize;
	  }
 
Example #24
Source File: BlockingAudioTrack.java    From android_9.0.0_r45 with Apache License 2.0 5 votes vote down vote up
public int write(byte[] data) {
    AudioTrack track = null;
    synchronized (mAudioTrackLock) {
        track = mAudioTrack;
    }

    if (track == null || mStopped) {
        return -1;
    }
    final int bytesWritten = writeToAudioTrack(track, data);

    mBytesWritten += bytesWritten;
    return bytesWritten;
}
 
Example #25
Source File: AudioPlayer.java    From EvilsLive with MIT License 5 votes vote down vote up
public void stopPlayer() {
    
    if (!mIsPlayStarted) {
        return;
    }
    
    if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
        mAudioTrack.stop();                        
    }
    
    mAudioTrack.release();
    mIsPlayStarted = false;
       
    Log.d(TAG, "Stop audio player success !");
}
 
Example #26
Source File: WebRtcAudioManager.java    From webrtc_android with MIT License 5 votes vote down vote up
private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
  final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
  final int channelConfig =
      (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
  return AudioTrack.getMinBufferSize(
             sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
      / bytesPerFrame;
}
 
Example #27
Source File: TTSUtility.java    From speech-android-sdk with Apache License 2.0 5 votes vote down vote up
/**
    * Stop player
    */
private void stopTtsPlayer() {
       if (audioTrack != null && audioTrack.getState() != AudioTrack.STATE_UNINITIALIZED ) {
           // IMPORTANT: NOT use stop()
           // For an immediate stop, use pause(), followed by flush() to discard audio data that hasn't been played back yet.
           audioTrack.pause();
           audioTrack.flush();
       }
}
 
Example #28
Source File: AudioTrackManagerSingle.java    From apollo-DuerOS with Apache License 2.0 5 votes vote down vote up
public void pauseAudioTrack() {
    if ((mAudioTrack != null) && (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING)) {
        try {
            mAudioTrack.pause();
        } catch (IllegalStateException e) {
            e.printStackTrace();
        }

        // release audio track focus
        releaseAudioTrackFocus();
    }
}
 
Example #29
Source File: WebRtcAudioTrack.java    From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@SuppressWarnings("unused")
private int StopPlayback() {
    _playLock.lock();
    try {
        // only stop if we are playing
        if (_audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
            // stop playout
            try {
                _audioTrack.stop();
            } catch (IllegalStateException e) {
                e.printStackTrace();
                return -1;
            }

            // flush the buffers
            _audioTrack.flush();
        }

        // release the object
        _audioTrack.release();
        _audioTrack = null;

    } finally {
        // Ensure we always unlock, both for success, exception or error
        // return.
        _doPlayInit = true;
        _playLock.unlock();
    }

    _isPlaying = false;
    return 0;
}
 
Example #30
Source File: BlockingAudioTrack.java    From android_9.0.0_r45 with Apache License 2.0 5 votes vote down vote up
/**
 * @see
 *     AudioTrack#setPlaybackPositionUpdateListener(AudioTrack.OnPlaybackPositionUpdateListener).
 */
public void setPlaybackPositionUpdateListener(
        AudioTrack.OnPlaybackPositionUpdateListener listener) {
    synchronized (mAudioTrackLock) {
        if (mAudioTrack != null) {
            mAudioTrack.setPlaybackPositionUpdateListener(listener);
        }
    }
}