android.media.AudioTrack Java Examples

The following examples show how to use android.media.AudioTrack. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: science-journal   Author: google   File: AndroidAudioForJSyn.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void start() {
  minBufferSize =
      AudioTrack.getMinBufferSize(
          frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
  System.out.println("Audio minBufferSize = " + minBufferSize);
  bufferSize = (3 * (minBufferSize / 2)) & ~3;
  System.out.println("Audio bufferSize = " + bufferSize);
  audioTrack =
      new AudioTrack(
          AudioManager.STREAM_MUSIC,
          frameRate,
          AudioFormat.CHANNEL_OUT_STEREO,
          AudioFormat.ENCODING_PCM_16BIT,
          bufferSize,
          AudioTrack.MODE_STREAM);
  audioTrack.play();
}
 
Example #2
Source Project: AssistantBySDK   Author: LingjuAI   File: PcmPlayer.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void onMarkerReached(AudioTrack track) {
    Log.i(TAG, "onMarkerReached>>>" + track.getNotificationMarkerPosition());
    if (playLock.tryLock()) {
        try {
            playCondition.signalAll();
        } finally {
            playLock.unlock();
        }
    }
    Log.i(TAG, "PCM SIZE=" + pcms.size());
    if (!pending.get() && pcms.size() == 0) {
        play.set(false);
        playListener.onCompleted();
    }
}
 
Example #3
Source Project: speech-android-sdk   Author: watson-developer-cloud   File: TTSUtility.java    License: Apache License 2.0 6 votes vote down vote up
private void initPlayer(){
    stopTtsPlayer();
    // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode.
    int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    synchronized (this) {
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                sampleRate,
                AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT,
                bufferSize,
                AudioTrack.MODE_STREAM);
        if (audioTrack != null)
            audioTrack.play();
    }
}
 
Example #4
Source Project: Telegram-FOSS   Author: Telegram-FOSS-Team   File: DefaultAudioSink.java    License: GNU General Public License v2.0 6 votes vote down vote up
private int getDefaultBufferSize() {
  if (isInputPcm) {
    int minBufferSize =
        AudioTrack.getMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding);
    Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
    int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
    int minAppBufferSize =
        (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
    int maxAppBufferSize =
        (int)
            Math.max(
                minBufferSize, durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
    return Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize);
  } else {
    int rate = getMaximumEncodedRateBytesPerSecond(outputEncoding);
    if (outputEncoding == C.ENCODING_AC3) {
      rate *= AC3_BUFFER_MULTIPLICATION_FACTOR;
    }
    return (int) (PASSTHROUGH_BUFFER_DURATION_US * rate / C.MICROS_PER_SECOND);
  }
}
 
Example #5
Source Project: android-chromium   Author: eduplus   File: MediaCodecBridge.java    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
@CalledByNative
private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int flags,
        boolean playAudio) {
    try {
        mMediaCodec.configure(format, null, crypto, flags);
        if (playAudio) {
            int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
            int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
            int channelConfig = (channelCount == 1) ? AudioFormat.CHANNEL_OUT_MONO :
                    AudioFormat.CHANNEL_OUT_STEREO;
            // Using 16bit PCM for output. Keep this value in sync with
            // kBytesPerAudioOutputSample in media_codec_bridge.cc.
            int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig,
                    AudioFormat.ENCODING_PCM_16BIT);
            mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
                    AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
        }
        return true;
    } catch (IllegalStateException e) {
        Log.e(TAG, "Cannot configure the audio codec " + e.toString());
    }
    return false;
}
 
Example #6
Source Project: MediaSDK   Author: JeffMony   File: AudioTrackPositionTracker.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Sets the {@link AudioTrack} to wrap. Subsequent method calls on this instance relate to this
 * track's position, until the next call to {@link #reset()}.
 *
 * @param audioTrack The audio track to wrap.
 * @param outputEncoding The encoding of the audio track.
 * @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored
 *     otherwise.
 * @param bufferSize The audio track buffer size in bytes.
 */
public void setAudioTrack(
    AudioTrack audioTrack,
    @C.Encoding int outputEncoding,
    int outputPcmFrameSize,
    int bufferSize) {
  this.audioTrack = audioTrack;
  this.outputPcmFrameSize = outputPcmFrameSize;
  this.bufferSize = bufferSize;
  audioTimestampPoller = new AudioTimestampPoller(audioTrack);
  outputSampleRate = audioTrack.getSampleRate();
  needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding);
  isOutputPcm = Util.isEncodingLinearPcm(outputEncoding);
  bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
  lastRawPlaybackHeadPosition = 0;
  rawPlaybackHeadWrapCount = 0;
  passthroughWorkaroundPauseOffset = 0;
  hasData = false;
  stopTimestampUs = C.TIME_UNSET;
  forceResetWorkaroundTimeMs = C.TIME_UNSET;
  latencyUs = 0;
}
 
Example #7
Source Project: Android   Author: Jhuster   File: AudioPlayer.java    License: Apache License 2.0 6 votes vote down vote up
public boolean startPlayer(int streamType, int sampleRateInHz, int channelConfig, int audioFormat) {
    
    if (mIsPlayStarted) {
        Log.e(TAG, "Player already started !");
        return false;
    }
    
    mMinBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz,channelConfig,audioFormat);
    if (mMinBufferSize == AudioTrack.ERROR_BAD_VALUE) {
        Log.e(TAG, "Invalid parameter !");
        return false;
    }
    Log.d(TAG , "getMinBufferSize = "+mMinBufferSize+" bytes !");
    
    mAudioTrack = new AudioTrack(streamType,sampleRateInHz,channelConfig,audioFormat,mMinBufferSize,DEFAULT_PLAY_MODE);
    if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) {
        Log.e(TAG, "AudioTrack initialize fail !");
        return false;
    }            
    
    mIsPlayStarted = true;
    
    Log.d(TAG, "Start audio player success !");
    
    return true;
}
 
Example #8
Source Project: rtmp-rtsp-stream-client-java   Author: pedroSG94   File: FromFileBase.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Stop stream started with @startStream.
 */
public void stopStream() {
  if (streaming) {
    streaming = false;
    stopStreamRtp();
  }
  if (!recordController.isRecording()) {
    if (glInterface != null) {
      glInterface.removeMediaCodecSurface();
      glInterface.stop();
    }
    if (videoDecoder != null) videoDecoder.stop();
    if (audioDecoder != null) audioDecoder.stop();
    if (audioTrackPlayer != null
        && audioTrackPlayer.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
      audioTrackPlayer.stop();
    }
    audioTrackPlayer = null;
    videoEncoder.stop();
    audioEncoder.stop();
    recordController.resetFormats();
  }
}
 
Example #9
Source Project: webrtc_android   Author: ddssingsong   File: WebRtcAudioTrack.java    License: MIT License 6 votes vote down vote up
@TargetApi(21)
private static AudioTrack createAudioTrackOnLollipopOrHigher(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
  // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
  // performance when Android O is supported. Add some logging in the mean time.
  final int nativeOutputSampleRate =
      AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
  Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
  if (sampleRateInHz != nativeOutputSampleRate) {
    Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
  }
  // Create an audio track where the audio usage is for VoIP and the content type is speech.
  return new AudioTrack(new AudioAttributes.Builder()
                            .setUsage(DEFAULT_USAGE)
                            .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
                            .build(),
      new AudioFormat.Builder()
          .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
          .setSampleRate(sampleRateInHz)
          .setChannelMask(channelConfig)
          .build(),
      bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
}
 
Example #10
Source Project: android_9.0.0_r45   Author: lulululbj   File: BlockingAudioTrack.java    License: Apache License 2.0 6 votes vote down vote up
private static int writeToAudioTrack(AudioTrack audioTrack, byte[] bytes) {
    if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
        if (DBG) Log.d(TAG, "AudioTrack not playing, restarting : " + audioTrack.hashCode());
        audioTrack.play();
    }

    int count = 0;
    while (count < bytes.length) {
        // Note that we don't take bufferCopy.mOffset into account because
        // it is guaranteed to be 0.
        int written = audioTrack.write(bytes, count, bytes.length);
        if (written <= 0) {
            break;
        }
        count += written;
    }
    return count;
}
 
Example #11
Source Project: android_9.0.0_r45   Author: lulululbj   File: BlockingAudioTrack.java    License: Apache License 2.0 6 votes vote down vote up
private static void setupVolume(AudioTrack audioTrack, float volume, float pan) {
    final float vol = clip(volume, 0.0f, 1.0f);
    final float panning = clip(pan, -1.0f, 1.0f);

    float volLeft = vol;
    float volRight = vol;
    if (panning > 0.0f) {
        volLeft *= (1.0f - panning);
    } else if (panning < 0.0f) {
        volRight *= (1.0f + panning);
    }
    if (DBG) Log.d(TAG, "volLeft=" + volLeft + ",volRight=" + volRight);
    if (audioTrack.setStereoVolume(volLeft, volRight) != AudioTrack.SUCCESS) {
        Log.e(TAG, "Failed to set volume");
    }
}
 
Example #12
Source Project: DeviceConnect-Android   Author: DeviceConnect   File: OpusTrack.java    License: MIT License 6 votes vote down vote up
/**
 * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します.
 */
private void createAudioTrack() {
    int bufSize = AudioTrack.getMinBufferSize(mSamplingRate,
            mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT) * 2;

    if (DEBUG) {
        Log.d(TAG, "OpusTrack::createAudioTrack");
        Log.d(TAG, "  SamplingRate: " + mSamplingRate);
        Log.d(TAG, "  Channels: " + mChannel);
        Log.d(TAG, "  AudioFormat: " + AudioFormat.ENCODING_PCM_16BIT);
        Log.d(TAG, "  BufSize: " + bufSize);
    }

    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSamplingRate,
            mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, bufSize,
            AudioTrack.MODE_STREAM);

    mAudioTrack.play();
}
 
Example #13
Source Project: TelePlus-Android   Author: TelePlusDev   File: AudioTrackPositionTracker.java    License: GNU General Public License v2.0 6 votes vote down vote up
/**
 * Sets the {@link AudioTrack} to wrap. Subsequent method calls on this instance relate to this
 * track's position, until the next call to {@link #reset()}.
 *
 * @param audioTrack The audio track to wrap.
 * @param outputEncoding The encoding of the audio track.
 * @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored
 *     otherwise.
 * @param bufferSize The audio track buffer size in bytes.
 */
public void setAudioTrack(
    AudioTrack audioTrack,
    @C.Encoding int outputEncoding,
    int outputPcmFrameSize,
    int bufferSize) {
  this.audioTrack = audioTrack;
  this.outputPcmFrameSize = outputPcmFrameSize;
  this.bufferSize = bufferSize;
  audioTimestampPoller = new AudioTimestampPoller(audioTrack);
  outputSampleRate = audioTrack.getSampleRate();
  needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding);
  isOutputPcm = Util.isEncodingLinearPcm(outputEncoding);
  bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
  lastRawPlaybackHeadPosition = 0;
  rawPlaybackHeadWrapCount = 0;
  passthroughWorkaroundPauseOffset = 0;
  hasData = false;
  stopTimestampUs = C.TIME_UNSET;
  forceResetWorkaroundTimeMs = C.TIME_UNSET;
  latencyUs = 0;
}
 
Example #14
Source Project: opentok-android-sdk-samples   Author: opentok   File: CustomAudioDevice.java    License: MIT License 6 votes vote down vote up
@Override
public boolean stopRenderer() {
    Log.d("AUDIO_FOCUS", "Stop Renderer");

    rendererLock.lock();
    try {
        // only stop if we are playing
        if (audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
            // stop playout
            audioTrack.stop();

        }
        // flush the buffers
        audioTrack.flush();
    } catch (Exception e) {
        throw new RuntimeException(e.getMessage());
    } finally {
        // Ensure we always unlock, both for success, exception or error
        // return.
        isRendering = false;
        rendererLock.unlock();
    }
    unregisterHeadsetReceiver();
    unregisterBtReceiver();
    return true;
}
 
Example #15
Source Project: RxAndroidAudio   Author: Piasy   File: StreamAudioPlayer.java    License: MIT License 6 votes vote down vote up
@WorkerThread
public synchronized boolean play(byte[] data, int size) {
    if (mAudioTrack != null) {
        try {
            int ret = mAudioTrack.write(data, 0, size);
            switch (ret) {
                case AudioTrack.ERROR_INVALID_OPERATION:
                    Log.w(TAG, "play fail: ERROR_INVALID_OPERATION");
                    return false;
                case AudioTrack.ERROR_BAD_VALUE:
                    Log.w(TAG, "play fail: ERROR_BAD_VALUE");
                    return false;
                case AudioManager.ERROR_DEAD_OBJECT:
                    Log.w(TAG, "play fail: ERROR_DEAD_OBJECT");
                    return false;
                default:
                    return true;
            }
        } catch (IllegalStateException e) {
            Log.w(TAG, "play fail: " + e.getMessage());
            return false;
        }
    }
    Log.w(TAG, "play fail: null mAudioTrack");
    return false;
}
 
Example #16
Source Project: CameraV   Author: guardianproject   File: MjpegPlayerActivity.java    License: GNU General Public License v3.0 6 votes vote down vote up
public void initAudio(String vfsPath) throws Exception {

    	isAudio = new BufferedInputStream(new FileInputStream(vfsPath));

    	if (useAAC)
    	{
    		aac = new AACHelper();
    		aac.setDecoder(MediaConstants.sAudioSampleRate, MediaConstants.sAudioChannels, MediaConstants.sAudioBitRate);
    	}
    	else
    	{
	
	        int minBufferSize = AudioTrack.getMinBufferSize(MediaConstants.sAudioSampleRate,
	        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT)*8;
	
	        at = new AudioTrack(AudioManager.STREAM_MUSIC, MediaConstants.sAudioSampleRate,
	        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT,
	            minBufferSize, AudioTrack.MODE_STREAM);
	        
    	}
         
    }
 
Example #17
Source Project: Viewer   Author: OpenIchano   File: AudioThread.java    License: Apache License 2.0 6 votes vote down vote up
public AudioThread(int sampleRateInHz, int channel, long streamId, long decoderId, Media media)
{
	if (channel == 1)
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
	} else
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
	}
	this.mediaStreamId = streamId;
	this.decoderId = decoderId;
	this.media = media;
	int minBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT);
	if (minBufferSize > audioLength)
	{
		audioLength = minBufferSize;
	}
	mAudioBuffer = new byte[audioLength];
	mAudio = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT, audioLength, AudioTrack.MODE_STREAM);
}
 
Example #18
Source Project: CameraV   Author: guardianproject   File: MjpegViewerActivity.java    License: GNU General Public License v3.0 5 votes vote down vote up
public void initAudio(String vfsPath) throws Exception {

    	isAudio = new BufferedInputStream(new FileInputStream(vfsPath));

    	if (useAAC)
    	{
    		aac = new AACHelper();
    		aac.setDecoder(MediaConstants.sAudioSampleRate, MediaConstants.sAudioChannels, MediaConstants.sAudioBitRate);
    	}
    	else
    	{
	
	        int minBufferSize = AudioTrack.getMinBufferSize(MediaConstants.sAudioSampleRate,
	        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT)*8;
	
	        at = new AudioTrack(AudioManager.STREAM_MUSIC, MediaConstants.sAudioSampleRate,
	        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT,
	            minBufferSize, AudioTrack.MODE_STREAM);
	        
    	}
         
    }
 
Example #19
Source Project: apollo-DuerOS   Author: ApolloAuto   File: AudioTrackManagerSingle.java    License: Apache License 2.0 5 votes vote down vote up
public void pauseAudioTrack() {
    if ((mAudioTrack != null) && (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING)) {
        try {
            mAudioTrack.pause();
        } catch (IllegalStateException e) {
            e.printStackTrace();
        }

        // release audio track focus
        releaseAudioTrackFocus();
    }
}
 
Example #20
Source Project: voice-quickstart-android   Author: twilio   File: FileAndMicAudioDevice.java    License: MIT License 5 votes vote down vote up
@Override
public boolean onInitRenderer() {
    int bytesPerFrame = getRendererFormat().getChannelCount() * (BITS_PER_SAMPLE / 8);
    readByteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (getRendererFormat().getSampleRate() / BUFFERS_PER_SECOND));
    int channelConfig = channelCountToConfiguration(getRendererFormat().getChannelCount());
    int minBufferSize = AudioRecord.getMinBufferSize(getRendererFormat().getSampleRate(), channelConfig, android.media.AudioFormat.ENCODING_PCM_16BIT);
    audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL, getRendererFormat().getSampleRate(), channelConfig,
            android.media.AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
    keepAliveRendererRunnable = true;
    return true;
}
 
Example #21
Source Project: apollo-DuerOS   Author: ApolloAuto   File: AudioTrackManagerDualNormal.java    License: Apache License 2.0 5 votes vote down vote up
private void reInitAudioTrack() {
    try {
        mMusicAudioTrack =
                new AudioTrack(AudioManager.STREAM_MUSIC, mPreMediaSampleRate, mPreMediaChannelConfig,
                        mPreMediaFormate, mPreMinBuffSize, AudioTrack.MODE_STREAM);

    } catch (IllegalArgumentException e) {
        informMusicPause();
        mMusicAudioTrack = null;
        e.printStackTrace();
    }
}
 
Example #22
Source Project: android-chromium   Author: eduplus   File: MediaCodecBridge.java    License: BSD 2-Clause "Simplified" License 5 votes vote down vote up
@CalledByNative
private void playOutputBuffer(byte[] buf) {
    if (mAudioTrack != null) {
        if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) {
            mAudioTrack.play();
        }
        int size = mAudioTrack.write(buf, 0, buf.length);
        if (buf.length != size) {
            Log.i(TAG, "Failed to send all data to audio output, expected size: " +
                    buf.length + ", actual size: " + size);
        }
    }
}
 
Example #23
Source Project: MediaSDK   Author: JeffMony   File: DefaultAudioSink.java    License: Apache License 2.0 5 votes vote down vote up
private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
  int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE.
  int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
  @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
  int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
  return new AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, bufferSize,
      MODE_STATIC, audioSessionId);
}
 
Example #24
Source Project: NetEasyNews   Author: liaozhoubei   File: MediaPlayer.java    License: GNU General Public License v3.0 5 votes vote down vote up
private void audioTrackRelease() {
  if (mAudioTrack != null) {
    if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
      mAudioTrack.stop();
    mAudioTrack.release();
  }
  mAudioTrack = null;
}
 
Example #25
Source Project: Telegram   Author: DrKLO   File: AudioTimestampPoller.java    License: GNU General Public License v2.0 5 votes vote down vote up
/**
 * Creates a new audio timestamp poller.
 *
 * @param audioTrack The audio track that will provide timestamps, if the platform supports it.
 */
public AudioTimestampPoller(AudioTrack audioTrack) {
  if (Util.SDK_INT >= 19) {
    audioTimestamp = new AudioTimestampV19(audioTrack);
    reset();
  } else {
    audioTimestamp = null;
    updateState(STATE_NO_TIMESTAMP);
  }
}
 
Example #26
Source Project: video-player   Author: yashketkar   File: MediaPlayer.java    License: MIT License 5 votes vote down vote up
private void audioTrackRelease() {
  if (mAudioTrack != null) {
    if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
      mAudioTrack.stop();
    mAudioTrack.release();
  }
  mAudioTrack = null;
}
 
Example #27
Source Project: jsyn   Author: philburk   File: AndroidAudioForJSyn.java    License: Apache License 2.0 5 votes vote down vote up
public void write(double[] buffer, int start, int count) {
    // Allocate buffer if needed.
    if ((floatBuffer == null) || (floatBuffer.length < count)) {
        floatBuffer = new float[count];
    }
    // Convert float samples to shorts.
    for (int i = 0; i < count; i++) {
        floatBuffer[i] = (float) buffer[i + start];
    }
    audioTrack.write(floatBuffer, 0, count, AudioTrack.WRITE_BLOCKING);
}
 
Example #28
Source Project: webrtc_android   Author: ddssingsong   File: WebRtcAudioTrack.java    License: MIT License 5 votes vote down vote up
@TargetApi(21)
private static AudioTrack createAudioTrackOnLollipopOrHigher(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
  // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
  // performance when Android O is supported. Add some logging in the mean time.
  final int nativeOutputSampleRate =
      AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
  Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
  if (sampleRateInHz != nativeOutputSampleRate) {
    Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
  }
  if (usageAttribute != DEFAULT_USAGE) {
    Logging.w(TAG, "A non default usage attribute is used: " + usageAttribute);
  }
  // Create an audio track where the audio usage is for VoIP and the content type is speech.
  return new AudioTrack(
      new AudioAttributes.Builder()
          .setUsage(usageAttribute)
          .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
      .build(),
      new AudioFormat.Builder()
        .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
        .setSampleRate(sampleRateInHz)
        .setChannelMask(channelConfig)
        .build(),
      bufferSizeInBytes,
      AudioTrack.MODE_STREAM,
      AudioManager.AUDIO_SESSION_ID_GENERATE);
}
 
Example #29
Source Project: Vitamio   Author: CharonChui   File: MediaPlayer.java    License: Apache License 2.0 5 votes vote down vote up
private void audioTrackRelease() {
  if (mAudioTrack != null) {
    if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
      mAudioTrack.stop();
    mAudioTrack.release();
  }
  mAudioTrack = null;
}
 
Example #30
Source Project: droidkit-webrtc   Author: actorapp   File: WebRtcAudioTrack.java    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@SuppressWarnings("unused")
private int StopPlayback() {
    _playLock.lock();
    try {
        // only stop if we are playing
        if (_audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
            // stop playout
            try {
                _audioTrack.stop();
            } catch (IllegalStateException e) {
                e.printStackTrace();
                return -1;
            }

            // flush the buffers
            _audioTrack.flush();
        }

        // release the object
        _audioTrack.release();
        _audioTrack = null;

    } finally {
        // Ensure we always unlock, both for success, exception or error
        // return.
        _doPlayInit = true;
        _playLock.unlock();
    }

    _isPlaying = false;
    return 0;
}