Java Code Examples for android.media.AudioTrack#getMinBufferSize()

The following examples show how to use android.media.AudioTrack#getMinBufferSize() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DefaultAudioSink.java    From Telegram with GNU General Public License v2.0 7 votes vote down vote up
private int getDefaultBufferSize() {
  if (isInputPcm) {
    int minBufferSize =
        AudioTrack.getMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding);
    Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
    int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
    int minAppBufferSize =
        (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
    int maxAppBufferSize =
        (int)
            Math.max(
                minBufferSize, durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
    return Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize);
  } else {
    int rate = getMaximumEncodedRateBytesPerSecond(outputEncoding);
    if (outputEncoding == C.ENCODING_AC3) {
      rate *= AC3_BUFFER_MULTIPLICATION_FACTOR;
    }
    return (int) (PASSTHROUGH_BUFFER_DURATION_US * rate / C.MICROS_PER_SECOND);
  }
}
 
Example 2
Source File: SimpleAudioOutput.java    From android-MidiSynth with Apache License 2.0 6 votes vote down vote up
public AudioTrack createAudioTrack(int frameRate) {
    int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT);
    Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes
            + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME)
            + " frames");
    int bufferSize = 8 * minBufferSizeBytes / 8;
    int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME;
    Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = "
            + outputBufferSizeFrames + " frames");

    AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC,
            mFrameRate, AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    Log.i(TAG, "created AudioTrack");
    return player;
}
 
Example 3
Source File: DefaultAudioSink.java    From Telegram-FOSS with GNU General Public License v2.0 6 votes vote down vote up
private int getDefaultBufferSize() {
  if (isInputPcm) {
    int minBufferSize =
        AudioTrack.getMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding);
    Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
    int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
    int minAppBufferSize =
        (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
    int maxAppBufferSize =
        (int)
            Math.max(
                minBufferSize, durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
    return Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize);
  } else {
    int rate = getMaximumEncodedRateBytesPerSecond(outputEncoding);
    if (outputEncoding == C.ENCODING_AC3) {
      rate *= AC3_BUFFER_MULTIPLICATION_FACTOR;
    }
    return (int) (PASSTHROUGH_BUFFER_DURATION_US * rate / C.MICROS_PER_SECOND);
  }
}
 
Example 4
Source File: Track.java    From K-Sonic with MIT License 6 votes vote down vote up
private void initDevice(int sampleRate, int numChannels) {
    if (isJMono)
        numChannels = 2;
    mLock.lock();
    try {
        final int format = findFormatFromChannels(numChannels);
        final int minSize = AudioTrack.getMinBufferSize(sampleRate, format,
                AudioFormat.ENCODING_PCM_16BIT);
        mTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, format,
                AudioFormat.ENCODING_PCM_16BIT, minSize * 4,
                AudioTrack.MODE_STREAM);
        mSonic = new Sonic(sampleRate, numChannels);
    } catch (Exception e) {//IllegalArgumentException
        throw e;
    } finally {
        mLock.unlock();
    }
}
 
Example 5
Source File: MediaPlayer.java    From video-player with MIT License 6 votes vote down vote up
@SuppressLint("NewApi")
  
private int audioTrackInit(int sampleRateInHz, int channels) {
 //  this.sampleRateInHz=sampleRateInHz;
 //  this.channels=channels;
 //   return 0;
	
 audioTrackRelease();
    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
    try {
      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
    } catch (Exception e) {
      mAudioTrackBufferSize = 0;
      Log.e("audioTrackInit", e);
    }
    return mAudioTrackBufferSize;
  }
 
Example 6
Source File: JSynAndroidAudioDeviceManager.java    From processing-sound with GNU Lesser General Public License v2.1 6 votes vote down vote up
public void start() {
	this.minBufferSize = AudioTrack.getMinBufferSize(this.frameRate, AudioFormat.CHANNEL_OUT_STEREO,
			AudioFormat.ENCODING_PCM_16BIT);
	this.bufferSize = (3 * (this.minBufferSize / 2)) & ~3;
	this.audioTrack = new AudioTrack.Builder()
			.setAudioAttributes(new AudioAttributes.Builder()
					.setUsage(AudioAttributes.USAGE_MEDIA)
					.setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
					.build())
			.setAudioFormat(new AudioFormat.Builder()
					.setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
					.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
					.setSampleRate(this.frameRate)
					.build())
			.setBufferSizeInBytes(this.bufferSize)
			.setTransferMode(AudioTrack.MODE_STREAM)
			.build();
	this.audioTrack.play();
}
 
Example 7
Source File: SimpleAudioOutput.java    From media-samples with Apache License 2.0 6 votes vote down vote up
public AudioTrack createAudioTrack(int frameRate) {
    int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT);
    Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes
            + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME)
            + " frames");
    int bufferSize = 8 * minBufferSizeBytes / 8;
    int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME;
    Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = "
            + outputBufferSizeFrames + " frames");

    AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC,
            mFrameRate, AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    Log.i(TAG, "created AudioTrack");
    return player;
}
 
Example 8
Source File: SimpleAudioOutput.java    From android-MidiScope with Apache License 2.0 6 votes vote down vote up
public AudioTrack createAudioTrack(int frameRate) {
    int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT);
    Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes
            + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME)
            + " frames");
    int bufferSize = 8 * minBufferSizeBytes / 8;
    int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME;
    Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = "
            + outputBufferSizeFrames + " frames");

    AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC,
            mFrameRate, AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    Log.i(TAG, "created AudioTrack");
    return player;
}
 
Example 9
Source File: AACHelper.java    From CameraV with GNU General Public License v3.0 6 votes vote down vote up
public boolean setPlayer(int rate)
{
    int bufferSizePlayer = AudioTrack.getMinBufferSize(rate, AudioFormat.CHANNEL_OUT_MONO, audioFormat);
    Log.d("====buffer Size player ", String.valueOf(bufferSizePlayer));

    player= new AudioTrack(AudioManager.STREAM_MUSIC, rate, AudioFormat.CHANNEL_OUT_MONO, audioFormat, bufferSizePlayer, AudioTrack.MODE_STREAM);


    if (player.getState() == AudioTrack.STATE_INITIALIZED)
    {

        return true;
    }
    else
    {
        return false;
    }

}
 
Example 10
Source File: SimpleAudioOutput.java    From media-samples with Apache License 2.0 6 votes vote down vote up
public AudioTrack createAudioTrack(int frameRate) {
    int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT);
    Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes
            + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME)
            + " frames");
    int bufferSize = 8 * minBufferSizeBytes / 8;
    int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME;
    Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = "
            + outputBufferSizeFrames + " frames");

    AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC,
            mFrameRate, AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    Log.i(TAG, "created AudioTrack");
    return player;
}
 
Example 11
Source File: MediaPlayer.java    From Vitamio with Apache License 2.0 5 votes vote down vote up
private int audioTrackInit(int sampleRateInHz, int channels) {
  audioTrackRelease();
  int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
  try {
    mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
  } catch (Exception e) {
    mAudioTrackBufferSize = 0;
    Log.e("audioTrackInit", e);
  }
  return mAudioTrackBufferSize;
}
 
Example 12
Source File: MediaPlayer.java    From BambooPlayer with Apache License 2.0 5 votes vote down vote up
private int audioTrackInit(int sampleRateInHz, int channels) {
  audioTrackRelease();
  int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
  try {
    mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
  } catch (Exception e) {
    mAudioTrackBufferSize = 0;
    Log.e("audioTrackInit", e);
  }
  return mAudioTrackBufferSize;
}
 
Example 13
Source File: AndroidAudioForJSyn.java    From jsyn with Apache License 2.0 5 votes vote down vote up
public void start() {
    Process.setThreadPriority(-5);
    minBufferSize = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT);
    System.out.println("Audio minBufferSize = " + minBufferSize);
    bufferSize = (3 * (minBufferSize / 2)) & ~3;
    System.out.println("Audio bufferSize = " + bufferSize);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    audioTrack.play();
}
 
Example 14
Source File: MediaPlayer.java    From HPlayer with Apache License 2.0 5 votes vote down vote up
public int audioTrackInit() {
//	  Log.e("  ffff mediaplayer audiotrackinit start .  sampleRateInHz:=" + sampleRateInHz + " channels:=" + channels );
	    audioTrackRelease();
	    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
	    try {
	      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
	      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
	    } catch (Exception e) {
	      mAudioTrackBufferSize = 0;
	      Log.e("audioTrackInit", e);
	    }
	    return mAudioTrackBufferSize;
	  }
 
Example 15
Source File: SpeexDecoder.java    From sctalk with Apache License 2.0 5 votes vote down vote up
private void initializeAndroidAudio(int sampleRate) throws Exception {
    int minBufferSize = AudioTrack.getMinBufferSize(sampleRate,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    if (minBufferSize < 0) {
        throw new Exception("Failed to get minimum buffer size: "
                + Integer.toString(minBufferSize));
    }

    track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT,
            minBufferSize, AudioTrack.MODE_STREAM);

}
 
Example 16
Source File: AudioManagerAndroid.java    From 365browser with Apache License 2.0 5 votes vote down vote up
/**
* Returns the minimum frame size required for audio output.
*
* @param sampleRate sampling rate
* @param channels number of channels
*/
 @CalledByNative
 private static int getMinOutputFrameSize(int sampleRate, int channels) {
     int channelConfig;
     if (channels == 1) {
         channelConfig = AudioFormat.CHANNEL_OUT_MONO;
     } else if (channels == 2) {
         channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
     } else {
         return -1;
     }
     return AudioTrack.getMinBufferSize(
             sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / 2 / channels;
 }
 
Example 17
Source File: OpusPlayerActor.java    From actor-platform with GNU Affero General Public License v3.0 5 votes vote down vote up
protected void onPlayMessage(String fileName, float seek) {
    if (state != STATE_NONE) {
        destroyPlayer();
    }
    state = STATE_NONE;
    currentFileName = fileName;

    int res = opusLib.openOpusFile(currentFileName);
    if (res == 0) {
        callback.onError(currentFileName);
        return;
    }

    duration = opusLib.getTotalPcmDuration();
    offset = 0;

    try {
        bufferSize = AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM);
        audioTrack.play();
    } catch (Exception e) {
        e.printStackTrace();
        destroyPlayer();
        callback.onError(currentFileName);
        return;
    }

    state = STATE_STARTED;
    if (seek != 0) {
        opusLib.seekOpusFile(seek);
    }
    callback.onStart(fileName);
    self().send(new Iterate());
}
 
Example 18
Source File: AndroidAudioPlayer.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Constructs a new AndroidAudioPlayer from an audio format, default buffer size and stream type.
 *
 * @param audioFormat The audio format of the stream that this AndroidAudioPlayer will process.
 *                    This can only be 1 channel, PCM 16 bit.
 * @param bufferSizeInSamples  The requested buffer size in samples.
 * @param streamType  The type of audio stream that the internal AudioTrack should use. For
 *                    example, {@link AudioManager#STREAM_MUSIC}.
 * @throws IllegalArgumentException if audioFormat is not valid or if the requested buffer size is invalid.
 * @see AudioTrack
 */
public AndroidAudioPlayer(TarsosDSPAudioFormat audioFormat, int bufferSizeInSamples, int streamType) {
    if (audioFormat.getChannels() != 1) {
        throw new IllegalArgumentException("TarsosDSP only supports mono audio channel count: " + audioFormat.getChannels());
    }

    // The requested sample rate
    int sampleRate = (int) audioFormat.getSampleRate();

    //The buffer size in bytes is twice the buffer size expressed in samples if 16bit samples are used:
    int bufferSizeInBytes = bufferSizeInSamples * audioFormat.getSampleSizeInBits()/8;

    // From the Android API about getMinBufferSize():
    // The total size (in bytes) of the internal buffer where audio data is read from for playback.
    // If track's creation mode is MODE_STREAM, you can write data into this buffer in chunks less than or equal to this size,
    // and it is typical to use chunks of 1/2 of the total size to permit double-buffering. If the track's creation mode is MODE_STATIC,
    // this is the maximum length sample, or audio clip, that can be played by this instance. See getMinBufferSize(int, int, int) to determine
    // the minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. Using values smaller
    // than getMinBufferSize() will result in an initialization failure.
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO,  AudioFormat.ENCODING_PCM_16BIT);
    if(minBufferSizeInBytes > bufferSizeInBytes){
        throw new IllegalArgumentException("The buffer size should be at least " + (minBufferSizeInBytes/(audioFormat.getSampleSizeInBits()/8)) + " (samples) according to  AudioTrack.getMinBufferSize().");
    }

    //http://developer.android.com/reference/android/media/AudioTrack.html#AudioTrack(int, int, int, int, int, int)
    audioTrack = new AudioTrack(streamType, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,AudioTrack.MODE_STREAM);

    audioTrack.play();
}
 
Example 19
Source File: FullScreenMJPEGPlayerFragment.java    From CameraV with GNU General Public License v3.0 3 votes vote down vote up
public void initAudio(String vfsPath) throws Exception {

    	isAudio = new BufferedInputStream(new FileInputStream(vfsPath));

	
        int minBufferSize = AudioTrack.getMinBufferSize(MediaConstants.sAudioSampleRate,
        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT)*8;

        at = new AudioTrack(AudioManager.STREAM_MUSIC, MediaConstants.sAudioSampleRate,
        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT,
            minBufferSize, AudioTrack.MODE_STREAM);
        
	     
    }
 
Example 20
Source File: FullScreenMJPEGViewFragment.java    From CameraV with GNU General Public License v3.0 3 votes vote down vote up
public void initAudio(String vfsPath) throws Exception {

    	isAudio = new BufferedInputStream(new FileInputStream(vfsPath));

	
        int minBufferSize = AudioTrack.getMinBufferSize(MediaConstants.sAudioSampleRate,
        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT)*8;

        at = new AudioTrack(AudioManager.STREAM_MUSIC, MediaConstants.sAudioSampleRate,
        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT,
            minBufferSize, AudioTrack.MODE_STREAM);
        
	     
    }