Java Code Examples for android.media.AudioTrack#MODE_STREAM

The following examples show how to use android.media.AudioTrack#MODE_STREAM . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PcmPlayer.java    From AssistantBySDK with Apache License 2.0 8 votes vote down vote up
public PcmPlayer(Context context, Handler handler) {
    this.mContext = context;
    this.audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, wBufferSize, AudioTrack.MODE_STREAM);
    this.handler = handler;
    audioTrack.setPlaybackPositionUpdateListener(this, handler);
    cacheDir = context.getExternalFilesDir(Environment.DIRECTORY_MUSIC);
}
 
Example 2
Source File: SimpleAudioOutput.java    From media-samples with Apache License 2.0 6 votes vote down vote up
public AudioTrack createAudioTrack(int frameRate) {
    int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT);
    Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes
            + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME)
            + " frames");
    int bufferSize = 8 * minBufferSizeBytes / 8;
    int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME;
    Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = "
            + outputBufferSizeFrames + " frames");

    AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC,
            mFrameRate, AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    Log.i(TAG, "created AudioTrack");
    return player;
}
 
Example 3
Source File: StreamPlayer.java    From android-sdk with Apache License 2.0 6 votes vote down vote up
/**
 * Initialize AudioTrack by getting buffersize
 *
 * @param sampleRate the sample rate for the audio to be played
 */
private void initPlayer(int sampleRate) {
  synchronized (this) {
    int bufferSize = AudioTrack.getMinBufferSize(
            sampleRate,
            AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT);
    if (bufferSize == AudioTrack.ERROR_BAD_VALUE) {
      throw new RuntimeException("Could not determine buffer size for audio");
    }

    audioTrack = new AudioTrack(
            AudioManager.STREAM_MUSIC,
            sampleRate,
            AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            bufferSize,
            AudioTrack.MODE_STREAM
    );
    audioTrack.play();
  }
}
 
Example 4
Source File: Track.java    From K-Sonic with MIT License 6 votes vote down vote up
private void initDevice(int sampleRate, int numChannels) {
    if (isJMono)
        numChannels = 2;
    mLock.lock();
    try {
        final int format = findFormatFromChannels(numChannels);
        final int minSize = AudioTrack.getMinBufferSize(sampleRate, format,
                AudioFormat.ENCODING_PCM_16BIT);
        mTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, format,
                AudioFormat.ENCODING_PCM_16BIT, minSize * 4,
                AudioTrack.MODE_STREAM);
        mSonic = new Sonic(sampleRate, numChannels);
    } catch (Exception e) {//IllegalArgumentException
        throw e;
    } finally {
        mLock.unlock();
    }
}
 
Example 5
Source File: MediaCodecBridge.java    From android-chromium with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@CalledByNative
private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int flags,
        boolean playAudio) {
    try {
        mMediaCodec.configure(format, null, crypto, flags);
        if (playAudio) {
            int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
            int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
            int channelConfig = (channelCount == 1) ? AudioFormat.CHANNEL_OUT_MONO :
                    AudioFormat.CHANNEL_OUT_STEREO;
            // Using 16bit PCM for output. Keep this value in sync with
            // kBytesPerAudioOutputSample in media_codec_bridge.cc.
            int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig,
                    AudioFormat.ENCODING_PCM_16BIT);
            mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
                    AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
        }
        return true;
    } catch (IllegalStateException e) {
        Log.e(TAG, "Cannot configure the audio codec " + e.toString());
    }
    return false;
}
 
Example 6
Source File: MediaPlayer.java    From MyHearts with Apache License 2.0 6 votes vote down vote up
public int audioTrackInit() {
//	  Log.e("  ffff mediaplayer audiotrackinit start .  sampleRateInHz:=" + sampleRateInHz + " channels:=" + channels );
	    audioTrackRelease();
	    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
	    try {
	      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
	      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
	    } catch (Exception e) {
	      mAudioTrackBufferSize = 0;
	      Log.e("audioTrackInit", e);
	    }
	    return mAudioTrackBufferSize;
	  }
 
Example 7
Source File: SourceDataLineImpl.java    From MidiDriver with Apache License 2.0 6 votes vote down vote up
@Override
public void open(AudioFormat format, int bufferSize)
		throws LineUnavailableException {
	// Get the smallest buffer to minimize latency.
	this.format = format;
	this.bufferSize = bufferSize;
	int sampleRateInHz = (int) format.getSampleRate();
	// int sampleSizeInBit = format.getSampleSizeInBits();
	int channelConfig;
	if (format.getChannels() == 1) {
		channelConfig = android.media.AudioFormat.CHANNEL_OUT_MONO;
	} else if (format.getChannels() == 2) {
		channelConfig = android.media.AudioFormat.CHANNEL_OUT_STEREO;
	} else {
		throw new IllegalArgumentException(
				"format.getChannels() must in (1,2)");
	}
	audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz,
			channelConfig, android.media.AudioFormat.ENCODING_PCM_16BIT,
			bufferSize, AudioTrack.MODE_STREAM);
}
 
Example 8
Source File: PcmPlayer.java    From SinVoiceDemo with Apache License 2.0 5 votes vote down vote up
public PcmPlayer(PcmCallback callback, int sampleRate, int channel,
		int format, int bufferSize) {
	playerCallback = callback;
	// 初始化AudioTrack对象(音频流类型,采样率,通道,格式,缓冲区大小,模式)
	audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
			channel, format, bufferSize, AudioTrack.MODE_STREAM);
	mState = STATE_STOP;
}
 
Example 9
Source File: SpeexDecoder.java    From sctalk with Apache License 2.0 5 votes vote down vote up
private void initializeAndroidAudio(int sampleRate) throws Exception {
    int minBufferSize = AudioTrack.getMinBufferSize(sampleRate,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    if (minBufferSize < 0) {
        throw new Exception("Failed to get minimum buffer size: "
                + Integer.toString(minBufferSize));
    }

    track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT,
            minBufferSize, AudioTrack.MODE_STREAM);

}
 
Example 10
Source File: AudioTrackManagerDualStreamType.java    From apollo-DuerOS with Apache License 2.0 5 votes vote down vote up
private void reInitAudioTrack() {
    try {
        mMusicAudioTrack =
                new AudioTrack(AudioManager.STREAM_MUSIC, mPreMediaSampleRate, mPreMediaChannelConfig,
                        mPreMediaFormate, mPreMinBuffSize, AudioTrack.MODE_STREAM);

    } catch (IllegalArgumentException e) {
        mMusicAudioTrack = null;
        e.printStackTrace();
    }
}
 
Example 11
Source File: MediaPlayer.java    From BambooPlayer with Apache License 2.0 5 votes vote down vote up
private int audioTrackInit(int sampleRateInHz, int channels) {
  audioTrackRelease();
  int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
  try {
    mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
  } catch (Exception e) {
    mAudioTrackBufferSize = 0;
    Log.e("audioTrackInit", e);
  }
  return mAudioTrackBufferSize;
}
 
Example 12
Source File: MediaPlayer.java    From react-native-android-vitamio with MIT License 5 votes vote down vote up
private int audioTrackInit(int sampleRateInHz, int channels) {
  audioTrackRelease();
  int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
  try {
    mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
  } catch (Exception e) {
    mAudioTrackBufferSize = 0;
    Log.e("audioTrackInit", e);
  }
  return mAudioTrackBufferSize;
}
 
Example 13
Source File: AndroidAudioPlayer.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Constructs a new AndroidAudioPlayer from an audio format, default buffer size and stream type.
 *
 * @param audioFormat The audio format of the stream that this AndroidAudioPlayer will process.
 *                    This can only be 1 channel, PCM 16 bit.
 * @param bufferSizeInSamples  The requested buffer size in samples.
 * @param streamType  The type of audio stream that the internal AudioTrack should use. For
 *                    example, {@link AudioManager#STREAM_MUSIC}.
 * @throws IllegalArgumentException if audioFormat is not valid or if the requested buffer size is invalid.
 * @see AudioTrack
 */
public AndroidAudioPlayer(TarsosDSPAudioFormat audioFormat, int bufferSizeInSamples, int streamType) {
    if (audioFormat.getChannels() != 1) {
        throw new IllegalArgumentException("TarsosDSP only supports mono audio channel count: " + audioFormat.getChannels());
    }

    // The requested sample rate
    int sampleRate = (int) audioFormat.getSampleRate();

    //The buffer size in bytes is twice the buffer size expressed in samples if 16bit samples are used:
    int bufferSizeInBytes = bufferSizeInSamples * audioFormat.getSampleSizeInBits()/8;

    // From the Android API about getMinBufferSize():
    // The total size (in bytes) of the internal buffer where audio data is read from for playback.
    // If track's creation mode is MODE_STREAM, you can write data into this buffer in chunks less than or equal to this size,
    // and it is typical to use chunks of 1/2 of the total size to permit double-buffering. If the track's creation mode is MODE_STATIC,
    // this is the maximum length sample, or audio clip, that can be played by this instance. See getMinBufferSize(int, int, int) to determine
    // the minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. Using values smaller
    // than getMinBufferSize() will result in an initialization failure.
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO,  AudioFormat.ENCODING_PCM_16BIT);
    if(minBufferSizeInBytes > bufferSizeInBytes){
        throw new IllegalArgumentException("The buffer size should be at least " + (minBufferSizeInBytes/(audioFormat.getSampleSizeInBits()/8)) + " (samples) according to  AudioTrack.getMinBufferSize().");
    }

    //http://developer.android.com/reference/android/media/AudioTrack.html#AudioTrack(int, int, int, int, int, int)
    audioTrack = new AudioTrack(streamType, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,AudioTrack.MODE_STREAM);

    audioTrack.play();
}
 
Example 14
Source File: MediaPlayer.java    From Vitamio with Apache License 2.0 5 votes vote down vote up
private int audioTrackInit(int sampleRateInHz, int channels) {
  audioTrackRelease();
  int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
  try {
    mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
  } catch (Exception e) {
    mAudioTrackBufferSize = 0;
    Log.e("audioTrackInit", e);
  }
  return mAudioTrackBufferSize;
}
 
Example 15
Source File: AudioTrackManager.java    From TikTok with Apache License 2.0 4 votes vote down vote up
private AudioTrackManager() {
    bufferSize = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2, AudioTrack.MODE_STREAM);
}
 
Example 16
Source File: AudioParams.java    From chromadoze with GNU General Public License v3.0 4 votes vote down vote up
AudioTrack makeAudioTrack() {
    return new AudioTrack(
            STREAM_TYPE, SAMPLE_RATE, CHANNEL_CONFIG,
            AUDIO_FORMAT, BUF_BYTES, AudioTrack.MODE_STREAM);
}
 
Example 17
Source File: AudioOutputQueue.java    From Android-Airplay-Server with MIT License 4 votes vote down vote up
public AudioOutputQueue(final AudioStreamInformationProvider streamInfoProvider) {
	convertUnsignedToSigned = true;
	
	//setup the Audio Format options
	streamType = AudioManager.STREAM_MUSIC;
	
	sampleRateInHz = streamInfoProvider.getSampleRate();
	channelConfig = streamInfoProvider.getChannels();
	audioFormat = streamInfoProvider. getAudioFormat();
	
	sampleRate = streamInfoProvider.getSampleRate();
	
	/* Audio format-dependent stuff */
	packetSizeFrames = streamInfoProvider.getFramesPerPacket();
	bytesPerFrame = streamInfoProvider.getChannels() * streamInfoProvider.getSampleSizeInBits() / 8;
	
	//calculate the buffer size in bytes
	bufferSizeInBytes = (int)Math.pow(2, Math.ceil(Math.log(BUFFER_SIZE_SECONDS * sampleRate * bytesPerFrame) / Math.log(2.0)));
	
	mode = AudioTrack.MODE_STREAM;
	
	//create the AudioTrack
	//audioTrack = new AudioTrack(streamType, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes, mode);
	audioTrack = new AudioTrack(streamType, sampleRateInHz, AudioFormat.CHANNEL_CONFIGURATION_STEREO, audioFormat, bufferSizeInBytes, mode);//FIXME

	LOG.info("AudioTrack created succesfully with a buffer of : " + bufferSizeInBytes + " bytes and : " + bufferSizeInBytes / bytesPerFrame + " frames.");
		
	//create initial array of "filler" bytes ....
	lineLastFrame = new byte[bytesPerFrame];
	for(int b=0; b < lineLastFrame.length; ++b){
		lineLastFrame[b] = (b % 2 == 0) ? (byte)-128 : (byte)0;
	}

	/* Create enqueuer thread and wait for the line to start.
	 * The wait guarantees that the AudioClock functions return
	 * sensible values right after construction
	 */
	queueThread.setDaemon(true);
	queueThread.setName("Audio Enqueuer");
	queueThread.setPriority(Thread.MAX_PRIORITY);
	
	/*
	queueThread.start();
	
	//while ( queueThread.isAlive() && ! m_line.isActive() ){
	while ( queueThread.isAlive() && audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING){
		Thread.yield();
	}
	*/

	/* Initialize the seconds time offset now that the line is running. */
	secondsTimeOffset = 2208988800.0 +  System.currentTimeMillis() * 1e-3;
}
 
Example 18
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 4 votes vote down vote up
@SuppressWarnings("deprecation") // Deprecated in API level 25.
private static AudioTrack createAudioTrackOnLowerThanLollipop(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
      AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
}
 
Example 19
Source File: MidiDriver.java    From tuxguitar with GNU Lesser General Public License v2.1 4 votes vote down vote up
private void processMidi() {
	int status = 0;
	int size = 0;

	// Init midi
	if ((size = this.init()) == 0) {
		return;
	}

	short[] buffer = new short[size];

	// Create audio track
	this.audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, SAMPLE_RATE, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, BUFFER_SIZE, AudioTrack.MODE_STREAM);
	
	// Check audiotrack
	if( audioTrack == null ) {
		this.shutdown();
		return;
	}

	// Check state
	int state = this.audioTrack.getState();

	if (state != AudioTrack.STATE_INITIALIZED) {
		this.audioTrack.release();
		this.shutdown();
		return;
	}

	// Play track
	this.audioTrack.play();

	// Keep running until stopped
	while( this.thread != null ) {
		
		// Write the midi events
		synchronized (this.mutex) {
			for(byte[] queuedEvent : this.queuedEvents) {
				this.write(queuedEvent);
			}
			this.queuedEvents.clear();
		}
		
		// Render the audio
		if (this.render(buffer) == 0) {
			break;
		}
		// Write audio to audiotrack
		status = this.audioTrack.write(buffer, 0, buffer.length);

		if (status < 0) {
			break;
		}
	}

	// Render and write the last bit of audio
	if( status > 0 ) {
		if (this.render(buffer) > 0) {
			this.audioTrack.write(buffer, 0, buffer.length);
		}
	}
	// Shut down audio
	this.shutdown();
	this.audioTrack.release();
}
 
Example 20
Source File: FullScreenMJPEGPlayerFragment.java    From CameraV with GNU General Public License v3.0 3 votes vote down vote up
public void initAudio(String vfsPath) throws Exception {

    	isAudio = new BufferedInputStream(new FileInputStream(vfsPath));

	
        int minBufferSize = AudioTrack.getMinBufferSize(MediaConstants.sAudioSampleRate,
        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT)*8;

        at = new AudioTrack(AudioManager.STREAM_MUSIC, MediaConstants.sAudioSampleRate,
        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT,
            minBufferSize, AudioTrack.MODE_STREAM);
        
	     
    }