Java Code Examples for android.media.AudioFormat#CHANNEL_OUT_MONO

The following examples show how to use android.media.AudioFormat#CHANNEL_OUT_MONO . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PcmPlayer.java    From AssistantBySDK with Apache License 2.0 8 votes vote down vote up
public PcmPlayer(Context context, Handler handler) {
    this.mContext = context;
    this.audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, wBufferSize, AudioTrack.MODE_STREAM);
    this.handler = handler;
    audioTrack.setPlaybackPositionUpdateListener(this, handler);
    cacheDir = context.getExternalFilesDir(Environment.DIRECTORY_MUSIC);
}
 
Example 2
Source File: OpusTrack.java    From DeviceConnect-Android with MIT License 6 votes vote down vote up
/**
 * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します.
 */
private void createAudioTrack() {
    int bufSize = AudioTrack.getMinBufferSize(mSamplingRate,
            mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT) * 2;

    if (DEBUG) {
        Log.d(TAG, "OpusTrack::createAudioTrack");
        Log.d(TAG, "  SamplingRate: " + mSamplingRate);
        Log.d(TAG, "  Channels: " + mChannel);
        Log.d(TAG, "  AudioFormat: " + AudioFormat.ENCODING_PCM_16BIT);
        Log.d(TAG, "  BufSize: " + bufSize);
    }

    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSamplingRate,
            mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, bufSize,
            AudioTrack.MODE_STREAM);

    mAudioTrack.play();
}
 
Example 3
Source File: MediaPlayer.java    From MyHearts with Apache License 2.0 6 votes vote down vote up
public int audioTrackInit() {
//	  Log.e("  ffff mediaplayer audiotrackinit start .  sampleRateInHz:=" + sampleRateInHz + " channels:=" + channels );
	    audioTrackRelease();
	    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
	    try {
	      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
	      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
	    } catch (Exception e) {
	      mAudioTrackBufferSize = 0;
	      Log.e("audioTrackInit", e);
	    }
	    return mAudioTrackBufferSize;
	  }
 
Example 4
Source File: AACHelper.java    From CameraV with GNU General Public License v3.0 6 votes vote down vote up
public boolean setPlayer(int rate)
{
    int bufferSizePlayer = AudioTrack.getMinBufferSize(rate, AudioFormat.CHANNEL_OUT_MONO, audioFormat);
    Log.d("====buffer Size player ", String.valueOf(bufferSizePlayer));

    player= new AudioTrack(AudioManager.STREAM_MUSIC, rate, AudioFormat.CHANNEL_OUT_MONO, audioFormat, bufferSizePlayer, AudioTrack.MODE_STREAM);


    if (player.getState() == AudioTrack.STATE_INITIALIZED)
    {

        return true;
    }
    else
    {
        return false;
    }

}
 
Example 5
Source File: TTSUtility.java    From speech-android-sdk with Apache License 2.0 6 votes vote down vote up
private void initPlayer(){
    stopTtsPlayer();
    // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode.
    int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    synchronized (this) {
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                sampleRate,
                AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT,
                bufferSize,
                AudioTrack.MODE_STREAM);
        if (audioTrack != null)
            audioTrack.play();
    }
}
 
Example 6
Source File: SinVoicePlayer.java    From SinVoiceDemo with Apache License 2.0 6 votes vote down vote up
/**
 * 构造函数
 * 
 * @param codeBook
 * @param sampleRate
 *            采样率
 * @param bufferSize
 *            缓冲区体积
 * @param buffCount
 *            缓冲区数量
 */
public SinVoicePlayer(String codeBook, int sampleRate, int bufferSize,
		int buffCount) {

	mState = STATE_STOP;
	mBuffer = new Buffer(buffCount, bufferSize);

	mEncoder = new Encoder(this, sampleRate, SinGenerator.BITS_16,
			bufferSize);
	pcmPlayer = new PcmPlayer(this, sampleRate,
			AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT,
			bufferSize);
	pcmPlayer.setListener(this);

	setCodeBook(codeBook);
}
 
Example 7
Source File: WebRtcAudioManager.java    From webrtc_android with MIT License 5 votes vote down vote up
private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
  final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
  final int channelConfig =
      (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
  return AudioTrack.getMinBufferSize(
             sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
      / bytesPerFrame;
}
 
Example 8
Source File: MediaPlayer.java    From HPlayer with Apache License 2.0 5 votes vote down vote up
public int audioTrackInit() {
//	  Log.e("  ffff mediaplayer audiotrackinit start .  sampleRateInHz:=" + sampleRateInHz + " channels:=" + channels );
	    audioTrackRelease();
	    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
	    try {
	      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
	      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
	    } catch (Exception e) {
	      mAudioTrackBufferSize = 0;
	      Log.e("audioTrackInit", e);
	    }
	    return mAudioTrackBufferSize;
	  }
 
Example 9
Source File: MediaPlayer.java    From BambooPlayer with Apache License 2.0 5 votes vote down vote up
private int audioTrackInit(int sampleRateInHz, int channels) {
  audioTrackRelease();
  int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
  try {
    mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
  } catch (Exception e) {
    mAudioTrackBufferSize = 0;
    Log.e("audioTrackInit", e);
  }
  return mAudioTrackBufferSize;
}
 
Example 10
Source File: AudioPlayer.java    From react-native-google-nearby-connection with MIT License 5 votes vote down vote up
/** Starts playing the stream. */
public void start() {
	mAlive = true;
	mThread =
		new Thread() {
			@Override
			public void run() {
				setThreadPriority(THREAD_PRIORITY_AUDIO);

				Buffer buffer = new Buffer();
				AudioTrack audioTrack =
					new AudioTrack(
						AudioManager.STREAM_MUSIC,
						buffer.sampleRate,
						AudioFormat.CHANNEL_OUT_MONO,
						AudioFormat.ENCODING_PCM_16BIT,
						buffer.size,
						AudioTrack.MODE_STREAM);
				audioTrack.play();
				
				int len;
				try {
					while (isPlaying() && (len = mInputStream.read(buffer.data)) > 0) {
						audioTrack.write(buffer.data, 0, len);
					}
				} catch (IOException e) {
					Log.e(TAG, "Exception with playing stream", e);
				} finally {
					//closeStream();
					audioTrack.release();
					onFinish();
				}
			}
		};
	mThread.start();
}
 
Example 11
Source File: DefaultAudioSink.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
private AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
  int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE.
  int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
  @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
  int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
  return new AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, bufferSize,
      MODE_STATIC, audioSessionId);
}
 
Example 12
Source File: AudioManagerAndroid.java    From android-chromium with BSD 2-Clause "Simplified" License 5 votes vote down vote up
/**
* Returns the minimum frame size required for audio output.
*
* @param sampleRate sampling rate
* @param channels number of channels
*/
 @CalledByNative
 private static int getMinOutputFrameSize(int sampleRate, int channels) {
     int channelConfig;
     if (channels == 1) {
         channelConfig = AudioFormat.CHANNEL_OUT_MONO;
     } else if (channels == 2) {
         channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
     } else {
         return -1;
     }
     return AudioTrack.getMinBufferSize(
             sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / 2 / channels;
 }
 
Example 13
Source File: DefaultAudioSink.java    From MediaSDK with Apache License 2.0 5 votes vote down vote up
private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
  int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE.
  int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
  @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
  int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
  return new AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, bufferSize,
      MODE_STATIC, audioSessionId);
}
 
Example 14
Source File: Util.java    From Telegram with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Returns the audio track channel configuration for the given channel count, or {@link
 * AudioFormat#CHANNEL_INVALID} if output is not poossible.
 *
 * @param channelCount The number of channels in the input audio.
 * @return The channel configuration or {@link AudioFormat#CHANNEL_INVALID} if output is not
 *     possible.
 */
public static int getAudioTrackChannelConfig(int channelCount) {
  switch (channelCount) {
    case 1:
      return AudioFormat.CHANNEL_OUT_MONO;
    case 2:
      return AudioFormat.CHANNEL_OUT_STEREO;
    case 3:
      return AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
    case 4:
      return AudioFormat.CHANNEL_OUT_QUAD;
    case 5:
      return AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
    case 6:
      return AudioFormat.CHANNEL_OUT_5POINT1;
    case 7:
      return AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
    case 8:
      if (Util.SDK_INT >= 23) {
        return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
      } else if (Util.SDK_INT >= 21) {
        // Equal to AudioFormat.CHANNEL_OUT_7POINT1_SURROUND, which is hidden before Android M.
        return AudioFormat.CHANNEL_OUT_5POINT1
            | AudioFormat.CHANNEL_OUT_SIDE_LEFT
            | AudioFormat.CHANNEL_OUT_SIDE_RIGHT;
      } else {
        // 8 ch output is not supported before Android L.
        return AudioFormat.CHANNEL_INVALID;
      }
    default:
      return AudioFormat.CHANNEL_INVALID;
  }
}
 
Example 15
Source File: AndroidAudioPlayer.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Constructs a new AndroidAudioPlayer from an audio format, default buffer size and stream type.
 *
 * @param audioFormat The audio format of the stream that this AndroidAudioPlayer will process.
 *                    This can only be 1 channel, PCM 16 bit.
 * @param bufferSizeInSamples  The requested buffer size in samples.
 * @param streamType  The type of audio stream that the internal AudioTrack should use. For
 *                    example, {@link AudioManager#STREAM_MUSIC}.
 * @throws IllegalArgumentException if audioFormat is not valid or if the requested buffer size is invalid.
 * @see AudioTrack
 */
public AndroidAudioPlayer(TarsosDSPAudioFormat audioFormat, int bufferSizeInSamples, int streamType) {
    if (audioFormat.getChannels() != 1) {
        throw new IllegalArgumentException("TarsosDSP only supports mono audio channel count: " + audioFormat.getChannels());
    }

    // The requested sample rate
    int sampleRate = (int) audioFormat.getSampleRate();

    //The buffer size in bytes is twice the buffer size expressed in samples if 16bit samples are used:
    int bufferSizeInBytes = bufferSizeInSamples * audioFormat.getSampleSizeInBits()/8;

    // From the Android API about getMinBufferSize():
    // The total size (in bytes) of the internal buffer where audio data is read from for playback.
    // If track's creation mode is MODE_STREAM, you can write data into this buffer in chunks less than or equal to this size,
    // and it is typical to use chunks of 1/2 of the total size to permit double-buffering. If the track's creation mode is MODE_STATIC,
    // this is the maximum length sample, or audio clip, that can be played by this instance. See getMinBufferSize(int, int, int) to determine
    // the minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. Using values smaller
    // than getMinBufferSize() will result in an initialization failure.
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO,  AudioFormat.ENCODING_PCM_16BIT);
    if(minBufferSizeInBytes > bufferSizeInBytes){
        throw new IllegalArgumentException("The buffer size should be at least " + (minBufferSizeInBytes/(audioFormat.getSampleSizeInBits()/8)) + " (samples) according to  AudioTrack.getMinBufferSize().");
    }

    //http://developer.android.com/reference/android/media/AudioTrack.html#AudioTrack(int, int, int, int, int, int)
    audioTrack = new AudioTrack(streamType, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,AudioTrack.MODE_STREAM);

    audioTrack.play();
}
 
Example 16
Source File: FrequencyPlayer.java    From doppler-android with MIT License 5 votes vote down vote up
FrequencyPlayer(double frequency) {
    numSamples = sampleRate * duration / MILLIS_PER_SECOND;
    generatedSound = new byte[2 * numSamples];
    sample = new double[numSamples];

    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, generatedSound.length, AudioTrack.MODE_STATIC);

    setFrequency(frequency);
}
 
Example 17
Source File: DefaultAudioSink.java    From Telegram with GNU General Public License v2.0 5 votes vote down vote up
private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
  int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE.
  int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
  @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
  int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
  return new AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, bufferSize,
      MODE_STATIC, audioSessionId);
}
 
Example 18
Source File: AudioPlayback.java    From MediaPlayer-Extended with Apache License 2.0 4 votes vote down vote up
/**
 * Initializes or reinitializes the audio track with the supplied format for playback
 * while keeping the playstate. Keeps the current configuration and skips reinitialization
 * if the new format is the same as the current format.
 */
public void init(MediaFormat format) {
    Log.d(TAG, "init");

    boolean playing = false;

    if(isInitialized()) {
        if(!checkIfReinitializationRequired(format)) {
            // Set new format that equals the old one (in case we compare references somewhere)
            mAudioFormat = format;
            return;
        }

        playing = isPlaying();
        pause();
        stopAndRelease(false);
    } else {
        // deferred creation of the audio thread until its first use
        mAudioThread = new AudioThread();
        mAudioThread.setPaused(true);
        mAudioThread.start();
    }

    mAudioFormat = format;

    int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
    int bytesPerSample = 2;
    mFrameSize = bytesPerSample * channelCount;
    mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);

    int channelConfig = AudioFormat.CHANNEL_OUT_DEFAULT;
    switch(channelCount) {
        case 1:
            channelConfig = AudioFormat.CHANNEL_OUT_MONO;
            break;
        case 2:
            channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
            break;
        case 4:
            channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
            break;
        case 6:
            channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
            break;
        case 8:
            channelConfig = AudioFormat.CHANNEL_OUT_7POINT1;
    }

    mPlaybackBufferSize = mFrameChunkSize * channelCount;

    mAudioTrack = new AudioTrack(
            mAudioStreamType,
            mSampleRate,
            channelConfig,
            AudioFormat.ENCODING_PCM_16BIT,
            mPlaybackBufferSize, // at least twice the size to enable double buffering (according to docs)
            AudioTrack.MODE_STREAM, mAudioSessionId);

    if(mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        stopAndRelease();
        throw new IllegalStateException("audio track init failed");
    }

    mAudioSessionId = mAudioTrack.getAudioSessionId();
    mAudioStreamType = mAudioTrack.getStreamType();
    setStereoVolume(mVolumeLeft, mVolumeRight);
    mPresentationTimeOffsetUs = PTS_NOT_SET;

    if(playing) {
        play();
    }
}
 
Example 19
Source File: AudioTrackManager.java    From TikTok with Apache License 2.0 4 votes vote down vote up
private AudioTrackManager() {
    bufferSize = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2, AudioTrack.MODE_STREAM);
}
 
Example 20
Source File: MediaMoviePlayer.java    From libcommon with Apache License 2.0 4 votes vote down vote up
/**
 * @param source
 * @return first audio track index, -1 if not found
 */
@SuppressLint("NewApi")
protected int internal_prepare_audio(final Object source) throws IOException {
	int trackindex = -1;
	mAudioMediaExtractor = new MediaExtractor();
	if (source instanceof String) {
		mAudioMediaExtractor.setDataSource((String)source);
	} else if (source instanceof AssetFileDescriptor) {
		if (BuildCheck.isAndroid7()) {
			mVideoMediaExtractor.setDataSource((AssetFileDescriptor)source);
		} else {
			mVideoMediaExtractor.setDataSource(((AssetFileDescriptor)source).getFileDescriptor());
		}
	} else {
		// ここには来ないけど
		throw new IllegalArgumentException("unknown source type:source=" + source);
	}
	trackindex = selectTrack(mAudioMediaExtractor, "audio/");
	if (trackindex >= 0) {
		mAudioMediaExtractor.selectTrack(trackindex);
		final MediaFormat format = mAudioMediaExtractor.getTrackFormat(trackindex);
		mAudioChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
		mAudioSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
		final int min_buf_size = AudioTrack.getMinBufferSize(mAudioSampleRate,
			(mAudioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
			AudioFormat.ENCODING_PCM_16BIT);
		final int max_input_size = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
		mAudioInputBufSize =  min_buf_size > 0 ? min_buf_size * 4 : max_input_size;
		if (mAudioInputBufSize > max_input_size) mAudioInputBufSize = max_input_size;
		final int frameSizeInBytes = mAudioChannels * 2;
		mAudioInputBufSize = (mAudioInputBufSize / frameSizeInBytes) * frameSizeInBytes;
		if (DEBUG) Log.v(TAG, String.format("getMinBufferSize=%d,max_input_size=%d,mAudioInputBufSize=%d",min_buf_size, max_input_size, mAudioInputBufSize));
		//
		mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
			mAudioSampleRate,
			(mAudioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
			AudioFormat.ENCODING_PCM_16BIT,
			mAudioInputBufSize,
			AudioTrack.MODE_STREAM);
		try {
			mAudioTrack.play();
		} catch (final Exception e) {
			Log.e(TAG, "failed to start audio track playing", e);
			mAudioTrack.release();
			mAudioTrack = null;
		}
	}
	return trackindex;
}