Java Code Examples for android.media.AudioFormat#CHANNEL_OUT_STEREO

The following examples show how to use android.media.AudioFormat#CHANNEL_OUT_STEREO . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FromFileBase.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 8 votes vote down vote up
/**
 * @param filePath to video MP4 file.
 * @param bitRate AAC in kb.
 * @return true if success, false if you get a error (Normally because the encoder selected
 * doesn't support any configuration seated or your device hasn't a H264 encoder).
 * @throws IOException Normally file not found.
 */
public boolean prepareAudio(String filePath, int bitRate) throws IOException {
  audioPath = filePath;
  audioDecoder = new AudioDecoder(this, audioDecoderInterface, this);
  if (!audioDecoder.initExtractor(filePath)) return false;
  boolean result = audioEncoder.prepareAudioEncoder(bitRate, audioDecoder.getSampleRate(),
      audioDecoder.isStereo(), 0);
  prepareAudioRtp(audioDecoder.isStereo(), audioDecoder.getSampleRate());
  audioDecoder.prepareAudio();
  if (glInterface != null && !(glInterface instanceof OffScreenGlThread)) {
    int channel =
        audioDecoder.isStereo() ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
    int buffSize = AudioTrack.getMinBufferSize(audioDecoder.getSampleRate(), channel,
        AudioFormat.ENCODING_PCM_16BIT);
    audioTrackPlayer =
        new AudioTrack(AudioManager.STREAM_MUSIC, audioDecoder.getSampleRate(), channel,
            AudioFormat.ENCODING_PCM_16BIT, buffSize, AudioTrack.MODE_STREAM);
  }
  return result;
}
 
Example 2
Source File: AndroidAudioForJSyn.java    From science-journal with Apache License 2.0 6 votes vote down vote up
@Override
public void start() {
  minBufferSize =
      AudioTrack.getMinBufferSize(
          frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
  System.out.println("Audio minBufferSize = " + minBufferSize);
  bufferSize = (3 * (minBufferSize / 2)) & ~3;
  System.out.println("Audio bufferSize = " + bufferSize);
  audioTrack =
      new AudioTrack(
          AudioManager.STREAM_MUSIC,
          frameRate,
          AudioFormat.CHANNEL_OUT_STEREO,
          AudioFormat.ENCODING_PCM_16BIT,
          bufferSize,
          AudioTrack.MODE_STREAM);
  audioTrack.play();
}
 
Example 3
Source File: OpusTrack.java    From DeviceConnect-Android with MIT License 6 votes vote down vote up
/**
 * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します.
 */
private void createAudioTrack() {
    int bufSize = AudioTrack.getMinBufferSize(mSamplingRate,
            mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT) * 2;

    if (DEBUG) {
        Log.d(TAG, "OpusTrack::createAudioTrack");
        Log.d(TAG, "  SamplingRate: " + mSamplingRate);
        Log.d(TAG, "  Channels: " + mChannel);
        Log.d(TAG, "  AudioFormat: " + AudioFormat.ENCODING_PCM_16BIT);
        Log.d(TAG, "  BufSize: " + bufSize);
    }

    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSamplingRate,
            mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, bufSize,
            AudioTrack.MODE_STREAM);

    mAudioTrack.play();
}
 
Example 4
Source File: MediaCodecBridge.java    From 365browser with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("deprecation")
private int getAudioFormat(int channelCount) {
    switch (channelCount) {
        case 1:
            return AudioFormat.CHANNEL_OUT_MONO;
        case 2:
            return AudioFormat.CHANNEL_OUT_STEREO;
        case 4:
            return AudioFormat.CHANNEL_OUT_QUAD;
        case 6:
            return AudioFormat.CHANNEL_OUT_5POINT1;
        case 8:
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
                return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
            } else {
                return AudioFormat.CHANNEL_OUT_7POINT1;
            }
        default:
            return AudioFormat.CHANNEL_OUT_DEFAULT;
    }
}
 
Example 5
Source File: MediaPlayer.java    From video-player with MIT License 6 votes vote down vote up
@SuppressLint("NewApi")
  
private int audioTrackInit(int sampleRateInHz, int channels) {
 //  this.sampleRateInHz=sampleRateInHz;
 //  this.channels=channels;
 //   return 0;
	
 audioTrackRelease();
    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
    try {
      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
    } catch (Exception e) {
      mAudioTrackBufferSize = 0;
      Log.e("audioTrackInit", e);
    }
    return mAudioTrackBufferSize;
  }
 
Example 6
Source File: Sound.java    From Android-Audio-Recorder with Apache License 2.0 6 votes vote down vote up
public AudioTrack generateTrack(int sampleRate, short[] buf, int len) {
    int end = len;

    int c = 0;

    if (RawSamples.CHANNEL_CONFIG == AudioFormat.CHANNEL_IN_MONO)
        c = AudioFormat.CHANNEL_OUT_MONO;

    if (RawSamples.CHANNEL_CONFIG == AudioFormat.CHANNEL_IN_STEREO)
        c = AudioFormat.CHANNEL_OUT_STEREO;

    // old phones bug.
    // http://stackoverflow.com/questions/27602492
    //
    // with MODE_STATIC setNotificationMarkerPosition not called
    AudioTrack track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
            c, RawSamples.AUDIO_FORMAT,
            len * (Short.SIZE / 8), AudioTrack.MODE_STREAM);
    track.write(buf, 0, len);
    if (track.setNotificationMarkerPosition(end) != AudioTrack.SUCCESS)
        throw new RuntimeException("unable to set marker");
    return track;
}
 
Example 7
Source File: SimpleAudioOutput.java    From media-samples with Apache License 2.0 6 votes vote down vote up
public AudioTrack createAudioTrack(int frameRate) {
    int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT);
    Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes
            + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME)
            + " frames");
    int bufferSize = 8 * minBufferSizeBytes / 8;
    int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME;
    Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = "
            + outputBufferSizeFrames + " frames");

    AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC,
            mFrameRate, AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    Log.i(TAG, "created AudioTrack");
    return player;
}
 
Example 8
Source File: AndroidAudioForJSyn.java    From jsyn with Apache License 2.0 5 votes vote down vote up
public void start() {
    Process.setThreadPriority(-5);
    minBufferSize = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT);
    System.out.println("Audio minBufferSize = " + minBufferSize);
    bufferSize = (3 * (minBufferSize / 2)) & ~3;
    System.out.println("Audio bufferSize = " + bufferSize);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    audioTrack.play();
}
 
Example 9
Source File: MediaPlayer.java    From react-native-android-vitamio with MIT License 5 votes vote down vote up
private int audioTrackInit(int sampleRateInHz, int channels) {
  audioTrackRelease();
  int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
  try {
    mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
  } catch (Exception e) {
    mAudioTrackBufferSize = 0;
    Log.e("audioTrackInit", e);
  }
  return mAudioTrackBufferSize;
}
 
Example 10
Source File: Track.java    From K-Sonic with MIT License 5 votes vote down vote up
private int findFormatFromChannels(int numChannels) {
    switch (numChannels) {
        case 1:
            return AudioFormat.CHANNEL_OUT_MONO;
        case 2:
            return AudioFormat.CHANNEL_OUT_STEREO;
        default:
            return -1; // Error
    }
}
 
Example 11
Source File: MediaPlayer.java    From HPlayer with Apache License 2.0 5 votes vote down vote up
public int audioTrackInit() {
//	  Log.e("  ffff mediaplayer audiotrackinit start .  sampleRateInHz:=" + sampleRateInHz + " channels:=" + channels );
	    audioTrackRelease();
	    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
	    try {
	      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
	      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
	    } catch (Exception e) {
	      mAudioTrackBufferSize = 0;
	      Log.e("audioTrackInit", e);
	    }
	    return mAudioTrackBufferSize;
	  }
 
Example 12
Source File: AudioManagerAndroid.java    From android-chromium with BSD 2-Clause "Simplified" License 5 votes vote down vote up
/**
* Returns the minimum frame size required for audio output.
*
* @param sampleRate sampling rate
* @param channels number of channels
*/
 @CalledByNative
 private static int getMinOutputFrameSize(int sampleRate, int channels) {
     int channelConfig;
     if (channels == 1) {
         channelConfig = AudioFormat.CHANNEL_OUT_MONO;
     } else if (channels == 2) {
         channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
     } else {
         return -1;
     }
     return AudioTrack.getMinBufferSize(
             sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / 2 / channels;
 }
 
Example 13
Source File: WebRtcAudioManager.java    From webrtc_android with MIT License 5 votes vote down vote up
private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
  final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
  final int channelConfig =
      (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
  return AudioTrack.getMinBufferSize(
             sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
      / bytesPerFrame;
}
 
Example 14
Source File: AudioDecoder.java    From DeviceConnect-Android with MIT License 5 votes vote down vote up
/**
 * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します.
 */
void createAudioTrack() {
    int bufSize = AudioTrack.getMinBufferSize(mSamplingRate,
            mChannelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT) * 2;

    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSamplingRate,
            mChannelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, bufSize,
            AudioTrack.MODE_STREAM);

    mAudioTrack.play();
}
 
Example 15
Source File: AudioDecoder.java    From DeviceConnect-Android with MIT License 5 votes vote down vote up
/**
 * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します.
 */
void createAudioTrack() {
    int bufSize = AudioTrack.getMinBufferSize(mSamplingRate,
            mChannelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT) * 2;

    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSamplingRate,
            mChannelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, bufSize,
            AudioTrack.MODE_STREAM);

    mAudioTrack.play();
}
 
Example 16
Source File: MediaPlayer.java    From video-player with MIT License 5 votes vote down vote up
public int audioTrackInit() {
//	  Log.e("  ffff mediaplayer audiotrackinit start .  sampleRateInHz:=" + sampleRateInHz + " channels:=" + channels );
	    audioTrackRelease();
	    int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
	    try {
	      mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
	      mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
	    } catch (Exception e) {
	      mAudioTrackBufferSize = 0;
	      Log.e("audioTrackInit", e);
	    }
	    return mAudioTrackBufferSize;
	  }
 
Example 17
Source File: AudioTrack.java    From Exoplayer_VLC with Apache License 2.0 4 votes vote down vote up
/**
 * Reconfigures the audio track to play back media in {@code format}. Buffers passed to
 * {@link #handleBuffer} must using the specified {@code encoding}, which should be a constant
 * from {@link AudioFormat}.
 *
 * @param format Specifies the channel count and sample rate to play back.
 * @param encoding The format in which audio is represented.
 * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to use a
 *     size inferred from the format.
 */
@SuppressLint("InlinedApi")
public void reconfigure(MediaFormat format, int encoding, int specifiedBufferSize) {
  int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
  int channelConfig;
  switch (channelCount) {
    case 1:
      channelConfig = AudioFormat.CHANNEL_OUT_MONO;
      break;
    case 2:
      channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
      break;
    case 6:
      channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
      break;
    case 8:
      channelConfig = AudioFormat.CHANNEL_OUT_7POINT1;
      break;
    default:
      throw new IllegalArgumentException("Unsupported channel count: " + channelCount);
  }

  int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);

  // TODO: Does channelConfig determine channelCount?
  boolean isAc3 = false;//encoding == AudioFormat.ENCODING_AC3 || encoding == AudioFormat.ENCODING_E_AC3;
  if (isInitialized() && this.sampleRate == sampleRate && this.channelConfig == channelConfig
      && !this.isAc3 && !isAc3) {
    // We already have an existing audio track with the correct sample rate and channel config.
    return;
  }

  reset();

  this.encoding = encoding;
  this.sampleRate = sampleRate;
  this.channelConfig = channelConfig;
  this.isAc3 = isAc3;
  ac3Bitrate = UNKNOWN_AC3_BITRATE; // Calculated on receiving the first buffer if isAc3 is true.
  frameSize = 2 * channelCount; // 2 bytes per 16 bit sample * number of channels.
  minBufferSize = android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, encoding);

  if (specifiedBufferSize != 0) {
    bufferSize = specifiedBufferSize;
  } else {
    int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
    int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * frameSize;
    int maxAppBufferSize = (int) Math.max(minBufferSize,
        durationUsToFrames(MAX_BUFFER_DURATION_US) * frameSize);
    bufferSize = multipliedBufferSize < minAppBufferSize ? minAppBufferSize
        : multipliedBufferSize > maxAppBufferSize ? maxAppBufferSize
        : multipliedBufferSize;
  }
}
 
Example 18
Source File: MediaMoviePlayer.java    From libcommon with Apache License 2.0 4 votes vote down vote up
/**
 * @param source
 * @return first audio track index, -1 if not found
 */
@SuppressLint("NewApi")
protected int internal_prepare_audio(final Object source) throws IOException {
	int trackindex = -1;
	mAudioMediaExtractor = new MediaExtractor();
	if (source instanceof String) {
		mAudioMediaExtractor.setDataSource((String)source);
	} else if (source instanceof AssetFileDescriptor) {
		if (BuildCheck.isAndroid7()) {
			mVideoMediaExtractor.setDataSource((AssetFileDescriptor)source);
		} else {
			mVideoMediaExtractor.setDataSource(((AssetFileDescriptor)source).getFileDescriptor());
		}
	} else {
		// ここには来ないけど
		throw new IllegalArgumentException("unknown source type:source=" + source);
	}
	trackindex = selectTrack(mAudioMediaExtractor, "audio/");
	if (trackindex >= 0) {
		mAudioMediaExtractor.selectTrack(trackindex);
		final MediaFormat format = mAudioMediaExtractor.getTrackFormat(trackindex);
		mAudioChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
		mAudioSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
		final int min_buf_size = AudioTrack.getMinBufferSize(mAudioSampleRate,
			(mAudioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
			AudioFormat.ENCODING_PCM_16BIT);
		final int max_input_size = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
		mAudioInputBufSize =  min_buf_size > 0 ? min_buf_size * 4 : max_input_size;
		if (mAudioInputBufSize > max_input_size) mAudioInputBufSize = max_input_size;
		final int frameSizeInBytes = mAudioChannels * 2;
		mAudioInputBufSize = (mAudioInputBufSize / frameSizeInBytes) * frameSizeInBytes;
		if (DEBUG) Log.v(TAG, String.format("getMinBufferSize=%d,max_input_size=%d,mAudioInputBufSize=%d",min_buf_size, max_input_size, mAudioInputBufSize));
		//
		mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
			mAudioSampleRate,
			(mAudioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
			AudioFormat.ENCODING_PCM_16BIT,
			mAudioInputBufSize,
			AudioTrack.MODE_STREAM);
		try {
			mAudioTrack.play();
		} catch (final Exception e) {
			Log.e(TAG, "failed to start audio track playing", e);
			mAudioTrack.release();
			mAudioTrack = null;
		}
	}
	return trackindex;
}
 
Example 19
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 4 votes vote down vote up
private int channelCountToConfiguration(int channels) {
  return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
}
 
Example 20
Source File: MainActivity.java    From android-fskmodem with GNU General Public License v3.0 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
	super.onCreate(savedInstanceState);
	setContentView(R.layout.activity_main);
	
	/// INIT FSK CONFIG
	
	try {
		mConfig = new FSKConfig(FSKConfig.SAMPLE_RATE_44100, FSKConfig.PCM_16BIT, FSKConfig.CHANNELS_STEREO, FSKConfig.SOFT_MODEM_MODE_4, FSKConfig.THRESHOLD_20P);
	} catch (IOException e1) {
		e1.printStackTrace();
	}

	/// INIT FSK DECODER
	
	mDecoder = new FSKDecoder(mConfig, new FSKDecoderCallback() {
		
		@Override
		public void decoded(byte[] newData) {
			
			final String text = new String(newData);
			
			runOnUiThread(new Runnable() {
				public void run() {
					
					TextView view = ((TextView) findViewById(R.id.result));
					
					view.setText(view.getText()+text);
				}
			});
		}
	});
	
	/// INIT FSK ENCODER
	
	mEncoder = new FSKEncoder(mConfig, new FSKEncoderCallback() {
		
		@Override
		public void encoded(byte[] pcm8, short[] pcm16) {
			if (mConfig.pcmFormat == FSKConfig.PCM_8BIT) {
				//8bit buffer is populated, 16bit buffer is null
				
				mAudioTrack.write(pcm8, 0, pcm8.length);
				
				mDecoder.appendSignal(pcm8);
			}
			else if (mConfig.pcmFormat == FSKConfig.PCM_16BIT) {
				//16bit buffer is populated, 8bit buffer is null
				
				mAudioTrack.write(pcm16, 0, pcm16.length);
				
				mDecoder.appendSignal(pcm16);
			}
		}
	});
	
	///
	
	mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
			mConfig.sampleRate, AudioFormat.CHANNEL_OUT_STEREO,
			AudioFormat.ENCODING_PCM_16BIT, 1024,
			AudioTrack.MODE_STREAM);
	
	mAudioTrack.play();
	
	///
	
	new Thread(mDataFeeder).start();
}