android.media.AudioFormat Java Examples

The following examples show how to use android.media.AudioFormat. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FromFileBase.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 8 votes vote down vote up
/**
 * @param filePath to video MP4 file.
 * @param bitRate AAC in kb.
 * @return true if success, false if you get a error (Normally because the encoder selected
 * doesn't support any configuration seated or your device hasn't a H264 encoder).
 * @throws IOException Normally file not found.
 */
public boolean prepareAudio(String filePath, int bitRate) throws IOException {
  audioPath = filePath;
  audioDecoder = new AudioDecoder(this, audioDecoderInterface, this);
  if (!audioDecoder.initExtractor(filePath)) return false;
  boolean result = audioEncoder.prepareAudioEncoder(bitRate, audioDecoder.getSampleRate(),
      audioDecoder.isStereo(), 0);
  prepareAudioRtp(audioDecoder.isStereo(), audioDecoder.getSampleRate());
  audioDecoder.prepareAudio();
  if (glInterface != null && !(glInterface instanceof OffScreenGlThread)) {
    int channel =
        audioDecoder.isStereo() ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
    int buffSize = AudioTrack.getMinBufferSize(audioDecoder.getSampleRate(), channel,
        AudioFormat.ENCODING_PCM_16BIT);
    audioTrackPlayer =
        new AudioTrack(AudioManager.STREAM_MUSIC, audioDecoder.getSampleRate(), channel,
            AudioFormat.ENCODING_PCM_16BIT, buffSize, AudioTrack.MODE_STREAM);
  }
  return result;
}
 
Example #2
Source File: AudioThread.java    From Viewer with Apache License 2.0 6 votes vote down vote up
public AudioThread(int sampleRateInHz, int channel, long streamId, long decoderId, Media media)
{
	if (channel == 1)
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
	} else
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
	}
	this.mediaStreamId = streamId;
	this.decoderId = decoderId;
	this.media = media;
	int minBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT);
	if (minBufferSize > audioLength)
	{
		audioLength = minBufferSize;
	}
	mAudioBuffer = new byte[audioLength];
	mAudio = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT, audioLength, AudioTrack.MODE_STREAM);
}
 
Example #3
Source File: RecordingSampler.java    From voice-recording-visualizer with Apache License 2.0 6 votes vote down vote up
private void initAudioRecord() {
    int bufferSize = AudioRecord.getMinBufferSize(
            RECORDING_SAMPLE_RATE,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT
    );

    mAudioRecord = new AudioRecord(
            MediaRecorder.AudioSource.MIC,
            RECORDING_SAMPLE_RATE,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            bufferSize
    );

    if (mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
        mBufSize = bufferSize;
    }
}
 
Example #4
Source File: Microphone.java    From ssj with GNU General Public License v3.0 6 votes vote down vote up
public static Cons.Type audioFormatSampleType(int f)
{
    switch (f)
    {
        case AudioFormat.ENCODING_PCM_8BIT:
            return Cons.Type.CHAR;
        case AudioFormat.ENCODING_PCM_16BIT:
        case AudioFormat.ENCODING_DEFAULT:
            return Cons.Type.SHORT;
        case AudioFormat.ENCODING_PCM_FLOAT:
            return Cons.Type.FLOAT;
        case AudioFormat.ENCODING_INVALID:
        default:
            return Cons.Type.UNDEF;
    }
}
 
Example #5
Source File: MicrophoneSource.java    From media-for-mobile with Apache License 2.0 6 votes vote down vote up
public synchronized void configure(int sampleRate, int channels) {
    this.sampleRate = sampleRate;
    recordChannels = channels;

    switch (recordChannels) {
        case 1: {
            androidChannels = AudioFormat.CHANNEL_IN_MONO;
        }
        break;

        case 2: {
            androidChannels = AudioFormat.CHANNEL_IN_STEREO;
        }
        break;
    }

    minBufferSize = AudioRecord.getMinBufferSize(sampleRate, androidChannels, audioEncoding);

    if (minBufferSize < 0) {
        this.sampleRate = 8000;
        minBufferSize = AudioRecord.getMinBufferSize(sampleRate, androidChannels, audioEncoding);
    }
}
 
Example #6
Source File: TTSUtility.java    From speech-android-sdk with Apache License 2.0 6 votes vote down vote up
private void initPlayer(){
    stopTtsPlayer();
    // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode.
    int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    synchronized (this) {
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                sampleRate,
                AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT,
                bufferSize,
                AudioTrack.MODE_STREAM);
        if (audioTrack != null)
            audioTrack.play();
    }
}
 
Example #7
Source File: MjpegPlayerActivity.java    From CameraV with GNU General Public License v3.0 6 votes vote down vote up
public void initAudio(String vfsPath) throws Exception {

    	isAudio = new BufferedInputStream(new FileInputStream(vfsPath));

    	if (useAAC)
    	{
    		aac = new AACHelper();
    		aac.setDecoder(MediaConstants.sAudioSampleRate, MediaConstants.sAudioChannels, MediaConstants.sAudioBitRate);
    	}
    	else
    	{
	
	        int minBufferSize = AudioTrack.getMinBufferSize(MediaConstants.sAudioSampleRate,
	        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT)*8;
	
	        at = new AudioTrack(AudioManager.STREAM_MUSIC, MediaConstants.sAudioSampleRate,
	        		MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT,
	            minBufferSize, AudioTrack.MODE_STREAM);
	        
    	}
         
    }
 
Example #8
Source File: AudioUtil.java    From Augendiagnose with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Create a sin wave of certain frequency and duration.
 *
 * @param freqHz     The frequency in Hertz
 * @param durationMs The duration in milliseconds
 * @return An AudioTrack with the corresponding sine wave.
 */
public static AudioTrack generateTonePulse(final double freqHz, final int durationMs) {
	int count = (int) (BITRATE * 2.0 * (durationMs / MILLIS_IN_SECOND)) & ~1;
	short[] samples = new short[count];
	for (int i = 0; i < count; i += 2) {
		short sample = TONE_MAP_2[(int) (2 * i / (BITRATE / freqHz)) % 2];
		samples[i] = sample;
		samples[i + 1] = sample;
	}
	@SuppressWarnings("deprecation")
	AudioTrack track = new AudioTrack(AudioManager.STREAM_MUSIC, (int) BITRATE,
			AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT,
			count * (Short.SIZE / 8), AudioTrack.MODE_STATIC); // MAGIC_NUMBER
	track.write(samples, 0, count);
	return track;
}
 
Example #9
Source File: AudioCodec.java    From bcm-android with GNU General Public License v3.0 6 votes vote down vote up
public AudioCodec(IRecordFinished finishListener) throws IOException {
    this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
    this.audioRecord = createAudioRecord(this.bufferSize);
    this.mediaCodec = createMediaCodec(this.bufferSize);
    this.finishListener = finishListener;

    this.mediaCodec.start();

    try {
        audioRecord.startRecording();
    } catch (Exception e) {
        ALog.e(TAG, "AudioCodec", e);
        mediaCodec.release();
        throw new IOException(e);
    }
}
 
Example #10
Source File: MediaAudioEncoderRunable.java    From GLES2_AUDIO_VIDEO_RECODE with Apache License 2.0 6 votes vote down vote up
/**
 * 录制前的准备
 *
 * @throws IOException
 */
@Override
public void prepare() throws IOException {

    mTrackIndex = -1;
    mMuxerStarted = mIsEndOfStream = false;

    // mediaFormat配置
    final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
    audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    //
    mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
    mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mMediaCodec.start();

    if (mMediaEncoderListener != null) {
        try {
            mMediaEncoderListener.onPrepared(this);
        } catch (final Exception e) {
            LogUtils.e(TAG, "prepare:", e);
        }
    }
}
 
Example #11
Source File: DefaultAudioSink.java    From TelePlus-Android with GNU General Public License v2.0 6 votes vote down vote up
@TargetApi(21)
private AudioTrack createAudioTrackV21() {
  android.media.AudioAttributes attributes;
  if (tunneling) {
    attributes = new android.media.AudioAttributes.Builder()
        .setContentType(android.media.AudioAttributes.CONTENT_TYPE_MOVIE)
        .setFlags(android.media.AudioAttributes.FLAG_HW_AV_SYNC)
        .setUsage(android.media.AudioAttributes.USAGE_MEDIA)
        .build();
  } else {
    attributes = audioAttributes.getAudioAttributesV21();
  }
  AudioFormat format =
      new AudioFormat.Builder()
          .setChannelMask(outputChannelConfig)
          .setEncoding(outputEncoding)
          .setSampleRate(outputSampleRate)
          .build();
  int audioSessionId = this.audioSessionId != C.AUDIO_SESSION_ID_UNSET ? this.audioSessionId
      : AudioManager.AUDIO_SESSION_ID_GENERATE;
  return new AudioTrack(attributes, format, bufferSize, MODE_STREAM, audioSessionId);
}
 
Example #12
Source File: AbstractTLMediaAudioEncoder.java    From TimeLapseRecordingSample with Apache License 2.0 6 votes vote down vote up
@Override
	protected MediaFormat internal_prepare() throws IOException {
		if (DEBUG) Log.v(TAG, "prepare:");
		// prepare MediaCodec for AAC encoding of audio data from inernal mic.
		final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
		if (audioCodecInfo == null) {
			Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
			return null;
		}
		if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());

		final MediaFormat format = MediaFormat.createAudioFormat(MIME_TYPE, mSampleRate, 1);
		format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
		format.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
		format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
		format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
//		format.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
//      format.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
		if (DEBUG) Log.i(TAG, "prepare finishing:format=" + format);
		return format;
	}
 
Example #13
Source File: MicrophoneManager.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 6 votes vote down vote up
/**
 * Create audio record with params and AudioPlaybackCaptureConfig used for capturing internal audio
 * Notice that you should granted {@link android.Manifest.permission#RECORD_AUDIO} before calling this!
 *
 * @param config - AudioPlaybackCaptureConfiguration received from {@link android.media.projection.MediaProjection}
 *
 * @see AudioPlaybackCaptureConfiguration.Builder#Builder(MediaProjection)
 * @see "https://developer.android.com/guide/topics/media/playback-capture"
 * @see "https://medium.com/@debuggingisfun/android-10-audio-capture-77dd8e9070f9"
 */
public void createInternalMicrophone(AudioPlaybackCaptureConfiguration config, int sampleRate, boolean isStereo) {
  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
    this.sampleRate = sampleRate;
    if (!isStereo) channel = AudioFormat.CHANNEL_IN_MONO;
    audioRecord = new AudioRecord.Builder()
            .setAudioPlaybackCaptureConfig(config)
            .setAudioFormat(new AudioFormat.Builder()
                    .setEncoding(audioFormat)
                    .setSampleRate(sampleRate)
                    .setChannelMask(channel)
                    .build())
            .setBufferSizeInBytes(getPcmBufferSize())
            .build();

    audioPostProcessEffect = new AudioPostProcessEffect(audioRecord.getAudioSessionId());
    String chl = (isStereo) ? "Stereo" : "Mono";
    Log.i(TAG, "Internal microphone created, " + sampleRate + "hz, " + chl);
    created = true;
  } else createMicrophone(sampleRate, isStereo, false, false);
}
 
Example #14
Source File: AudioEncoder.java    From RtmpPublisher with Apache License 2.0 6 votes vote down vote up
/**
 * prepare the Encoder. call this before start the encoder.
 */
void prepare(int bitrate, int sampleRate, long startStreamingAt) {
    int bufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT);
    MediaFormat audioFormat =
            MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, sampleRate, CHANNEL_COUNT);
    audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,
            MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
    audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
    startedEncodingAt = startStreamingAt;
    try {
        encoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
        encoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    } catch (IOException | IllegalStateException e) {
        e.printStackTrace();
    }
}
 
Example #15
Source File: JSynAndroidAudioDeviceManager.java    From processing-sound with GNU Lesser General Public License v2.1 6 votes vote down vote up
public void start() {
	this.minBufferSize = AudioTrack.getMinBufferSize(this.frameRate, AudioFormat.CHANNEL_OUT_STEREO,
			AudioFormat.ENCODING_PCM_16BIT);
	this.bufferSize = (3 * (this.minBufferSize / 2)) & ~3;
	this.audioTrack = new AudioTrack.Builder()
			.setAudioAttributes(new AudioAttributes.Builder()
					.setUsage(AudioAttributes.USAGE_MEDIA)
					.setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
					.build())
			.setAudioFormat(new AudioFormat.Builder()
					.setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
					.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
					.setSampleRate(this.frameRate)
					.build())
			.setBufferSizeInBytes(this.bufferSize)
			.setTransferMode(AudioTrack.MODE_STREAM)
			.build();
	this.audioTrack.play();
}
 
Example #16
Source File: SpeechRecognizer.java    From pocketsphinx-android with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * Creates speech recognizer. Recognizer holds the AudioRecord object, so you 
 * need to call {@link release} in order to properly finalize it.
 * 
 * @param config The configuration object
 * @throws IOException thrown if audio recorder can not be created for some reason.
 */
protected SpeechRecognizer(Config config) throws IOException {
    decoder = new Decoder(config);
    sampleRate = (int)decoder.getConfig().getFloat("-samprate");
    bufferSize = Math.round(sampleRate * BUFFER_SIZE_SECONDS);
    recorder = new AudioRecord(
            AudioSource.VOICE_RECOGNITION, sampleRate,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2);

    if (recorder.getState() == AudioRecord.STATE_UNINITIALIZED) {
        recorder.release();
        throw new IOException(
                "Failed to initialize recorder. Microphone might be already in use.");
    }
}
 
Example #17
Source File: AudioRecorder.java    From react-native-google-nearby-connection with MIT License 6 votes vote down vote up
public AudioRecord findAudioRecord() {
	for (int rate : AudioBuffer.POSSIBLE_SAMPLE_RATES) {
		for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) {
			for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) {
				try {
					Log.d(TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
							+ channelConfig);
					int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);

					if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
						// check if we can instantiate and have a success
						AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);

						if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
							return recorder;
						}
					}
				} catch (Exception e) {
					Log.e(TAG, rate + "Exception, keep trying.",e);
				}
			}
		}
	}
	return null;
}
 
Example #18
Source File: Track.java    From K-Sonic with MIT License 6 votes vote down vote up
private void initDevice(int sampleRate, int numChannels) {
    if (isJMono)
        numChannels = 2;
    mLock.lock();
    try {
        final int format = findFormatFromChannels(numChannels);
        final int minSize = AudioTrack.getMinBufferSize(sampleRate, format,
                AudioFormat.ENCODING_PCM_16BIT);
        mTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, format,
                AudioFormat.ENCODING_PCM_16BIT, minSize * 4,
                AudioTrack.MODE_STREAM);
        mSonic = new Sonic(sampleRate, numChannels);
    } catch (Exception e) {//IllegalArgumentException
        throw e;
    } finally {
        mLock.unlock();
    }
}
 
Example #19
Source File: AbstractAudioRecorder.java    From speechutils with Apache License 2.0 5 votes vote down vote up
protected int getBufferSize() {
    int minBufferSizeInBytes = SpeechRecord.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_IN_MONO, RESOLUTION);
    if (minBufferSizeInBytes == SpeechRecord.ERROR_BAD_VALUE) {
        throw new IllegalArgumentException("SpeechRecord.getMinBufferSize: parameters not supported by hardware");
    } else if (minBufferSizeInBytes == SpeechRecord.ERROR) {
        Log.e("SpeechRecord.getMinBufferSize: unable to query hardware for output properties");
        minBufferSizeInBytes = mSampleRate * (120 / 1000) * RESOLUTION_IN_BYTES * CHANNELS;
    }
    int bufferSize = BUFFER_SIZE_MULTIPLIER * minBufferSizeInBytes;
    Log.i("SpeechRecord buffer size: " + bufferSize + ", min size = " + minBufferSizeInBytes);
    return bufferSize;
}
 
Example #20
Source File: AudioRecordThread.java    From dcs-sdk-java with Apache License 2.0 5 votes vote down vote up
public AudioRecordThread(LinkedBlockingDeque<byte[]> linkedBlockingDeque) {
    this.linkedBlockingDeque = linkedBlockingDeque;
    bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE_HZ, AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT);
    audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE_HZ, AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT, bufferSize);
}
 
Example #21
Source File: AudioPlayer.java    From connectivity-samples with Apache License 2.0 5 votes vote down vote up
/** Starts playing the stream. */
public void start() {
  mAlive = true;
  mThread =
      new Thread() {
        @Override
        public void run() {
          setThreadPriority(THREAD_PRIORITY_AUDIO);

          Buffer buffer = new Buffer();
          AudioTrack audioTrack =
              new AudioTrack(
                  AudioManager.STREAM_MUSIC,
                  buffer.sampleRate,
                  AudioFormat.CHANNEL_OUT_MONO,
                  AudioFormat.ENCODING_PCM_16BIT,
                  buffer.size,
                  AudioTrack.MODE_STREAM);
          audioTrack.play();

          int len;
          try {
            while (isPlaying() && (len = mInputStream.read(buffer.data)) > 0) {
              audioTrack.write(buffer.data, 0, len);
            }
          } catch (IOException e) {
            Log.e(TAG, "Exception with playing stream", e);
          } finally {
            stopInternal();
            audioTrack.release();
            onFinish();
          }
        }
      };
  mThread.start();
}
 
Example #22
Source File: Track.java    From K-Sonic with MIT License 5 votes vote down vote up
private int findFormatFromChannels(int numChannels) {
    switch (numChannels) {
        case 1:
            return AudioFormat.CHANNEL_OUT_MONO;
        case 2:
            return AudioFormat.CHANNEL_OUT_STEREO;
        default:
            return -1; // Error
    }
}
 
Example #23
Source File: ScreenCastRecorder.java    From DeviceConnect-Android with MIT License 5 votes vote down vote up
@Override
public void setAudioEncoder() {
    if (DEBUG) {
        Log.d(TAG, "setAudioEncoder()");
        Log.d(TAG, "mLiveStreamingClient : " + mLiveStreamingClient);
    }
    if (mLiveStreamingClient != null) {
        AudioEncoder audioEncoder = new MicAACLATMEncoder();
        AudioQuality audioQuality = audioEncoder.getAudioQuality();
        audioQuality.setChannel(AudioFormat.CHANNEL_IN_MONO);
        audioQuality.setSamplingRate(44100);
        mLiveStreamingClient.setAudioEncoder(audioEncoder);
    }
}
 
Example #24
Source File: AudioCodec.java    From mollyim-android with GNU General Public License v3.0 5 votes vote down vote up
public AudioCodec() throws IOException {
  this.bufferSize  = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
  this.audioRecord = createAudioRecord(this.bufferSize);
  this.mediaCodec  = createMediaCodec(this.bufferSize);

  this.mediaCodec.start();

  try {
    audioRecord.startRecording();
  } catch (Exception e) {
    Log.w(TAG, e);
    mediaCodec.release();
    throw new IOException(e);
  }
}
 
Example #25
Source File: Util.java    From MediaSDK with Apache License 2.0 5 votes vote down vote up
/**
 * Returns the audio track channel configuration for the given channel count, or {@link
 * AudioFormat#CHANNEL_INVALID} if output is not poossible.
 *
 * @param channelCount The number of channels in the input audio.
 * @return The channel configuration or {@link AudioFormat#CHANNEL_INVALID} if output is not
 *     possible.
 */
public static int getAudioTrackChannelConfig(int channelCount) {
  switch (channelCount) {
    case 1:
      return AudioFormat.CHANNEL_OUT_MONO;
    case 2:
      return AudioFormat.CHANNEL_OUT_STEREO;
    case 3:
      return AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
    case 4:
      return AudioFormat.CHANNEL_OUT_QUAD;
    case 5:
      return AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
    case 6:
      return AudioFormat.CHANNEL_OUT_5POINT1;
    case 7:
      return AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
    case 8:
      if (Util.SDK_INT >= 23) {
        return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
      } else if (Util.SDK_INT >= 21) {
        // Equal to AudioFormat.CHANNEL_OUT_7POINT1_SURROUND, which is hidden before Android M.
        return AudioFormat.CHANNEL_OUT_5POINT1
            | AudioFormat.CHANNEL_OUT_SIDE_LEFT
            | AudioFormat.CHANNEL_OUT_SIDE_RIGHT;
      } else {
        // 8 ch output is not supported before Android L.
        return AudioFormat.CHANNEL_INVALID;
      }
    default:
      return AudioFormat.CHANNEL_INVALID;
  }
}
 
Example #26
Source File: Util.java    From Telegram-FOSS with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Returns the audio track channel configuration for the given channel count, or {@link
 * AudioFormat#CHANNEL_INVALID} if output is not poossible.
 *
 * @param channelCount The number of channels in the input audio.
 * @return The channel configuration or {@link AudioFormat#CHANNEL_INVALID} if output is not
 *     possible.
 */
public static int getAudioTrackChannelConfig(int channelCount) {
  switch (channelCount) {
    case 1:
      return AudioFormat.CHANNEL_OUT_MONO;
    case 2:
      return AudioFormat.CHANNEL_OUT_STEREO;
    case 3:
      return AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
    case 4:
      return AudioFormat.CHANNEL_OUT_QUAD;
    case 5:
      return AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
    case 6:
      return AudioFormat.CHANNEL_OUT_5POINT1;
    case 7:
      return AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
    case 8:
      if (Util.SDK_INT >= 23) {
        return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
      } else if (Util.SDK_INT >= 21) {
        // Equal to AudioFormat.CHANNEL_OUT_7POINT1_SURROUND, which is hidden before Android M.
        return AudioFormat.CHANNEL_OUT_5POINT1
            | AudioFormat.CHANNEL_OUT_SIDE_LEFT
            | AudioFormat.CHANNEL_OUT_SIDE_RIGHT;
      } else {
        // 8 ch output is not supported before Android L.
        return AudioFormat.CHANNEL_INVALID;
      }
    default:
      return AudioFormat.CHANNEL_INVALID;
  }
}
 
Example #27
Source File: VoicePlayThread.java    From AndroidScreenShare with Apache License 2.0 5 votes vote down vote up
private void initVoiceDecoder(int ChannelMode,int EncodeFormat,int ChannelCount,
                              int ByteRate,int SampleRate){
    int mChannelMode = ChannelMode==AudioFormat.CHANNEL_IN_MONO?
            AudioFormat.CHANNEL_OUT_MONO:AudioFormat.CHANNEL_OUT_STEREO;

    myAudioTrack = new MyAudioTrack(SampleRate,mChannelMode,
            EncodeFormat,AudioManager.STREAM_MUSIC);
    myAudioTrack.init();

    aacDecoder=new AACDecoder(ChannelCount,ByteRate,SampleRate);
    aacDecoder.setOnDecodeDone(this);
    aacDecoder.start();
}
 
Example #28
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 5 votes vote down vote up
@TargetApi(21)
private static AudioTrack createAudioTrackOnLollipopOrHigher(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
  // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
  // performance when Android O is supported. Add some logging in the mean time.
  final int nativeOutputSampleRate =
      AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
  Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
  if (sampleRateInHz != nativeOutputSampleRate) {
    Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
  }
  if (usageAttribute != DEFAULT_USAGE) {
    Logging.w(TAG, "A non default usage attribute is used: " + usageAttribute);
  }
  // Create an audio track where the audio usage is for VoIP and the content type is speech.
  return new AudioTrack(
      new AudioAttributes.Builder()
          .setUsage(usageAttribute)
          .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
      .build(),
      new AudioFormat.Builder()
        .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
        .setSampleRate(sampleRateInHz)
        .setChannelMask(channelConfig)
        .build(),
      bufferSizeInBytes,
      AudioTrack.MODE_STREAM,
      AudioManager.AUDIO_SESSION_ID_GENERATE);
}
 
Example #29
Source File: WebRtcAudioManager.java    From webrtc_android with MIT License 5 votes vote down vote up
private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
  final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
  final int channelConfig =
      (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
  return AudioRecord.getMinBufferSize(
             sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
      / bytesPerFrame;
}
 
Example #30
Source File: RecordingService.java    From libcommon with Apache License 2.0 5 votes vote down vote up
/**
 * 録音用のMediaCodecエンコーダーを生成
 * @param sampleRate
 * @param channelCount
 */
protected void createEncoder(final int sampleRate, final int channelCount)
	throws IOException {

	if (DEBUG) Log.v(TAG, "createEncoder:audio");
	final MediaCodecInfo codecInfo = selectAudioEncoder(MIME_AUDIO_AAC);
	if (codecInfo == null) {
		throw new IOException("Unable to find an appropriate codec for " + MIME_AUDIO_AAC);
	}
	final MediaFormat format = MediaFormat.createAudioFormat(
		MIME_AUDIO_AAC, sampleRate, channelCount);
	format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
	format.setInteger(MediaFormat.KEY_CHANNEL_MASK,
		mChannelCount == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
	format.setInteger(MediaFormat.KEY_BIT_RATE, 64000/*FIXMEパラメータにする*/);
	format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mChannelCount);
	// MediaCodecに適用するパラメータを設定する。
	// 誤った設定をするとMediaCodec#configureが復帰不可能な例外を生成する
	if (DEBUG) Log.d(TAG, "createEncoder:audio format:" + format);

	// 設定したフォーマットに従ってMediaCodecのエンコーダーを生成する
	mAudioEncoder = MediaCodec.createEncoderByType(MIME_AUDIO_AAC);
	mAudioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
	mAudioEncoder.start();
	mAudioReaper = new MediaReaper.AudioReaper(
		mAudioEncoder, mReaperListener, sampleRate, channelCount);
	if (DEBUG) Log.v(TAG, "createEncoder:finished");
}