android.media.AudioRecord Java Examples

The following examples show how to use android.media.AudioRecord. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MainActivity.java    From snips-platform-android-demo with Apache License 2.0 6 votes vote down vote up
private void runStreaming() {
    Log.d(TAG, "starting audio streaming");
    final int minBufferSizeInBytes = AudioRecord.getMinBufferSize(FREQUENCY, CHANNEL, ENCODING);
    Log.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes);

    recorder = new AudioRecord(MIC, FREQUENCY, CHANNEL, ENCODING, minBufferSizeInBytes);
    recorder.startRecording();

    while (continueStreaming) {
        short[] buffer = new short[minBufferSizeInBytes / 2];
        recorder.read(buffer, 0, buffer.length);
        if (client != null) {
            client.sendAudioBuffer(buffer);
        }
    }
    recorder.stop();
    Log.d(TAG, "audio streaming stopped");
}
 
Example #2
Source File: AudioRecordJNI.java    From Telegram with GNU General Public License v2.0 6 votes vote down vote up
public boolean start() {
	if(audioRecord==null || audioRecord.getState()!=AudioRecord.STATE_INITIALIZED)
		return false;
	try{
		if(thread==null){
				if(audioRecord==null)
					return false;
				audioRecord.startRecording();
			startThread();
		}else{
			audioRecord.startRecording();
		}
		return true;
	}catch(Exception x){
		VLog.e("Error initializing AudioRecord", x);
	}
	return false;
}
 
Example #3
Source File: SpeechRecognizer.java    From pocketsphinx-android with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * Creates speech recognizer. Recognizer holds the AudioRecord object, so you 
 * need to call {@link release} in order to properly finalize it.
 * 
 * @param config The configuration object
 * @throws IOException thrown if audio recorder can not be created for some reason.
 */
protected SpeechRecognizer(Config config) throws IOException {
    decoder = new Decoder(config);
    sampleRate = (int)decoder.getConfig().getFloat("-samprate");
    bufferSize = Math.round(sampleRate * BUFFER_SIZE_SECONDS);
    recorder = new AudioRecord(
            AudioSource.VOICE_RECOGNITION, sampleRate,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2);

    if (recorder.getState() == AudioRecord.STATE_UNINITIALIZED) {
        recorder.release();
        throw new IOException(
                "Failed to initialize recorder. Microphone might be already in use.");
    }
}
 
Example #4
Source File: AudioDispatcherFactory.java    From cythara with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Create a new AudioDispatcher connected to the default microphone.
 * 
 * @param sampleRate
 *            The requested sample rate.
 * @param audioBufferSize
 *            The size of the audio buffer (in samples).
 * 
 * @param bufferOverlap
 *            The size of the overlap (in samples).
 * @return A new AudioDispatcher
 */
public static AudioDispatcher fromDefaultMicrophone(final int sampleRate,
		final int audioBufferSize, final int bufferOverlap) {
	int minAudioBufferSize = AudioRecord.getMinBufferSize(sampleRate,
			android.media.AudioFormat.CHANNEL_IN_MONO,
			android.media.AudioFormat.ENCODING_PCM_16BIT);
	int minAudioBufferSizeInSamples =  minAudioBufferSize/2;
	if(minAudioBufferSizeInSamples <= audioBufferSize ){
	AudioRecord audioInputStream = new AudioRecord(
			MediaRecorder.AudioSource.MIC, sampleRate,
			android.media.AudioFormat.CHANNEL_IN_MONO,
			android.media.AudioFormat.ENCODING_PCM_16BIT,
			audioBufferSize * 2);

	TarsosDSPAudioFormat format = new TarsosDSPAudioFormat(sampleRate, 16,1, true, false);
	
	TarsosDSPAudioInputStream audioStream = new AndroidAudioInputStream(audioInputStream, format);
	//start recording ! Opens the stream.
	audioInputStream.startRecording();
	return new AudioDispatcher(audioStream,audioBufferSize,bufferOverlap);
	}else{
		throw new IllegalArgumentException("Buffer size too small should be at least " + (minAudioBufferSize *2));
	}
}
 
Example #5
Source File: AudioCapturer.java    From Android with Apache License 2.0 6 votes vote down vote up
public void stopCapture() {

        if (!mIsCaptureStarted) {
            return;
        }

        mIsLoopExit = true;		
        try {
            mCaptureThread.interrupt();
            mCaptureThread.join(1000);
        } 
        catch (InterruptedException e) {		
            e.printStackTrace();
        }

        if (mAudioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
            mAudioRecord.stop();						
        }

        mAudioRecord.release();		
	
        mIsCaptureStarted = false;
        mAudioFrameCapturedListener = null;

        Log.d(TAG, "Stop audio capture success !");
    }
 
Example #6
Source File: Doppler.java    From doppler-android with MIT License 6 votes vote down vote up
public Doppler() {
    //write a check to see if stereo is supported
    bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
    buffer = new short[bufferSize];

    frequency = PRELIM_FREQ;
    freqIndex = PRELIM_FREQ_INDEX;

    frequencyPlayer = new FrequencyPlayer(PRELIM_FREQ);

    microphone = new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION, DEFAULT_SAMPLE_RATE,
            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

    mHandler = new Handler();

    calibrator = new Calibrator();
}
 
Example #7
Source File: AudioMediaEncoder.java    From Lassi-Android with MIT License 6 votes vote down vote up
private void read(boolean endOfStream) {
    mCurrentBuffer = mByteBufferPool.get();
    if (mCurrentBuffer == null) {
        LOG.e("Skipping audio frame, encoding is too slow.");
        // TODO should fix the next presentation time here. However this is
        // extremely unlikely based on my tests. The mByteBufferPool should be big enough.
    } else {
        mCurrentBuffer.clear();
        mReadBytes = mAudioRecord.read(mCurrentBuffer, FRAME_SIZE);
        if (mReadBytes > 0) { // Good read: increase PTS.
            increaseTime(mReadBytes);
            mCurrentBuffer.limit(mReadBytes);
            onBuffer(endOfStream);
        } else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
            LOG.e("Got AudioRecord.ERROR_INVALID_OPERATION");
        } else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) {
            LOG.e("Got AudioRecord.ERROR_BAD_VALUE");
        }
    }
}
 
Example #8
Source File: RecordAudioTester.java    From PermissionAgent with Apache License 2.0 6 votes vote down vote up
@Override
public boolean test() throws Throwable {
    AudioRecord audioRecord = findAudioRecord();
    try {
        if (audioRecord != null) {
            audioRecord.startRecording();
        } else {
            return !existMicrophone(mContext);
        }
    } catch (Throwable e) {
        return !existMicrophone(mContext);
    } finally {
        if (audioRecord != null) {
            audioRecord.stop();
            audioRecord.release();
        }
    }
    return true;
}
 
Example #9
Source File: RecordAudioTester.java    From PermissionAgent with Apache License 2.0 6 votes vote down vote up
@Override
public boolean test() throws Throwable {
    AudioRecord audioRecord = findAudioRecord();
    try {
        if (audioRecord != null) {
            audioRecord.startRecording();
        } else {
            return !existMicrophone(mContext);
        }
    } catch (Throwable e) {
        return !existMicrophone(mContext);
    } finally {
        if (audioRecord != null) {
            audioRecord.stop();
            audioRecord.release();
        }
    }
    return true;
}
 
Example #10
Source File: AudioCapture.java    From EvilsLive with MIT License 6 votes vote down vote up
@Override
public void run() {
    while (!mIsLoopExit) {
        byte[] buffer = new byte[mMinBufferSize];

        int ret = mAudioRecord.read(buffer, 0, mMinBufferSize);

        if (ret == AudioRecord.ERROR_INVALID_OPERATION) {
            Log.e(TAG, "Error ERROR_INVALID_OPERATION");
        } else if (ret == AudioRecord.ERROR_BAD_VALUE) {
            Log.e(TAG, "Error ERROR_BAD_VALUE");
        } else {
            if (mAudioFrameCapturedListener != null) {
                mAudioFrameCapturedListener.onAudioFrameCaptured(buffer);
            }
            Log.d(TAG, "OK, Captured " + ret + " bytes !");
        }
        SystemClock.sleep(10);
    }

}
 
Example #11
Source File: AudioRecorder.java    From RtmpPublisher with Apache License 2.0 6 votes vote down vote up
public void start() {
    final int bufferSize =
            AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT);

    audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

    audioRecord.startRecording();

    HandlerThread handlerThread = new HandlerThread("AudioRecorder-record");
    handlerThread.start();
    Handler handler = new Handler(handlerThread.getLooper());
    handler.post(new Runnable() {
        @Override
        public void run() {
            int bufferReadResult;
            byte[] data = new byte[bufferSize];
            // keep running... so use a different thread.
            while (isRecording() && (bufferReadResult = audioRecord.read(data, 0, bufferSize)) > 0) {
                listener.onAudioRecorded(data, bufferReadResult);
            }
        }
    });
}
 
Example #12
Source File: AACEncoder.java    From AndroidInstantVideo with Apache License 2.0 6 votes vote down vote up
public AACEncoder(final StreamPublisher.StreamPublisherParam params) throws IOException {
    this.samplingRate = params.samplingRate;

    bufferSize = params.audioBufferSize;
    mMediaCodec = MediaCodec.createEncoderByType(params.audioMIME);
    mMediaCodec.configure(params.createAudioMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mediaCodecInputStream = new MediaCodecInputStream(mMediaCodec, new MediaCodecInputStream.MediaFormatCallback() {
        @Override
        public void onChangeMediaFormat(MediaFormat mediaFormat) {
            params.setAudioOutputMediaFormat(mediaFormat);
        }
    });
    mAudioRecord = new AudioRecord(params.audioSource, samplingRate, params.channelCfg, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
    if (NoiseSuppressor.isAvailable()) {
        NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(mAudioRecord.getAudioSessionId());
    }

}
 
Example #13
Source File: ExtAudioCapture.java    From PLDroidRTCStreaming with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {
    while (!mIsLoopExit) {
        byte[] buffer = new byte[SAMPLES_PER_FRAME * 2];
        int ret = mAudioRecord.read(buffer, 0, buffer.length);
        if (ret == AudioRecord.ERROR_INVALID_OPERATION) {
            Log.e(TAG, "Error ERROR_INVALID_OPERATION");
        } else if (ret == AudioRecord.ERROR_BAD_VALUE) {
            Log.e(TAG, "Error ERROR_BAD_VALUE");
        } else {
            if (mOnAudioFrameCapturedListener != null) {
                mOnAudioFrameCapturedListener.onAudioFrameCaptured(buffer, System.nanoTime());
            }
        }
    }
}
 
Example #14
Source File: AudioInput.java    From Jumble with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

    Log.i(Constants.TAG, "AudioInput: started");

    mAudioRecord.startRecording();

    if(mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED)
        return;

    final short[] mAudioBuffer = new short[mFrameSize];
    // We loop when the 'recording' instance var is true instead of checking audio record state because we want to always cleanly shutdown.
    while(mRecording) {
        int shortsRead = mAudioRecord.read(mAudioBuffer, 0, mFrameSize);
        if(shortsRead > 0) {
            mListener.onAudioInputReceived(mAudioBuffer, mFrameSize);
        } else {
            Log.e(Constants.TAG, "Error fetching audio! AudioRecord error " + shortsRead);
        }
    }

    mAudioRecord.stop();

    Log.i(Constants.TAG, "AudioInput: stopped");
}
 
Example #15
Source File: AudioRecorder.java    From react-native-google-nearby-connection with MIT License 6 votes vote down vote up
public AudioRecord findAudioRecord() {
	for (int rate : AudioBuffer.POSSIBLE_SAMPLE_RATES) {
		for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) {
			for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) {
				try {
					Log.d(TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
							+ channelConfig);
					int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);

					if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
						// check if we can instantiate and have a success
						AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);

						if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
							return recorder;
						}
					}
				} catch (Exception e) {
					Log.e(TAG, rate + "Exception, keep trying.",e);
				}
			}
		}
	}
	return null;
}
 
Example #16
Source File: PullableSource.java    From OmRecorder with Apache License 2.0 6 votes vote down vote up
@Override
public AudioRecord preparedToBePulled() {
  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
    if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
      android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
          .create(audioRecord().getAudioSessionId());
      if (automaticGainControl != null) {
        automaticGainControl.setEnabled(true);
        Log.i(getClass().getSimpleName(), "AutomaticGainControl ON");
      } else {
        Log.i(getClass().getSimpleName(), "AutomaticGainControl failed :(");
      }
    } else {
      Log.i(getClass().getSimpleName(), "This device don't support AutomaticGainControl");
    }
  } else {
    Log.i(getClass().getSimpleName(),
        "For this effect, Android api should be higher than or equals 16");
  }
  return super.preparedToBePulled();
}
 
Example #17
Source File: AudioProcess.java    From NoiseCapture with GNU General Public License v3.0 6 votes vote down vote up
private AudioRecord createAudioRecord() {
    // Source:
    //  section 5.3 of the Android 4.0 Compatibility Definition
    // https://source.android.com/compatibility/4.0/android-4.0-cdd.pdf
    // Using VOICE_RECOGNITION
    // Noise reduction processing, if present, is disabled.
    // Except for 5.0+ where android.media.audiofx.NoiseSuppressor could be use to cancel such processing
    // Automatic gain control, if present, is disabled.
    if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
        return new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION,
                rate, audioChannel,
                encoding, bufferSize);
    } else {
        return null;
    }
}
 
Example #18
Source File: AudioTrackManager.java    From TikTok with Apache License 2.0 6 votes vote down vote up
/**
 * 停止播放
 */
public void stopPlay() {
    try {
        destroyThread();
        if (audioTrack != null) {
            if (audioTrack.getState() == AudioRecord.STATE_INITIALIZED) {
                audioTrack.stop();
            }
            if (audioTrack != null) {
                audioTrack.release();
            }
        }
        if (dis != null) {
            dis.close();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example #19
Source File: AudioCapturer.java    From Android with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {

    while (!mIsLoopExit) {

        byte[] buffer = new byte[mMinBufferSize];

        int ret = mAudioRecord.read(buffer, 0, mMinBufferSize);				
        if (ret == AudioRecord.ERROR_INVALID_OPERATION) {
            Log.e(TAG , "Error ERROR_INVALID_OPERATION");
        } 
        else if (ret == AudioRecord.ERROR_BAD_VALUE) {
            Log.e(TAG , "Error ERROR_BAD_VALUE");
        } 
        else { 
            if (mAudioFrameCapturedListener != null) {
                mAudioFrameCapturedListener.onAudioFrameCaptured(buffer);
            }   
            Log.d(TAG , "OK, Captured "+ret+" bytes !");
        }			
        
        SystemClock.sleep(10);
    }		
}
 
Example #20
Source File: AudioCapture.java    From EvilsLive with MIT License 6 votes vote down vote up
public void stopCapture() {
    if (!mIsCaptureStarted) {
        return;
    }

    mIsLoopExit = true;

    try {
        mCaptureThread.interrupt();
        mCaptureThread.join(1000);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }

    if (mAudioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
        mAudioRecord.stop();
    }

    mAudioRecord.release();
    mIsCaptureStarted = false;
    mAudioFrameCapturedListener = null;

    Log.d(TAG, "Stop audio capture success !");

}
 
Example #21
Source File: AudioCapture.java    From EvilsLive with MIT License 6 votes vote down vote up
private int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) {
        int numOfChannels, bitsPersample;
        if (channelConfig == AudioFormat.CHANNEL_IN_MONO) {
            numOfChannels = 1;
        } else {
            numOfChannels = 2;
        }
        if (AudioFormat.ENCODING_PCM_16BIT == audioFormat) {
            bitsPersample = 16;
        } else {
            bitsPersample = 8;
        }
        int periodInFrames = sampleRate * TIMER_INTERVAL / 1000;		//num of frames in a second is same as sample rate
        //refer to android/4.1.1/frameworks/av/media/libmedia/AudioRecord.cpp, AudioRecord::getMinFrameCount method
        //we times 2 for ping pong use of record buffer
        mMinBufferSize = periodInFrames * 2  * numOfChannels * bitsPersample / 8;
        if (mMinBufferSize < AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat)) {
            // Check to make sure buffer size is not smaller than the smallest allowed one
            mMinBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
            // Set frame period and timer interval accordingly
//            periodInFrames = mMinBufferSize / ( 2 * bitsPersample * numOfChannels / 8 );
        }

        return mMinBufferSize;
    }
 
Example #22
Source File: AssistantActivity.java    From androidthings-googleassistant with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {
    ByteBuffer audioData = ByteBuffer.allocateDirect(SAMPLE_BLOCK_SIZE);
    if (mAudioInputDevice != null) {
        mAudioRecord.setPreferredDevice(mAudioInputDevice);
    }
    int result =
            mAudioRecord.read(audioData, audioData.capacity(), AudioRecord.READ_BLOCKING);
    if (result < 0) {
        Log.e(TAG, "error reading from audio stream:" + result);
        return;
    }
    Log.d(TAG, "streaming ConverseRequest: " + result);
    mAssistantRequestObserver.onNext(AssistRequest.newBuilder()
            .setAudioIn(ByteString.copyFrom(audioData))
            .build());
    mAssistantHandler.post(mStreamAssistantRequest);
}
 
Example #23
Source File: ExtAudioCapture.java    From PLDroidRTCStreaming with Apache License 2.0 6 votes vote down vote up
public void stopCapture() {
    if (!mIsCaptureStarted) {
        return;
    }

    mIsLoopExit = true;
    try {
        mCaptureThread.interrupt();
        mCaptureThread.join(1000);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }

    if (mAudioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
        mAudioRecord.stop();
    }

    mAudioRecord.release();

    mIsCaptureStarted = false;
    mOnAudioFrameCapturedListener = null;

    Log.d(TAG, "Stop audio capture success !");
}
 
Example #24
Source File: Preferences.java    From Plumble with GNU General Public License v3.0 6 votes vote down vote up
private static void configureAudioPreferences(final PreferenceScreen screen) {
    ListPreference inputPreference = (ListPreference) screen.findPreference(Settings.PREF_INPUT_METHOD);
    inputPreference.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() {
        @Override
        public boolean onPreferenceChange(Preference preference, Object newValue) {
            updateAudioDependents(screen, (String) newValue);
            return true;
        }
    });

    // Scan each bitrate and determine if the device supports it
    ListPreference inputQualityPreference = (ListPreference) screen.findPreference(Settings.PREF_INPUT_RATE);
    String[] bitrateNames = new String[inputQualityPreference.getEntryValues().length];
    for(int x=0;x<bitrateNames.length;x++) {
        int bitrate = Integer.parseInt(inputQualityPreference.getEntryValues()[x].toString());
        boolean supported = AudioRecord.getMinBufferSize(bitrate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) > 0;
        bitrateNames[x] = bitrate+"Hz" + (supported ? "" : " (unsupported)");
    }
    inputQualityPreference.setEntries(bitrateNames);

    updateAudioDependents(screen, inputPreference.getValue());
}
 
Example #25
Source File: SpeaktoitRecognitionServiceImpl.java    From dialogflow-android-client with Apache License 2.0 6 votes vote down vote up
private void init() {
    synchronized (recognizerLock) {
        final int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, AUDIO_FORMAT);

        audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
                SAMPLE_RATE_IN_HZ,
                CHANNEL_CONFIG,
                AUDIO_FORMAT,
                bufferSize);

        vad.setEnabled(config.isVoiceActivityDetectionEnabled());
        vad.setSpeechListener(this);

        mediaPlayer = new MediaPlayer();
        mediaPlayer.setOnErrorListener(this);
        mediaPlayer.setOnCompletionListener(this);
    }
}
 
Example #26
Source File: AudioRecordManager.java    From permissions4m with Apache License 2.0 6 votes vote down vote up
/**
 * stop record
 *
 * @throws IOException
 * @throws InterruptedException
 */
public void stopRecord() throws IOException, InterruptedException {
    // specially for OPPO、XIAOMI、MEIZU、HUAWEI and so on
    Thread.sleep(250);
    destroyThread();
    if (mRecorder != null) {
        if (mRecorder.getState() == AudioRecord.STATE_INITIALIZED) {
            mRecorder.stop();
        }
        if (mRecorder != null) {
            mRecorder.release();
        }
    }
    if (dos != null) {
        dos.flush();
        dos.close();
    }
    length = file.length();
    deleteFile();
}
 
Example #27
Source File: OpusAudioRecorder.java    From Tok-Android with GNU General Public License v3.0 6 votes vote down vote up
public void stopRecording(final boolean send, boolean vibrate) {
    recordQueue.cancelRunnable(recordStartRunnable);
    if (vibrate) {
        vibrate(new long[] { 0L, 10L });
    }
    recordQueue.postRunnable(new Runnable() {
        @Override
        public void run() {
            if (audioRecord != null) {
                try {
                    sendAfterDone = send;
                    if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
                        audioRecord.stop();
                    }
                } catch (Exception e) {
                    if (recordingAudioFile != null) {
                        recordingAudioFile.delete();
                    }
                }
                OpusAudioRecorder.this.stopRecordingInternal(send);
            }
        }
    }, 0);
}
 
Example #28
Source File: AudioUtils.java    From android-utils with Apache License 2.0 5 votes vote down vote up
/**
 * Check correct buffer size for your AudioRecord instance
 *
 * @param audioSource          the audio source
 * @param fs                   the fs
 * @param channelConfiguration the channel configuration
 * @param audioEncoding        the audio encoding
 * @return the int
 */
public static int getValidBufferSize(int audioSource, int fs, int channelConfiguration,
                                     int audioEncoding) {
    for (int bufferSize : new int[]{
            256, 512, 1024, 2048, 4096
    }) {  // add the rates you wish to check against
        AudioRecord audioRecordTemp =
                new AudioRecord(audioSource, fs, channelConfiguration, audioEncoding, bufferSize);
        if (audioRecordTemp != null && audioRecordTemp.getState() == AudioRecord.STATE_INITIALIZED) {
            return bufferSize;
        }
    }
    return 0;
}
 
Example #29
Source File: WebRtcAudioRecord.java    From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@SuppressWarnings("unused")
private int StopRecording() {
    _recLock.lock();
    try {
        // only stop if we are recording
        if (_audioRecord.getRecordingState() ==
          AudioRecord.RECORDSTATE_RECORDING) {
            // stop recording
            try {
                _audioRecord.stop();
            } catch (IllegalStateException e) {
                e.printStackTrace();
                return -1;
            }
        }

        // release the object
        _audioRecord.release();
        _audioRecord = null;

    } finally {
        // Ensure we always unlock, both for success, exception or error
        // return.
        _doRecInit = true;
        _recLock.unlock();
    }

    _isRecording = false;
    return 0;
}
 
Example #30
Source File: Recorder.java    From VideoAndroid with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

    // Audio
    int bufferSize;
    ShortBuffer audioData;
    int bufferReadResult;

    bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
    mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

    audioData = ShortBuffer.allocate(bufferSize);

    mAudioRecord.startRecording();

    /* ffmpeg_audio encoding loop */
    while (mRunAudioThread) {
        //获取音频数据
        bufferReadResult = mAudioRecord.read(audioData.array(), 0, audioData.capacity());
        audioData.limit(bufferReadResult);
        if (bufferReadResult > 0) {
            if(mFFmpegFrameRecorder != null && mRecording) {
                try {
                    mFFmpegFrameRecorder.recordSamples(audioData);      //写入音频数据
                } catch (FFmpegFrameRecorder.Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /* encoding finish, release recorder */
    if (mAudioRecord != null) {
        mAudioRecord.stop();
        mAudioRecord.release();
    }
}