Java Code Examples for android.media.AudioFormat#ENCODING_PCM_16BIT

The following examples show how to use android.media.AudioFormat#ENCODING_PCM_16BIT . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FromFileBase.java    From rtmp-rtsp-stream-client-java with Apache License 2.0 8 votes vote down vote up
/**
 * @param filePath to video MP4 file.
 * @param bitRate AAC in kb.
 * @return true if success, false if you get a error (Normally because the encoder selected
 * doesn't support any configuration seated or your device hasn't a H264 encoder).
 * @throws IOException Normally file not found.
 */
public boolean prepareAudio(String filePath, int bitRate) throws IOException {
  audioPath = filePath;
  audioDecoder = new AudioDecoder(this, audioDecoderInterface, this);
  if (!audioDecoder.initExtractor(filePath)) return false;
  boolean result = audioEncoder.prepareAudioEncoder(bitRate, audioDecoder.getSampleRate(),
      audioDecoder.isStereo(), 0);
  prepareAudioRtp(audioDecoder.isStereo(), audioDecoder.getSampleRate());
  audioDecoder.prepareAudio();
  if (glInterface != null && !(glInterface instanceof OffScreenGlThread)) {
    int channel =
        audioDecoder.isStereo() ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
    int buffSize = AudioTrack.getMinBufferSize(audioDecoder.getSampleRate(), channel,
        AudioFormat.ENCODING_PCM_16BIT);
    audioTrackPlayer =
        new AudioTrack(AudioManager.STREAM_MUSIC, audioDecoder.getSampleRate(), channel,
            AudioFormat.ENCODING_PCM_16BIT, buffSize, AudioTrack.MODE_STREAM);
  }
  return result;
}
 
Example 2
Source File: AudioCapture.java    From EvilsLive with MIT License 6 votes vote down vote up
private int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) {
        int numOfChannels, bitsPersample;
        if (channelConfig == AudioFormat.CHANNEL_IN_MONO) {
            numOfChannels = 1;
        } else {
            numOfChannels = 2;
        }
        if (AudioFormat.ENCODING_PCM_16BIT == audioFormat) {
            bitsPersample = 16;
        } else {
            bitsPersample = 8;
        }
        int periodInFrames = sampleRate * TIMER_INTERVAL / 1000;		//num of frames in a second is same as sample rate
        //refer to android/4.1.1/frameworks/av/media/libmedia/AudioRecord.cpp, AudioRecord::getMinFrameCount method
        //we times 2 for ping pong use of record buffer
        mMinBufferSize = periodInFrames * 2  * numOfChannels * bitsPersample / 8;
        if (mMinBufferSize < AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat)) {
            // Check to make sure buffer size is not smaller than the smallest allowed one
            mMinBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
            // Set frame period and timer interval accordingly
//            periodInFrames = mMinBufferSize / ( 2 * bitsPersample * numOfChannels / 8 );
        }

        return mMinBufferSize;
    }
 
Example 3
Source File: Microphone.java    From ssj with GNU General Public License v3.0 6 votes vote down vote up
public static int audioFormatSampleBytes(int f)
{
    switch (f)
    {
        case AudioFormat.ENCODING_PCM_8BIT:
            return 1;
        case AudioFormat.ENCODING_PCM_16BIT:
        case AudioFormat.ENCODING_DEFAULT:
            return 2;
        case AudioFormat.ENCODING_PCM_FLOAT:
            return 4;
        case AudioFormat.ENCODING_INVALID:
        default:
            return 0;
    }
}
 
Example 4
Source File: SpeechRecord.java    From AlexaAndroid with GNU General Public License v2.0 5 votes vote down vote up
public SpeechRecord(int sampleRateInHz, int bufferSizeInBytes, boolean noise, boolean gain, boolean echo)
        throws IllegalArgumentException {

    this(
            MediaRecorder.AudioSource.VOICE_RECOGNITION,
            sampleRateInHz,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            bufferSizeInBytes,
            noise,
            gain,
            echo
    );
}
 
Example 5
Source File: AudioRecorderActivity.java    From CameraV with GNU General Public License v3.0 5 votes vote down vote up
private void initAudio(final String audioPath) throws Exception {

			fileAudio  = new File(audioPath); 
			
			   outputStreamAudio = new BufferedOutputStream(new info.guardianproject.iocipher.FileOutputStream(fileAudio),8192*8);
				
			   if (useAAC)
			   {
				   aac = new AACHelper();
				   aac.setEncoder(MediaConstants.sAudioSampleRate, MediaConstants.sAudioChannels, MediaConstants.sAudioBitRate);
			   }
			   else
			   {
			   
				   int minBufferSize = AudioRecord.getMinBufferSize(MediaConstants.sAudioSampleRate, 
					MediaConstants.sChannelConfigIn, 
				     AudioFormat.ENCODING_PCM_16BIT)*8;
				   
				   audioData = new byte[minBufferSize];
	
				   int audioSource = MediaRecorder.AudioSource.CAMCORDER;
				   // audioSource = MediaRecorder.AudioSource.MIC;
				   
				   audioRecord = new AudioRecord(audioSource,
						   MediaConstants.sAudioSampleRate,
						   MediaConstants.sChannelConfigIn,
				     AudioFormat.ENCODING_PCM_16BIT,
				     minBufferSize);
			   }
	 }
 
Example 6
Source File: SpeechRecord.java    From AlexaAndroid with GNU General Public License v2.0 5 votes vote down vote up
public SpeechRecord(int sampleRateInHz, int bufferSizeInBytes)
        throws IllegalArgumentException {

    this(
            MediaRecorder.AudioSource.VOICE_RECOGNITION,
            sampleRateInHz,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            bufferSizeInBytes,
            false,
            false,
            false
    );
}
 
Example 7
Source File: AndroidAudioPlayer.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Constructs a new AndroidAudioPlayer from an audio format, default buffer size and stream type.
 *
 * @param audioFormat The audio format of the stream that this AndroidAudioPlayer will process.
 *                    This can only be 1 channel, PCM 16 bit.
 * @param bufferSizeInSamples  The requested buffer size in samples.
 * @param streamType  The type of audio stream that the internal AudioTrack should use. For
 *                    example, {@link AudioManager#STREAM_MUSIC}.
 * @throws IllegalArgumentException if audioFormat is not valid or if the requested buffer size is invalid.
 * @see AudioTrack
 */
public AndroidAudioPlayer(TarsosDSPAudioFormat audioFormat, int bufferSizeInSamples, int streamType) {
    if (audioFormat.getChannels() != 1) {
        throw new IllegalArgumentException("TarsosDSP only supports mono audio channel count: " + audioFormat.getChannels());
    }

    // The requested sample rate
    int sampleRate = (int) audioFormat.getSampleRate();

    //The buffer size in bytes is twice the buffer size expressed in samples if 16bit samples are used:
    int bufferSizeInBytes = bufferSizeInSamples * audioFormat.getSampleSizeInBits()/8;

    // From the Android API about getMinBufferSize():
    // The total size (in bytes) of the internal buffer where audio data is read from for playback.
    // If track's creation mode is MODE_STREAM, you can write data into this buffer in chunks less than or equal to this size,
    // and it is typical to use chunks of 1/2 of the total size to permit double-buffering. If the track's creation mode is MODE_STATIC,
    // this is the maximum length sample, or audio clip, that can be played by this instance. See getMinBufferSize(int, int, int) to determine
    // the minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. Using values smaller
    // than getMinBufferSize() will result in an initialization failure.
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO,  AudioFormat.ENCODING_PCM_16BIT);
    if(minBufferSizeInBytes > bufferSizeInBytes){
        throw new IllegalArgumentException("The buffer size should be at least " + (minBufferSizeInBytes/(audioFormat.getSampleSizeInBits()/8)) + " (samples) according to  AudioTrack.getMinBufferSize().");
    }

    //http://developer.android.com/reference/android/media/AudioTrack.html#AudioTrack(int, int, int, int, int, int)
    audioTrack = new AudioTrack(streamType, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,AudioTrack.MODE_STREAM);

    audioTrack.play();
}
 
Example 8
Source File: Recorder.java    From VideoAndroid with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

    // Audio
    int bufferSize;
    ShortBuffer audioData;
    int bufferReadResult;

    bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
    mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

    audioData = ShortBuffer.allocate(bufferSize);

    mAudioRecord.startRecording();

    /* ffmpeg_audio encoding loop */
    while (mRunAudioThread) {
        //获取音频数据
        bufferReadResult = mAudioRecord.read(audioData.array(), 0, audioData.capacity());
        audioData.limit(bufferReadResult);
        if (bufferReadResult > 0) {
            if(mFFmpegFrameRecorder != null && mRecording) {
                try {
                    mFFmpegFrameRecorder.recordSamples(audioData);      //写入音频数据
                } catch (FFmpegFrameRecorder.Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /* encoding finish, release recorder */
    if (mAudioRecord != null) {
        mAudioRecord.stop();
        mAudioRecord.release();
    }
}
 
Example 9
Source File: SpeechRecord.java    From speechutils with Apache License 2.0 5 votes vote down vote up
public SpeechRecord(int sampleRateInHz, int bufferSizeInBytes)
        throws IllegalArgumentException {

    this(
            MediaRecorder.AudioSource.VOICE_RECOGNITION,
            sampleRateInHz,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            bufferSizeInBytes,
            false,
            false,
            false
    );
}
 
Example 10
Source File: AudioPlayback.java    From MediaPlayer-Extended with Apache License 2.0 4 votes vote down vote up
/**
 * Initializes or reinitializes the audio track with the supplied format for playback
 * while keeping the playstate. Keeps the current configuration and skips reinitialization
 * if the new format is the same as the current format.
 */
public void init(MediaFormat format) {
    Log.d(TAG, "init");

    boolean playing = false;

    if(isInitialized()) {
        if(!checkIfReinitializationRequired(format)) {
            // Set new format that equals the old one (in case we compare references somewhere)
            mAudioFormat = format;
            return;
        }

        playing = isPlaying();
        pause();
        stopAndRelease(false);
    } else {
        // deferred creation of the audio thread until its first use
        mAudioThread = new AudioThread();
        mAudioThread.setPaused(true);
        mAudioThread.start();
    }

    mAudioFormat = format;

    int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
    int bytesPerSample = 2;
    mFrameSize = bytesPerSample * channelCount;
    mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);

    int channelConfig = AudioFormat.CHANNEL_OUT_DEFAULT;
    switch(channelCount) {
        case 1:
            channelConfig = AudioFormat.CHANNEL_OUT_MONO;
            break;
        case 2:
            channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
            break;
        case 4:
            channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
            break;
        case 6:
            channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
            break;
        case 8:
            channelConfig = AudioFormat.CHANNEL_OUT_7POINT1;
    }

    mPlaybackBufferSize = mFrameChunkSize * channelCount;

    mAudioTrack = new AudioTrack(
            mAudioStreamType,
            mSampleRate,
            channelConfig,
            AudioFormat.ENCODING_PCM_16BIT,
            mPlaybackBufferSize, // at least twice the size to enable double buffering (according to docs)
            AudioTrack.MODE_STREAM, mAudioSessionId);

    if(mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        stopAndRelease();
        throw new IllegalStateException("audio track init failed");
    }

    mAudioSessionId = mAudioTrack.getAudioSessionId();
    mAudioStreamType = mAudioTrack.getStreamType();
    setStereoVolume(mVolumeLeft, mVolumeRight);
    mPresentationTimeOffsetUs = PTS_NOT_SET;

    if(playing) {
        play();
    }
}
 
Example 11
Source File: AudioRecorder.java    From TikTok with Apache License 2.0 4 votes vote down vote up
@Override
    public void run() {
        try {
            //初始化音频
            int bufferSizeInBytes = AudioRecord
                    .getMinBufferSize(audioSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
            final AudioRecord
                    audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, audioSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes);
            if(audioRecord == null){
                mOnAudioRecorderListener.onNotPermission();
                return ;
            }
            audioRecord.startRecording();

            /**
             * 根据开始录音判断是否有录音权限
             */
            if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING
                    && audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
//            AVLogUtils.e(TAG, "audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING : " + audioRecord.getRecordingState());
                isAudioPermission = false;
            }

            if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) {
                //如果短时间内频繁检测,会造成audioRecord还未销毁完成,此时检测会返回RECORDSTATE_STOPPED状态,再去read,会读到0的size,可以更具自己的需求返回true或者false
                isAudioPermission = false;
            }

            if(!isAudioPermission){
                mOnAudioRecorderListener.onNotPermission();
                return ;
            }
            mOnAudioRecorderListener.onCanRecord(isAudioPermission);

            byte[] data = new byte[2048];
            while(isRecord){
                if(audioRecord == null){
                    return ;
                }
                int offset = 0;
                while(offset < 2048) {
                    int readSize = audioRecord.read(data, offset, data.length-offset);
                    offset+=readSize;
                }
                if(isAudioRecordWrite){//写入文件
                    HeyhouRecorder.getInstance().recordAudioNHW(data,audioSampleRate,HeyhouRecorder.FORMAT_S16,1024);
                }

            }
            audioRecord.stop();
            audioRecord.release();
        }catch (Exception e) {
            e.printStackTrace();
            mOnAudioRecorderListener.onRecordError("录音失败");
        }
    }
 
Example 12
Source File: CheckPermissionUtil.java    From TikTok with Apache License 2.0 4 votes vote down vote up
/**
     * 判断是是否有录音权限
     */
    public static boolean isHasAudioPermission(final Context context){
        int bufferSizeInBytes = 0;
        bufferSizeInBytes = AudioRecord.getMinBufferSize(44100,
                AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
        AudioRecord audioRecord =  new AudioRecord(MediaRecorder.AudioSource.MIC, 44100,
                AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes);
        //开始录制音频
        //开始录制音频
        try{
            // 防止某些手机崩溃,例如联想
            audioRecord.startRecording();
        }catch (IllegalStateException e){
            e.printStackTrace();
//            AVLogUtils.e(TAG, Log.getStackTraceString(e));
        }
        /**
         * 根据开始录音判断是否有录音权限
         */
        if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING
                && audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
//            AVLogUtils.e(TAG, "audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING : " + audioRecord.getRecordingState());
            return false;
        }

        if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) {
            //如果短时间内频繁检测,会造成audioRecord还未销毁完成,此时检测会返回RECORDSTATE_STOPPED状态,再去read,会读到0的size,可以更具自己的需求返回true或者false
            return false;
        }

        byte[] bytes = new byte[1024];
        int readSize = audioRecord.read(bytes, 0, 1024);
        if (readSize == AudioRecord.ERROR_INVALID_OPERATION || readSize <= 0) {
//            AVLogUtils.e(TAG, "readSize illegal : " + readSize);
            return false;
        }
        audioRecord.stop();
        audioRecord.release();
        audioRecord = null;

        return true;
    }
 
Example 13
Source File: CheckPermission.java    From imsdk-android with MIT License 4 votes vote down vote up
/**
 * 用于检测是否具有录音权限
 *
 * @return
 */
public static int getRecordState() {
    int minBuffer = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat
            .ENCODING_PCM_16BIT);
    AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat
            .CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, (minBuffer * 100));
    short[] point = new short[minBuffer];
    int readSize = 0;
    try {

        audioRecord.startRecording();//检测是否可以进入初始化状态
    } catch (Exception e) {
        if (audioRecord != null) {
            audioRecord.release();
            audioRecord = null;
        }
        return STATE_NO_PERMISSION;
    }
    if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
        //6.0以下机型都会返回此状态,故使用时需要判断bulid版本
        //检测是否在录音中
        if (audioRecord != null) {
            audioRecord.stop();
            audioRecord.release();
            audioRecord = null;
            Log.d("CheckAudioPermission", "录音机被占用");
        }
        return STATE_RECORDING;
    } else {
        //检测是否可以获取录音结果

        readSize = audioRecord.read(point, 0, point.length);


        if (readSize <= 0) {
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;

            }
            Log.d("CheckAudioPermission", "录音的结果为空");
            return STATE_NO_PERMISSION;

        } else {
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;

            }

            return STATE_SUCCESS;
        }
    }
}
 
Example 14
Source File: MicOpusRecorder.java    From DeviceConnect-Android with MIT License 4 votes vote down vote up
/**
 * 音声をレコードして、MediaCodec に渡します.
 */
private void recordAudio() throws NativeInterfaceException {
    int samplingRate = mSamplingRate.getValue();
    int channels = mChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO;
    int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
    int bufferSize = AudioRecord.getMinBufferSize(samplingRate, channels, audioFormat) * 4;
    int oneFrameDataCount = mSamplingRate.getValue() / mFrameSize.getFps();

    mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT,
            samplingRate,
            channels,
            audioFormat,
            bufferSize);

    if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
        if (mAudioRecordCallback != null) {
            mAudioRecordCallback.onEncoderError();
        }
        return;
    }

    if (mUseAEC && AcousticEchoCanceler.isAvailable()) {
        // ノイズキャンセラー
        mEchoCanceler = AcousticEchoCanceler.create(mAudioRecord.getAudioSessionId());
        if (mEchoCanceler != null) {
            int ret = mEchoCanceler.setEnabled(true);
            if (ret != AudioEffect.SUCCESS) {
                if (DEBUG) {
                    Log.w(TAG, "AcousticEchoCanceler is not supported.");
                }
            }
        }
    }

    OpusEncoder opusEncoder = null;

    try {
        opusEncoder = new OpusEncoder(mSamplingRate, mChannels, mFrameSize, mBitRate, mApplication);

        mAudioRecord.startRecording();

        short[] emptyBuffer = new short[oneFrameDataCount];
        short[] pcmBuffer = new short[oneFrameDataCount];
        byte[] opusFrameBuffer = opusEncoder.bufferAllocate();
        while (!mStopFlag) {
            int readSize = mAudioRecord.read(pcmBuffer, 0, oneFrameDataCount);
            if (readSize > 0) {
                int opusFrameBufferLength;
                if (isMute()) {
                    opusFrameBufferLength = opusEncoder.encode(emptyBuffer, readSize, opusFrameBuffer);
                } else {
                    opusFrameBufferLength = opusEncoder.encode(pcmBuffer, readSize, opusFrameBuffer);
                }

                if (opusFrameBufferLength > 0 && mAudioRecordCallback != null) {
                    mAudioRecordCallback.onPeriodicNotification(opusFrameBuffer, opusFrameBufferLength);
                }
            } else if (readSize == AudioRecord.ERROR_INVALID_OPERATION) {
                if (DEBUG) {
                    Log.e(TAG, "Invalid operation error.");
                }
                break;
            } else if (readSize == AudioRecord.ERROR_BAD_VALUE) {
                if (DEBUG) {
                    Log.e(TAG, "Bad value error.");
                }
                break;
            } else if (readSize == AudioRecord.ERROR) {
                if (DEBUG) {
                    Log.e(TAG, "Unknown error.");
                }
                break;
            }
        }
    } finally {
        if (mEchoCanceler != null) {
            mEchoCanceler.release();
            mEchoCanceler = null;
        }

        if (opusEncoder != null) {
            opusEncoder.release();
        }
    }
}
 
Example 15
Source File: FileSynthesisCallback.java    From android_9.0.0_r45 with Apache License 2.0 4 votes vote down vote up
@Override
public int start(int sampleRateInHz, int audioFormat, int channelCount) {
    if (DBG) {
        Log.d(TAG, "FileSynthesisRequest.start(" + sampleRateInHz + "," + audioFormat
                + "," + channelCount + ")");
    }
    if (audioFormat != AudioFormat.ENCODING_PCM_8BIT &&
        audioFormat != AudioFormat.ENCODING_PCM_16BIT &&
        audioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
        Log.e(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " +
                   "of AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT or " +
                   "AudioFormat.ENCODING_PCM_FLOAT");
    }
    mDispatcher.dispatchOnBeginSynthesis(sampleRateInHz, audioFormat, channelCount);

    FileChannel fileChannel = null;
    synchronized (mStateLock) {
        if (mStatusCode == TextToSpeech.STOPPED) {
            if (DBG) Log.d(TAG, "Request has been aborted.");
            return errorCodeOnStop();
        }
        if (mStatusCode != TextToSpeech.SUCCESS) {
            if (DBG) Log.d(TAG, "Error was raised");
            return TextToSpeech.ERROR;
        }
        if (mStarted) {
            Log.e(TAG, "Start called twice");
            return TextToSpeech.ERROR;
        }
        mStarted = true;
        mSampleRateInHz = sampleRateInHz;
        mAudioFormat = audioFormat;
        mChannelCount = channelCount;

        mDispatcher.dispatchOnStart();
        fileChannel = mFileChannel;
    }

    try {
        fileChannel.write(ByteBuffer.allocate(WAV_HEADER_LENGTH));
            return TextToSpeech.SUCCESS;
    } catch (IOException ex) {
        Log.e(TAG, "Failed to write wav header to output file descriptor", ex);
        synchronized (mStateLock) {
            cleanUp();
            mStatusCode = TextToSpeech.ERROR_OUTPUT;
        }
        return TextToSpeech.ERROR;
    }
}
 
Example 16
Source File: CheckPermission.java    From CameraView with Apache License 2.0 4 votes vote down vote up
/**
 * 用于检测是否具有录音权限
 *
 * @return
 */
public static int getRecordState() {
    int minBuffer = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat
            .ENCODING_PCM_16BIT);
    AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat
            .CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, (minBuffer * 100));
    short[] point = new short[minBuffer];
    int readSize = 0;
    try {

        audioRecord.startRecording();//检测是否可以进入初始化状态
    } catch (Exception e) {
        if (audioRecord != null) {
            audioRecord.release();
            audioRecord = null;
        }
        return STATE_NO_PERMISSION;
    }
    if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
        //6.0以下机型都会返回此状态,故使用时需要判断bulid版本
        //检测是否在录音中
        if (audioRecord != null) {
            audioRecord.stop();
            audioRecord.release();
            audioRecord = null;
            Log.d("CheckAudioPermission", "录音机被占用");
        }
        return STATE_RECORDING;
    } else {
        //检测是否可以获取录音结果

        readSize = audioRecord.read(point, 0, point.length);


        if (readSize <= 0) {
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;

            }
            Log.d("CheckAudioPermission", "录音的结果为空");
            return STATE_NO_PERMISSION;

        } else {
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;

            }

            return STATE_SUCCESS;
        }
    }
}
 
Example 17
Source File: WebRtcAudioTrack.java    From webrtc_android with MIT License 4 votes vote down vote up
@SuppressWarnings("deprecation") // Deprecated in API level 25.
private static AudioTrack createAudioTrackOnLowerThanLollipop(
    int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
  return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
      AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
}
 
Example 18
Source File: RawSamples.java    From Android-Audio-Recorder with Apache License 2.0 4 votes vote down vote up
public static long getSamples(long len) {
    return len / (AUDIO_FORMAT == AudioFormat.ENCODING_PCM_16BIT ? 2 : 1);
}
 
Example 19
Source File: RecordUtil.java    From WeiXinRecordedDemo with MIT License 4 votes vote down vote up
private void initAudioRecord(){
    audioBufferSize = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
    audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, audioBufferSize);
}
 
Example 20
Source File: AudioParameters.java    From Saiy-PS with GNU Affero General Public License v3.0 4 votes vote down vote up
public static AudioParameters getDefaultBeyondVerbal(){
    return new AudioParameters(AudioFormat.ENCODING_PCM_16BIT,
            MediaRecorder.AudioSource.VOICE_RECOGNITION,
            AudioFormat.CHANNEL_IN_MONO, 1, 8000, 16);
}