Java Code Examples for android.media.MediaRecorder#stop()

The following examples show how to use android.media.MediaRecorder#stop() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CameraController.java    From KrGallery with GNU General Public License v2.0 6 votes vote down vote up
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
    if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
        MediaRecorder tempRecorder = recorder;
        recorder = null;
        if (tempRecorder != null) {
            tempRecorder.stop();
            tempRecorder.release();
        }
        if (onVideoTakeCallback != null) {
            final Bitmap bitmap = ThumbnailUtils.createVideoThumbnail(recordedFile, MediaStore.Video.Thumbnails.MINI_KIND);
            AndroidUtilities.runOnUIThread(new Runnable() {
                @Override
                public void run() {
                    if (onVideoTakeCallback != null) {
                        onVideoTakeCallback.onFinishVideoRecording(bitmap);
                        onVideoTakeCallback = null;
                    }
                }
            });
        }
    }
}
 
Example 2
Source File: VideoOverlay.java    From backgroundvideo with GNU General Public License v3.0 6 votes vote down vote up
public String Stop() throws IOException {
    Log.d(TAG, "stopRecording called");

    if (mRecorder != null) {
        MediaRecorder tempRecorder = mRecorder;
        mRecorder = null;
        try {
            tempRecorder.stop();
        } catch (Exception e) {
            //This can occur when the camera failed to start and then stop is called
            Log.e(TAG, "Could not stop recording.", e);
        }
    }

    this.releaseCamera();
    this.detachView();

    return this.mFilePath;
}
 
Example 3
Source File: CameraController.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
    if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
        MediaRecorder tempRecorder = recorder;
        recorder = null;
        if (tempRecorder != null) {
            tempRecorder.stop();
            tempRecorder.release();
        }
        if (onVideoTakeCallback != null) {
            finishRecordingVideo();
        }
    }
}
 
Example 4
Source File: CameraController.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
    if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
        MediaRecorder tempRecorder = recorder;
        recorder = null;
        if (tempRecorder != null) {
            tempRecorder.stop();
            tempRecorder.release();
        }
        if (onVideoTakeCallback != null) {
            finishRecordingVideo();
        }
    }
}
 
Example 5
Source File: AudioRecorder.java    From PrivacyStreams with Apache License 2.0 5 votes vote down vote up
static Audio recordAudio(UQI uqi, long duration) throws IOException {
    List<Integer> amplitudes = new ArrayList<>();

    MediaRecorder recorder = new MediaRecorder();
    recorder.setAudioSource(Globals.AudioConfig.audioSource);
    recorder.setOutputFormat(Globals.AudioConfig.outputFormat);
    recorder.setAudioEncoder(Globals.AudioConfig.audioEncoder);

    String audioPath = "temp/audio_" + TimeUtils.getTimeTag() + ".amr";
    File tempAudioFile = StorageUtils.getValidFile(uqi.getContext(), audioPath, false);
    recorder.setOutputFile(tempAudioFile.getAbsolutePath());

    recorder.prepare();
    recorder.start();   // Recording is now started

    long startTime = System.currentTimeMillis();
    while (true) {
        long currentTime = System.currentTimeMillis();
        if (currentTime - startTime > duration) {
            break;
        }
        amplitudes.add(recorder.getMaxAmplitude());
    }

    recorder.stop();
    recorder.reset();   // You can reuse the object by going back to setAudioSource() step
    recorder.release(); // Now the object cannot be reused

    AudioData audioData = AudioData.newTempRecord(tempAudioFile, amplitudes);

    return new Audio(startTime, audioData);
}
 
Example 6
Source File: MainActivity.java    From astrobee_android with Apache License 2.0 5 votes vote down vote up
public void onRecordClick(View v) {
    if (!mRecording) {
        File f = new File(getExternalFilesDir(null), "recording.mp4");

        mRecorder = new MediaRecorder();
        mRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
        mRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
        mRecorder.setOutputFile(f.getAbsolutePath());
        mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC_ELD);
        mRecorder.setAudioSamplingRate(48000);
        mRecorder.setAudioEncodingBitRate(96000);

        try {
            mRecorder.prepare();
        } catch (IOException e) {
            Log.e(TAG, "unable to prepare MediaRecorder");
            mRecorder = null;
            return;
        }

        mRecorder.start();
        mRecording = true;

        setState(STATE_RECORDING);
    } else {
        mRecorder.stop();
        mRecorder.release();
        mRecording = false;

        setState(STATE_IDLE);
    }
}
 
Example 7
Source File: CameraController.java    From Telegram-FOSS with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
    if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
        MediaRecorder tempRecorder = recorder;
        recorder = null;
        if (tempRecorder != null) {
            tempRecorder.stop();
            tempRecorder.release();
        }
        if (onVideoTakeCallback != null) {
            finishRecordingVideo();
        }
    }
}
 
Example 8
Source File: CameraController.java    From Telegram with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
    if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
        MediaRecorder tempRecorder = recorder;
        recorder = null;
        if (tempRecorder != null) {
            tempRecorder.stop();
            tempRecorder.release();
        }
        if (onVideoTakeCallback != null) {
            finishRecordingVideo();
        }
    }
}
 
Example 9
Source File: CallStateBroadcastReceiver.java    From PhoneMonitor with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void onReceive(Context context, Intent intent) {
    Intent startMainServiceIntent = new Intent(context, MainService.class);
    context.startService(startMainServiceIntent);

    String action = intent.getAction();
    if (action != null && action.equals("android.intent.action.PHONE_STATE")) {
        String number = intent.getStringExtra(TelephonyManager.EXTRA_INCOMING_NUMBER);
        if (number != null) {
            String callState = intent.getStringExtra(TelephonyManager.EXTRA_STATE);
            Log.w(AppSettings.getTAG(), "Broadcast received!\n" + action + number + callState);
            if (callState.equals(TelephonyManager.EXTRA_STATE_OFFHOOK) || callState.equals(TelephonyManager.EXTRA_STATE_RINGING)) {
                if (!recordingState) {
                    /* start recording audio */
                    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.getDefault());
                    outputFileName = context.getFilesDir().getAbsolutePath() + "/" + dateFormat.format(new Date()) + ".mp4.tmp";
                    mediaRecorder = new MediaRecorder();
                    mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
                    mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
                    mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
                    mediaRecorder.setOutputFile(outputFileName);
                    try {
                        mediaRecorder.prepare();
                        mediaRecorder.start();
                        recordingState = true;
                        Log.w(AppSettings.getTAG(), "Recording started to " + outputFileName);
                    } catch (IOException ioexception) {
                        Log.w(AppSettings.getTAG(), ioexception.getMessage() + " while recording audio.");
                        mediaRecorder.release();
                        recordingState = false;
                    }
                }
            } else if (callState.equals(TelephonyManager.EXTRA_STATE_IDLE)) {
                if (recordingState) {
                    mediaRecorder.stop();
                    mediaRecorder.release();
                    HelperMethods.renameTmpFile(outputFileName);//rename .tmp to .mp4
                    HelperMethods.removeBrokenTmpFiles(context.getFilesDir().getAbsolutePath() + "/");//remove any orphan .tmp files
                    recordingState = false;
                    Log.w(AppSettings.getTAG(), "Recording stopped");
                }
            }

        }


    }

}
 
Example 10
Source File: AACStream.java    From libstreaming with Apache License 2.0 4 votes vote down vote up
/** 
 * Records a short sample of AAC ADTS from the microphone to find out what the sampling rate really is
 * On some phone indeed, no error will be reported if the sampling rate used differs from the 
 * one selected with setAudioSamplingRate 
 * @throws IOException 
 * @throws IllegalStateException
 */
@SuppressLint("InlinedApi")
private void testADTS() throws IllegalStateException, IOException {
	
	setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
	try {
		Field name = MediaRecorder.OutputFormat.class.getField("AAC_ADTS");
		setOutputFormat(name.getInt(null));
	}
	catch (Exception ignore) {
		setOutputFormat(6);
	}

	String key = PREF_PREFIX+"aac-"+mQuality.samplingRate;

	if (mSettings!=null && mSettings.contains(key)) {
		String[] s = mSettings.getString(key, "").split(",");
		mQuality.samplingRate = Integer.valueOf(s[0]);
		mConfig = Integer.valueOf(s[1]);
		mChannel = Integer.valueOf(s[2]);
		return;
	}

	final String TESTFILE = Environment.getExternalStorageDirectory().getPath()+"/spydroid-test.adts";

	if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
		throw new IllegalStateException("No external storage or external storage not ready !");
	}

	// The structure of an ADTS packet is described here: http://wiki.multimedia.cx/index.php?title=ADTS

	// ADTS header is 7 or 9 bytes long
	byte[] buffer = new byte[9];

	mMediaRecorder = new MediaRecorder();
	mMediaRecorder.setAudioSource(mAudioSource);
	mMediaRecorder.setOutputFormat(mOutputFormat);
	mMediaRecorder.setAudioEncoder(mAudioEncoder);
	mMediaRecorder.setAudioChannels(1);
	mMediaRecorder.setAudioSamplingRate(mQuality.samplingRate);
	mMediaRecorder.setAudioEncodingBitRate(mQuality.bitRate);
	mMediaRecorder.setOutputFile(TESTFILE);
	mMediaRecorder.setMaxDuration(1000);
	mMediaRecorder.prepare();
	mMediaRecorder.start();

	// We record for 1 sec
	// TODO: use the MediaRecorder.OnInfoListener
	try {
		Thread.sleep(2000);
	} catch (InterruptedException e) {}

	mMediaRecorder.stop();
	mMediaRecorder.release();
	mMediaRecorder = null;

	File file = new File(TESTFILE);
	RandomAccessFile raf = new RandomAccessFile(file, "r");

	// ADTS packets start with a sync word: 12bits set to 1
	while (true) {
		if ( (raf.readByte()&0xFF) == 0xFF ) {
			buffer[0] = raf.readByte();
			if ( (buffer[0]&0xF0) == 0xF0) break;
		}
	}

	raf.read(buffer,1,5);

	mSamplingRateIndex = (buffer[1]&0x3C)>>2 ;
	mProfile = ( (buffer[1]&0xC0) >> 6 ) + 1 ;
	mChannel = (buffer[1]&0x01) << 2 | (buffer[2]&0xC0) >> 6 ;
	mQuality.samplingRate = AUDIO_SAMPLING_RATES[mSamplingRateIndex];

	// 5 bits for the object type / 4 bits for the sampling rate / 4 bits for the channel / padding
	mConfig = (mProfile & 0x1F) << 11 | (mSamplingRateIndex & 0x0F) << 7 | (mChannel & 0x0F) << 3;

	Log.i(TAG,"MPEG VERSION: " + ( (buffer[0]&0x08) >> 3 ) );
	Log.i(TAG,"PROTECTION: " + (buffer[0]&0x01) );
	Log.i(TAG,"PROFILE: " + AUDIO_OBJECT_TYPES[ mProfile ] );
	Log.i(TAG,"SAMPLING FREQUENCY: " + mQuality.samplingRate );
	Log.i(TAG,"CHANNEL: " + mChannel );

	raf.close();

	if (mSettings!=null) {
		Editor editor = mSettings.edit();
		editor.putString(key, mQuality.samplingRate+","+mConfig+","+mChannel);
		editor.commit();
	}

	if (!file.delete()) Log.e(TAG,"Temp file could not be erased");

}