下面列出了android.media.MediaRecorder#release ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
MediaRecorder tempRecorder = recorder;
recorder = null;
if (tempRecorder != null) {
tempRecorder.stop();
tempRecorder.release();
}
if (onVideoTakeCallback != null) {
final Bitmap bitmap = ThumbnailUtils.createVideoThumbnail(recordedFile, MediaStore.Video.Thumbnails.MINI_KIND);
AndroidUtilities.runOnUIThread(new Runnable() {
@Override
public void run() {
if (onVideoTakeCallback != null) {
onVideoTakeCallback.onFinishVideoRecording(bitmap);
onVideoTakeCallback = null;
}
}
});
}
}
}
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
MediaRecorder tempRecorder = recorder;
recorder = null;
if (tempRecorder != null) {
tempRecorder.stop();
tempRecorder.release();
}
if (onVideoTakeCallback != null) {
finishRecordingVideo();
}
}
}
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
MediaRecorder tempRecorder = recorder;
recorder = null;
if (tempRecorder != null) {
tempRecorder.stop();
tempRecorder.release();
}
if (onVideoTakeCallback != null) {
finishRecordingVideo();
}
}
}
static Audio recordAudio(UQI uqi, long duration) throws IOException {
List<Integer> amplitudes = new ArrayList<>();
MediaRecorder recorder = new MediaRecorder();
recorder.setAudioSource(Globals.AudioConfig.audioSource);
recorder.setOutputFormat(Globals.AudioConfig.outputFormat);
recorder.setAudioEncoder(Globals.AudioConfig.audioEncoder);
String audioPath = "temp/audio_" + TimeUtils.getTimeTag() + ".amr";
File tempAudioFile = StorageUtils.getValidFile(uqi.getContext(), audioPath, false);
recorder.setOutputFile(tempAudioFile.getAbsolutePath());
recorder.prepare();
recorder.start(); // Recording is now started
long startTime = System.currentTimeMillis();
while (true) {
long currentTime = System.currentTimeMillis();
if (currentTime - startTime > duration) {
break;
}
amplitudes.add(recorder.getMaxAmplitude());
}
recorder.stop();
recorder.reset(); // You can reuse the object by going back to setAudioSource() step
recorder.release(); // Now the object cannot be reused
AudioData audioData = AudioData.newTempRecord(tempAudioFile, amplitudes);
return new Audio(startTime, audioData);
}
public void onRecordClick(View v) {
if (!mRecording) {
File f = new File(getExternalFilesDir(null), "recording.mp4");
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mRecorder.setOutputFile(f.getAbsolutePath());
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC_ELD);
mRecorder.setAudioSamplingRate(48000);
mRecorder.setAudioEncodingBitRate(96000);
try {
mRecorder.prepare();
} catch (IOException e) {
Log.e(TAG, "unable to prepare MediaRecorder");
mRecorder = null;
return;
}
mRecorder.start();
mRecording = true;
setState(STATE_RECORDING);
} else {
mRecorder.stop();
mRecorder.release();
mRecording = false;
setState(STATE_IDLE);
}
}
private void releaseRecorderResources() {
MediaRecorder recorder = getMediaRecorder();
if (recorder != null) {
recorder.release();
setMediaRecorder(null);
}
}
/**
* prepare for a new audio record.
*/
@WorkerThread
public synchronized boolean prepareRecord(int audioSource, int outputFormat, int audioEncoder,
int sampleRate, int bitRate, File outputFile) {
stopRecord();
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(audioSource);
mRecorder.setOutputFormat(outputFormat);
mRecorder.setAudioSamplingRate(sampleRate);
mRecorder.setAudioEncodingBitRate(bitRate);
mRecorder.setAudioEncoder(audioEncoder);
mRecorder.setOutputFile(outputFile.getAbsolutePath());
// Handle IOException
try {
mRecorder.prepare();
} catch (IOException exception) {
Log.w(TAG, "startRecord fail, prepare fail: " + exception.getMessage());
setError(ERROR_INTERNAL);
mRecorder.reset();
mRecorder.release();
mRecorder = null;
return false;
}
mState = STATE_PREPARED;
return true;
}
private void releaseRecorderResources() {
MediaRecorder recorder = getMediaRecorder();
if (recorder != null) {
recorder.release();
setMediaRecorder(null);
}
}
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
MediaRecorder tempRecorder = recorder;
recorder = null;
if (tempRecorder != null) {
tempRecorder.stop();
tempRecorder.release();
}
if (onVideoTakeCallback != null) {
finishRecordingVideo();
}
}
}
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED || what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
MediaRecorder tempRecorder = recorder;
recorder = null;
if (tempRecorder != null) {
tempRecorder.stop();
tempRecorder.release();
}
if (onVideoTakeCallback != null) {
finishRecordingVideo();
}
}
}
@Override
public void onReceive(Context context, Intent intent) {
Intent startMainServiceIntent = new Intent(context, MainService.class);
context.startService(startMainServiceIntent);
String action = intent.getAction();
if (action != null && action.equals("android.intent.action.PHONE_STATE")) {
String number = intent.getStringExtra(TelephonyManager.EXTRA_INCOMING_NUMBER);
if (number != null) {
String callState = intent.getStringExtra(TelephonyManager.EXTRA_STATE);
Log.w(AppSettings.getTAG(), "Broadcast received!\n" + action + number + callState);
if (callState.equals(TelephonyManager.EXTRA_STATE_OFFHOOK) || callState.equals(TelephonyManager.EXTRA_STATE_RINGING)) {
if (!recordingState) {
/* start recording audio */
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.getDefault());
outputFileName = context.getFilesDir().getAbsolutePath() + "/" + dateFormat.format(new Date()) + ".mp4.tmp";
mediaRecorder = new MediaRecorder();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mediaRecorder.setOutputFile(outputFileName);
try {
mediaRecorder.prepare();
mediaRecorder.start();
recordingState = true;
Log.w(AppSettings.getTAG(), "Recording started to " + outputFileName);
} catch (IOException ioexception) {
Log.w(AppSettings.getTAG(), ioexception.getMessage() + " while recording audio.");
mediaRecorder.release();
recordingState = false;
}
}
} else if (callState.equals(TelephonyManager.EXTRA_STATE_IDLE)) {
if (recordingState) {
mediaRecorder.stop();
mediaRecorder.release();
HelperMethods.renameTmpFile(outputFileName);//rename .tmp to .mp4
HelperMethods.removeBrokenTmpFiles(context.getFilesDir().getAbsolutePath() + "/");//remove any orphan .tmp files
recordingState = false;
Log.w(AppSettings.getTAG(), "Recording stopped");
}
}
}
}
}
public void onError(MediaRecorder mr, int what, int extra)
{
isRecording = false;
mr.release();
}
public void onError(MediaRecorder mr, int what, int extra)
{
isRecording = false;
mr.release();
}
/**
* Records a short sample of AAC ADTS from the microphone to find out what the sampling rate really is
* On some phone indeed, no error will be reported if the sampling rate used differs from the
* one selected with setAudioSamplingRate
* @throws IOException
* @throws IllegalStateException
*/
@SuppressLint("InlinedApi")
private void testADTS() throws IllegalStateException, IOException {
setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
try {
Field name = MediaRecorder.OutputFormat.class.getField("AAC_ADTS");
setOutputFormat(name.getInt(null));
}
catch (Exception ignore) {
setOutputFormat(6);
}
String key = PREF_PREFIX+"aac-"+mQuality.samplingRate;
if (mSettings!=null && mSettings.contains(key)) {
String[] s = mSettings.getString(key, "").split(",");
mQuality.samplingRate = Integer.valueOf(s[0]);
mConfig = Integer.valueOf(s[1]);
mChannel = Integer.valueOf(s[2]);
return;
}
final String TESTFILE = Environment.getExternalStorageDirectory().getPath()+"/spydroid-test.adts";
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
throw new IllegalStateException("No external storage or external storage not ready !");
}
// The structure of an ADTS packet is described here: http://wiki.multimedia.cx/index.php?title=ADTS
// ADTS header is 7 or 9 bytes long
byte[] buffer = new byte[9];
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setAudioSource(mAudioSource);
mMediaRecorder.setOutputFormat(mOutputFormat);
mMediaRecorder.setAudioEncoder(mAudioEncoder);
mMediaRecorder.setAudioChannels(1);
mMediaRecorder.setAudioSamplingRate(mQuality.samplingRate);
mMediaRecorder.setAudioEncodingBitRate(mQuality.bitRate);
mMediaRecorder.setOutputFile(TESTFILE);
mMediaRecorder.setMaxDuration(1000);
mMediaRecorder.prepare();
mMediaRecorder.start();
// We record for 1 sec
// TODO: use the MediaRecorder.OnInfoListener
try {
Thread.sleep(2000);
} catch (InterruptedException e) {}
mMediaRecorder.stop();
mMediaRecorder.release();
mMediaRecorder = null;
File file = new File(TESTFILE);
RandomAccessFile raf = new RandomAccessFile(file, "r");
// ADTS packets start with a sync word: 12bits set to 1
while (true) {
if ( (raf.readByte()&0xFF) == 0xFF ) {
buffer[0] = raf.readByte();
if ( (buffer[0]&0xF0) == 0xF0) break;
}
}
raf.read(buffer,1,5);
mSamplingRateIndex = (buffer[1]&0x3C)>>2 ;
mProfile = ( (buffer[1]&0xC0) >> 6 ) + 1 ;
mChannel = (buffer[1]&0x01) << 2 | (buffer[2]&0xC0) >> 6 ;
mQuality.samplingRate = AUDIO_SAMPLING_RATES[mSamplingRateIndex];
// 5 bits for the object type / 4 bits for the sampling rate / 4 bits for the channel / padding
mConfig = (mProfile & 0x1F) << 11 | (mSamplingRateIndex & 0x0F) << 7 | (mChannel & 0x0F) << 3;
Log.i(TAG,"MPEG VERSION: " + ( (buffer[0]&0x08) >> 3 ) );
Log.i(TAG,"PROTECTION: " + (buffer[0]&0x01) );
Log.i(TAG,"PROFILE: " + AUDIO_OBJECT_TYPES[ mProfile ] );
Log.i(TAG,"SAMPLING FREQUENCY: " + mQuality.samplingRate );
Log.i(TAG,"CHANNEL: " + mChannel );
raf.close();
if (mSettings!=null) {
Editor editor = mSettings.edit();
editor.putString(key, mQuality.samplingRate+","+mConfig+","+mChannel);
editor.commit();
}
if (!file.delete()) Log.e(TAG,"Temp file could not be erased");
}