下面列出了android.media.MediaCodec# BUFFER_FLAG_SYNC_FRAME 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Test
public void writeIFrameWhenInputDataIsAvailable() {
passthroughTranscoder.sourceTrack = 0;
passthroughTranscoder.targetTrack = 0;
passthroughTranscoder.duration = DURATION;
passthroughTranscoder.targetTrackAdded = true;
int outputFlags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
doReturn(0).when(mediaSource).getSampleTrackIndex();
doReturn(BUFFER_SIZE).when(mediaSource).readSampleData(outputBuffer, 0);
doReturn(SAMPLE_TIME).when(mediaSource).getSampleTime();
doReturn(outputFlags).when(mediaSource).getSampleFlags();
int result = passthroughTranscoder.processNextFrame();
verify(outputBufferInfo).set(0, BUFFER_SIZE, SAMPLE_TIME, outputFlags);
verify(mediaSource).advance();
verify(mediaTarget).writeSampleData(0, outputBuffer, outputBufferInfo);
assertThat(passthroughTranscoder.progress, is((float) SAMPLE_TIME / DURATION));
assertThat(result, is(TrackTranscoder.RESULT_FRAME_PROCESSED));
assertThat(passthroughTranscoder.lastResult, is(TrackTranscoder.RESULT_FRAME_PROCESSED));
}
@Test
public void keepProgressAtZeroWhenDurationIsNotAvailable() {
passthroughTranscoder.sourceTrack = 0;
passthroughTranscoder.targetTrack = 0;
passthroughTranscoder.duration = 0;
passthroughTranscoder.targetTrackAdded = true;
int outputFlags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
doReturn(0).when(mediaSource).getSampleTrackIndex();
doReturn(BUFFER_SIZE).when(mediaSource).readSampleData(outputBuffer, 0);
doReturn(SAMPLE_TIME).when(mediaSource).getSampleTime();
doReturn(outputFlags).when(mediaSource).getSampleFlags();
int result = passthroughTranscoder.processNextFrame();
verify(outputBufferInfo).set(0, BUFFER_SIZE, SAMPLE_TIME, outputFlags);
verify(mediaSource).advance();
verify(mediaTarget).writeSampleData(0, outputBuffer, outputBufferInfo);
assertThat(passthroughTranscoder.progress, is(0f));
assertThat(result, is(TrackTranscoder.RESULT_FRAME_PROCESSED));
assertThat(passthroughTranscoder.lastResult, is(TrackTranscoder.RESULT_FRAME_PROCESSED));
}
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
samples.add(new Sample(offset, bufferInfo.size));
if (syncSamples != null && isSyncFrame) {
syncSamples.add(samples.size());
}
long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
lastPresentationTimeUs = bufferInfo.presentationTimeUs;
delta = (delta * timeScale + 500000L) / 1000000L;
if (!first) {
sampleDurations.add(sampleDurations.size() - 1, delta);
duration += delta;
}
first = false;
}
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
samples.add(new Sample(offset, bufferInfo.size));
if (syncSamples != null && isSyncFrame) {
syncSamples.add(samples.size());
}
long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
lastPresentationTimeUs = bufferInfo.presentationTimeUs;
delta = (delta * timeScale + 500000L) / 1000000L;
if (!first) {
sampleDurations.add(sampleDurations.size() - 1, delta);
duration += delta;
}
first = false;
}
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
samples.add(new Sample(offset, bufferInfo.size));
if (syncSamples != null && isSyncFrame) {
syncSamples.add(samples.size());
}
long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
lastPresentationTimeUs = bufferInfo.presentationTimeUs;
delta = (delta * timeScale + 500000L) / 1000000L;
if (!first) {
sampleDurations.add(sampleDurations.size() - 1, delta);
duration += delta;
}
first = false;
}
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
samples.add(new Sample(offset, bufferInfo.size));
if (syncSamples != null && isSyncFrame) {
syncSamples.add(samples.size());
}
long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
lastPresentationTimeUs = bufferInfo.presentationTimeUs;
delta = (delta * timeScale + 500000L) / 1000000L;
if (!first) {
sampleDurations.add(sampleDurations.size() - 1, delta);
duration += delta;
}
first = false;
}
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
samples.add(new Sample(offset, bufferInfo.size));
if (syncSamples != null && isSyncFrame) {
syncSamples.add(samples.size());
}
long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
lastPresentationTimeUs = bufferInfo.presentationTimeUs;
delta = (delta * timeScale + 500000L) / 1000000L;
if (!first) {
sampleDurations.add(sampleDurations.size() - 1, delta);
duration += delta;
}
first = false;
}
/**
* Returns the index of the oldest sync frame. Valid until the next add().
* <p>
* When sending output to a MediaMuxer, start here.
*/
public int getFirstIndex() {
final int metaLen = mPacketStart.length;
int index = mMetaTail;
while (index != mMetaHead) {
if ((mPacketFlags[index] & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
break;
}
index = (index + 1) % metaLen;
}
if (index == mMetaHead) {
Log.w(TAG, "HEY: could not find sync frame in buffer");
index = -1;
}
return index;
}
/**
* Returns the index of the oldest sync frame. Valid until the next add().
* <p>
* When sending output to a MediaMuxer, start here.
*/
public int getFirstIndex() {
final int metaLen = mPacketStart.length;
int index = mMetaTail;
while (index != mMetaHead) {
if ((mPacketFlags[index] & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
break;
}
index = (index + 1) % metaLen;
}
if (index == mMetaHead) {
Log.w(TAG, "HEY: could not find sync frame in buffer");
index = -1;
}
return index;
}
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
samples.add(new Sample(offset, bufferInfo.size));
if (syncSamples != null && isSyncFrame) {
syncSamples.add(samples.size());
}
samplePresentationTimes.add(new SamplePresentationTime(samplePresentationTimes.size(), (bufferInfo.presentationTimeUs * timeScale + 500000L) / 1000000L));
}
/**
* 如果混合器未启动 , 那么返回false ,(重新发送到消息队列 , 并且保持原有顺序, 等待混合器启动 , 通常不会等待太久)
* 如果去掉重新重新发送到消息队列的逻辑 , 在某些手机上生成视频会有问题 , 比如华为 Honor 9 ,
* 因为第一帧的关键帧丢失 , 如果第一帧视频帧不是关键帧 , 会卡顿 , 直到播放到下一个关键帧 , 并且关键帧间隔时间小于录制的时间会导致混合器无法停止而崩溃
*/
private boolean writeSampleData(int trackIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
LogUtil.logd(TAG,"IFrame = "+((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0)+"\t\t size = "+bufferInfo.size);
if (isMuxerStarted()) {
mMediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
return true;
} else {
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
SystemClock.sleep(10);
}
return false;
}
}
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
samples.add(new Sample(offset, bufferInfo.size));
if (syncSamples != null && isSyncFrame) {
syncSamples.add(samples.size());
}
samplePresentationTimes.add(new SamplePresentationTime(samplePresentationTimes.size(), (bufferInfo.presentationTimeUs * timeScale + 500000L) / 1000000L));
}
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
samples.add(new Sample(offset, bufferInfo.size));
if (syncSamples != null && isSyncFrame) {
syncSamples.add(samples.size());
}
samplePresentationTimes.add(new SamplePresentationTime(samplePresentationTimes.size(), (bufferInfo.presentationTimeUs * timeScale + 500000L) / 1000000L));
}
/**
* キーフレームか確認します.
*
* @param bufferInfo 映像データの情報
* @return キーフレームの場合はtrue、それ以外はfalse
*/
@SuppressWarnings("deprecation")
private boolean isKeyFrame(MediaCodec.BufferInfo bufferInfo) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
} else {
return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
}
}
private OutputBufferInfo dequeueOutputBuffer() {
checkOnMediaCodecThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
if (result >= 0) {
// MediaCodec doesn't care about Buffer position/remaining/etc so we can
// mess with them to get a slice and avoid having to pass extra
// (BufferInfo-related) parameters back to C++.
ByteBuffer outputBuffer = outputBuffers[result].duplicate();
outputBuffer.position(info.offset);
outputBuffer.limit(info.offset + info.size);
boolean isKeyFrame =
(info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) {
Log.d(TAG, "Sync frame generated");
}
return new OutputBufferInfo(
result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
return dequeueOutputBuffer();
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
return dequeueOutputBuffer();
} else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
return null;
}
throw new RuntimeException("dequeueOutputBuffer: " + result);
} catch (IllegalStateException e) {
Log.e(TAG, "dequeueOutputBuffer failed", e);
return new OutputBufferInfo(-1, null, false, -1);
}
}
@CalledByNativeUnchecked
OutputBufferInfo dequeueOutputBuffer() {
checkOnMediaCodecThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
// Check if this is config frame and save configuration data.
if (result >= 0) {
boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (isConfigFrame) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
configData = ByteBuffer.allocateDirect(info.size);
outputBuffers[result].position(info.offset);
outputBuffers[result].limit(info.offset + info.size);
configData.put(outputBuffers[result]);
// Log few SPS header bytes to check profile and level.
String spsData = "";
for (int i = 0; i < (info.size < 8 ? info.size : 8); i++) {
spsData += Integer.toHexString(configData.get(i) & 0xff) + " ";
}
Logging.d(TAG, spsData);
// Release buffer back.
mediaCodec.releaseOutputBuffer(result, false);
// Query next output.
result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
}
}
if (result >= 0) {
// MediaCodec doesn't care about Buffer position/remaining/etc so we can
// mess with them to get a slice and avoid having to pass extra
// (BufferInfo-related) parameters back to C++.
ByteBuffer outputBuffer = outputBuffers[result].duplicate();
outputBuffer.position(info.offset);
outputBuffer.limit(info.offset + info.size);
reportEncodedFrame(info.size);
// Check key frame flag.
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
}
if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
Logging.d(TAG, "Appending config frame of size " + configData.capacity()
+ " to output buffer with offset " + info.offset + ", size " + info.size);
// For H.264 key frame append SPS and PPS NALs at the start
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
configData.rewind();
keyFrameBuffer.put(configData);
keyFrameBuffer.put(outputBuffer);
keyFrameBuffer.position(0);
return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
} else {
return new OutputBufferInfo(
result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
}
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
return dequeueOutputBuffer();
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
return dequeueOutputBuffer();
} else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
return null;
}
throw new RuntimeException("dequeueOutputBuffer: " + result);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueOutputBuffer failed", e);
return new OutputBufferInfo(-1, null, false, -1);
}
}
protected void deliverEncodedImage() {
outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
if (index < 0) {
return;
}
ByteBuffer codecOutputBuffer = codec.getOutputBuffers()[index];
codecOutputBuffer.position(info.offset);
codecOutputBuffer.limit(info.offset + info.size);
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
configBuffer = ByteBuffer.allocateDirect(info.size);
configBuffer.put(codecOutputBuffer);
} else {
bitrateAdjuster.reportEncodedFrame(info.size);
if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
updateBitrate();
}
final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
}
final ByteBuffer frameBuffer;
if (isKeyFrame && codecType == VideoCodecType.H264) {
Logging.d(TAG,
"Prepending config frame of size " + configBuffer.capacity()
+ " to output buffer with offset " + info.offset + ", size " + info.size);
// For H.264 key frame prepend SPS and PPS NALs at the start.
frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
configBuffer.rewind();
frameBuffer.put(configBuffer);
frameBuffer.put(codecOutputBuffer);
frameBuffer.rewind();
} else {
frameBuffer = codecOutputBuffer.slice();
}
final EncodedImage.FrameType frameType = isKeyFrame
? EncodedImage.FrameType.VideoFrameKey
: EncodedImage.FrameType.VideoFrameDelta;
EncodedImage.Builder builder = outputBuilders.poll();
builder.setBuffer(frameBuffer).setFrameType(frameType);
// TODO(mellem): Set codec-specific info.
callback.onEncodedFrame(builder.createEncodedImage(), new CodecSpecificInfo());
}
codec.releaseOutputBuffer(index, false);
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverOutput failed", e);
}
}
private void handleWriteSampleData(MediaCodec encoder, int trackIndex, int bufferIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
super.writeSampleData(encoder, trackIndex, bufferIndex, encodedData, bufferInfo);
mPacketCount++;
// Don't write the samples directly if they're CODEC_CONFIG data
// Of if the muxer has already shutdown
if (((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)) {
if (VERBOSE) Log.i(TAG, "handling BUFFER_FLAG_CODEC_CONFIG for track " + trackIndex);
if (trackIndex == mVideoTrackIndex) {
// Capture H.264 SPS + PPS Data
if (VERBOSE) Log.i(TAG, "Capture SPS + PPS");
captureH264MetaData(encodedData, bufferInfo);
releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);
return;
} else {
if (VERBOSE) Log.i(TAG, "Ignoring audio CODEC_CONFIG");
releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);
return;
}
}
if (trackIndex == mAudioTrackIndex && formatRequiresADTS()) {
addAdtsToByteBuffer(encodedData, bufferInfo);
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
bufferInfo.presentationTimeUs = getNextRelativePts(bufferInfo.presentationTimeUs, trackIndex);
if (VERBOSE)
Log.i(TAG, mPacketCount + " PTS " + bufferInfo.presentationTimeUs + " size: " + bufferInfo.size + " " + (trackIndex == mVideoTrackIndex ? "video " : "audio ") + (((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) ? "keyframe" : "") + (((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) ? " EOS" : ""));
if (DEBUG_PKTS) writePacketToFile(encodedData, bufferInfo);
if (!allTracksFinished()) {
if (trackIndex == mVideoTrackIndex && ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0)) {
packageH264Keyframe(encodedData, bufferInfo);
mFFmpeg.writeAVPacketFromEncodedData(mH264Keyframe, 1, bufferInfo.offset, bufferInfo.size + mH264MetaSize, bufferInfo.flags, bufferInfo.presentationTimeUs);
} else
mFFmpeg.writeAVPacketFromEncodedData(encodedData, (trackIndex == mVideoTrackIndex ? 1 : 0), bufferInfo.offset, bufferInfo.size, bufferInfo.flags, bufferInfo.presentationTimeUs);
}
releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);
if (allTracksFinished()) {
/*if (VERBOSE) */ Log.i(TAG, "Shutting down on last frame");
handleForceStop();
}
}
private void deliverEncodedImage() {
try {
int index = mediaCodec.dequeueOutputBuffer(outputBufferInfo,
OUTPUT_THREAD_DEQUEUE_TIMEOUT_US);
if (index < 0) {
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
callback.onOutputFormatChanged(mediaCodec, mediaCodec.getOutputFormat());
}
return;
}
ByteBuffer codecOutputBuffer = mediaCodec.getOutputBuffers()[index];
codecOutputBuffer.position(outputBufferInfo.offset);
codecOutputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size);
if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Logging.d(TAG, "Config frame generated. Offset: " + outputBufferInfo.offset
+ ". Size: " + outputBufferInfo.size);
configData = ByteBuffer.allocateDirect(outputBufferInfo.size);
configData.put(codecOutputBuffer);
// Log few SPS header bytes to check profile and level.
String spsData = "";
for (int i = 0; i < (outputBufferInfo.size < 8 ? outputBufferInfo.size : 8); i++) {
spsData += Integer.toHexString(configData.get(i) & 0xff) + " ";
}
Logging.d(TAG, spsData);
} else {
reportEncodedFrame(outputBufferInfo.size);
// Check key frame flag.
boolean isKeyFrame = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
// For H.264 key frame append SPS and PPS NALs at the start
if (keyFrameData.capacity() < configData.capacity() + outputBufferInfo.size) {
// allocate double size
int newSize = Math.max(keyFrameData.capacity() * 2,
configData.capacity() + outputBufferInfo.size);
keyFrameData = ByteBuffer.allocateDirect(newSize);
}
keyFrameData.position(0);
configData.rewind();
keyFrameData.put(configData);
keyFrameData.put(codecOutputBuffer);
keyFrameData.position(0);
outputFrame.fill(index, keyFrameData, configData.capacity() + outputBufferInfo.size,
isKeyFrame, outputBufferInfo.presentationTimeUs);
callback.onEncodedFrame(outputFrame, outputBufferInfo);
releaseOutputBuffer(index);
} else {
outputFrame.fill(index, codecOutputBuffer, outputBufferInfo.size, isKeyFrame,
outputBufferInfo.presentationTimeUs);
callback.onEncodedFrame(outputFrame, outputBufferInfo);
releaseOutputBuffer(index);
}
}
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverOutput failed", e);
}
}
protected boolean process() throws IOException {
mMediaExtractor = new MediaExtractor();
mMediaExtractor.setDataSource(SDCARD_PATH+"/input.mp4");
int mVideoTrackIndex = -1;
int framerate = 0;
for(int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
MediaFormat format = mMediaExtractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if(!mime.startsWith("video/")) {
continue;
}
framerate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
mMediaExtractor.selectTrack(i);
mMediaMuxer = new MediaMuxer(SDCARD_PATH+"/ouput.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4);
mVideoTrackIndex = mMediaMuxer.addTrack(format);
mMediaMuxer.start();
}
if(mMediaMuxer == null) {
return false;
}
BufferInfo info = new BufferInfo();
info.presentationTimeUs = 0;
ByteBuffer buffer = ByteBuffer.allocate(500*1024);
while(true) {
int sampleSize = mMediaExtractor.readSampleData(buffer, 0);
if(sampleSize < 0) {
break;
}
mMediaExtractor.advance();
info.offset = 0;
info.size = sampleSize;
info.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
info.presentationTimeUs += 1000*1000/framerate;
mMediaMuxer.writeSampleData(mVideoTrackIndex,buffer,info);
}
mMediaExtractor.release();
mMediaMuxer.stop();
mMediaMuxer.release();
return true;
}