下面列出了android.media.MediaCodec# dequeueInputBuffer ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* @param codec
* @param extractor
* @param inputBuffers
* @param presentationTimeUs
* @param isAudio
*/
protected boolean internal_process_input(final MediaCodec codec, final MediaExtractor extractor, final ByteBuffer[] inputBuffers, final long presentationTimeUs, final boolean isAudio) {
// if (DEBUG) Log.v(TAG, "internalProcessInput:presentationTimeUs=" + presentationTimeUs);
boolean result = true;
while (mIsRunning) {
final int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER)
break;
if (inputBufIndex >= 0) {
final int size = extractor.readSampleData(inputBuffers[inputBufIndex], 0);
if (size > 0) {
codec.queueInputBuffer(inputBufIndex, 0, size, presentationTimeUs, 0);
}
result = extractor.advance(); // return false if no data is available
break;
}
}
return result;
}
/**
* @param codec
* @param extractor
* @param inputBuffers
* @param presentationTimeUs
* @param isAudio
*/
protected boolean internalProcessInput(final MediaCodec codec,
final MediaExtractor extractor,
final ByteBuffer[] inputBuffers,
final long presentationTimeUs, final boolean isAudio) {
// if (DEBUG) Log.v(TAG, "internalProcessInput:presentationTimeUs=" + presentationTimeUs);
boolean result = true;
while (mIsRunning) {
final int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER)
break;
if (inputBufIndex >= 0) {
final int size = extractor.readSampleData(inputBuffers[inputBufIndex], 0);
if (size > 0) {
codec.queueInputBuffer(inputBufIndex, 0, size, presentationTimeUs, 0);
}
result = extractor.advance(); // return false if no data is available
break;
}
}
return result;
}
/**
* @param codec
* @param extractor
* @param inputBuffers
* @param presentationTimeUs
* @param isAudio
*/
protected boolean internal_process_input(final MediaCodec codec, final MediaExtractor extractor, final ByteBuffer[] inputBuffers, final long presentationTimeUs, final boolean isAudio) {
// if (DEBUG) Log.v(TAG, "internal_process_input:presentationTimeUs=" + presentationTimeUs);
boolean result = true;
while (mIsRunning) {
final int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER)
break;
if (inputBufIndex >= 0) {
final int size = extractor.readSampleData(inputBuffers[inputBufIndex], 0);
if (size > 0) {
codec.queueInputBuffer(inputBufIndex, 0, size, presentationTimeUs, 0);
}
result = extractor.advance(); // return false if no data is available
break;
}
}
return result;
}
private void handleCodecInput(AudioRecord audioRecord, byte[] audioRecordData,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running)
{
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
private void handleCodecInput(AudioRecord audioRecord, byte[] audioRecordData,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running) {
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
/**
* バイト配列をエンコードする場合
* @param buffer
* @param length 書き込むバイト配列の長さ。0ならBUFFER_FLAG_END_OF_STREAMフラグをセットする
* @param presentationTimeUs [マイクロ秒]
*/
private void encode(@NonNull final MediaCodec encoder,
@Nullable final ByteBuffer buffer, final int length, final long presentationTimeUs) {
if (BuildCheck.isLollipop()) {
encodeV21(encoder, buffer, length, presentationTimeUs);
} else {
final ByteBuffer[] inputBuffers = encoder.getInputBuffers();
for ( ; isRecording() && !mIsEos ;) {
final int inputBufferIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// エンコード要求サイズが0の時はEOSを送信
mIsEos = true;
// if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
encoder.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
encoder.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
break;
// } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// 送れるようになるまでループする
// MediaCodec#dequeueInputBufferにタイムアウト(10ミリ秒)をセットしているのでここでは待機しない
}
}
}
}
/**
* バイト配列をエンコードする場合(API21/Android5以降)
* @param buffer
* @param length 書き込むバイト配列の長さ。0ならBUFFER_FLAG_END_OF_STREAMフラグをセットする
* @param presentationTimeUs [マイクロ秒]
*/
@SuppressLint("NewApi")
private void encodeV21(@NonNull final MediaCodec encoder,
@Nullable final ByteBuffer buffer, final int length, final long presentationTimeUs) {
for ( ; isRecording() && !mIsEos ;) {
final int inputBufferIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = encoder.getInputBuffer(inputBufferIndex);
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// エンコード要求サイズが0の時はEOSを送信
mIsEos = true;
// if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
encoder.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
encoder.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
break;
// } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// 送れるようになるまでループする
// MediaCodec#dequeueInputBufferにタイムアウト(10ミリ秒)をセットしているのでここでは待機しない
}
}
}
private void handleCodecInput(AudioRecord audioRecord, byte[] audioRecordData,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running)
{
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
private static void doExtract(final @NonNull MediaExtractor extractor,
final @NonNull MediaCodec decoder,
final @NonNull OutputSurface outputSurface,
final int outputWidth, int outputHeight, long duration, int thumbnailCount,
final @NonNull Callback callback)
throws TranscodingException
{
final int TIMEOUT_USEC = 10000;
final ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int samplesExtracted = 0;
int thumbnailsCreated = 0;
Log.i(TAG, "doExtract started");
final ByteBuffer pixelBuf = ByteBuffer.allocateDirect(outputWidth * outputHeight * 4);
pixelBuf.order(ByteOrder.LITTLE_ENDIAN);
boolean outputDone = false;
boolean inputDone = false;
while (!outputDone) {
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
final int sampleSize = extractor.readSampleData(inputBuf, 0);
if (sampleSize < 0 || samplesExtracted >= thumbnailCount) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
Log.i(TAG, "input done");
} else {
final long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, sampleSize, presentationTimeUs, 0 /*flags*/);
samplesExtracted++;
extractor.seekTo(duration * samplesExtracted / thumbnailCount, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
Log.i(TAG, "seek to " + duration * samplesExtracted / thumbnailCount + ", actual " + extractor.getSampleTime());
}
}
}
int outputBufIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (outputBufIndex >= 0) {
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
outputDone = true;
}
final boolean shouldRender = (info.size != 0) /*&& (info.presentationTimeUs >= duration * decodeCount / thumbnailCount)*/;
decoder.releaseOutputBuffer(outputBufIndex, shouldRender);
if (shouldRender) {
outputSurface.awaitNewImage();
outputSurface.drawImage();
if (thumbnailsCreated < thumbnailCount) {
pixelBuf.rewind();
GLES20.glReadPixels(0, 0, outputWidth, outputHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuf);
final Bitmap bitmap = Bitmap.createBitmap(outputWidth, outputHeight, Bitmap.Config.ARGB_8888);
pixelBuf.rewind();
bitmap.copyPixelsFromBuffer(pixelBuf);
if (!callback.publishProgress(thumbnailsCreated, bitmap)) {
break;
}
Log.i(TAG, "publishProgress for frame " + thumbnailsCreated + " at " + info.presentationTimeUs + " (target " + duration * thumbnailsCreated / thumbnailCount + ")");
}
thumbnailsCreated++;
}
}
}
Log.i(TAG, "doExtract finished");
}
private void decodeFrames(final MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat) {
boolean sawInputEOS = false;
sawOutputEOS = false;
decoder.configure(mediaFormat, null, null, 0);
decoder.start();
if(!Helper.isUpperThanAPI21()) {
inputByteBuffers = decoder.getInputBuffers();
outputByteBuffers = decoder.getOutputBuffers();
}
while (!sawOutputEOS && !stopDecode) {
if (!sawInputEOS) {
int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = null;
if (Helper.isUpperThanAPI21()) {
inputBuffer = decoder.getInputBuffer(inputBufferId);
} else {
inputBuffer = inputByteBuffers[inputBufferId];
}
int sampleSize = extractor.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {
decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, 0);
sawInputEOS = false;
extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
} else {
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
extractor.advance();
}
}
}
if(displayThread==null) {
displayThread = new Thread(new Runnable() {
@Override
public void run() {
while (!sawOutputEOS && !stopDisplay) {
frameDisplay(decoder);
}
}
});
displayThread.start();
}
}
}
/**
* 需要改变音频速率的情况下,需要先解码->改变速率->编码
*/
private void decodeToPCM(MediaCodec decoder, MediaExtractor extractor, MediaFormat oriAudioFormat, String outPath, Long endTimeUs) throws IOException {
int maxBufferSize = getAudioMaxBufferSize(oriAudioFormat);
ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
//调整音频速率需要重解码音频帧
decoder.configure(oriAudioFormat, null, null, 0);
decoder.start();
boolean decodeDone = false;
boolean decodeInputDone = false;
final int TIMEOUT_US = 2500;
File pcmFile = new File(outPath);
FileChannel writeChannel = new FileOutputStream(pcmFile).getChannel();
ByteBuffer[] inputBuffers = null;
ByteBuffer[] outputBuffers = null;
try {
while (!decodeDone) {
if (!decodeInputDone) {
boolean eof = false;
int decodeInputIndex = decoder.dequeueInputBuffer(TIMEOUT_US);
if (Build.VERSION.SDK_INT < 21 && decodeInputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = decoder.getOutputBuffers();
inputBuffers = decoder.getInputBuffers();
} else if (decodeInputIndex >= 0) {
long sampleTimeUs = extractor.getSampleTime();
if (sampleTimeUs == -1) {
eof = true;
} else if (endTimeUs != null && sampleTimeUs > endTimeUs) {
eof = true;
}
if (eof) {
decodeInputDone = true;
decoder.queueInputBuffer(decodeInputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
info.size = extractor.readSampleData(buffer, 0);
info.presentationTimeUs = sampleTimeUs;
info.flags = extractor.getSampleFlags();
ByteBuffer inputBuffer = null;
if (android.os.Build.VERSION.SDK_INT >= 21) {
inputBuffer = decoder.getInputBuffer(decodeInputIndex);
} else {
inputBuffer = inputBuffers[decodeInputIndex];
}
inputBuffer.put(buffer);
MLog.i(TAG, "audio decode queueInputBuffer " + info.presentationTimeUs / 1000);
decoder.queueInputBuffer(decodeInputIndex, 0, info.size, info.presentationTimeUs, info.flags);
extractor.advance();
}
}
}
while (!decodeDone) {
int outputBufferIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US);
if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
MLog.i(TAG, "audio decode newFormat = " + newFormat);
} else if (outputBufferIndex < 0) {
//ignore
MLog.e(TAG, "unexpected result from audio decoder.dequeueOutputBuffer: " + outputBufferIndex);
} else {
if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
decodeDone = true;
} else {
ByteBuffer decodeOutputBuffer = null;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
decodeOutputBuffer = decoder.getOutputBuffer(outputBufferIndex);
} else {
decodeOutputBuffer = outputBuffers[outputBufferIndex];
}
MLog.i(TAG, "audio decode saveFrame " + info.presentationTimeUs / 1000);
writeChannel.write(decodeOutputBuffer);
}
decoder.releaseOutputBuffer(outputBufferIndex, false);
}
}
}
} finally {
writeChannel.close();
extractor.release();
decoder.stop();
decoder.release();
}
}
private static void renderHevcImage(ByteBuffer bitstream, ImageInfo info, Surface surface) {
long beginTime = SystemClock.elapsedRealtimeNanos();
// configure HEVC decoder
MediaCodec decoder = configureDecoder(info, bitstream.limit(), surface);
MediaFormat outputFormat = decoder.getOutputFormat();
Log.d(TAG, "HEVC output-format=" + outputFormat);
decoder.start();
try {
// set bitstream to decoder
int inputBufferId = decoder.dequeueInputBuffer(-1);
if (inputBufferId < 0) {
throw new IllegalStateException("dequeueInputBuffer return " + inputBufferId);
}
ByteBuffer inBuffer = decoder.getInputBuffer(inputBufferId);
inBuffer.put(bitstream);
decoder.queueInputBuffer(inputBufferId, 0, bitstream.limit(), 0, 0);
// notify end of stream
inputBufferId = decoder.dequeueInputBuffer(-1);
if (inputBufferId < 0) {
throw new IllegalStateException("dequeueInputBuffer return " + inputBufferId);
}
decoder.queueInputBuffer(inputBufferId, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
// get decoded image
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (true) {
int outputBufferId = decoder.dequeueOutputBuffer(bufferInfo, -1);
if (outputBufferId >= 0) {
decoder.releaseOutputBuffer(outputBufferId, true);
break;
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = decoder.getOutputFormat();
Log.d(TAG, "HEVC output-format=" + outputFormat);
} else {
Log.d(TAG, "HEVC dequeueOutputBuffer return " + outputBufferId);
}
}
decoder.flush();
} finally {
decoder.stop();
decoder.release();
}
long endTime = SystemClock.elapsedRealtimeNanos();
Log.i(TAG, "HEVC decoding elapsed=" + (endTime - beginTime) / 1000000.f + "[msec]");
}
/**
* Does the actual work for encoding frames from buffers of byte[].
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
@SuppressLint("InlinedApi")
private boolean doEncodeDecodeVideoFromBuffer(MediaCodec encoder,
int encoderColorFormat)
{
final int TIMEOUT_USEC = 10000;
ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int generateIndex = 0;
// yuv format
byte[] frameData = new byte[mWidth * mHeight * 3 / 2];
// Loop until the output side is done.
boolean inputDone = false;
// If we're not done submitting frames, generate a new one and submit
// it. By
// doing this on every loop we're working to ensure that the encoder
// always has
// work to do.
while (!inputDone)
{
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0)
{
long ptsUsec = computePresentationTime(generateIndex);
if (generateIndex >= frames.size())
{
// Send an empty frame with the end-of-stream flag set. If
// we set EOS
// on a frame with data, that frame data will be ignored,
// and the
// output will be short one frame.
encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
drainEncoder(true, info);
} else
{
try
{
generateFrame(generateIndex, encoderColorFormat,
frameData);
} catch (Exception e)
{
Log.i(TAG, "meet a different type of image");
Arrays.fill(frameData, (byte) 0);
}
if (VERBOSE)
Log.i(TAG, "generateIndex: " + generateIndex
+ ", size: " + frames.size());
ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex];
// the buffer should be sized to hold one full frame
inputBuf.clear();
inputBuf.put(frameData);
encoder.queueInputBuffer(inputBufIndex, 0,
frameData.length, ptsUsec, 0);
drainEncoder(false, info);
}
generateIndex++;
} else
{
// either all in use, or we timed out during initial setup
if (VERBOSE)
Log.i(TAG, "input buffer not available");
}
}
return true;
}
/**
* Decodes audio file into a raw file. This method accepts audio file formats with valid
* headers (like .mp3, .mp4, and .wav).
* @param filepath Path of the file to decode.
* @return Decoded raw audio file.
* @throws IOException when file cannot be read.
*/
private File decode(String filepath) throws IOException
{
// Set selected audio file as a source.
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(filepath);
// Get audio format.
MediaFormat format = extractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
// Cache necessary audio attributes.
sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
// Create and configure decoder based on audio format.
MediaCodec decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, null, null, 0);
decoder.start();
// Create input/output buffers.
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
extractor.selectTrack(0);
File dst = new File(FileCons.SSJ_EXTERNAL_STORAGE + File.separator + "output.raw");
FileOutputStream f = new FileOutputStream(dst);
boolean endOfStreamReached = false;
while (true)
{
if (!endOfStreamReached)
{
int inputBufferIndex = decoder.dequeueInputBuffer(10 * 1000);
if (inputBufferIndex >= 0)
{
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
int sampleSize = extractor.readSampleData(inputBuffer, 0);
if (sampleSize < 0)
{
// Pass empty buffer and the end of stream flag to the codec.
decoder.queueInputBuffer(inputBufferIndex, 0, 0,
0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
endOfStreamReached = true;
}
else
{
// Pass data-filled buffer to the decoder.
decoder.queueInputBuffer(inputBufferIndex, 0, sampleSize,
extractor.getSampleTime(), 0);
extractor.advance();
}
}
}
int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10 * 1000);
if (outputBufferIndex >= 0)
{
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] data = new byte[bufferInfo.size];
outputBuffer.get(data);
outputBuffer.clear();
if (data.length > 0)
{
f.write(data, 0, data.length);
}
decoder.releaseOutputBuffer(outputBufferIndex, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
{
endOfStreamReached = true;
}
}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
outputBuffers = decoder.getOutputBuffers();
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
{
return dst;
}
}
}
/**
* Checks the video data.
*
* @return the number of bad frames
*/
private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface) {
final int TIMEOUT_USEC = 1000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
int checkIndex = 0;
int badFrames = 0;
boolean outputDone = false;
boolean inputDone = false;
while (!outputDone) {
if (VERBOSE) Log.d(TAG, "check loop");
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
if (inputChunk == inputData.getNumChunks()) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE) Log.d(TAG, "sent input EOS");
} else {
// Copy a chunk of input to the decoder. The first chunk should have
// the BUFFER_FLAG_CODEC_CONFIG flag set.
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
inputBuf.clear();
inputData.getChunkData(inputChunk, inputBuf);
int flags = inputData.getChunkFlags(inputChunk);
long time = inputData.getChunkTime(inputChunk);
decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
time, flags);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
inputBuf.position() + " flags=" + flags);
}
inputChunk++;
}
} else {
if (VERBOSE) Log.d(TAG, "input buffer not available");
}
}
if (!outputDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
decoderOutputBuffers = decoder.getOutputBuffers();
if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
ByteBuffer decodedData = decoderOutputBuffers[decoderStatus];
if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
" (size=" + info.size + ")");
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
boolean doRender = (info.size != 0);
// As soon as we call releaseOutputBuffer, the buffer will be forwarded
// to SurfaceTexture to convert to a texture. The API doesn't guarantee
// that the texture will be available before the call returns, so we
// need to wait for the onFrameAvailable callback to fire.
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
info.presentationTimeUs);
surface.awaitNewImage();
surface.drawImage();
if (!checkSurfaceFrame(checkIndex++)) {
badFrames++;
}
}
}
}
}
return badFrames;
}
/**
* Work loop.
*/
static void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder,
CodecOutputSurface outputSurface) throws IOException {
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
int decodeCount = 0;
long frameSaveTime = 0;
boolean outputDone = false;
boolean inputDone = false;
while (!outputDone) {
if (VERBOSE) Log.d(TAG, "loop");
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE) Log.d(TAG, "sent input EOS");
} else {
if (extractor.getSampleTrackIndex() != trackIndex) {
Log.w(TAG, "WEIRD: got sample from track " +
extractor.getSampleTrackIndex() + ", expected " + trackIndex);
}
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
presentationTimeUs, 0 /*flags*/);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
chunkSize);
}
inputChunk++;
extractor.advance();
}
} else {
if (VERBOSE) Log.d(TAG, "input buffer not available");
}
}
if (!outputDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not important for us, since we're using Surface
if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
" (size=" + info.size + ")");
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
boolean doRender = (info.size != 0);
// As soon as we call releaseOutputBuffer, the buffer will be forwarded
// to SurfaceTexture to convert to a texture. The API doesn't guarantee
// that the texture will be available before the call returns, so we
// need to wait for the onFrameAvailable callback to fire.
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount);
outputSurface.awaitNewImage();
outputSurface.drawImage(true);
if (decodeCount < MAX_FRAMES) {
File outputFile = new File(FILES_DIR,
String.format("frame-%02d.png", decodeCount));
long startWhen = System.nanoTime();
outputSurface.saveFrame(outputFile.toString());
frameSaveTime += System.nanoTime() - startWhen;
}
decodeCount++;
}
}
}
}
int numSaved = (MAX_FRAMES < decodeCount) ? MAX_FRAMES : decodeCount;
Log.d(TAG, "Saving " + numSaved + " frames took " +
(frameSaveTime / numSaved / 1000) + " us per frame");
}
/**
* Work loop.
*/
static void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder,
CodecOutputSurface outputSurface) throws IOException {
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
int decodeCount = 0;
long frameSaveTime = 0;
boolean outputDone = false;
boolean inputDone = false;
while (!outputDone) {
if (VERBOSE) Log.d(TAG, "loop");
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE) Log.d(TAG, "sent input EOS");
} else {
if (extractor.getSampleTrackIndex() != trackIndex) {
Log.w(TAG, "WEIRD: got sample from track " +
extractor.getSampleTrackIndex() + ", expected " + trackIndex);
}
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
presentationTimeUs, 0 /*flags*/);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
chunkSize);
}
inputChunk++;
extractor.advance();
}
} else {
if (VERBOSE) Log.d(TAG, "input buffer not available");
}
}
if (!outputDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not important for us, since we're using Surface
if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
" (size=" + info.size + ")");
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
boolean doRender = (info.size != 0);
// As soon as we call releaseOutputBuffer, the buffer will be forwarded
// to SurfaceTexture to convert to a texture. The API doesn't guarantee
// that the texture will be available before the call returns, so we
// need to wait for the onFrameAvailable callback to fire.
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount);
outputSurface.awaitNewImage();
outputSurface.drawImage(true);
if (decodeCount < MAX_FRAMES) {
File outputFile = new File(FILES_DIR,
String.format("frame-%02d.png", decodeCount));
long startWhen = System.nanoTime();
outputSurface.saveFrame(outputFile.toString());
frameSaveTime += System.nanoTime() - startWhen;
}
decodeCount++;
}
}
}
}
int numSaved = (MAX_FRAMES < decodeCount) ? MAX_FRAMES : decodeCount;
Log.d(TAG, "Saving " + numSaved + " frames took " +
(frameSaveTime / numSaved / 1000) + " us per frame");
}
/**
* Reads bytes from the given recorder and encodes them with the given encoder.
* Uses the (deprecated) Synchronous Processing using Buffer Arrays.
* <p/>
* Encoders (or codecs that generate compressed data) will create and return the codec specific
* data before any valid output buffer in output buffers marked with the codec-config flag.
* Buffers containing codec-specific-data have no meaningful timestamps.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
codec.start();
// Getting some buffers (e.g. 4 of each) to communicate with the codec
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
boolean doneSubmittingInput = false;
int numRetriesDequeueOutputBuffer = 0;
int index;
while (true) {
if (!doneSubmittingInput) {
index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
if (index >= 0) {
int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
if (size == -1) {
codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
Log.i("enc: in: EOS");
doneSubmittingInput = true;
} else {
Log.i("enc: in: " + size);
mNumBytesSubmitted += size;
}
} else {
Log.i("enc: in: timeout, will try again");
}
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
Log.i("enc: out: flags/index: " + info.flags + "/" + index);
if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
break;
}
} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = codec.getOutputFormat();
Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
} else {
dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
mNumBytesDequeued += info.size;
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.i("enc: out: EOS");
break;
}
}
}
codec.stop();
codec.release();
}
}
/**
* Reads bytes from the given recorder and encodes them with the given encoder.
* Uses the (deprecated) Synchronous Processing using Buffer Arrays.
* <p/>
* Encoders (or codecs that generate compressed data) will create and return the codec specific
* data before any valid output buffer in output buffers marked with the codec-config flag.
* Buffers containing codec-specific-data have no meaningful timestamps.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
codec.start();
// Getting some buffers (e.g. 4 of each) to communicate with the codec
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
boolean doneSubmittingInput = false;
int numRetriesDequeueOutputBuffer = 0;
int index;
while (true) {
if (!doneSubmittingInput) {
index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
if (index >= 0) {
int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
if (size == -1) {
codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
Log.i("enc: in: EOS");
doneSubmittingInput = true;
} else {
Log.i("enc: in: " + size);
mNumBytesSubmitted += size;
}
} else {
Log.i("enc: in: timeout, will try again");
}
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
Log.i("enc: out: flags/index: " + info.flags + "/" + index);
if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
break;
}
} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = codec.getOutputFormat();
Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
} else {
dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
mNumBytesDequeued += info.size;
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.i("enc: out: EOS");
break;
}
}
}
codec.stop();
codec.release();
}
}
/**
* Reads bytes from the given recorder and encodes them with the given encoder.
* Uses the (deprecated) Synchronous Processing using Buffer Arrays.
* <p/>
* Encoders (or codecs that generate compressed data) will create and return the codec specific
* data before any valid output buffer in output buffers marked with the codec-config flag.
* Buffers containing codec-specific-data have no meaningful timestamps.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private int recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
int status = -1;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
codec.start();
// Getting some buffers (e.g. 4 of each) to communicate with the codec
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
boolean doneSubmittingInput = false;
int numDequeueOutputBufferTimeout = 0;
int index;
while (true) {
if (!doneSubmittingInput) {
index = codec.dequeueInputBuffer(DEQUEUE_INPUT_BUFFER_TIMEOUT);
if (index >= 0) {
int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
if (size == -1) {
codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
Log.i("enc: in: EOS");
doneSubmittingInput = true;
} else {
Log.i("enc: in: " + size);
mNumBytesSubmitted += size;
}
} else {
Log.i("enc: in: timeout, will try again");
}
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT);
Log.i("enc: out: flags/index: " + info.flags + "/" + index);
if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
numDequeueOutputBufferTimeout++;
Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numDequeueOutputBufferTimeout);
if (numDequeueOutputBufferTimeout > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
break;
}
} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = codec.getOutputFormat();
Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
} else {
dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
mNumBytesDequeued += info.size;
numDequeueOutputBufferTimeout = 0;
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.i("enc: out: EOS");
status = 0;
break;
}
}
}
codec.stop();
codec.release();
Log.i("stopped and released codec");
}
return status;
}