android.media.MediaCodec# releaseOutputBuffer ( ) 源码实例Demo

下面列出了android.media.MediaCodec# releaseOutputBuffer ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。


/**
 * Save the encoded (output) buffer into the complete encoded recording.
 * TODO: copy directly (without the intermediate byte array)
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void dequeueOutputBuffer(MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        ByteBuffer buffer = outputBuffers[index];
        Log.i("size/remaining: " + info.size + "/" + buffer.remaining());
        if (info.size <= buffer.remaining()) {
            final byte[] bufferCopied = new byte[info.size];
            buffer.get(bufferCopied); // TODO: catch BufferUnderflow
            // TODO: do we need to clear?
            // on N5: always size == remaining(), clearing is not needed
            // on SGS2: remaining decreases until it becomes less than size, which results in BufferUnderflow
            // (but SGS2 records only zeros anyway)
            //buffer.clear();
            codec.releaseOutputBuffer(index, false);
            addEncoded(bufferCopied);
            if (Log.DEBUG) {
                AudioUtils.showSomeBytes("out", bufferCopied);
            }
        } else {
            Log.e("size > remaining");
            codec.releaseOutputBuffer(index, false);
        }
    }
}
 

/**
 * Save the encoded (output) buffer into the complete encoded recording.
 * TODO: copy directly (without the intermediate byte array)
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void dequeueOutputBuffer(MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        ByteBuffer buffer = outputBuffers[index];
        Log.i("size/remaining: " + info.size + "/" + buffer.remaining());
        if (info.size <= buffer.remaining()) {
            final byte[] bufferCopied = new byte[info.size];
            buffer.get(bufferCopied); // TODO: catch BufferUnderflow
            // TODO: do we need to clear?
            // on N5: always size == remaining(), clearing is not needed
            // on SGS2: remaining decreases until it becomes less than size, which results in BufferUnderflow
            // (but SGS2 records only zeros anyway)
            //buffer.clear();
            codec.releaseOutputBuffer(index, false);
            addEncoded(bufferCopied);
            if (Log.DEBUG) {
                AudioUtils.showSomeBytes("out", bufferCopied);
            }
        } else {
            Log.e("size > remaining");
            codec.releaseOutputBuffer(index, false);
        }
    }
}
 

@TargetApi(21)
private void renderOutputBufferTimedV21(MediaCodec codec, int bufferIndex, long releaseTimeNs) {
  maybeNotifyVideoSizeChanged();
  TraceUtil.beginSection("releaseOutputBufferTimed");
  codec.releaseOutputBuffer(bufferIndex,false);// releaseTimeNs);
  TraceUtil.endSection();
  codecCounters.renderedOutputBufferCount++;
  maybeNotifyDrawnToSurface();
}
 

/**
 * Handle reading encoded audio from MediaCodec OutputBuffer
 * @param codec
 * @param outputBufferId
 * @param info
 * @return number bytes read
 * @throws IOException
 */
private int dequeueCodecOutputBuffer(MediaCodec codec, int outputBufferId, MediaCodec.BufferInfo info) throws IOException {
    int outBitsSize = info.size;
    int outPacketSize = outBitsSize + 7;    // 7 is ADTS header size
    ByteBuffer outBuf = codec.getOutputBuffer(outputBufferId);

    outBuf.position(info.offset);
    outBuf.limit(info.offset + outBitsSize);

    byte[] packet = new byte[outPacketSize];
    addADTStoPacket(packet, outPacketSize);
    outBuf.get(packet, 7, outBitsSize);

    convertedAudioWriteStream.write(packet, 0, outPacketSize);
    convertedAudioWriteStream.flush();

    outBuf.clear();
    codec.releaseOutputBuffer(outputBufferId, false);

    return outBitsSize;
}
 

private void releaseOutputBufer(MediaCodec encoder, ByteBuffer encodedData, int bufferIndex, int trackIndex) {
    synchronized (mEncoderReleasedSync) {
        if (!mEncoderReleased) {
            if (formatRequiresBuffering()) {
                encodedData.clear();
                synchronized (mMuxerInputQueue) {
                    mMuxerInputQueue.get(trackIndex).add(encodedData);
                }
            } else {
                encoder.releaseOutputBuffer(bufferIndex, false);
            }
        }
    }
}
 

private void flushMediaCodec(MediaCodec mc) {
	int index = 0;
	BufferInfo info = new BufferInfo();
	while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
		index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (index>=0) {
			mc.releaseOutputBuffer(index, false);
		}
	}
}
 

/**
 * Renders the output buffer with the specified index. This method is only called if the platform
 * API version of the device is less than 21.
 *
 * @param codec The codec that owns the output buffer.
 * @param index The index of the output buffer to drop.
 * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
 */
protected void renderOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) {
  maybeNotifyVideoSizeChanged();
  TraceUtil.beginSection("releaseOutputBuffer");
  codec.releaseOutputBuffer(index, true);
  TraceUtil.endSection();
  lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000;
  decoderCounters.renderedOutputBufferCount++;
  consecutiveDroppedFrameCount = 0;
  maybeNotifyRenderedFirstFrame();
}
 
源代码8 项目: K-Sonic   文件: MediaCodecVideoRenderer.java

@TargetApi(21)
private void renderOutputBufferV21(MediaCodec codec, int bufferIndex, long releaseTimeNs) {
  maybeNotifyVideoSizeChanged();
  TraceUtil.beginSection("releaseOutputBuffer");
  codec.releaseOutputBuffer(bufferIndex, releaseTimeNs);
  TraceUtil.endSection();
  decoderCounters.renderedOutputBufferCount++;
  consecutiveDroppedFrameCount = 0;
  maybeNotifyRenderedFirstFrame();
}
 

@Override
public void writeSampleData(MediaCodec encoder, int trackIndex, int bufferIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
    super.writeSampleData(encoder, trackIndex, bufferIndex, encodedData, bufferInfo);
    if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // MediaMuxer gets the codec config info via the addTrack command
        if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
        encoder.releaseOutputBuffer(bufferIndex, false);
        return;
    }

    if(bufferInfo.size == 0){
        if(VERBOSE) Log.d(TAG, "ignoring zero size buffer");
        encoder.releaseOutputBuffer(bufferIndex, false);
        return;
    }

    if (!mStarted) {
        Log.e(TAG, "writeSampleData called before muxer started. Ignoring packet. Track index: " + trackIndex + " tracks added: " + mNumTracks);
        encoder.releaseOutputBuffer(bufferIndex, false);
        return;
    }

    bufferInfo.presentationTimeUs = getNextRelativePts(bufferInfo.presentationTimeUs, trackIndex);

    mMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);

    encoder.releaseOutputBuffer(bufferIndex, false);

    if(allTracksFinished()){
        stop();
    }
}
 

@Override
public void writeSampleData(MediaCodec encoder, int trackIndex, int bufferIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
    synchronized (mReadyFence) {
        if (mReady) {
            ByteBuffer muxerInput;
            if (formatRequiresBuffering()) {
                // Copy encodedData into another ByteBuffer, recycling if possible
                Log.i("THIS IS THE ENCODED DATA", encodedData.toString());
                Log.i("THIS IS THE TRACK INDEX", String.valueOf(trackIndex));
                synchronized (mMuxerInputQueue) {
                    muxerInput = mMuxerInputQueue.get(trackIndex).isEmpty() ?
                            ByteBuffer.allocateDirect(encodedData.capacity()) : mMuxerInputQueue.get(trackIndex).remove();
                }
                muxerInput.put(encodedData);
                muxerInput.position(0);
                encoder.releaseOutputBuffer(bufferIndex, false);
                mHandler.sendMessage(mHandler.obtainMessage(MSG_WRITE_FRAME,
                        new WritePacketData(encoder, trackIndex, bufferIndex, muxerInput, bufferInfo)));
            } else {
                handleWriteSampleData(encoder, trackIndex, bufferIndex, encodedData, bufferInfo);
            }

        } else {
            Log.w(TAG, "Dropping frame because Muxer not ready!");
            releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);
            if (formatRequiresBuffering())
                encoder.releaseOutputBuffer(bufferIndex, false);
        }
    }
}
 
源代码11 项目: Telegram   文件: MediaCodecVideoRenderer.java

/**
 * Renders the output buffer with the specified index. This method is only called if the platform
 * API version of the device is 21 or later.
 *
 * @param codec The codec that owns the output buffer.
 * @param index The index of the output buffer to drop.
 * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
 * @param releaseTimeNs The wallclock time at which the frame should be displayed, in nanoseconds.
 */
@TargetApi(21)
protected void renderOutputBufferV21(
    MediaCodec codec, int index, long presentationTimeUs, long releaseTimeNs) {
  maybeNotifyVideoSizeChanged();
  TraceUtil.beginSection("releaseOutputBuffer");
  codec.releaseOutputBuffer(index, releaseTimeNs);
  TraceUtil.endSection();
  lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000;
  decoderCounters.renderedOutputBufferCount++;
  consecutiveDroppedFrameCount = 0;
  maybeNotifyRenderedFirstFrame();
}
 
源代码12 项目: MediaSDK   文件: MediaCodecAudioRenderer.java

@Override
protected boolean processOutputBuffer(
    long positionUs,
    long elapsedRealtimeUs,
    MediaCodec codec,
    ByteBuffer buffer,
    int bufferIndex,
    int bufferFlags,
    long bufferPresentationTimeUs,
    boolean isDecodeOnlyBuffer,
    boolean isLastBuffer,
    Format format)
    throws ExoPlaybackException {
  if (codecNeedsEosBufferTimestampWorkaround
      && bufferPresentationTimeUs == 0
      && (bufferFlags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0
      && lastInputTimeUs != C.TIME_UNSET) {
    bufferPresentationTimeUs = lastInputTimeUs;
  }

  if (passthroughEnabled && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
    // Discard output buffers from the passthrough (raw) decoder containing codec specific data.
    codec.releaseOutputBuffer(bufferIndex, false);
    return true;
  }

  if (isDecodeOnlyBuffer) {
    codec.releaseOutputBuffer(bufferIndex, false);
    decoderCounters.skippedOutputBufferCount++;
    audioSink.handleDiscontinuity();
    return true;
  }

  try {
    if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) {
      codec.releaseOutputBuffer(bufferIndex, false);
      decoderCounters.renderedOutputBufferCount++;
      return true;
    }
  } catch (AudioSink.InitializationException | AudioSink.WriteException e) {
    // TODO(internal: b/145658993) Use outputFormat instead.
    throw createRendererException(e, inputFormat);
  }
  return false;
}
 
源代码13 项目: heifreader   文件: HeifReader.java

private static void renderHevcImage(ByteBuffer bitstream, ImageInfo info, Surface surface) {
    long beginTime = SystemClock.elapsedRealtimeNanos();

    // configure HEVC decoder
    MediaCodec decoder = configureDecoder(info, bitstream.limit(), surface);
    MediaFormat outputFormat = decoder.getOutputFormat();
    Log.d(TAG, "HEVC output-format=" + outputFormat);

    decoder.start();
    try {
        // set bitstream to decoder
        int inputBufferId = decoder.dequeueInputBuffer(-1);
        if (inputBufferId < 0) {
            throw new IllegalStateException("dequeueInputBuffer return " + inputBufferId);
        }
        ByteBuffer inBuffer = decoder.getInputBuffer(inputBufferId);
        inBuffer.put(bitstream);
        decoder.queueInputBuffer(inputBufferId, 0, bitstream.limit(), 0, 0);

        // notify end of stream
        inputBufferId = decoder.dequeueInputBuffer(-1);
        if (inputBufferId < 0) {
            throw new IllegalStateException("dequeueInputBuffer return " + inputBufferId);
        }
        decoder.queueInputBuffer(inputBufferId, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);

        // get decoded image
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        while (true) {
            int outputBufferId = decoder.dequeueOutputBuffer(bufferInfo, -1);
            if (outputBufferId >= 0) {
                decoder.releaseOutputBuffer(outputBufferId, true);
                break;
            } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                outputFormat = decoder.getOutputFormat();
                Log.d(TAG, "HEVC output-format=" + outputFormat);
            } else {
                Log.d(TAG, "HEVC dequeueOutputBuffer return " + outputBufferId);
            }
        }
        decoder.flush();
    } finally {
        decoder.stop();
        decoder.release();
    }
    long endTime = SystemClock.elapsedRealtimeNanos();
    Log.i(TAG, "HEVC decoding elapsed=" + (endTime - beginTime) / 1000000.f + "[msec]");
}
 
源代码14 项目: VIA-AI   文件: MediaPlayerGrabber.java

private void frameDisplay(MediaCodec decoder) {
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

        int srcColorFormat = 0;
        int srcStride = 0;
        int outputBufferId = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
        if (outputBufferId >= 0) {
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                sawOutputEOS = false;
            }

            boolean doRender = (info.size != 0);
            if (doRender) {
                outputFrameCount++;

                ByteBuffer b = null;
                Image image = null;

                if (Helper.isUpperThanAPI21()) {
                    image = decoder.getOutputImage(outputBufferId);
                    if(image!=null) {
//                        Log.d(TAG, "image:" + image.getWidth() + "," + image.getHeight() + "," + image.getFormat());
                    } else {
                        b = decoder.getOutputBuffer(outputBufferId);
                        MediaFormat format = decoder.getOutputFormat(outputBufferId);
                        srcColorFormat = format.getInteger(KEY_COLOR_FORMAT);
                        srcStride = format.getInteger(KEY_STRIDE);
                    }
                } else {
                    b = outputByteBuffers[outputBufferId];
                }

                if(time==-1) time = System.currentTimeMillis();
                else {
                    long diff = (System.currentTimeMillis()-time);
                    if(diff<33) {
                        //waitMs((33*1000-diff)/1000);
                        waitMs((33-diff));
                    }
                    time = System.currentTimeMillis();
                }

                if(callback!=null) {
                    if(image!=null) callback.onImageReady(image);
                    else if (b!=null) callback.onFrameReady(b,info.offset, width, height, srcColorFormat , srcStride);
                }

                decoder.releaseOutputBuffer(outputBufferId, false);
            }
        }
    }
 

/**
 * Work loop.
 */
static void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder,
        CodecOutputSurface outputSurface) throws IOException {
    final int TIMEOUT_USEC = 10000;
    ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int inputChunk = 0;
    int decodeCount = 0;
    long frameSaveTime = 0;

    boolean outputDone = false;
    boolean inputDone = false;
    while (!outputDone) {
        if (VERBOSE) Log.d(TAG, "loop");

        // Feed more data to the decoder.
        if (!inputDone) {
            int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
            if (inputBufIndex >= 0) {
                ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                // Read the sample data into the ByteBuffer.  This neither respects nor
                // updates inputBuf's position, limit, etc.
                int chunkSize = extractor.readSampleData(inputBuf, 0);
                if (chunkSize < 0) {
                    // End of stream -- send empty frame with EOS flag set.
                    decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
                            MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    inputDone = true;
                    if (VERBOSE) Log.d(TAG, "sent input EOS");
                } else {
                    if (extractor.getSampleTrackIndex() != trackIndex) {
                        Log.w(TAG, "WEIRD: got sample from track " +
                                extractor.getSampleTrackIndex() + ", expected " + trackIndex);
                    }
                    long presentationTimeUs = extractor.getSampleTime();
                    decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
                            presentationTimeUs, 0 /*flags*/);
                    if (VERBOSE) {
                        Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
                                chunkSize);
                    }
                    inputChunk++;
                    extractor.advance();
                }
            } else {
                if (VERBOSE) Log.d(TAG, "input buffer not available");
            }
        }

        if (!outputDone) {
            int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
            if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (VERBOSE) Log.d(TAG, "no output from decoder available");
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not important for us, since we're using Surface
                if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat newFormat = decoder.getOutputFormat();
                if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
            } else if (decoderStatus < 0) {
                fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
            } else { // decoderStatus >= 0
                if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
                        " (size=" + info.size + ")");
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    if (VERBOSE) Log.d(TAG, "output EOS");
                    outputDone = true;
                }

                boolean doRender = (info.size != 0);

                // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
                // that the texture will be available before the call returns, so we
                // need to wait for the onFrameAvailable callback to fire.
                decoder.releaseOutputBuffer(decoderStatus, doRender);
                if (doRender) {
                    if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount);
                    outputSurface.awaitNewImage();
                    outputSurface.drawImage(true);

                    if (decodeCount < MAX_FRAMES) {
                        File outputFile = new File(FILES_DIR,
                                String.format("frame-%02d.png", decodeCount));
                        long startWhen = System.nanoTime();
                        outputSurface.saveFrame(outputFile.toString());
                        frameSaveTime += System.nanoTime() - startWhen;
                    }
                    decodeCount++;
                }
            }
        }
    }

    int numSaved = (MAX_FRAMES < decodeCount) ? MAX_FRAMES : decodeCount;
    Log.d(TAG, "Saving " + numSaved + " frames took " +
        (frameSaveTime / numSaved / 1000) + " us per frame");
}
 
源代码16 项目: LiveMultimedia   文件: AudioEncoder.java

private synchronized void dequeueOutputBuffer(
        MediaCodec codec, ByteBuffer[] outputBuffers,
        int index, MediaCodec.BufferInfo info) {
    codec.releaseOutputBuffer(index, false /* render */);
}
 
源代码17 项目: PhotoMovie   文件: AudioRecordThread.java

/**
 * 需要改变音频速率的情况下,需要先解码->改变速率->编码
 */
private void decodeToPCM(MediaCodec decoder, MediaExtractor extractor, MediaFormat oriAudioFormat, String outPath, Long endTimeUs) throws IOException {
    int maxBufferSize = getAudioMaxBufferSize(oriAudioFormat);
    ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

    //调整音频速率需要重解码音频帧
    decoder.configure(oriAudioFormat, null, null, 0);
    decoder.start();

    boolean decodeDone = false;
    boolean decodeInputDone = false;
    final int TIMEOUT_US = 2500;
    File pcmFile = new File(outPath);
    FileChannel writeChannel = new FileOutputStream(pcmFile).getChannel();
    ByteBuffer[] inputBuffers = null;
    ByteBuffer[] outputBuffers = null;

    try {
        while (!decodeDone) {
            if (!decodeInputDone) {
                boolean eof = false;
                int decodeInputIndex = decoder.dequeueInputBuffer(TIMEOUT_US);
                if (Build.VERSION.SDK_INT < 21 && decodeInputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    outputBuffers = decoder.getOutputBuffers();
                    inputBuffers = decoder.getInputBuffers();
                } else if (decodeInputIndex >= 0) {
                    long sampleTimeUs = extractor.getSampleTime();
                    if (sampleTimeUs == -1) {
                        eof = true;
                    } else if (endTimeUs != null && sampleTimeUs > endTimeUs) {
                        eof = true;
                    }

                    if (eof) {
                        decodeInputDone = true;
                        decoder.queueInputBuffer(decodeInputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    } else {
                        info.size = extractor.readSampleData(buffer, 0);
                        info.presentationTimeUs = sampleTimeUs;
                        info.flags = extractor.getSampleFlags();
                        ByteBuffer inputBuffer = null;
                        if (android.os.Build.VERSION.SDK_INT >= 21) {
                            inputBuffer = decoder.getInputBuffer(decodeInputIndex);
                        } else {
                            inputBuffer = inputBuffers[decodeInputIndex];
                        }
                        inputBuffer.put(buffer);
                        MLog.i(TAG, "audio decode queueInputBuffer " + info.presentationTimeUs / 1000);
                        decoder.queueInputBuffer(decodeInputIndex, 0, info.size, info.presentationTimeUs, info.flags);
                        extractor.advance();
                    }

                }
            }

            while (!decodeDone) {
                int outputBufferIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US);
                if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    break;
                } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    MediaFormat newFormat = decoder.getOutputFormat();
                    MLog.i(TAG, "audio decode newFormat = " + newFormat);
                } else if (outputBufferIndex < 0) {
                    //ignore
                    MLog.e(TAG, "unexpected result from audio decoder.dequeueOutputBuffer: " + outputBufferIndex);
                } else {
                    if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
                        decodeDone = true;
                    } else {
                        ByteBuffer decodeOutputBuffer = null;
                        if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                            decodeOutputBuffer = decoder.getOutputBuffer(outputBufferIndex);
                        } else {
                            decodeOutputBuffer = outputBuffers[outputBufferIndex];
                        }
                        MLog.i(TAG, "audio decode saveFrame " + info.presentationTimeUs / 1000);
                        writeChannel.write(decodeOutputBuffer);
                    }
                    decoder.releaseOutputBuffer(outputBufferIndex, false);
                }
            }
        }
    } finally {
        writeChannel.close();
        extractor.release();
        decoder.stop();
        decoder.release();
    }
}
 

private void processOutput(@NonNull ByteBuffer byteBuffer, @NonNull MediaCodec mediaCodec,
    int outBufferIndex, @NonNull MediaCodec.BufferInfo bufferInfo) throws IllegalStateException {
  checkBuffer(byteBuffer, bufferInfo);
  sendBuffer(byteBuffer, bufferInfo);
  mediaCodec.releaseOutputBuffer(outBufferIndex, false);
}
 
源代码19 项目: MediaSDK   文件: MediaCodecVideoRenderer.java

/**
 * Skips the output buffer with the specified index.
 *
 * @param codec The codec that owns the output buffer.
 * @param index The index of the output buffer to skip.
 * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
 */
protected void skipOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) {
  TraceUtil.beginSection("skipVideoBuffer");
  codec.releaseOutputBuffer(index, false);
  TraceUtil.endSection();
  decoderCounters.skippedOutputBufferCount++;
}
 

/**
 * Drops the output buffer with the specified index.
 *
 * @param codec The codec that owns the output buffer.
 * @param index The index of the output buffer to drop.
 * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
 */
protected void dropOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) {
  TraceUtil.beginSection("dropVideoBuffer");
  codec.releaseOutputBuffer(index, false);
  TraceUtil.endSection();
  updateDroppedBufferCounters(1);
}