android.media.MediaExtractor#advance ( )源码实例Demo

下面列出了android.media.MediaExtractor#advance ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: AudioVideoPlayerSample   文件: MediaVideoPlayer.java
/**
	 * @param codec
	 * @param extractor
	 * @param inputBuffers
	 * @param presentationTimeUs
	 * @param isAudio
	 */
	protected boolean internalProcessInput(final MediaCodec codec,
		final MediaExtractor extractor,
		final ByteBuffer[] inputBuffers,
		final long presentationTimeUs, final boolean isAudio) {

//		if (DEBUG) Log.v(TAG, "internalProcessInput:presentationTimeUs=" + presentationTimeUs);
		boolean result = true;
		while (mIsRunning) {
            final int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_USEC);
            if (inputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER)
            	break;
            if (inputBufIndex >= 0) {
                final int size = extractor.readSampleData(inputBuffers[inputBufIndex], 0);
                if (size > 0) {
                	codec.queueInputBuffer(inputBufIndex, 0, size, presentationTimeUs, 0);
                }
            	result = extractor.advance();	// return false if no data is available
                break;
            }
		}
		return result;
	}
 
源代码2 项目: VideoProcessor   文件: VideoUtil.java
public static float getAveFrameRate(VideoProcessor.MediaSource mediaSource) throws IOException {
    MediaExtractor extractor = new MediaExtractor();
    mediaSource.setDataSource(extractor);
    int trackIndex = VideoUtil.selectTrack(extractor, false);
    extractor.selectTrack(trackIndex);
    long lastSampleTimeUs = 0;
    int frameCount = 0;
    while (true) {
        long sampleTime = extractor.getSampleTime();
        if (sampleTime < 0) {
            break;
        } else {
            lastSampleTimeUs = sampleTime;
        }
        frameCount++;
        extractor.advance();
    }
    extractor.release();
    return frameCount / (lastSampleTimeUs / 1000f / 1000f);
}
 
源代码3 项目: libcommon   文件: MediaMoviePlayer.java
/**
	 * @param codec
	 * @param extractor
	 * @param inputBuffers
	 * @param presentationTimeUs
	 * @param isAudio
	 */
	protected boolean internal_process_input(final MediaCodec codec, final MediaExtractor extractor, final ByteBuffer[] inputBuffers, final long presentationTimeUs, final boolean isAudio) {
//		if (DEBUG) Log.v(TAG, "internal_process_input:presentationTimeUs=" + presentationTimeUs);
		boolean result = true;
		while (mIsRunning) {
            final int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_USEC);
            if (inputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER)
            	break;
            if (inputBufIndex >= 0) {
                final int size = extractor.readSampleData(inputBuffers[inputBufIndex], 0);
                if (size > 0) {
                	codec.queueInputBuffer(inputBufIndex, 0, size, presentationTimeUs, 0);
                }
            	result = extractor.advance();	// return false if no data is available
                break;
            }
		}
		return result;
	}
 
源代码4 项目: sdl_java_suite   文件: BaseAudioDecoder.java
protected MediaCodec.BufferInfo onInputBufferAvailable(@NonNull MediaExtractor extractor, @NonNull ByteBuffer inputBuffer) {
    long sampleTime = extractor.getSampleTime();
    int counter = 0;
    int maxresult = 0;
    int result;
    boolean advanced = false;

    do {
        result = extractor.readSampleData(inputBuffer, counter);
        if (result >= 0) {
            advanced = extractor.advance();
            maxresult = Math.max(maxresult, result);
            counter += result;
        }
    } while (result >= 0 && advanced && inputBuffer.capacity() - inputBuffer.limit() > maxresult);
    // the remaining capacity should be more than enough for another sample data block

    // queue the input buffer. At end of file counter will be 0 and flags marks end of stream
    // offset MUST be 0. The output buffer code cannot handle offsets
    // result < 0 means the end of the file input is reached
    int flags = advanced ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM;

    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
    bufferInfo.set(0, counter, sampleTime, flags);

    return bufferInfo;
}
 
源代码5 项目: VideoProcessor   文件: VideoUtil.java
/**
 * 用于制作全关键帧视频时计算比特率应该为多少
 *
 * @return
 */
public static int getBitrateForAllKeyFrameVideo(VideoProcessor.MediaSource input) throws IOException {
    MediaExtractor extractor = new MediaExtractor();
    input.setDataSource(extractor);
    int trackIndex = VideoUtil.selectTrack(extractor, false);
    extractor.selectTrack(trackIndex);
    int keyFrameCount = 0;
    int frameCount = 0;
    while (true) {
        int flags = extractor.getSampleFlags();
        if (flags > 0 && (flags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
            keyFrameCount++;
        }
        long sampleTime = extractor.getSampleTime();
        if (sampleTime < 0) {
            break;
        }
        frameCount++;
        extractor.advance();
    }
    extractor.release();
    float bitrateMultiple = (frameCount - keyFrameCount) / (float) keyFrameCount + 1;
    MediaMetadataRetriever retriever = new MediaMetadataRetriever();
    input.setDataSource(retriever);
    int oriBitrate = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE));
    retriever.release();
    if (frameCount == keyFrameCount) {
        return oriBitrate;
    }
    return (int) (bitrateMultiple * oriBitrate);
}
 
源代码6 项目: VideoProcessor   文件: AudioUtil.java
/**
 * 去掉视频的音轨
 */
public static void removeAudioTrack(String videoPath, String outPath) throws IOException {
    MediaExtractor videoExtractor = new MediaExtractor();
    videoExtractor.setDataSource(videoPath);
    try {
        int videoTrack = VideoUtil.selectTrack(videoExtractor, false);

        videoExtractor.selectTrack(videoTrack);
        MediaFormat videoFormat = videoExtractor.getTrackFormat(videoTrack);

        MediaMuxer mediaMuxer = new MediaMuxer(outPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        int muxerVideoTrackIndex = mediaMuxer.addTrack(videoFormat);
        mediaMuxer.start();

        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        //写视频
        int maxBufferSize = videoFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        ByteBuffer videoBuffer = ByteBuffer.allocateDirect(maxBufferSize);
        while (true) {
            long sampleTimeUs = videoExtractor.getSampleTime();
            if (sampleTimeUs == -1) {
                break;
            }
            int flags = videoExtractor.getSampleFlags();
            int size = videoExtractor.readSampleData(videoBuffer, 0);
            info.presentationTimeUs = sampleTimeUs;
            info.flags = flags;
            info.size = size;
            mediaMuxer.writeSampleData(muxerVideoTrackIndex, videoBuffer, info);
            videoExtractor.advance();
        }
        mediaMuxer.stop();
        mediaMuxer.release();
    } finally {
        videoExtractor.release();
    }
}
 
源代码7 项目: VIA-AI   文件: MediaPlayerGrabber.java
private void decodeFrames(final MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat) {
    boolean sawInputEOS = false;
    sawOutputEOS = false;
    decoder.configure(mediaFormat, null, null, 0);
    decoder.start();

    if(!Helper.isUpperThanAPI21()) {
        inputByteBuffers = decoder.getInputBuffers();
        outputByteBuffers = decoder.getOutputBuffers();
    }

    while (!sawOutputEOS && !stopDecode) {
        if (!sawInputEOS) {
            int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
            if (inputBufferId >= 0) {
                ByteBuffer inputBuffer = null;
                if (Helper.isUpperThanAPI21()) {
                    inputBuffer = decoder.getInputBuffer(inputBufferId);
                } else {
                    inputBuffer = inputByteBuffers[inputBufferId];
                }

                int sampleSize = extractor.readSampleData(inputBuffer, 0);
                if (sampleSize < 0) {
                    decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, 0);
                    sawInputEOS = false;
                    extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
                } else {
                    long presentationTimeUs = extractor.getSampleTime();
                    decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
                    extractor.advance();
                }
            }

        }

        if(displayThread==null) {
            displayThread = new Thread(new Runnable() {
                @Override
                public void run() {
                    while (!sawOutputEOS && !stopDisplay) {
                        frameDisplay(decoder);
                    }
                }
            });
            displayThread.start();
        }
    }

}
 
private boolean muxVideoAndAudio(String videoFilePath, String audioFilePath, String outputFilePath) {
    try {
        File file = new File(outputFilePath);
        file.createNewFile();
        MediaExtractor videoExtractor = new MediaExtractor();
        videoExtractor.setDataSource(videoFilePath);
        MediaExtractor audioExtractor = new MediaExtractor();
        audioExtractor.setDataSource(audioFilePath);
        MediaMuxer muxer = new MediaMuxer(outputFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

        videoExtractor.selectTrack(0);
        MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
        int videoTrack = muxer.addTrack(videoFormat);

        audioExtractor.selectTrack(0);
        MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
        int audioTrack = muxer.addTrack(audioFormat);
        boolean sawEOS = false;
        int offset = 100;
        int sampleSize = 2048 * 1024;
        ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
        ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
        MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
        MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();

        videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
        audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);

        muxer.start();

        while (!sawEOS) {
            videoBufferInfo.offset = offset;
            videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);

            if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                sawEOS = true;
                videoBufferInfo.size = 0;
            } else {
                videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
                videoBufferInfo.flags = videoExtractor.getSampleFlags();
                muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
                videoExtractor.advance();
            }
        }

        boolean sawEOS2 = false;
        while (!sawEOS2) {
            audioBufferInfo.offset = offset;
            audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);

            if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                sawEOS2 = true;
                audioBufferInfo.size = 0;
            } else {
                audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
                audioBufferInfo.flags = audioExtractor.getSampleFlags();
                muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
                audioExtractor.advance();

            }
        }

        try {
            muxer.stop();
            muxer.release();
        } catch (IllegalStateException ignore) {}
    } catch (IOException e) {
        e.printStackTrace();
        return false;
    }

    return true;
}
 
源代码9 项目: deltachat-android   文件: VideoRecoder.java
@TargetApi(16)
private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception {
  int trackIndex = selectTrack(extractor, isAudio);
  if (trackIndex >= 0) {
    extractor.selectTrack(trackIndex);
    MediaFormat trackFormat = extractor.getTrackFormat(trackIndex);
    int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio);
    int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
    boolean inputDone = false;
    if (start > 0) {
      extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
    } else {
      extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
    }
    ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
    long startTime = -1;

    checkConversionCanceled();
    long lastTimestamp = -100;

    while (!inputDone) {
      checkConversionCanceled();

      boolean eof = false;
      int index = extractor.getSampleTrackIndex();
      if (index == trackIndex) {
        info.size = extractor.readSampleData(buffer, 0);
        if (info.size >= 0) {
          info.presentationTimeUs = extractor.getSampleTime();
        } else {
          info.size = 0;
          eof = true;
        }

        if (info.size > 0 && !eof) {
          if (start > 0 && startTime == -1) {
            startTime = info.presentationTimeUs;
          }
          if (end < 0 || info.presentationTimeUs < end) {
            if (info.presentationTimeUs > lastTimestamp) {
              info.offset = 0;
              info.flags = extractor.getSampleFlags();
              if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) {
                //didWriteData(messageObject, file, false, false);
              }
            }
            lastTimestamp = info.presentationTimeUs;
          } else {
            eof = true;
          }
        }
        if (!eof) {
          extractor.advance();
        }
      } else if (index == -1) {
        eof = true;
      } else {
        extractor.advance();
      }
      if (eof) {
        inputDone = true;
      }
    }

    extractor.unselectTrack(trackIndex);
    return startTime;
  }
  return -1;
}
 
源代码10 项目: VideoCompressor   文件: VideoController.java
@TargetApi(16)
private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception {
    int trackIndex = selectTrack(extractor, isAudio);
    if (trackIndex >= 0) {
        extractor.selectTrack(trackIndex);
        MediaFormat trackFormat = extractor.getTrackFormat(trackIndex);
        int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio);
        int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        boolean inputDone = false;
        if (start > 0) {
            extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        } else {
            extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        }
        ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
        long startTime = -1;

        while (!inputDone) {

            boolean eof = false;
            int index = extractor.getSampleTrackIndex();
            if (index == trackIndex) {
                info.size = extractor.readSampleData(buffer, 0);

                if (info.size < 0) {
                    info.size = 0;
                    eof = true;
                } else {
                    info.presentationTimeUs = extractor.getSampleTime();
                    if (start > 0 && startTime == -1) {
                        startTime = info.presentationTimeUs;
                    }
                    if (end < 0 || info.presentationTimeUs < end) {
                        info.offset = 0;
                        info.flags = extractor.getSampleFlags();
                        if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) {
                            // didWriteData(messageObject, file, false, false);
                        }
                        extractor.advance();
                    } else {
                        eof = true;
                    }
                }
            } else if (index == -1) {
                eof = true;
            }
            if (eof) {
                inputDone = true;
            }
        }

        extractor.unselectTrack(trackIndex);
        return startTime;
    }
    return -1;
}
 
/**
 * Work loop.
 */
static void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder,
        CodecOutputSurface outputSurface) throws IOException {
    final int TIMEOUT_USEC = 10000;
    ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int inputChunk = 0;
    int decodeCount = 0;
    long frameSaveTime = 0;

    boolean outputDone = false;
    boolean inputDone = false;
    while (!outputDone) {
        if (VERBOSE) Log.d(TAG, "loop");

        // Feed more data to the decoder.
        if (!inputDone) {
            int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
            if (inputBufIndex >= 0) {
                ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                // Read the sample data into the ByteBuffer.  This neither respects nor
                // updates inputBuf's position, limit, etc.
                int chunkSize = extractor.readSampleData(inputBuf, 0);
                if (chunkSize < 0) {
                    // End of stream -- send empty frame with EOS flag set.
                    decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
                            MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    inputDone = true;
                    if (VERBOSE) Log.d(TAG, "sent input EOS");
                } else {
                    if (extractor.getSampleTrackIndex() != trackIndex) {
                        Log.w(TAG, "WEIRD: got sample from track " +
                                extractor.getSampleTrackIndex() + ", expected " + trackIndex);
                    }
                    long presentationTimeUs = extractor.getSampleTime();
                    decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
                            presentationTimeUs, 0 /*flags*/);
                    if (VERBOSE) {
                        Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
                                chunkSize);
                    }
                    inputChunk++;
                    extractor.advance();
                }
            } else {
                if (VERBOSE) Log.d(TAG, "input buffer not available");
            }
        }

        if (!outputDone) {
            int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
            if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (VERBOSE) Log.d(TAG, "no output from decoder available");
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not important for us, since we're using Surface
                if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat newFormat = decoder.getOutputFormat();
                if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
            } else if (decoderStatus < 0) {
                fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
            } else { // decoderStatus >= 0
                if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
                        " (size=" + info.size + ")");
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    if (VERBOSE) Log.d(TAG, "output EOS");
                    outputDone = true;
                }

                boolean doRender = (info.size != 0);

                // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
                // that the texture will be available before the call returns, so we
                // need to wait for the onFrameAvailable callback to fire.
                decoder.releaseOutputBuffer(decoderStatus, doRender);
                if (doRender) {
                    if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount);
                    outputSurface.awaitNewImage();
                    outputSurface.drawImage(true);

                    if (decodeCount < MAX_FRAMES) {
                        File outputFile = new File(FILES_DIR,
                                String.format("frame-%02d.png", decodeCount));
                        long startWhen = System.nanoTime();
                        outputSurface.saveFrame(outputFile.toString());
                        frameSaveTime += System.nanoTime() - startWhen;
                    }
                    decodeCount++;
                }
            }
        }
    }

    int numSaved = (MAX_FRAMES < decodeCount) ? MAX_FRAMES : decodeCount;
    Log.d(TAG, "Saving " + numSaved + " frames took " +
        (frameSaveTime / numSaved / 1000) + " us per frame");
}
 
源代码12 项目: Android   文件: MainActivity.java
protected boolean process() throws IOException {

        mMediaExtractor = new MediaExtractor();          
        mMediaExtractor.setDataSource(SDCARD_PATH+"/input.mp4");                
                
        int mVideoTrackIndex = -1;
        int framerate = 0;
        for(int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
            MediaFormat format = mMediaExtractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if(!mime.startsWith("video/")) {                
                continue;
            }
            framerate = format.getInteger(MediaFormat.KEY_FRAME_RATE);            
            mMediaExtractor.selectTrack(i);
            mMediaMuxer = new MediaMuxer(SDCARD_PATH+"/ouput.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4);
            mVideoTrackIndex = mMediaMuxer.addTrack(format);  
            mMediaMuxer.start();
        }
        
        if(mMediaMuxer == null) {
            return false;
        }
        
        BufferInfo info = new BufferInfo();
        info.presentationTimeUs = 0;
        ByteBuffer buffer = ByteBuffer.allocate(500*1024);        
        while(true) {
            int sampleSize = mMediaExtractor.readSampleData(buffer, 0);
            if(sampleSize < 0) {
                break;
            }
            mMediaExtractor.advance();
            info.offset = 0;
            info.size = sampleSize;
            info.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;        
            info.presentationTimeUs += 1000*1000/framerate;
            mMediaMuxer.writeSampleData(mVideoTrackIndex,buffer,info);
        }

        mMediaExtractor.release();
        
        mMediaMuxer.stop();
        mMediaMuxer.release();
        
        return true;
    }
 
源代码13 项目: VideoProcessor   文件: VideoProcessor.java
/**
 * 对视频先检查,如果不是全关键帧,先处理成所有帧都是关键帧,再逆序
 */
public static void reverseVideo(Context context, MediaSource input, String output, boolean reverseAudio, @Nullable VideoProgressListener listener) throws Exception {
    File tempFile = new File(context.getCacheDir(), System.currentTimeMillis() + ".temp");
    File temp2File = new File(context.getCacheDir(), System.currentTimeMillis() + ".temp2");
    try {
        MediaExtractor extractor = new MediaExtractor();
        input.setDataSource(extractor);
        int trackIndex = VideoUtil.selectTrack(extractor, false);
        extractor.selectTrack(trackIndex);
        int keyFrameCount = 0;
        int frameCount = 0;
        List<Long> frameTimeStamps = new ArrayList<>();
        while (true) {
            int flags = extractor.getSampleFlags();
            if (flags > 0 && (flags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
                keyFrameCount++;
            }
            long sampleTime = extractor.getSampleTime();
            if (sampleTime < 0) {
                break;
            }
            frameTimeStamps.add(sampleTime);
            frameCount++;
            extractor.advance();
        }
        extractor.release();

        if (frameCount == keyFrameCount || frameCount == keyFrameCount + 1) {
            reverseVideoNoDecode(input, output, reverseAudio, frameTimeStamps, listener);
        } else {
            VideoMultiStepProgress stepProgress = new VideoMultiStepProgress(new float[]{0.45f, 0.1f, 0.45f}, listener);
            stepProgress.setCurrentStep(0);
            float bitrateMultiple = (frameCount - keyFrameCount) / (float) keyFrameCount + 1;
            MediaMetadataRetriever retriever = new MediaMetadataRetriever();
            input.setDataSource(retriever);
            int oriBitrate = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE));
            int duration = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION));
            try {
                processor(context)
                        .input(input)
                        .output(tempFile.getAbsolutePath())
                        .bitrate((int) (oriBitrate * bitrateMultiple))
                        .iFrameInterval(0)
                        .progressListener(stepProgress)
                        .process();
            } catch (MediaCodec.CodecException e) {
                CL.e(e);
                /** Nexus5上-1代表全关键帧*/
                processor(context)
                        .input(input)
                        .output(tempFile.getAbsolutePath())
                        .bitrate((int) (oriBitrate * bitrateMultiple))
                        .iFrameInterval(-1)
                        .progressListener(stepProgress)
                        .process();
            }
            stepProgress.setCurrentStep(1);
            reverseVideoNoDecode(new MediaSource(tempFile.getAbsolutePath()), temp2File.getAbsolutePath(), reverseAudio, null, stepProgress);
            int oriIFrameInterval = (int) (keyFrameCount / (duration / 1000f));
            oriIFrameInterval = oriIFrameInterval == 0 ? 1 : oriIFrameInterval;
            stepProgress.setCurrentStep(2);
            processor(context)
                    .input(temp2File.getAbsolutePath())
                    .output(output)
                    .bitrate(oriBitrate)
                    .iFrameInterval(oriIFrameInterval)
                    .progressListener(stepProgress)
                    .process();
        }
    } finally {
        tempFile.delete();
        temp2File.delete();
    }
}
 
源代码14 项目: VideoProcessor   文件: AudioUtil.java
/**
 * 需要改变音频速率的情况下,需要先解码->改变速率->编码
 */
public static void decodeToPCM(VideoProcessor.MediaSource audioSource, String outPath, Integer startTimeUs, Integer endTimeUs) throws IOException {
    MediaExtractor extractor = new MediaExtractor();
    audioSource.setDataSource(extractor);
    int audioTrack = VideoUtil.selectTrack(extractor, true);
    extractor.selectTrack(audioTrack);
    if (startTimeUs == null) {
        startTimeUs = 0;
    }
    extractor.seekTo(startTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
    MediaFormat oriAudioFormat = extractor.getTrackFormat(audioTrack);
    int maxBufferSize;
    if (oriAudioFormat.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
        maxBufferSize = oriAudioFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
    } else {
        maxBufferSize = 100 * 1000;
    }
    ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

    //调整音频速率需要重解码音频帧
    MediaCodec decoder = MediaCodec.createDecoderByType(oriAudioFormat.getString(MediaFormat.KEY_MIME));
    decoder.configure(oriAudioFormat, null, null, 0);
    decoder.start();

    boolean decodeDone = false;
    boolean decodeInputDone = false;
    final int TIMEOUT_US = 2500;
    File pcmFile = new File(outPath);
    FileChannel writeChannel = new FileOutputStream(pcmFile).getChannel();
    try {
        while (!decodeDone) {
            if (!decodeInputDone) {
                boolean eof = false;
                int decodeInputIndex = decoder.dequeueInputBuffer(TIMEOUT_US);
                if (decodeInputIndex >= 0) {
                    long sampleTimeUs = extractor.getSampleTime();
                    if (sampleTimeUs == -1) {
                        eof = true;
                    } else if (sampleTimeUs < startTimeUs) {
                        extractor.advance();
                        continue;
                    } else if (endTimeUs != null && sampleTimeUs > endTimeUs) {
                        eof = true;
                    }

                    if (eof) {
                        decodeInputDone = true;
                        decoder.queueInputBuffer(decodeInputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    } else {
                        info.size = extractor.readSampleData(buffer, 0);
                        info.presentationTimeUs = sampleTimeUs;
                        info.flags = extractor.getSampleFlags();
                        ByteBuffer inputBuffer = decoder.getInputBuffer(decodeInputIndex);
                        inputBuffer.put(buffer);
                        CL.it(TAG, "audio decode queueInputBuffer " + info.presentationTimeUs / 1000);
                        decoder.queueInputBuffer(decodeInputIndex, 0, info.size, info.presentationTimeUs, info.flags);
                        extractor.advance();
                    }

                }
            }

            while (!decodeDone) {
                int outputBufferIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US);
                if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    break;
                } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    MediaFormat newFormat = decoder.getOutputFormat();
                    CL.it(TAG, "audio decode newFormat = " + newFormat);
                } else if (outputBufferIndex < 0) {
                    //ignore
                    CL.et(TAG, "unexpected result from audio decoder.dequeueOutputBuffer: " + outputBufferIndex);
                } else {
                    if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
                        decodeDone = true;
                    } else {
                        ByteBuffer decodeOutputBuffer = decoder.getOutputBuffer(outputBufferIndex);
                        CL.it(TAG, "audio decode saveFrame " + info.presentationTimeUs / 1000);
                        writeChannel.write(decodeOutputBuffer);
                    }
                    decoder.releaseOutputBuffer(outputBufferIndex, false);
                }
            }
        }
    } finally {
        writeChannel.close();
        extractor.release();
        decoder.stop();
        decoder.release();
    }
}
 
源代码15 项目: VideoProcessor   文件: AudioUtil.java
/**
 * 不需要改变音频速率的情况下,直接读写就可
 */
public static long writeAudioTrack(MediaExtractor extractor, MediaMuxer mediaMuxer, int muxerAudioTrackIndex,
                                   Integer startTimeUs, Integer endTimeUs, long baseMuxerFrameTimeUs, VideoProgressListener listener) throws IOException {
    int audioTrack = VideoUtil.selectTrack(extractor, true);
    extractor.selectTrack(audioTrack);
    if (startTimeUs == null) {
        startTimeUs = 0;
    }
    extractor.seekTo(startTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
    MediaFormat audioFormat = extractor.getTrackFormat(audioTrack);
    long durationUs = audioFormat.getLong(MediaFormat.KEY_DURATION);
    int maxBufferSize = audioFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
    ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

    long lastFrametimeUs = baseMuxerFrameTimeUs;
    while (true) {
        long sampleTimeUs = extractor.getSampleTime();
        if (sampleTimeUs == -1) {
            break;
        }
        if (sampleTimeUs < startTimeUs) {
            extractor.advance();
            continue;
        }
        if (endTimeUs != null && sampleTimeUs > endTimeUs) {
            break;
        }
        if (listener != null) {
            float progress = (sampleTimeUs - startTimeUs) / (float) (endTimeUs == null ? durationUs : endTimeUs - startTimeUs);
            progress = progress < 0 ? 0 : progress;
            progress = progress > 1 ? 1 : progress;
            listener.onProgress(progress);
        }
        info.presentationTimeUs = sampleTimeUs - startTimeUs + baseMuxerFrameTimeUs;
        info.flags = extractor.getSampleFlags();
        info.size = extractor.readSampleData(buffer, 0);
        if (info.size < 0) {
            break;
        }
        CL.i("writeAudioSampleData,time:" + info.presentationTimeUs / 1000f);
        mediaMuxer.writeSampleData(muxerAudioTrackIndex, buffer, info);
        lastFrametimeUs = info.presentationTimeUs;
        extractor.advance();
    }
    return lastFrametimeUs;
}
 
源代码16 项目: VideoCompressor   文件: VideoController.java
@TargetApi(16)
private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception {
    int trackIndex = selectTrack(extractor, isAudio);
    if (trackIndex >= 0) {
        extractor.selectTrack(trackIndex);
        MediaFormat trackFormat = extractor.getTrackFormat(trackIndex);
        int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio);
        int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        boolean inputDone = false;
        if (start > 0) {
            extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        } else {
            extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        }
        ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
        long startTime = -1;

        while (!inputDone) {

            boolean eof = false;
            int index = extractor.getSampleTrackIndex();
            if (index == trackIndex) {
                info.size = extractor.readSampleData(buffer, 0);

                if (info.size < 0) {
                    info.size = 0;
                    eof = true;
                } else {
                    info.presentationTimeUs = extractor.getSampleTime();
                    if (start > 0 && startTime == -1) {
                        startTime = info.presentationTimeUs;
                    }
                    if (end < 0 || info.presentationTimeUs < end) {
                        info.offset = 0;
                        info.flags = extractor.getSampleFlags();
                        if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) {
                            // didWriteData(messageObject, file, false, false);
                        }
                        extractor.advance();
                    } else {
                        eof = true;
                    }
                }
            } else if (index == -1) {
                eof = true;
            }
            if (eof) {
                inputDone = true;
            }
        }

        extractor.unselectTrack(trackIndex);
        return startTime;
    }
    return -1;
}
 
源代码17 项目: ssj   文件: AudioDecoder.java
/**
 * Decodes audio file into a raw file. This method accepts audio file formats with valid
 * headers (like .mp3, .mp4, and .wav).
 * @param filepath Path of the file to decode.
 * @return Decoded raw audio file.
 * @throws IOException when file cannot be read.
 */
private File decode(String filepath) throws IOException
{
	// Set selected audio file as a source.
	MediaExtractor extractor = new MediaExtractor();
	extractor.setDataSource(filepath);

	// Get audio format.
	MediaFormat format = extractor.getTrackFormat(0);
	String mime = format.getString(MediaFormat.KEY_MIME);

	// Cache necessary audio attributes.
	sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
	channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);

	// Create and configure decoder based on audio format.
	MediaCodec decoder = MediaCodec.createDecoderByType(mime);
	decoder.configure(format, null, null, 0);
	decoder.start();

	// Create input/output buffers.
	ByteBuffer[] inputBuffers = decoder.getInputBuffers();
	ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
	MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
	extractor.selectTrack(0);

	File dst = new File(FileCons.SSJ_EXTERNAL_STORAGE + File.separator + "output.raw");
	FileOutputStream f = new FileOutputStream(dst);

	boolean endOfStreamReached = false;

	while (true)
	{
		if (!endOfStreamReached)
		{
			int inputBufferIndex = decoder.dequeueInputBuffer(10 * 1000);
			if (inputBufferIndex >= 0)
			{
				ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
				int sampleSize = extractor.readSampleData(inputBuffer, 0);
				if (sampleSize < 0)
				{
					// Pass empty buffer and the end of stream flag to the codec.
					decoder.queueInputBuffer(inputBufferIndex, 0, 0,
											 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
					endOfStreamReached = true;
				}
				else
				{
					// Pass data-filled buffer to the decoder.
					decoder.queueInputBuffer(inputBufferIndex, 0, sampleSize,
											 extractor.getSampleTime(), 0);
					extractor.advance();
				}
			}
		}

		int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10 * 1000);
		if (outputBufferIndex >= 0)
		{
			ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
			byte[] data = new byte[bufferInfo.size];
			outputBuffer.get(data);
			outputBuffer.clear();

			if (data.length > 0)
			{
				f.write(data, 0, data.length);
			}
			decoder.releaseOutputBuffer(outputBufferIndex, false);

			if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
			{
				endOfStreamReached = true;
			}
		}
		else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
		{
			outputBuffers = decoder.getOutputBuffers();
		}

		if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
		{
			return dst;
		}
	}
}
 
源代码18 项目: talk-android   文件: MediaController.java
@TargetApi(16)
private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception {
    int trackIndex = selectTrack(extractor, isAudio);
    if (trackIndex >= 0) {
        extractor.selectTrack(trackIndex);
        MediaFormat trackFormat = extractor.getTrackFormat(trackIndex);
        int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio);
        int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        boolean inputDone = false;
        if (start > 0) {
            extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        } else {
            extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        }
        ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
        long startTime = -1;

        while (!inputDone) {

            boolean eof = false;
            int index = extractor.getSampleTrackIndex();
            if (index == trackIndex) {
                info.size = extractor.readSampleData(buffer, 0);

                if (info.size < 0) {
                    info.size = 0;
                    eof = true;
                } else {
                    info.presentationTimeUs = extractor.getSampleTime();
                    if (start > 0 && startTime == -1) {
                        startTime = info.presentationTimeUs;
                    }
                    if (end < 0 || info.presentationTimeUs < end) {
                        info.offset = 0;
                        info.flags = extractor.getSampleFlags();
                        if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) {
                            // didWriteData(messageObject, file, false, false);
                        }
                        extractor.advance();
                    } else {
                        eof = true;
                    }
                }
            } else if (index == -1) {
                eof = true;
            }
            if (eof) {
                inputDone = true;
            }
        }

        extractor.unselectTrack(trackIndex);
        return startTime;
    }
    return -1;
}
 
源代码19 项目: SiliCompressor   文件: MediaController.java
@TargetApi(16)
private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception {
    int trackIndex = selectTrack(extractor, isAudio);
    if (trackIndex >= 0) {
        extractor.selectTrack(trackIndex);
        MediaFormat trackFormat = extractor.getTrackFormat(trackIndex);
        int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio);
        int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        boolean inputDone = false;
        if (start > 0) {
            extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        } else {
            extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        }
        ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
        long startTime = -1;

        while (!inputDone) {

            boolean eof = false;
            int index = extractor.getSampleTrackIndex();
            if (index == trackIndex) {
                info.size = extractor.readSampleData(buffer, 0);

                if (info.size < 0) {
                    info.size = 0;
                    eof = true;
                } else {
                    info.presentationTimeUs = extractor.getSampleTime();
                    if (start > 0 && startTime == -1) {
                        startTime = info.presentationTimeUs;
                    }
                    if (end < 0 || info.presentationTimeUs < end) {
                        info.offset = 0;
                        info.flags = extractor.getSampleFlags();
                        if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) {
                            // didWriteData(messageObject, file, false, false);
                        }
                        extractor.advance();
                    } else {
                        eof = true;
                    }
                }
            } else if (index == -1) {
                eof = true;
            }
            if (eof) {
                inputDone = true;
            }
        }

        extractor.unselectTrack(trackIndex);
        return startTime;
    }
    return -1;
}
 
/**
 * Work loop.
 */
static void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder,
        CodecOutputSurface outputSurface) throws IOException {
    final int TIMEOUT_USEC = 10000;
    ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int inputChunk = 0;
    int decodeCount = 0;
    long frameSaveTime = 0;

    boolean outputDone = false;
    boolean inputDone = false;
    while (!outputDone) {
        if (VERBOSE) Log.d(TAG, "loop");

        // Feed more data to the decoder.
        if (!inputDone) {
            int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
            if (inputBufIndex >= 0) {
                ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                // Read the sample data into the ByteBuffer.  This neither respects nor
                // updates inputBuf's position, limit, etc.
                int chunkSize = extractor.readSampleData(inputBuf, 0);
                if (chunkSize < 0) {
                    // End of stream -- send empty frame with EOS flag set.
                    decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
                            MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    inputDone = true;
                    if (VERBOSE) Log.d(TAG, "sent input EOS");
                } else {
                    if (extractor.getSampleTrackIndex() != trackIndex) {
                        Log.w(TAG, "WEIRD: got sample from track " +
                                extractor.getSampleTrackIndex() + ", expected " + trackIndex);
                    }
                    long presentationTimeUs = extractor.getSampleTime();
                    decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
                            presentationTimeUs, 0 /*flags*/);
                    if (VERBOSE) {
                        Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
                                chunkSize);
                    }
                    inputChunk++;
                    extractor.advance();
                }
            } else {
                if (VERBOSE) Log.d(TAG, "input buffer not available");
            }
        }

        if (!outputDone) {
            int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
            if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (VERBOSE) Log.d(TAG, "no output from decoder available");
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not important for us, since we're using Surface
                if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat newFormat = decoder.getOutputFormat();
                if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
            } else if (decoderStatus < 0) {
                fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
            } else { // decoderStatus >= 0
                if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
                        " (size=" + info.size + ")");
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    if (VERBOSE) Log.d(TAG, "output EOS");
                    outputDone = true;
                }

                boolean doRender = (info.size != 0);

                // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
                // that the texture will be available before the call returns, so we
                // need to wait for the onFrameAvailable callback to fire.
                decoder.releaseOutputBuffer(decoderStatus, doRender);
                if (doRender) {
                    if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount);
                    outputSurface.awaitNewImage();
                    outputSurface.drawImage(true);

                    if (decodeCount < MAX_FRAMES) {
                        File outputFile = new File(FILES_DIR,
                                String.format("frame-%02d.png", decodeCount));
                        long startWhen = System.nanoTime();
                        outputSurface.saveFrame(outputFile.toString());
                        frameSaveTime += System.nanoTime() - startWhen;
                    }
                    decodeCount++;
                }
            }
        }
    }

    int numSaved = (MAX_FRAMES < decodeCount) ? MAX_FRAMES : decodeCount;
    Log.d(TAG, "Saving " + numSaved + " frames took " +
        (frameSaveTime / numSaved / 1000) + " us per frame");
}