android.media.MediaCodec# createInputSurface ( ) 源码实例Demo

下面列出了android.media.MediaCodec# createInputSurface ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。


@Override
protected MediaCodec internal_configure(MediaCodec previous_codec,
	final MediaFormat format) throws IOException {

	if (DEBUG) Log.v(TAG, "internal_configure:");
	format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);	// API >= 18
	format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate > 0 ? mBitRate : calcBitRate());
	format.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate);
	format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameIntervals);
	if (DEBUG) Log.i(TAG, "format: " + format);

	if (previous_codec == null)
		previous_codec = MediaCodec.createEncoderByType(MIME_TYPE);
	previous_codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
	mSurface = previous_codec.createInputSurface();	// API >= 18
	return previous_codec;
}
 
源代码2 项目: VideoRecorder   文件: VideoEncoder.java

@Override
protected MediaCodec createEncoder() throws IOException {
    LogUtil.logd(TAG, "createEncoder");
    MediaFormat videoFormat = MediaFormat.createVideoFormat(MIME_TYPE, mVideoSize.getWidth(), mVideoSize.getHeight());
    videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
    videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameInterval);
    LogUtil.logd(TAG, "format: " + videoFormat);


    MediaCodec encoder = MediaCodec.createEncoderByType(MIME_TYPE);
    encoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mInputSurface = encoder.createInputSurface();
    encoder.start();
    LogUtil.logd(TAG, "createEncoder finishing");
    return encoder;
}
 
源代码3 项目: In77Camera   文件: MediaCodecUtils.java

@TargetApi(MIN_API_LEVEL_VIDEO)
public static int checkMediaCodecVideoEncoderSupport(){
    if(getApiLevel()<MIN_API_LEVEL_VIDEO){
        Log.d(TAG, "checkMediaCodecVideoEncoderSupport: Min API is 18");
        return CODEC_REQ_API_NOT_SATISFIED;
    }
    MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE_VIDEO, TEST_WIDTH, TEST_HEIGHT);
    format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
            MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    format.setInteger(MediaFormat.KEY_BIT_RATE, TEST_VIDEO_BIT_RATE);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, TEST_FRAME_RATE);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, TEST_IFRAME_INTERVAL);
    MediaCodec mediaCodec;
    try {
        mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE_VIDEO);
        mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.createInputSurface();
        mediaCodec.start();
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
    } catch (Exception ex) {
        Log.e(TAG, "Failed on creation of codec #", ex);
        return CODEC_ERROR;
    }
    return CODEC_SUPPORTED;
}
 

@TargetApi(MIN_API_LEVEL_VIDEO)
public static int checkMediaCodecVideoEncoderSupport(){
    if(getApiLevel()<MIN_API_LEVEL_VIDEO){
        Log.d(TAG, "checkMediaCodecVideoEncoderSupport: Min API is 18");
        return CODEC_REQ_API_NOT_SATISFIED;
    }
    MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE_VIDEO, TEST_WIDTH, TEST_HEIGHT);
    format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
            MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    format.setInteger(MediaFormat.KEY_BIT_RATE, TEST_VIDEO_BIT_RATE);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, TEST_FRAME_RATE);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, TEST_IFRAME_INTERVAL);
    MediaCodec mediaCodec;
    try {
        mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE_VIDEO);
        mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.createInputSurface();
        mediaCodec.start();
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
    } catch (Exception ex) {
        Log.e(TAG, "Failed on creation of codec #", ex);
        return CODEC_ERROR;
    }
    return CODEC_SUPPORTED;
}
 
源代码5 项目: ScreenRecoder   文件: ScreenRecoder.java

@Override
public void run() {
	MediaFormat format = MediaFormat.createVideoFormat("video/avc",
			mWidth, mHeight);
	format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
			MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
	format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
	format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
	format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
			I_FRAME_INTERVAL);

	MediaCodec codec = MediaCodec.createEncoderByType("video/avc");
	codec.configure(format, null, null,
			MediaCodec.CONFIGURE_FLAG_ENCODE);
	Surface surface = codec.createInputSurface();
	codec.start();

	VirtualDisplay virtualDisplay = mDisplayManager
			.createVirtualDisplay(DISPLAY_NAME, mWidth, mHeight,
					mDensityDpi, surface,
					DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC);
	
	if (virtualDisplay != null) {
		stream(codec);
		virtualDisplay.release();
	}

	codec.signalEndOfInputStream();
	codec.stop();
}
 
源代码6 项目: SoloPi   文件: VideoEncoder.java

@Override
protected void onEncoderConfigured(MediaCodec encoder) {
    mSurface = encoder.createInputSurface();
    if (VERBOSE) LogUtil.i("@@", "VideoEncoder create input surface: " + mSurface);
}
 
源代码7 项目: GIFCompressor   文件: VideoTranscoder.java

@Override
protected void onStartEncoder(@NonNull MediaFormat format, @NonNull MediaCodec encoder) {
    mEncoderInputSurface = new VideoEncoderInput(encoder.createInputSurface());
    super.onStartEncoder(format, encoder);
}
 
源代码8 项目: ScreenCapture   文件: VideoEncoder.java

@Override
protected void onEncoderConfigured(MediaCodec encoder) {
    mSurface = encoder.createInputSurface();
    if (VERBOSE) Log.i("@@", "VideoEncoder create input surface: " + mSurface);
}
 

/**
 * Generates a test video file, saving it as VideoChunks.  We generate frames with GL to
 * avoid having to deal with multiple YUV formats.
 *
 * @return true on success, false on "soft" failure
 */
private boolean generateVideoFile(VideoChunks output) {
    if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight);
    MediaCodec encoder = null;
    InputSurface inputSurface = null;
    try {
        MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
        if (codecInfo == null) {
            // Don't fail CTS if they don't have an AVC codec (not here, anyway).
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return false;
        }
        if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());
        // We avoid the device-specific limitations on width and height by using values that
        // are multiples of 16, which all tested devices seem to be able to handle.
        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        if (VERBOSE) Log.d(TAG, "format: " + format);
        output.setMediaFormat(format);
        // Create a MediaCodec for the desired codec, then configure it as an encoder with
        // our desired properties.
        encoder = MediaCodec.createByCodecName(codecInfo.getName());
        encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new InputSurface(encoder.createInputSurface());
        inputSurface.makeCurrent();
        encoder.start();
        generateVideoData(encoder, inputSurface, output);
    } finally {
        if (encoder != null) {
            if (VERBOSE) Log.d(TAG, "releasing encoder");
            encoder.stop();
            encoder.release();
            if (VERBOSE) Log.d(TAG, "released encoder");
        }
        if (inputSurface != null) {
            inputSurface.release();
        }
    }
    return true;
}
 

/**
 * Edits a video file, saving the contents to a new file.  This involves decoding and
 * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
 * <p>
 * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
 * output, but it's not practical to support all OEM formats.  By using a SurfaceTexture
 * for output and a Surface for input, we can avoid issues with obscure formats and can
 * use a fragment shader to do transformations.
 */
private VideoChunks editVideoFile(VideoChunks inputData) {
    if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight);
    VideoChunks outputData = new VideoChunks();
    MediaCodec decoder = null;
    MediaCodec encoder = null;
    InputSurface inputSurface = null;
    OutputSurface outputSurface = null;
    try {
        MediaFormat inputFormat = inputData.getMediaFormat();
        // Create an encoder format that matches the input format.  (Might be able to just
        // re-use the format used to generate the video, since we want it to be the same.)
        MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        outputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
                inputFormat.getInteger(MediaFormat.KEY_BIT_RATE));
        outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
                inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE));
        outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
                inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL));
        outputData.setMediaFormat(outputFormat);
        encoder = MediaCodec.createEncoderByType(MIME_TYPE);
        encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new InputSurface(encoder.createInputSurface());
        inputSurface.makeCurrent();
        encoder.start();
        // OutputSurface uses the EGL context created by InputSurface.
        decoder = MediaCodec.createDecoderByType(MIME_TYPE);
        outputSurface = new OutputSurface();
        outputSurface.changeFragmentShader(FRAGMENT_SHADER);
        decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
        decoder.start();
        editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
    } finally {
        if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder");
        if (outputSurface != null) {
            outputSurface.release();
        }
        if (inputSurface != null) {
            inputSurface.release();
        }
        if (encoder != null) {
            encoder.stop();
            encoder.release();
        }
        if (decoder != null) {
            decoder.stop();
            decoder.release();
        }
    }
    return outputData;
}
 

/**
 * Tests encoding and subsequently decoding video from frames generated into a buffer.
 * <p>
 * We encode several frames of a video test pattern using MediaCodec, then decode the
 * output with MediaCodec and do some simple checks.
 */
private void encodeDecodeVideoFromSurfaceToSurface() throws Exception {
    MediaCodec encoder = null;
    MediaCodec decoder = null;
    InputSurface inputSurface = null;
    OutputSurface outputSurface = null;
    mLargestColorDelta = -1;
    try {
        MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
        if (codecInfo == null) {
            // Don't fail CTS if they don't have an AVC codec (not here, anyway).
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return;
        }
        if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());
        int colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
        // We avoid the device-specific limitations on width and height by using values that
        // are multiples of 16, which all tested devices seem to be able to handle.
        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
        format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        if (VERBOSE) Log.d(TAG, "format: " + format);
        // Create the output surface.
        outputSurface = new OutputSurface(mWidth, mHeight);
        // Create a MediaCodec for the decoder, just based on the MIME type.  The various
        // format details will be passed through the csd-0 meta-data later on.
        decoder = MediaCodec.createDecoderByType(MIME_TYPE);
        MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        decoder.configure(format, outputSurface.getSurface(), null, 0);
        decoder.start();
        // Create a MediaCodec for the desired codec, then configure it as an encoder with
        // our desired properties.  Request a Surface to use for input.
        encoder = MediaCodec.createByCodecName(codecInfo.getName());
        encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new InputSurface(encoder.createInputSurface());
        encoder.start();
        doEncodeDecodeVideoFromSurfaceToSurface(encoder, inputSurface, colorFormat, decoder, outputSurface);
    } finally {
        if (VERBOSE) Log.d(TAG, "releasing codecs");
        if (inputSurface != null) {
            inputSurface.release();
        }
        if (outputSurface != null) {
            outputSurface.release();
        }
        if (encoder != null) {
            encoder.stop();
            encoder.release();
        }
        if (decoder != null) {
            decoder.stop();
            decoder.release();
        }
        Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    }
}
 
源代码12 项目: media-for-mobile   文件: Surface.java

public Surface(MediaCodec mediaCodec, IEglUtil eglUtil) {
    this.inputSurface = new InputSurface(mediaCodec.createInputSurface(), EGL14.eglGetCurrentContext());
    this.inputSurface.makeCurrent();
    this.outputSurface = new OutputSurface(eglUtil);
}
 
源代码13 项目: media-for-mobile   文件: SimpleSurface.java

public SimpleSurface(MediaCodec mediaCodec, EGLContext eglSharedCtx) {
    androidSurface = mediaCodec.createInputSurface();
    inputSurface = new InputSurface(androidSurface, eglSharedCtx);
}
 
源代码14 项目: LiveMultimedia   文件: HWEncoder.java

/**
 * Tests encoding and subsequently decoding video from frames generated into a buffer.
 * <p>
 * We encode several frames of a video test pattern using MediaCodec, then decode the
 * output with MediaCodec and do some simple checks.
 */
private void encodeDecodeVideoFromSurfaceToSurface() throws Exception {
    MediaCodec encoder = null;
    MediaCodec decoder = null;
    InputSurface inputSurface = null;
    OutputSurface outputSurface = null;

    mLargestColorDelta = -1;

    try {
        MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
        if (codecInfo == null) {
            // Don't fail CTS if they don't have an AVC codec (not here, anyway).
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return;
        }
        if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());

        int colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;

        // We avoid the device-specific limitations on width and height by using values that
        // are multiples of 16, which all tested devices seem to be able to handle.
        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);

        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
        format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        if (VERBOSE) Log.d(TAG, "format: " + format);

        // Create the output surface.
        outputSurface = new OutputSurface(mWidth, mHeight);

        // Create a MediaCodec for the decoder, just based on the MIME type.  The various
        // format details will be passed through the csd-0 meta-data later on.
        decoder = MediaCodec.createDecoderByType(MIME_TYPE);
        MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        decoder.configure(format, outputSurface.getSurface(), null, 0);
        decoder.start();

        // Create a MediaCodec for the desired codec, then configure it as an encoder with
        // our desired properties.  Request a Surface to use for input.
        encoder = MediaCodec.createByCodecName(codecInfo.getName());
        encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new InputSurface(encoder.createInputSurface());
        encoder.start();

        doEncodeDecodeVideoFromSurfaceToSurface(encoder, inputSurface, colorFormat, decoder, outputSurface);
    } finally {
        if (VERBOSE) Log.d(TAG, "releasing codecs");
        if (inputSurface != null) {
            inputSurface.release();
        }
        if (outputSurface != null) {
            outputSurface.release();
        }
        if (encoder != null) {
            encoder.stop();
            encoder.release();
        }
        if (decoder != null) {
            decoder.stop();
            decoder.release();
        }

        Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    }
}