下面列出了android.graphics.ImageFormat#YUV_420_888 ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Set the thumbnail image.
*
* <p>
* Pixel data is interpreted as a {@link android.graphics.ImageFormat#YUV_420_888} image.
* Thumbnail images with a dimension larger than {@link #MAX_THUMBNAIL_DIMENSION} will be
* rejected.
* </p>
*
* @param pixels an {@link android.media.Image} object with the format
* {@link android.graphics.ImageFormat#YUV_420_888}.
* @return this {@link #DngCreator} object.
* @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension
* larger than {@link #MAX_THUMBNAIL_DIMENSION}.
*/
@NonNull
public DngCreator setThumbnail(@NonNull Image pixels) {
if (pixels == null) {
throw new IllegalArgumentException("Null argument to setThumbnail");
}
int format = pixels.getFormat();
if (format != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Unsupported Image format " + format);
}
int width = pixels.getWidth();
int height = pixels.getHeight();
if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
"," + height + ") too large, dimensions must be smaller than " +
MAX_THUMBNAIL_DIMENSION);
}
ByteBuffer rgbBuffer = convertToRGB(pixels);
nativeSetThumbnail(rgbBuffer, width, height);
return this;
}
public static byte[] Serialize(Image image)
{
if (image==null)
return null;
Image.Plane[] planes = image.getPlanes();
// NV21 expects planes in order YVU, not YUV:
if (image.getFormat() == ImageFormat.YUV_420_888)
planes = new Image.Plane[] {planes[0], planes[2], planes[1]};
byte[] serializeBytes = new byte[getSerializedSize(image)];
int nextFree = 0;
for (Image.Plane plane: planes)
{
ByteBuffer buffer = plane.getBuffer();
buffer.position(0);
int nBytes = buffer.remaining();
plane.getBuffer().get(serializeBytes, nextFree, nBytes);
nextFree += nBytes;
}
return serializeBytes;
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else if (mPreviewFormat == ImageFormat.YUV_420_888) {
assert (mUVFrameData != null);
Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
} else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else if (mPreviewFormat == ImageFormat.YUV_420_888) {
assert (mUVFrameData != null);
Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
} else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else if (mPreviewFormat == ImageFormat.YUV_420_888) {
assert (mUVFrameData != null);
Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
} else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else if (mPreviewFormat == ImageFormat.YUV_420_888) {
assert (mUVFrameData != null);
Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
} else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireLatestImage()) {
if (image == null) return;
if (image.getFormat() != ImageFormat.YUV_420_888 || image.getPlanes().length != 3) {
nativeOnError(mNativeVideoCaptureDeviceAndroid, "Unexpected image format: "
+ image.getFormat() + " or #planes: " + image.getPlanes().length);
throw new IllegalStateException();
}
if (reader.getWidth() != image.getWidth()
|| reader.getHeight() != image.getHeight()) {
nativeOnError(mNativeVideoCaptureDeviceAndroid, "ImageReader size ("
+ reader.getWidth() + "x" + reader.getHeight()
+ ") did not match Image size (" + image.getWidth() + "x"
+ image.getHeight() + ")");
throw new IllegalStateException();
}
nativeOnI420FrameAvailable(mNativeVideoCaptureDeviceAndroid,
image.getPlanes()[0].getBuffer(), image.getPlanes()[0].getRowStride(),
image.getPlanes()[1].getBuffer(), image.getPlanes()[2].getBuffer(),
image.getPlanes()[1].getRowStride(), image.getPlanes()[1].getPixelStride(),
image.getWidth(), image.getHeight(), getCameraRotation(),
image.getTimestamp());
} catch (IllegalStateException ex) {
Log.e(TAG, "acquireLatestImage():", ex);
}
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else if (mPreviewFormat == ImageFormat.YUV_420_888) {
assert (mUVFrameData != null);
Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
} else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
private static Bitmap renderHevcImageWithFormat(ByteBuffer bitstream, ImageInfo info, int imageFormat) throws FormatFallbackException {
try (ImageReader reader = ImageReader.newInstance(info.size.getWidth(), info.size.getHeight(), imageFormat, 1)) {
renderHevcImage(bitstream, info, reader.getSurface());
Image image = null;
try {
try {
image = reader.acquireNextImage();
} catch (UnsupportedOperationException ex) {
throw new FormatFallbackException(ex);
}
switch (image.getFormat()) {
case ImageFormat.YUV_420_888:
case ImageFormat.YV12:
return convertYuv420ToBitmap(image);
case ImageFormat.RGB_565:
return convertRgb565ToBitmap(image);
default:
throw new RuntimeException("unsupported image format(" + image.getFormat() + ")");
}
} finally {
if (image != null) {
image.close();
}
}
}
}
private void decodeColor(final byte[] data, int width, int height, int format)
{
// TODO: implement missing conversions
switch (format)
{
case ImageFormat.YV12:
{
throw new UnsupportedOperationException("Not implemented, yet");
}
case ImageFormat.YUV_420_888: //YV12_PACKED_SEMI
{
CameraUtil.decodeYV12PackedSemi(argbData, data, width, height);
break;
}
case ImageFormat.NV21:
{
CameraUtil.convertNV21ToARGBInt(argbData, data, width, height);
break;
}
case ImageFormat.FLEX_RGB_888:
{
CameraUtil.convertRGBToARGBInt(argbData, data, width, height);
break;
}
default:
{
Log.e("Wrong color format");
throw new RuntimeException();
}
}
}
public static Bitmap ToBitmap(byte[] imageBytes, int format, int width, int height)
{
switch (format) {
case ImageFormat.NV21:
case ImageFormat.YUV_420_888:
return NV21ToBitmap(imageBytes, width, height);
}
return null;
}
@CalledByNative
public final int getColorspace() {
switch (mCaptureFormat.mPixelFormat) {
case ImageFormat.YV12:
return AndroidImageFormat.YV12;
case ImageFormat.YUV_420_888:
return AndroidImageFormat.YUV_420_888;
case ImageFormat.NV21:
return AndroidImageFormat.NV21;
case ImageFormat.UNKNOWN:
default:
return AndroidImageFormat.UNKNOWN;
}
}
/**
* {@link Image} を JPEG のバイナリデータに変換します.
*
* @param image 元の画像
* @return JPEG のバイナリデータ
*/
public static byte[] convertToJPEG(Image image) {
byte[] jpeg;
if (image.getFormat() == ImageFormat.JPEG) {
jpeg = readJPEG(image);
} else if (image.getFormat() == ImageFormat.YUV_420_888) {
jpeg = NV21toJPEG(YUV420toNV21(image), image.getWidth(), image.getHeight(), 100);
} else {
throw new RuntimeException("Unsupported format: " + image.getFormat());
}
return jpeg;
}
public static byte[] ToJpeg(byte[] imageData, int imageFormat, int width, int height)
{
if (imageData == null)
return null;
switch (imageFormat)
{
case ImageFormat.NV21:
case ImageFormat.YUY2:
YuvImage img = new YuvImage(imageData, imageFormat, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int quality = 20; //set quality
img.compressToJpeg(new Rect(0, 0, width, height), quality, baos);//this line decreases the image quality
return baos.toByteArray();
case ImageFormat.YUV_420_888:
return JpegFromYuv420888(imageData, imageFormat, width, height);
case ImageFormat.UNKNOWN:
return null;
case ImageFormat.NV16:
// Source: http://www.programcreek.com/java-api-examples/index.php?source_dir=Roid-Library-master/src/com/rincliu/library/common/persistence/zxing/camera/CameraManager.java
// This format has never been seen in the wild, but is compatible as we only care
// about the Y channel, so allow it.
case ImageFormat.YV12:
// source: https://github.com/evopark/tiqr-android/blob/master/src/main/java/de/evopark/tiqr/android/processing/ZxingQrScanner.java
case ImageFormat.YUV_422_888:
// only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
// doesn't affect the luminance much
// (see https://en.wikipedia.org/wiki/Chroma_subsampling)
case ImageFormat.YUV_444_888:
// only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
// doesn't affect the luminance much
// (see https://en.wikipedia.org/wiki/Chroma_subsampling)
return null;//new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
case ImageFormat.FLEX_RGB_888:
case ImageFormat.FLEX_RGBA_8888:
return null;//new RGBLuminanceSource(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.JPEG:
// Tried and tested myself
return null;//new RGBLuminanceSource(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.RGB_565:
return null;//new RGB565(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.RAW_SENSOR:
case ImageFormat.RAW10:
case ImageFormat.RAW12:
case ImageFormat.DEPTH16:
case ImageFormat.DEPTH_POINT_CLOUD:
//ImageFormat.Y8:
//ImageFormat.Y16:
return null;
default:
throw new IllegalArgumentException("No support for image format " + imageFormat);
}
}
private String formatToString(int format) {
switch (format) {
case ImageFormat.YV12:
return "YV12";
case ImageFormat.YUV_420_888:
return "YUV_420_888";
case ImageFormat.NV21:
return "NV21";
case ImageFormat.NV16:
return "NV16";
case PixelFormat.RGB_565:
return "RGB_565";
case PixelFormat.RGBA_8888:
return "RGBA_8888";
case PixelFormat.RGBX_8888:
return "RGBX_8888";
case PixelFormat.RGB_888:
return "RGB_888";
case ImageFormat.JPEG:
return "JPEG";
case ImageFormat.YUY2:
return "YUY2";
case ImageFormat.Y8:
return "Y8";
case ImageFormat.Y16:
return "Y16";
case ImageFormat.RAW_SENSOR:
return "RAW_SENSOR";
case ImageFormat.RAW_PRIVATE:
return "RAW_PRIVATE";
case ImageFormat.RAW10:
return "RAW10";
case ImageFormat.DEPTH16:
return "DEPTH16";
case ImageFormat.DEPTH_POINT_CLOUD:
return "DEPTH_POINT_CLOUD";
case ImageFormat.RAW_DEPTH:
return "RAW_DEPTH";
case ImageFormat.PRIVATE:
return "PRIVATE";
default:
return "UNKNOWN";
}
}
@Override
public boolean allocate(int width, int height, int frameRate) {
Log.d(TAG, "allocate: requested (%d x %d) @%dfps", width, height, frameRate);
ThreadUtils.assertOnUiThread();
synchronized (mCameraStateLock) {
if (mCameraState == CameraState.OPENING || mCameraState == CameraState.CONFIGURING) {
Log.e(TAG, "allocate() invoked while Camera is busy opening/configuring.");
return false;
}
}
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId);
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Find closest supported size.
final Size[] supportedSizes = streamMap.getOutputSizes(ImageFormat.YUV_420_888);
final Size closestSupportedSize = findClosestSizeInArray(supportedSizes, width, height);
if (closestSupportedSize == null) {
Log.e(TAG, "No supported resolutions.");
return false;
}
final List<Range<Integer>> fpsRanges = Arrays.asList(cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES));
if (fpsRanges.isEmpty()) {
Log.e(TAG, "No supported framerate ranges.");
return false;
}
final List<FramerateRange> framerateRanges =
new ArrayList<FramerateRange>(fpsRanges.size());
// On some legacy implementations FPS values are multiplied by 1000. Multiply by 1000
// everywhere for consistency. Set fpsUnitFactor to 1 if fps ranges are already multiplied
// by 1000.
final int fpsUnitFactor = fpsRanges.get(0).getUpper() > 1000 ? 1 : 1000;
for (Range<Integer> range : fpsRanges) {
framerateRanges.add(new FramerateRange(
range.getLower() * fpsUnitFactor, range.getUpper() * fpsUnitFactor));
}
final FramerateRange aeFramerateRange =
getClosestFramerateRange(framerateRanges, frameRate * 1000);
mAeFpsRange = new Range<Integer>(
aeFramerateRange.min / fpsUnitFactor, aeFramerateRange.max / fpsUnitFactor);
Log.d(TAG, "allocate: matched (%d x %d) @[%d - %d]", closestSupportedSize.getWidth(),
closestSupportedSize.getHeight(), mAeFpsRange.getLower(), mAeFpsRange.getUpper());
// |mCaptureFormat| is also used to configure the ImageReader.
mCaptureFormat = new VideoCaptureFormat(closestSupportedSize.getWidth(),
closestSupportedSize.getHeight(), frameRate, ImageFormat.YUV_420_888);
mCameraNativeOrientation =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// TODO(mcasas): The following line is correct for N5 with prerelease Build,
// but NOT for N7 with a dev Build. Figure out which one to support.
mInvertDeviceOrientationReadings =
cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
== CameraCharacteristics.LENS_FACING_BACK;
return true;
}
/******************************************************************************************
* The preview window can supprt different image formats depending on the camera make
* Almost all support NV21 and JPEG
* @param parameters preview window parms
****************************************************************************************/
@SuppressWarnings("deprecation")
private synchronized void queryPreviewSettings(Camera.Parameters parameters) {
List<int[]> supportedFps = parameters.getSupportedPreviewFpsRange();
for (int[] item : supportedFps) {
Log.d(TAG, "Mix preview frame rate supported: " + item[ Camera.Parameters.PREVIEW_FPS_MIN_INDEX]/ 1000 );
Log.d(TAG, "Max preview frame rate supported: " + item[ Camera.Parameters.PREVIEW_FPS_MAX_INDEX]/ 1000 );
}
List<Integer> formats = parameters.getSupportedPreviewFormats();
for (Integer format : formats) {
if (format == null) {
Log.e(TAG, "This camera supports illegal format in preview");
break;
}
switch ( format.intValue() ) {
case ImageFormat.JPEG:
Log.d(TAG, "This camera supports JPEG format in preview");
break;
case ImageFormat.NV16:
Log.d(TAG, "This camera supports NV16 format in preview");
break;
case ImageFormat.NV21:
Log.d(TAG, "This camera supports NV21 format in preview");
mNV21ColorFormatSupported = true;
break;
case ImageFormat.RGB_565:
Log.d(TAG, "This camera supports RGB_5645 format in preview");
break;
case ImageFormat.YUV_420_888:
Log.d(TAG, "This camera supports YUV_420_888 format in preview");
break;
case ImageFormat.YUY2:
Log.d(TAG, "This camera supports YUY2 format in preview");
break;
case ImageFormat.YV12:
Log.d(TAG, "This camera supports YV12 format in preview");
mYV12ColorFormatSupported = true;
break;
case ImageFormat.UNKNOWN:
Log.e(TAG, "This camera supports UNKNOWN format in preview");
break;
default:
Log.e(TAG, "This camera supports illegal format in preview");
break;
}
}
}
public static OneCamera create(
CameraDevice device,
CameraCharacteristics characteristics,
OneCameraFeatureConfig featureConfig,
OneCameraCaptureSetting captureSetting,
DisplayMetrics displayMetrics,
Context context,
MainThread mainThread,
ImageRotationCalculator imageRotationCalculator,
BurstFacade burstController,
SoundPlayer soundPlayer,
FatalErrorHandler fatalErrorHandler) throws OneCameraAccessException
{
// TODO: Might want to switch current camera to vendor HDR.
CaptureSupportLevel captureSupportLevel = featureConfig
.getCaptureSupportLevel(characteristics);
Log.i(TAG, "Camera support level: " + captureSupportLevel.name());
OneCameraCharacteristics oneCharacteristics =
new OneCameraCharacteristicsImpl(characteristics);
PictureSizeCalculator pictureSizeCalculator =
new PictureSizeCalculator(oneCharacteristics);
PictureSizeCalculator.Configuration configuration = null;
OneCameraFactory cameraFactory = null;
ImageSaver.Builder imageSaverBuilder = null;
ImageBackend imageBackend = ProcessingServiceManager.instance().getImageBackend();
// Depending on the support level of the camera, choose the right
// configuration.
switch (captureSupportLevel)
{
case LIMITED_JPEG:
case LEGACY_JPEG:
// LIMITED and LEGACY have different picture takers which will
// be selected by the support level that is passes into
// #createOneCamera below - otherwise they use the same OneCamera and image backend.
cameraFactory = new SimpleOneCameraFactory(ImageFormat.JPEG,
featureConfig.getMaxAllowedImageReaderCount(),
imageRotationCalculator);
configuration = pictureSizeCalculator.computeConfiguration(
captureSetting.getCaptureSize(),
ImageFormat.JPEG);
imageSaverBuilder = new JpegImageBackendImageSaver(imageRotationCalculator,
imageBackend, configuration.getPostCaptureCrop());
break;
case LIMITED_YUV:
// Same as above, but we're using YUV images.
cameraFactory = new SimpleOneCameraFactory(ImageFormat.YUV_420_888,
featureConfig.getMaxAllowedImageReaderCount(),
imageRotationCalculator);
configuration = pictureSizeCalculator.computeConfiguration(
captureSetting.getCaptureSize(),
ImageFormat.YUV_420_888);
imageSaverBuilder = new YuvImageBackendImageSaver(imageRotationCalculator,
imageBackend,
configuration.getPostCaptureCrop());
break;
case ZSL:
// ZSL has its own OneCamera and produces YUV images.
cameraFactory = new ZslOneCameraFactory(ImageFormat.YUV_420_888,
featureConfig.getMaxAllowedImageReaderCount());
configuration = pictureSizeCalculator.computeConfiguration(
captureSetting.getCaptureSize(),
ImageFormat.YUV_420_888);
imageSaverBuilder = new YuvImageBackendImageSaver(imageRotationCalculator,
imageBackend, configuration.getPostCaptureCrop());
break;
}
Log.i(TAG, "Picture Size Configuration: " + configuration);
return cameraFactory.createOneCamera(new AndroidCameraDeviceProxy(device),
new OneCameraCharacteristicsImpl(characteristics),
captureSupportLevel,
mainThread,
configuration.getNativeOutputSize(),
imageSaverBuilder,
captureSetting.getFlashSetting(),
captureSetting.getExposureSetting(),
captureSetting.getHdrSceneSetting(),
burstController,
fatalErrorHandler);
}
public CameraConfig(String cameraId, StreamConfigurationMap map, @Nullable View view, OnImageAvailableListener listener, Handler handler) {
if (view != null) {
this.view = view;
if (view instanceof TextureView) {
type = TYPE_TEXTURE_VIEW;
} else if (view instanceof SurfaceView) {
type = TYPE_SURFACE_VIEW;
} else {
throw new IllegalArgumentException("不支持类型");
}
}
this.streamConfigurationMap = map;
this.cameraId = cameraId;
this.imageAvailableListener = listener;
this.handler = handler;
int format = ImageFormat.JPEG;
if (map.isOutputSupportedFor(ImageFormat.YUV_420_888)) {
format = ImageFormat.YUV_420_888;
Log.i(TAG, "support YUV_420_888");
} else if (map.isOutputSupportedFor(ImageFormat.YV12)) {
format = ImageFormat.YV12;
}
Log.e(TAG, "current ImageFormat = " + format);
largest = calculationSize(map);
Log.d(TAG, "width = " + largest.getWidth() + " height = " + largest.getHeight());
//三通道 YUV YV12,YUV_420_888,不支持 NV21
imageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), format, 1);
imageReader.setOnImageAvailableListener(imageAvailableListener, handler);
this.cameraStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
cameraDevice = camera;
createCameraSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
camera.close();
cameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
camera.close();
cameraDevice = null;
Log.e(TAG, _error[error]);
}
};
}
/**
* Check if a given surface uses {@link ImageFormat#YUV_420_888} or format that can be readily
* converted to this; YV12 and NV21 are the two currently supported formats.
*
* @param s the surface to check.
* @return {@code true} if the surfaces uses {@link ImageFormat#YUV_420_888} or a compatible
* format.
*/
static boolean needsConversion(Surface s) throws BufferQueueAbandonedException {
int nativeType = detectSurfaceType(s);
return nativeType == ImageFormat.YUV_420_888 || nativeType == ImageFormat.YV12 ||
nativeType == ImageFormat.NV21;
}