下面列出了android.graphics.ImageFormat#FLEX_RGB_888 ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private void decodeColor(final byte[] data, int width, int height, int format)
{
// TODO: implement missing conversions
switch (format)
{
case ImageFormat.YV12:
{
throw new UnsupportedOperationException("Not implemented, yet");
}
case ImageFormat.YUV_420_888: //YV12_PACKED_SEMI
{
CameraUtil.decodeYV12PackedSemi(argbData, data, width, height);
break;
}
case ImageFormat.NV21:
{
CameraUtil.convertNV21ToARGBInt(argbData, data, width, height);
break;
}
case ImageFormat.FLEX_RGB_888:
{
CameraUtil.convertRGBToARGBInt(argbData, data, width, height);
break;
}
default:
{
Log.e("Wrong color format");
throw new RuntimeException();
}
}
}
/**
* @param data byte[]
*/
private void decodeColor(final byte[] data, int width, int height, int format)
{
//@todo implement missing conversions
switch (format)
{
case ImageFormat.YV12:
{
throw new UnsupportedOperationException("Not implemented, yet");
}
case ImageFormat.YUV_420_888: //YV12_PACKED_SEMI
{
CameraUtil.decodeYV12PackedSemi(iaRgbData, data, width, height);
break;
}
case ImageFormat.NV21:
{
CameraUtil.convertNV21ToARGBInt(iaRgbData, data, width, height);
break;
}
case ImageFormat.FLEX_RGB_888:
{
CameraUtil.convertRGBToARGBInt(iaRgbData, data, width, height);
break;
}
default:
{
Log.e("Wrong color format");
throw new RuntimeException();
}
}
}
/**
* 対応する映像フォーマットをlogCatへ出力する
* @param params
*/
public static void dumpSupportedPictureFormats(@NonNull final Camera.Parameters params) {
final List<Integer> formats = params.getSupportedPictureFormats();
for (final int format: formats) {
switch (format) {
case ImageFormat.DEPTH16: Log.i(TAG, "supported: DEPTH16"); break;
case ImageFormat.DEPTH_POINT_CLOUD: Log.i(TAG, "supported: DEPTH_POINT_CLOUD"); break;
case ImageFormat.FLEX_RGBA_8888: Log.i(TAG, "supported: FLEX_RGBA_8888"); break;
case ImageFormat.FLEX_RGB_888: Log.i(TAG, "supported: FLEX_RGB_888"); break;
case ImageFormat.JPEG: Log.i(TAG, "supported: JPEG"); break;
case ImageFormat.NV16: Log.i(TAG, "supported: NV16"); break;
case ImageFormat.NV21: Log.i(TAG, "supported: NV21"); break;
case ImageFormat.PRIVATE: Log.i(TAG, "supported: PRIVATE"); break;
case ImageFormat.RAW10: Log.i(TAG, "supported: RAW10"); break;
case ImageFormat.RAW12: Log.i(TAG, "supported: RAW12"); break;
case ImageFormat.RAW_PRIVATE: Log.i(TAG, "supported: RAW_PRIVATE"); break;
case ImageFormat.RAW_SENSOR: Log.i(TAG, "supported: RAW_SENSOR"); break;
case ImageFormat.RGB_565: Log.i(TAG, "supported: RGB_565"); break;
case ImageFormat.UNKNOWN: Log.i(TAG, "supported: UNKNOWN"); break;
case ImageFormat.YUV_420_888: Log.i(TAG, "supported: YUV_420_888"); break;
case ImageFormat.YUV_422_888: Log.i(TAG, "supported: YUV_422_888"); break;
case ImageFormat.YUV_444_888: Log.i(TAG, "supported: YUV_444_888"); break;
case ImageFormat.YUY2: Log.i(TAG, "supported: YUY2"); break;
case ImageFormat.YV12: Log.i(TAG, "supported: YV12"); break;
default:
Log.i(TAG, String.format("supported: unknown, %08x", format));
break;
}
}
}
private static String getFormat(int format) {
switch (format) {
case ImageFormat.DEPTH16:
return "DEPTH16";
case ImageFormat.DEPTH_POINT_CLOUD:
return "DEPTH_POINT_CLOUD";
case ImageFormat.FLEX_RGBA_8888:
return "FLEX_RGBA_8888";
case ImageFormat.FLEX_RGB_888:
return "FLEX_RGB_888";
case ImageFormat.JPEG:
return "JPEG";
case ImageFormat.NV16:
return "NV16";
case ImageFormat.NV21:
return "NV21";
case ImageFormat.PRIVATE:
return "PRIVATE";
case ImageFormat.RAW10:
return "RAW10";
case ImageFormat.RAW12:
return "RAW12";
case ImageFormat.RAW_PRIVATE:
return "RAW_PRIVATE";
case ImageFormat.RAW_SENSOR:
return "RAW_SENSOR";
case ImageFormat.RGB_565:
return "RGB_565";
case ImageFormat.YUV_420_888:
return "YUV_420_888";
case ImageFormat.YUV_422_888:
return "YUV_422_888";
case ImageFormat.YUV_444_888:
return "YUV_444_888";
case ImageFormat.YUY2:
return "YUY2";
case ImageFormat.YV12:
return "YV12";
default:
return UNKNOWN + "-" + format;
}
}
private String formatFormat(int imageFormat)
{
switch (imageFormat)
{
case ImageFormat.UNKNOWN:
return "UNKNOWN";
case ImageFormat.NV21:
return "NV21";
case ImageFormat.NV16:
return "NV16";
case ImageFormat.YV12:
return "YV12";
case ImageFormat.YUY2:
return "YUY2";
case ImageFormat.YUV_420_888:
return "YUV_420_888";
case ImageFormat.YUV_422_888:
return "YUV_422_888";
case ImageFormat.YUV_444_888:
return "YUV_444_888";
case ImageFormat.FLEX_RGB_888:
return "FLEX_RGB_888";
case ImageFormat.FLEX_RGBA_8888:
return "FLEX_RGBA_8888";
case ImageFormat.JPEG:
return "JPEG";
case ImageFormat.RGB_565:
return "RGB_565";
case ImageFormat.RAW_SENSOR:
return "RAW_SENSOR";
case ImageFormat.RAW10:
return "RAW10";
case ImageFormat.RAW12:
return "RAW12";
case ImageFormat.DEPTH16:
return "DEPTH16";
case ImageFormat.DEPTH_POINT_CLOUD:
return "DEPTH_POINT_CLOUD";
//case ImageFormat.Y8:
//case ImageFormat.Y16:
}
return "" + imageFormat;
}
private static double getFormatCost(int format) {
switch (format) {
case ImageFormat.UNKNOWN:
return 1.0;
case ImageFormat.NV21:
return 0.8;
case ImageFormat.NV16:
// This format has never been seen in the wild, but is compatible as we only care
// about the Y channel, so allow it.
return 0.8;
case ImageFormat.YV12:
case ImageFormat.YUY2:
case ImageFormat.YUV_420_888:
return 0.5; // pure guesswork - but it IS faster than JPEG
case ImageFormat.YUV_422_888:
// only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
// doesn't affect the luminance much
// (see https://en.wikipedia.org/wiki/Chroma_subsampling)
return 0.5;
case ImageFormat.YUV_444_888:
// only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
// doesn't affect the luminance much
// (see https://en.wikipedia.org/wiki/Chroma_subsampling)
return 0.5;
case ImageFormat.FLEX_RGB_888:
case ImageFormat.FLEX_RGBA_8888:
case ImageFormat.RGB_565:
return 0.8; // pure guesswork
case ImageFormat.JPEG:
return 1.0; // duh...?
case ImageFormat.RAW_SENSOR:
case ImageFormat.RAW10:
case ImageFormat.RAW12:
return 0.4; // pure guesswork - but any RAW format must be optimal (wrt capture speed)?
case ImageFormat.DEPTH16:
case ImageFormat.DEPTH_POINT_CLOUD:
return 1.5; // sound terribly complicated - but I'm just guessing....
//ImageFormat.Y8:
//ImageFormat.Y16:
}
return 1.0;
}
public static byte[] ToJpeg(byte[] imageData, int imageFormat, int width, int height)
{
if (imageData == null)
return null;
switch (imageFormat)
{
case ImageFormat.NV21:
case ImageFormat.YUY2:
YuvImage img = new YuvImage(imageData, imageFormat, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int quality = 20; //set quality
img.compressToJpeg(new Rect(0, 0, width, height), quality, baos);//this line decreases the image quality
return baos.toByteArray();
case ImageFormat.YUV_420_888:
return JpegFromYuv420888(imageData, imageFormat, width, height);
case ImageFormat.UNKNOWN:
return null;
case ImageFormat.NV16:
// Source: http://www.programcreek.com/java-api-examples/index.php?source_dir=Roid-Library-master/src/com/rincliu/library/common/persistence/zxing/camera/CameraManager.java
// This format has never been seen in the wild, but is compatible as we only care
// about the Y channel, so allow it.
case ImageFormat.YV12:
// source: https://github.com/evopark/tiqr-android/blob/master/src/main/java/de/evopark/tiqr/android/processing/ZxingQrScanner.java
case ImageFormat.YUV_422_888:
// only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
// doesn't affect the luminance much
// (see https://en.wikipedia.org/wiki/Chroma_subsampling)
case ImageFormat.YUV_444_888:
// only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
// doesn't affect the luminance much
// (see https://en.wikipedia.org/wiki/Chroma_subsampling)
return null;//new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
case ImageFormat.FLEX_RGB_888:
case ImageFormat.FLEX_RGBA_8888:
return null;//new RGBLuminanceSource(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.JPEG:
// Tried and tested myself
return null;//new RGBLuminanceSource(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.RGB_565:
return null;//new RGB565(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.RAW_SENSOR:
case ImageFormat.RAW10:
case ImageFormat.RAW12:
case ImageFormat.DEPTH16:
case ImageFormat.DEPTH_POINT_CLOUD:
//ImageFormat.Y8:
//ImageFormat.Y16:
return null;
default:
throw new IllegalArgumentException("No support for image format " + imageFormat);
}
}