下面列出了android.graphics.YuvImage#compressToJpeg ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
public static byte[] createFromNV21(@NonNull final byte[] data,
final int width,
final int height,
int rotation,
final Rect croppingRect,
final boolean flipHorizontal)
throws IOException
{
byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
final int rotatedWidth = rotation % 180 > 0 ? height : width;
final int rotatedHeight = rotation % 180 > 0 ? width : height;
YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
rotatedWidth, rotatedHeight, null);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
previewImage.compressToJpeg(croppingRect, 80, outputStream);
byte[] bytes = outputStream.toByteArray();
outputStream.close();
return bytes;
}
private Bitmap convertBitmap(byte[] data, Camera camera) {
Camera.Size previewSize = camera.getParameters().getPreviewSize();
YuvImage yuvimage = new YuvImage(
data,
camera.getParameters().getPreviewFormat(),
previewSize.width,
previewSize.height,
null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 100, baos);
byte[] rawImage = baos.toByteArray();
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap bitmap = BitmapFactory.decodeByteArray(rawImage, 0, rawImage.length, options);
Matrix m = new Matrix();
// 这里我的手机需要旋转一下图像方向才正确,如果在你们的手机上不正确,自己调节,
// 正式项目中不能这么写,需要计算方向,计算YuvImage方向太麻烦,我这里没有做。
m.setRotate(-displayDegree);
return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), m, true);
}
private static boolean saveJpg(YuvImage src, int quality, File file) {
long t0 = System.nanoTime();
try (FileOutputStream outputStream = new FileOutputStream(file)) {
Rect rect = new Rect(0, 0, src.getWidth(), src.getHeight());
boolean ok = src.compressToJpeg(rect, quality, outputStream);
if (!ok) {
// TODO(jiawen,samansari): Toast.
Log.w(TAG, "Error saving JPEG to: " + file.getAbsolutePath());
}
long t1 = System.nanoTime();
Log.i(TAG, String.format("saveJpg took %f ms.", (t1 - t0) * 1e-6f));
return ok;
} catch (IOException e) {
// TODO(jiawen,samansari): Toast.
Log.w(TAG, "Error saving JPEG image to: " + file.getAbsolutePath());
return false;
}
}
private byte[] imgToByte(boolean quality) {
Camera.Parameters parameters = getParameters();
int width = parameters.getPreviewSize().width;
int height = parameters.getPreviewSize().height;
YuvImage yuv = new YuvImage(getImage(), parameters.getPreviewFormat(), width, height, null);
ByteArrayOutputStream out =
new ByteArrayOutputStream();
yuv.compressToJpeg(new Rect(0, 0, width, height), 100, out);
byte[] compressed = out.toByteArray();
Bitmap newBmp = BitmapFactory.decodeByteArray(compressed, 0, compressed.length);
Matrix mat = new Matrix();
mat.postRotate(PrefsController.instance.getPrefs().getCameraPrefs(cameraId).angle);
newBmp = Bitmap.createBitmap(newBmp, 0, 0, newBmp.getWidth(), newBmp.getHeight(), mat, true);
ByteArrayOutputStream out2 = new ByteArrayOutputStream();
if (quality) {
newBmp.compress(Bitmap.CompressFormat.PNG, 100, out2);
} else {
newBmp.compress(Bitmap.CompressFormat.JPEG, 80, out2);
}
return out2.toByteArray();
}
private Bitmap captureBitmapFromYuvFrame(I420Frame i420Frame) {
YuvImage yuvImage = i420ToYuvImage(i420Frame.yuvPlanes,
i420Frame.yuvStrides,
i420Frame.width,
i420Frame.height);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight());
// Compress YuvImage to jpeg
yuvImage.compressToJpeg(rect, 100, stream);
// Convert jpeg to Bitmap
byte[] imageBytes = stream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
Matrix matrix = new Matrix();
// Apply any needed rotation
matrix.postRotate(i420Frame.rotationDegree);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix,
true);
return bitmap;
}
public void rawByteArray2RGBABitmap2(FileOutputStream b)
{
int yuvi = yuv_w * yuv_h;
int uvi = 0;
byte[] yuv = new byte[yuv_w * yuv_h * 3 / 2];
System.arraycopy(y, 0, yuv, 0, yuvi);
for (int i = 0; i < yuv_h / 2; i++)
{
for (int j = 0; j < yuv_w / 2; j++)
{
yuv[yuvi++] = v[uvi];
yuv[yuvi++] = u[uvi++];
}
}
YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21, yuv_w, yuv_h, null);
Rect rect = new Rect(0, 0, yuv_w, yuv_h);
yuvImage.compressToJpeg(rect, 100, b);
}
private Mat ImagetoMat(ImageProxy imageProxy) {
ImageProxy.PlaneProxy[] plane = imageProxy.getPlanes();
ByteBuffer yBuffer = plane[0].getBuffer(); // Y
ByteBuffer uBuffer = plane[1].getBuffer(); // U
ByteBuffer vBuffer = plane[2].getBuffer(); // V
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
byte[] nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
try {
YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, imageProxy.getWidth(), imageProxy.getHeight(), null);
ByteArrayOutputStream stream = new ByteArrayOutputStream(nv21.length);
yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 90, stream);
Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
Matrix matrix = new Matrix();
matrix.postRotate(90);
Rect rect = scannerView.getFramingRectInPreview(bitmap.getWidth(), bitmap.getHeight());
bitmap = Bitmap.createBitmap(bitmap, rect.top, rect.left, rect.height(), rect.width(), matrix, true);
stream.close();
Mat mat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4);
Utils.bitmapToMat(bitmap, mat);
return mat;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* 将byte[]转换成Bitmap
*
* @param bytes
* @param width
* @param height
* @return
*/
public static Bitmap getBitmapFromByte(byte[] bytes, int width, int height) {
final YuvImage image = new YuvImage(bytes, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream os = new ByteArrayOutputStream(bytes.length);
if (!image.compressToJpeg(new Rect(0, 0, width, height), 100, os)) {
return null;
}
byte[] tmp = os.toByteArray();
Bitmap bmp = BitmapFactory.decodeByteArray(tmp, 0, tmp.length);
return bmp;
}
/**********************************************************************
* getBitmapImageFromYUV returns a bitmap from an image captured in
* the camera in YUV12 format. Image formats and video formats are not
* the same thing.
*******************************************************************/
public static Bitmap getBitmapImageFromYUV(byte[] data, int width, int height) {
YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, width, height), 80, baos);
byte[] jdata = baos.toByteArray();
BitmapFactory.Options bitmapFatoryOptions = new BitmapFactory.Options();
bitmapFatoryOptions.inPreferredConfig = Bitmap.Config.RGB_565;
return BitmapFactory.decodeByteArray(jdata, 0, jdata.length, bitmapFatoryOptions);
}
@NonNull
private Bitmap getConvertedBitmap(Nv21Image nv21Image) {
YuvImage yuvImage = new YuvImage(nv21Image.nv21ByteArray, ImageFormat.NV21, nv21Image.width,
nv21Image.height, null);
ByteArrayOutputStream os = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, nv21Image.width, nv21Image.height), 100, os);
byte[] jpegByteArray = os.toByteArray();
return BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
}
public static Bitmap decodeToBitMap(byte[] data, Camera _camera) {
Camera.Size size = _camera.getParameters().getPreviewSize();
try {
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
if (image != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, stream);
Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
stream.close();
return bmp;
}
} catch (Exception ex) {
}
return null;
}
@Override
protected byte[] doInBackground(Object... objs) {
byte[] frame = (byte[]) objs[0];
Parameters parameters = (Parameters) objs[1];
if (frame_firstUpdateTime == 0) {
frame_firstUpdateTime = System.currentTimeMillis();
}
frame_currentUpdateTime = System.currentTimeMillis();
int datasize = 0;
cameraImageSize = parameters.getPreviewSize();
YuvImage image = new YuvImage(frame, parameters.getPreviewFormat(), cameraImageSize.width,
cameraImageSize.height, null);
ByteArrayOutputStream tmpBuffer = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, tmpBuffer);
Log.d(LOG_TAG, "compression took: "
+ (System.currentTimeMillis()-frame_currentUpdateTime));
synchronized (frameLock) {
frameBuffer = tmpBuffer.toByteArray();
frameGeneratedTime = System.currentTimeMillis();
frameID++;
frameLock.notify();
}
datasize = tmpBuffer.size();
frame_count++;
frame_totalsize += datasize;
if (frame_count % 50 == 0) {
Log.d(LOG_TAG, "(IMG)\t" +
"BW: " + 8.0 * frame_totalsize / (frame_currentUpdateTime - frame_firstUpdateTime) / 1000 +
" Mbps\tCurrent FPS: " + 8.0 * datasize / (frame_currentUpdateTime - frame_prevUpdateTime) / 1000 + " Mbps\t" +
"FPS: " + 1000.0 * frame_count / (frame_currentUpdateTime - frame_firstUpdateTime));
}
frame_prevUpdateTime = frame_currentUpdateTime;
return tmpBuffer.toByteArray();
}
/**
* bla bla bla
*/
@Override
public void run() {
long t = System.currentTimeMillis();
YuvImage yuvimage = new YuvImage(_data, ImageFormat.NV21,
_previewSize.width, _previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
if (!yuvimage.compressToJpeg(new Rect(0, 0, _previewSize.width,
_previewSize.height), 100, baos)) {
Log.e("Camera", "compressToJpeg failed");
}
Log.i("Timing", "Compression finished: "
+ (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inPreferredConfig = Bitmap.Config.RGB_565;
_currentFrame = BitmapFactory.decodeStream(new ByteArrayInputStream(
baos.toByteArray()), null, bfo);
Log.i("Timing", "Decode Finished: " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
// Rotate the so it siuts our portrait mode
Matrix matrix = new Matrix();
matrix.postRotate(90);
matrix.preScale(-1, 1);
// We rotate the same Bitmap
_currentFrame = Bitmap.createBitmap(_currentFrame, 0, 0,
_previewSize.width, _previewSize.height, matrix, false);
Log.i("Timing",
"Rotate, Create finished: " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
if (_currentFrame == null) {
Log.e(FACEDETECTIONTHREAD_TAG, "Could not decode Image");
return;
}
FaceDetector d = new FaceDetector(_currentFrame.getWidth(),
_currentFrame.getHeight(), 1);
Face[] faces = new Face[1];
d.findFaces(_currentFrame, faces);
Log.i("Timing",
"FaceDetection finished: " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
_currentFace = faces[0];
Log.d(FACEDETECTIONTHREAD_TAG, "Found: " + faces[0] + " Faces");
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (capturing) {
long now = System.currentTimeMillis();
float duration = now - lastTick;
t += duration;
if (onCaptureListener != null) {
onCaptureListener.onCaptureProgress(((float) t) / Constants.CAPTURE_DURATION);
}
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = parameters.getPreviewSize();
YuvImage image = new YuvImage(data, parameters.getPreviewFormat(),
size.width, size.height, null);
ByteArrayOutputStream output = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, output);
Bitmap bitmap = BitmapFactory.decodeByteArray(output.toByteArray(), 0, output.size());
boolean realSized = angle % 180 == 0;
float ratio = Constants.CAPTURE_WIDTH / Constants.CAPTURE_HEIGHT;
float srcWidth = realSized ? image.getWidth() : (float) image.getHeight() / ratio;
float srcHeight = realSized ? (float) image.getWidth() / ratio : image.getHeight();
float scaleFactor = realSized ? Constants.CAPTURE_WIDTH / image.getWidth() : Constants.CAPTURE_WIDTH / image.getHeight();
Matrix matrix = new Matrix();
matrix.postRotate(isFrontCamera ? -angle : angle);
matrix.postScale(isFrontCamera ? -scaleFactor : scaleFactor, scaleFactor);
int startX = (int) (image.getWidth() - srcWidth)/2;
int startY = (int) (image.getHeight() - srcHeight)/2;
Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap, startX, startY, (int) srcWidth, (int) srcHeight, matrix, true);
gifEncoder.setDelay((int) (duration / Constants.CAPTURE_ACCELERATION));
gifEncoder.addFrame(rotatedBitmap);
rotatedBitmap.recycle();
bitmap.recycle();
lastTick = now;
}
}
public Bitmap getBitMapWithRect(byte[] data, Camera camera, boolean mIsFrontalCamera,Rect rect) {
int width = camera.getParameters().getPreviewSize().width;
int height = camera.getParameters().getPreviewSize().height;
YuvImage yuvImage = new YuvImage(data, camera.getParameters()
.getPreviewFormat(), width, height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, width, height), 80,
byteArrayOutputStream);
byte[] jpegData = byteArrayOutputStream.toByteArray();
// 获取照相后的bitmap
Bitmap tmpBitmap = BitmapFactory.decodeByteArray(jpegData, 0,
jpegData.length);
// Log.e("xie", "getbitmap width"+tmpBitmap.getWidth()+"rect="+rect );
if (rect.top<0){
rect.top=0;
}
if (rect.top>tmpBitmap.getHeight()){
rect.top=tmpBitmap.getHeight();
}
if (rect.left<0){
rect.left=0;
}
if (rect.left>tmpBitmap.getWidth()){
rect.left=tmpBitmap.getWidth();
}
int widthRect=rect.right-rect.left;
if(rect.right>tmpBitmap.getWidth()){
widthRect=tmpBitmap.getWidth()-rect.left;
}
int heightRect=rect.bottom-rect.top;
if(rect.bottom>tmpBitmap.getHeight()){
heightRect=tmpBitmap.getHeight()-rect.top;
}
// Log.i("xie","xie rect"+rect+"wid"+widthRect+"height"+heightRect);
tmpBitmap = Bitmap.createBitmap(tmpBitmap, rect.left, rect.top, widthRect,
heightRect);
Matrix matrix = new Matrix();
matrix.reset();
if (mIsFrontalCamera) {
matrix.setRotate(-90);
} else {
matrix.setRotate(90);
}
tmpBitmap = Bitmap.createBitmap(tmpBitmap, 0, 0, tmpBitmap.getWidth(),
tmpBitmap.getHeight(), matrix, true);
// Log.e("xie", "getbitmap temp"+tmpBitmap.getWidth()+"asdhe "+tmpBitmap.getHeight() );
tmpBitmap = tmpBitmap.copy(Bitmap.Config.ARGB_8888, true);
int hight = tmpBitmap.getHeight() > tmpBitmap.getWidth() ? tmpBitmap
.getHeight() : tmpBitmap.getWidth();
float scale = hight / 800.0f;
if (scale > 1) {
tmpBitmap = Bitmap.createScaledBitmap(tmpBitmap,
(int) (tmpBitmap.getWidth() / scale),
(int) (tmpBitmap.getHeight() / scale), false);
}
return tmpBitmap;
}
public static byte[] ToJpeg(byte[] imageData, int imageFormat, int width, int height)
{
if (imageData == null)
return null;
switch (imageFormat)
{
case ImageFormat.NV21:
case ImageFormat.YUY2:
YuvImage img = new YuvImage(imageData, imageFormat, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int quality = 20; //set quality
img.compressToJpeg(new Rect(0, 0, width, height), quality, baos);//this line decreases the image quality
return baos.toByteArray();
case ImageFormat.YUV_420_888:
return JpegFromYuv420888(imageData, imageFormat, width, height);
case ImageFormat.UNKNOWN:
return null;
case ImageFormat.NV16:
// Source: http://www.programcreek.com/java-api-examples/index.php?source_dir=Roid-Library-master/src/com/rincliu/library/common/persistence/zxing/camera/CameraManager.java
// This format has never been seen in the wild, but is compatible as we only care
// about the Y channel, so allow it.
case ImageFormat.YV12:
// source: https://github.com/evopark/tiqr-android/blob/master/src/main/java/de/evopark/tiqr/android/processing/ZxingQrScanner.java
case ImageFormat.YUV_422_888:
// only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
// doesn't affect the luminance much
// (see https://en.wikipedia.org/wiki/Chroma_subsampling)
case ImageFormat.YUV_444_888:
// only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
// doesn't affect the luminance much
// (see https://en.wikipedia.org/wiki/Chroma_subsampling)
return null;//new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
case ImageFormat.FLEX_RGB_888:
case ImageFormat.FLEX_RGBA_8888:
return null;//new RGBLuminanceSource(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.JPEG:
// Tried and tested myself
return null;//new RGBLuminanceSource(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.RGB_565:
return null;//new RGB565(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
case ImageFormat.RAW_SENSOR:
case ImageFormat.RAW10:
case ImageFormat.RAW12:
case ImageFormat.DEPTH16:
case ImageFormat.DEPTH_POINT_CLOUD:
//ImageFormat.Y8:
//ImageFormat.Y16:
return null;
default:
throw new IllegalArgumentException("No support for image format " + imageFormat);
}
}
private Bitmap captureBitmapFromTexture(I420Frame i420Frame) {
int width = i420Frame.rotatedWidth();
int height = i420Frame.rotatedHeight();
int outputFrameSize = width * height * 3 / 2;
ByteBuffer outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
final float frameAspectRatio = (float) i420Frame.rotatedWidth() /
(float) i420Frame.rotatedHeight();
final float[] rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(i420Frame.samplingMatrix,
i420Frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(false,
frameAspectRatio,
(float) width / height);
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix,
layoutMatrix);
/*
* YuvConverter must be instantiated on a thread that has an active EGL context. We know
* that renderFrame is called from the correct render thread therefore
* we defer instantiation of the converter until frame arrives.
*/
YuvConverter yuvConverter = new YuvConverter();
yuvConverter.convert(outputFrameBuffer,
width,
height,
width,
i420Frame.textureId,
texMatrix);
// Now we need to unpack the YUV data into planes
byte[] data = outputFrameBuffer.array();
int offset = outputFrameBuffer.arrayOffset();
int stride = width;
ByteBuffer[] yuvPlanes = new ByteBuffer[] {
ByteBuffer.allocateDirect(width * height),
ByteBuffer.allocateDirect(width * height / 4),
ByteBuffer.allocateDirect(width * height / 4)
};
int[] yuvStrides = new int[] {
width,
(width + 1) / 2,
(width + 1) / 2
};
// Write Y
yuvPlanes[0].put(data, offset, width * height);
// Write U
for (int r = height ; r < height * 3 / 2; ++r) {
yuvPlanes[1].put(data, offset + r * stride, stride / 2);
}
// Write V
for (int r = height ; r < height * 3 / 2 ; ++r) {
yuvPlanes[2].put(data, offset + r * stride + stride / 2, stride / 2);
}
// Convert the YuvImage
YuvImage yuvImage = i420ToYuvImage(yuvPlanes, yuvStrides, width, height);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight());
// Compress YuvImage to jpeg
yuvImage.compressToJpeg(rect, 100, stream);
// Convert jpeg to Bitmap
byte[] imageBytes = stream.toByteArray();
// Release YUV Converter
yuvConverter.release();
return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
}
private static byte[] NV21toJPEG(byte[] nv21, int width, int height, int quality) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuv = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
yuv.compressToJpeg(new Rect(0, 0, width, height), quality, out);
return out.toByteArray();
}
/**
* 将Android的特殊格式转化成ARGB彩图
*
* @param nv21
* 输入的图片数据,Android读图特殊格式
*
* @param width
* 输入图的宽度
*
* @param height
* 输入图的高度
*
* @return
* ARGB彩图
*/
static public Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height) {
YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, baos);
byte[] cur = baos.toByteArray();
return BitmapFactory.decodeByteArray(cur, 0, cur.length);
}
/**
* 将Android的特殊格式转化成ARGB彩图
*
* @param NV21
* 输入的图片数据,Android读图特殊格式
*
* @param width
* 输入图的宽度
*
* @param height
* 输入图的高度
*
* @return
* ARGB彩图
*/
static public Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height) {
YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, baos);
byte[] cur = baos.toByteArray();
return BitmapFactory.decodeByteArray(cur, 0, cur.length);
}