类android.graphics.YuvImage源码实例Demo

下面列出了怎么用android.graphics.YuvImage的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: mollyim-android   文件: BitmapUtil.java
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
private Bitmap convertBitmap(byte[] data, Camera camera) {
    Camera.Size previewSize = camera.getParameters().getPreviewSize();
    YuvImage yuvimage = new YuvImage(
            data,
            camera.getParameters().getPreviewFormat(),
            previewSize.width,
            previewSize.height,
            null);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 100, baos);
    byte[] rawImage = baos.toByteArray();
    BitmapFactory.Options options = new BitmapFactory.Options();
    options.inPreferredConfig = Bitmap.Config.RGB_565;
    Bitmap bitmap = BitmapFactory.decodeByteArray(rawImage, 0, rawImage.length, options);
    Matrix m = new Matrix();
    // 这里我的手机需要旋转一下图像方向才正确,如果在你们的手机上不正确,自己调节,
    // 正式项目中不能这么写,需要计算方向,计算YuvImage方向太麻烦,我这里没有做。
    m.setRotate(-displayDegree);
    return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), m, true);
}
 
源代码3 项目: libsoftwaresync   文件: ResultProcessor.java
private boolean saveJpg(YuvImage yuvImage, File jpgFile) {
  // Save JPEG and also add to the photos gallery by inserting into MediaStore.
  long t0 = System.nanoTime();
  if (saveJpg(yuvImage, jpgQuality, jpgFile)) {
    try {
      MediaStore.Images.Media.insertImage(
          context.getContentResolver(),
          jpgFile.getAbsolutePath(),
          jpgFile.getName(),
          "Full path: " + jpgFile.getAbsolutePath());
    } catch (FileNotFoundException e) {
      Log.e(TAG, "Unable to find file to link in media store.");
    }
    long t1 = System.nanoTime();
    Log.i(TAG, String.format("Saving JPG to disk took %f ms.", (t1 - t0) * 1e-6f));
    context.notifyCaptured(jpgFile.getName());
    return true;
  }
  return false;
}
 
源代码4 项目: libsoftwaresync   文件: ResultProcessor.java
private static boolean saveJpg(YuvImage src, int quality, File file) {
  long t0 = System.nanoTime();
  try (FileOutputStream outputStream = new FileOutputStream(file)) {
    Rect rect = new Rect(0, 0, src.getWidth(), src.getHeight());
    boolean ok = src.compressToJpeg(rect, quality, outputStream);
    if (!ok) {
      // TODO(jiawen,samansari): Toast.
      Log.w(TAG, "Error saving JPEG to: " + file.getAbsolutePath());
    }
    long t1 = System.nanoTime();
    Log.i(TAG, String.format("saveJpg took %f ms.", (t1 - t0) * 1e-6f));
    return ok;
  } catch (IOException e) {
    // TODO(jiawen,samansari): Toast.
    Log.w(TAG, "Error saving JPEG image to: " + file.getAbsolutePath());
    return false;
  }
}
 
源代码5 项目: Telephoto   文件: ImageShot.java
private byte[] imgToByte(boolean quality) {
    Camera.Parameters parameters = getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;

    YuvImage yuv = new YuvImage(getImage(), parameters.getPreviewFormat(), width, height, null);
    ByteArrayOutputStream out =
            new ByteArrayOutputStream();
    yuv.compressToJpeg(new Rect(0, 0, width, height), 100, out);

    byte[] compressed = out.toByteArray();

    Bitmap newBmp = BitmapFactory.decodeByteArray(compressed, 0, compressed.length);
    Matrix mat = new Matrix();
    mat.postRotate(PrefsController.instance.getPrefs().getCameraPrefs(cameraId).angle);
    newBmp = Bitmap.createBitmap(newBmp, 0, 0, newBmp.getWidth(), newBmp.getHeight(), mat, true);
    ByteArrayOutputStream out2 = new ByteArrayOutputStream();
    if (quality) {
        newBmp.compress(Bitmap.CompressFormat.PNG, 100, out2);
    } else {
        newBmp.compress(Bitmap.CompressFormat.JPEG, 80, out2);
    }

    return out2.toByteArray();
}
 
源代码6 项目: PHONK   文件: LearnImages.java
private Bitmap cameraDataToBmp(byte[] data, Camera camera) {
    // transform camera data to bmp
    Camera.Parameters parameters = camera.getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;

    // get support preview format
    // MLog.d("qq", );
    YuvImage yuv = new YuvImage(data, parameters.getPreviewFormat(), width, height, null);

    ByteArrayOutputStream out = new ByteArrayOutputStream();
    // maybe pass the output to the callbacks and do each compression there?
    yuv.compressToJpeg(new Rect(0, 0, (int) Math.floor(width * 0.2), (int) Math.floor(height * 0.2)), 100, out);
    byte[] bytes = out.toByteArray();
    BitmapFactory.Options bitmap_options = new BitmapFactory.Options();
    bitmap_options.inPreferredConfig = Bitmap.Config.RGB_565;
    final Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length, bitmap_options);
    return bmp;
}
 
private Bitmap captureBitmapFromYuvFrame(I420Frame i420Frame) {
    YuvImage yuvImage = i420ToYuvImage(i420Frame.yuvPlanes,
            i420Frame.yuvStrides,
            i420Frame.width,
            i420Frame.height);
    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    Rect rect = new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight());

    // Compress YuvImage to jpeg
    yuvImage.compressToJpeg(rect, 100, stream);

    // Convert jpeg to Bitmap
    byte[] imageBytes = stream.toByteArray();
    Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
    Matrix matrix = new Matrix();

    // Apply any needed rotation
    matrix.postRotate(i420Frame.rotationDegree);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix,
            true);

    return bitmap;
}
 
private YuvImage fastI420ToYuvImage(ByteBuffer[] yuvPlanes,
                                    int[] yuvStrides,
                                    int width,
                                    int height) {
    byte[] bytes = new byte[width * height * 3 / 2];
    int i = 0;
    for (int row = 0 ; row < height ; row++) {
        for (int col = 0 ; col < width ; col++) {
            bytes[i++] = yuvPlanes[0].get(col + row * yuvStrides[0]);
        }
    }
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2; col++) {
            bytes[i++] = yuvPlanes[2].get(col + row * yuvStrides[2]);
            bytes[i++] = yuvPlanes[1].get(col + row * yuvStrides[1]);
        }
    }
    return new YuvImage(bytes, NV21, width, height, null);
}
 
源代码9 项目: quickstart-android   文件: BitmapUtils.java
@Nullable
public static Bitmap getBitmap(ByteBuffer data, FrameMetadata metadata) {
    data.rewind();
    byte[] imageInBuffer = new byte[data.limit()];
    data.get(imageInBuffer, 0, imageInBuffer.length);
    try {
        YuvImage image =
                new YuvImage(
                        imageInBuffer, ImageFormat.NV21, metadata.getWidth(), metadata.getHeight(), null);
        if (image != null) {
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            image.compressToJpeg(new Rect(0, 0, metadata.getWidth(), metadata.getHeight()), 80, stream);

            Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());

            stream.close();
            return rotateBitmap(bmp, metadata.getRotation(), metadata.getCameraFacing());
        }
    } catch (Exception e) {
        Log.e("VisionProcessorBase", "Error: " + e.getMessage());
    }
    return null;
}
 
源代码10 项目: Viewer   文件: MyRenderer.java
public void rawByteArray2RGBABitmap2(FileOutputStream b)
{
	int yuvi = yuv_w * yuv_h;
	int uvi = 0;
	byte[] yuv = new byte[yuv_w * yuv_h * 3 / 2];
	System.arraycopy(y, 0, yuv, 0, yuvi);
	for (int i = 0; i < yuv_h / 2; i++)
	{
		for (int j = 0; j < yuv_w / 2; j++)
		{
			yuv[yuvi++] = v[uvi];
			yuv[yuvi++] = u[uvi++];
		}
	}
	YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21, yuv_w, yuv_h, null);
	Rect rect = new Rect(0, 0, yuv_w, yuv_h);
	yuvImage.compressToJpeg(rect, 100, b);
}
 
源代码11 项目: BluetoothCameraAndroid   文件: CameraModelImpl.java
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    if (mInitAck && mDataAck) {
        mInitAck = false;
        mDataAck = false;
        previewMissedCount = 0;
        ThreadHandler.getInstance().doInBackground(new Runnable() {
            @Override
            public void run() {
                Camera.Size size = camera.getParameters().getPreviewSize();
                ByteArrayOutputStream out = new ByteArrayOutputStream();
                YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
                yuvImage.compressToJpeg(new Rect(0, 0, size.width, size.height), 50, out);
                byte[] imageBytes = out.toByteArray();
                mBluetoothHandler.write(BluetoothHandler.DATA_START.getBytes());
                mPendingImageBytes = imageBytes;
            }
        });
    } else {
        previewMissedCount++;
        if (previewMissedCount > 50) {
            mInitAck = true;
            mDataAck = true;
        }
    }
}
 
源代码12 项目: deltachat-android   文件: BitmapUtil.java
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
源代码13 项目: Silence   文件: BitmapUtil.java
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
 
源代码14 项目: LPR   文件: CameraAnalyzer.java
private Mat ImagetoMat(ImageProxy imageProxy) {
    ImageProxy.PlaneProxy[] plane = imageProxy.getPlanes();
    ByteBuffer yBuffer = plane[0].getBuffer();  // Y
    ByteBuffer uBuffer = plane[1].getBuffer();  // U
    ByteBuffer vBuffer = plane[2].getBuffer();  // V

    int ySize = yBuffer.remaining();
    int uSize = uBuffer.remaining();
    int vSize = vBuffer.remaining();

    byte[] nv21 = new byte[ySize + uSize + vSize];

    //U and V are swapped
    yBuffer.get(nv21, 0, ySize);
    vBuffer.get(nv21, ySize, vSize);
    uBuffer.get(nv21, ySize + vSize, uSize);
    try {
        YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, imageProxy.getWidth(), imageProxy.getHeight(), null);
        ByteArrayOutputStream stream = new ByteArrayOutputStream(nv21.length);
        yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 90, stream);
        Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
        Matrix matrix = new Matrix();
        matrix.postRotate(90);
        Rect rect = scannerView.getFramingRectInPreview(bitmap.getWidth(), bitmap.getHeight());
        bitmap = Bitmap.createBitmap(bitmap, rect.top, rect.left, rect.height(), rect.width(), matrix, true);
        stream.close();
        Mat mat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4);
        Utils.bitmapToMat(bitmap, mat);
        return mat;
    } catch (IOException e) {
        e.printStackTrace();
    }
    return null;
}
 
源代码15 项目: IDCardCamera   文件: ImageUtils.java
/**
 * 将byte[]转换成Bitmap
 *
 * @param bytes
 * @param width
 * @param height
 * @return
 */
public static Bitmap getBitmapFromByte(byte[] bytes, int width, int height) {
    final YuvImage image = new YuvImage(bytes, ImageFormat.NV21, width, height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream(bytes.length);
    if (!image.compressToJpeg(new Rect(0, 0, width, height), 100, os)) {
        return null;
    }
    byte[] tmp = os.toByteArray();
    Bitmap bmp = BitmapFactory.decodeByteArray(tmp, 0, tmp.length);
    return bmp;
}
 
源代码16 项目: haven   文件: MotionDetector.java
public static Bitmap convertImage (byte[] nv21bytearray, int width, int height)
{
	YuvImage yuvImage = new YuvImage(nv21bytearray, ImageFormat.NV21, width, height, null);
	ByteArrayOutputStream os = new ByteArrayOutputStream();
	yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, os);
	byte[] jpegByteArray = os.toByteArray();
	Bitmap bitmap = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
	return bitmap;
}
 
源代码17 项目: MegviiFacepp-Android-SDK   文件: ConUtil.java
public static Bitmap decodeToBitMap(byte[] data, Camera _camera) {
	Camera.Size size = _camera.getParameters().getPreviewSize();
	try {
		YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
		if (image != null) {
			ByteArrayOutputStream stream = new ByteArrayOutputStream();
			image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, stream);
			Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
			stream.close();
			return bmp;
		}
	} catch (Exception ex) {
	}
	return null;
}
 
源代码18 项目: MegviiFacepp-Android-SDK   文件: ICamera.java
public Bitmap getBitMap(byte[] data, Camera camera, boolean mIsFrontalCamera) {
	int width = camera.getParameters().getPreviewSize().width;
	int height = camera.getParameters().getPreviewSize().height;
	YuvImage yuvImage = new YuvImage(data, camera.getParameters()
			.getPreviewFormat(), width, height, null);
	ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
	yuvImage.compressToJpeg(new Rect(0, 0, width, height), 80,
			byteArrayOutputStream);
	byte[] jpegData = byteArrayOutputStream.toByteArray();
	// 获取照相后的bitmap
	Bitmap tmpBitmap = BitmapFactory.decodeByteArray(jpegData, 0,
			jpegData.length);
	Matrix matrix = new Matrix();
	matrix.reset();
	if (mIsFrontalCamera) {
		matrix.setRotate(-90);
	} else {
		matrix.setRotate(90);
	}
	tmpBitmap = Bitmap.createBitmap(tmpBitmap, 0, 0, tmpBitmap.getWidth(),
			tmpBitmap.getHeight(), matrix, true);
	tmpBitmap = tmpBitmap.copy(Bitmap.Config.ARGB_8888, true);

	int hight = tmpBitmap.getHeight() > tmpBitmap.getWidth() ? tmpBitmap
			.getHeight() : tmpBitmap.getWidth();

	float scale = hight / 800.0f;

	if (scale > 1) {
		tmpBitmap = Bitmap.createScaledBitmap(tmpBitmap,
				(int) (tmpBitmap.getWidth() / scale),
				(int) (tmpBitmap.getHeight() / scale), false);
	}
	return tmpBitmap;
}
 
private YuvImage i420ToYuvImage(ByteBuffer[] yuvPlanes,
                                int[] yuvStrides,
                                int width,
                                int height) {
    if (yuvStrides[0] != width) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }
    if (yuvStrides[1] != width / 2) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }
    if (yuvStrides[2] != width / 2) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }

    byte[] bytes = new byte[yuvStrides[0] * height +
            yuvStrides[1] * height / 2 +
            yuvStrides[2] * height / 2];
    ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, width * height);
    copyPlane(yuvPlanes[0], tmp);

    byte[] tmpBytes = new byte[width / 2 * height / 2];
    tmp = ByteBuffer.wrap(tmpBytes, 0, width / 2 * height / 2);

    copyPlane(yuvPlanes[2], tmp);
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2 ; col++) {
            bytes[width * height + row * width + col * 2]
                    = tmpBytes[row * width / 2 + col];
        }
    }
    copyPlane(yuvPlanes[1], tmp);
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2 ; col++) {
            bytes[width * height + row * width + col * 2 + 1] =
                    tmpBytes[row * width / 2 + col];
        }
    }
    return new YuvImage(bytes, NV21, width, height, null);
}
 
源代码20 项目: camerakit-android   文件: ProcessStillTask.java
@Override
public void run() {
    Camera.Parameters parameters = camera.getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;
    byte[] rotatedData = new Rotation(data, width, height, rotation).getYuv();

    int postWidth;
    int postHeight;

    switch (rotation) {
        case 90:
        case 270:
            postWidth = height;
            postHeight = width;
            break;

        case 0:
        case 180:
        default:
            postWidth = width;
            postHeight = height;
            break;
    }

    YuvImage yuv = new YuvImage(rotatedData, parameters.getPreviewFormat(), postWidth, postHeight, null);

    onStillProcessedListener.onStillProcessed(yuv);
}
 
源代码21 项目: quickstart-android   文件: CustomImageClassifier.java
/**
 * Resizes image data from {@code ByteBuffer}.
 */
private Bitmap createResizedBitmap(ByteBuffer buffer, int width, int height) {
    YuvImage img = new YuvImage(buffer.array(), ImageFormat.NV21, width, height, null);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    img.compressToJpeg(new Rect(0, 0, img.getWidth(), img.getHeight()), 50, out);
    byte[] imageBytes = out.toByteArray();
    Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
    return Bitmap.createScaledBitmap(bitmap, DIM_IMG_SIZE_X, DIM_IMG_SIZE_Y, true);
}
 
源代码22 项目: easyrs   文件: YuvToRgbTest.java
@NonNull
private Bitmap getExpectedBitmap(Nv21Image nv21Image) {
    YuvImage yuvImage = new YuvImage(nv21Image.nv21ByteArray, ImageFormat.NV21, nv21Image.width,
            nv21Image.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, nv21Image.width, nv21Image.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    return BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
}
 
源代码23 项目: easyrs   文件: Nv21ImageTest.java
@NonNull
private Bitmap getConvertedBitmap(Nv21Image nv21Image) {
    YuvImage yuvImage = new YuvImage(nv21Image.nv21ByteArray, ImageFormat.NV21, nv21Image.width,
            nv21Image.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, nv21Image.width, nv21Image.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    return BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
}
 
源代码24 项目: MultiMediaSample   文件: ColorFormatUtil.java
public static Bitmap convertYUV420sp2RGB(byte[] yuv, int width, int height) {
    //YUV420sp转RGB数据 5-60ms
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21, width, height, null);
    yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, out);
    byte[] imageBytes = out.toByteArray();
    return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
}
 
源代码25 项目: faceswap   文件: VideoStreamingThread.java
@Override
protected byte[] doInBackground(Object... objs) {
    byte[] frame = (byte[]) objs[0];
    Parameters parameters = (Parameters) objs[1];
    if (frame_firstUpdateTime == 0) {
        frame_firstUpdateTime = System.currentTimeMillis();
    }
    frame_currentUpdateTime = System.currentTimeMillis();

    int datasize = 0;
    cameraImageSize = parameters.getPreviewSize();
    YuvImage image = new YuvImage(frame, parameters.getPreviewFormat(), cameraImageSize.width,
            cameraImageSize.height, null);
    ByteArrayOutputStream tmpBuffer = new ByteArrayOutputStream();
    image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, tmpBuffer);
    Log.d(LOG_TAG, "compression took: "
            + (System.currentTimeMillis()-frame_currentUpdateTime));
    synchronized (frameLock) {
        frameBuffer = tmpBuffer.toByteArray();
        frameGeneratedTime = System.currentTimeMillis();
        frameID++;
        frameLock.notify();
    }
    datasize = tmpBuffer.size();
    frame_count++;
    frame_totalsize += datasize;
    if (frame_count % 50 == 0) {
        Log.d(LOG_TAG, "(IMG)\t" +
                "BW: " + 8.0 * frame_totalsize / (frame_currentUpdateTime - frame_firstUpdateTime) / 1000 +
                " Mbps\tCurrent FPS: " + 8.0 * datasize / (frame_currentUpdateTime - frame_prevUpdateTime) / 1000 + " Mbps\t" +
                "FPS: " + 1000.0 * frame_count / (frame_currentUpdateTime - frame_firstUpdateTime));
    }
    frame_prevUpdateTime = frame_currentUpdateTime;
    return tmpBuffer.toByteArray();
}
 
源代码26 项目: FastBarcodeScanner   文件: ImageDecoder.java
private static Bitmap NV21ToBitmap(byte[] nv21Bytes, int width, int height)
{
    YuvImage yuv = new YuvImage(nv21Bytes, ImageFormat.NV21, width, height, null);

    // pWidth and pHeight define the size of the preview Frame
    ByteArrayOutputStream jpegStream = new ByteArrayOutputStream();
    yuv.compressToJpeg(new Rect(0, 0, width, height), 50, jpegStream);
    byte[] jpegBytes = jpegStream.toByteArray();

    Bitmap bitmap= BitmapFactory.decodeByteArray(jpegBytes, 0, jpegBytes.length);

    return bitmap;
}
 
源代码27 项目: sensorhub   文件: AndroidCameraOutput.java
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
    long timeStamp = SystemClock.elapsedRealtimeNanos();
    
    // select current buffer
    YuvImage yuvImg = (data == imgBuf1) ? yuvImg1 : yuvImg2;
    
    // compress as JPEG
    jpegBuf.reset();
    yuvImg.compressToJpeg(imgArea, 90, jpegBuf);
    
    // release buffer for next frame
    camera.addCallbackBuffer(data);
    
    // generate new data record
    DataBlock newRecord;
    if (latestRecord == null)
        newRecord = dataStruct.createDataBlock();
    else
        newRecord = latestRecord.renew();
    
    // set time stamp
    double samplingTime = getJulianTimeStamp(timeStamp);
    newRecord.setDoubleValue(0, samplingTime);
    
    // set encoded data
    AbstractDataBlock frameData = ((DataBlockMixed)newRecord).getUnderlyingObject()[1];
    frameData.setUnderlyingObject(jpegBuf.toByteArray());
    
    // send event
    latestRecord = newRecord;
    latestRecordTime = System.currentTimeMillis();
    eventHandler.publishEvent(new SensorDataEvent(latestRecordTime, AndroidCameraOutput.this, latestRecord));          
}
 
public void takeSnapshot(final int quality) {
  mCamera.setPreviewCallback(new Camera.PreviewCallback() {
    @Override
    public void onPreviewFrame(byte[] bytes, Camera camera) {
      try {
        Camera.Parameters parameters = camera.getParameters();
        Camera.Size size = parameters.getPreviewSize();
        int orientation = mPreview.getDisplayOrientation();
        if (mPreview.getCameraFacing() == Camera.CameraInfo.CAMERA_FACING_FRONT) {
          bytes = rotateNV21(bytes, size.width, size.height, (360 - orientation) % 360);
        } else {
          bytes = rotateNV21(bytes, size.width, size.height, orientation);
        }
        // switch width/height when rotating 90/270 deg
        Rect rect = orientation == 90 || orientation == 270 ?
          new Rect(0, 0, size.height, size.width) :
          new Rect(0, 0, size.width, size.height);
        YuvImage yuvImage = new YuvImage(bytes, parameters.getPreviewFormat(), rect.width(), rect.height(), null);
        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, quality, byteArrayOutputStream);
        byte[] data = byteArrayOutputStream.toByteArray();
        byteArrayOutputStream.close();
        eventListener.onSnapshotTaken(Base64.encodeToString(data, Base64.NO_WRAP));
      } catch (IOException e) {
        Log.d(TAG, "CameraPreview IOException");
        eventListener.onSnapshotTakenError("IO Error");
      } finally {

        mCamera.setPreviewCallback(null);
      }
    }
  });
}
 
@Override
public void onNewRawImage(byte[] data, Size size) {
  Preconditions.checkNotNull(data);
  Preconditions.checkNotNull(size);
  if (data != rawImageBuffer || !size.equals(rawImageSize)) {
    rawImageBuffer = data;
    rawImageSize = size;
    yuvImage = new YuvImage(rawImageBuffer, ImageFormat.NV21, size.width, size.height, null);
    rect = new Rect(0, 0, size.width, size.height);
  }

  Time currentTime = connectedNode.getCurrentTime();
  String frameId = "camera";

  sensor_msgs.CompressedImage image = imagePublisher.newMessage();
  image.setFormat("jpeg");
  image.getHeader().setStamp(currentTime);
  image.getHeader().setFrameId(frameId);

  Preconditions.checkState(yuvImage.compressToJpeg(rect, 20, stream));
  image.setData(stream.buffer().copy());
  stream.buffer().clear();

  imagePublisher.publish(image);

  sensor_msgs.CameraInfo cameraInfo = cameraInfoPublisher.newMessage();
  cameraInfo.getHeader().setStamp(currentTime);
  cameraInfo.getHeader().setFrameId(frameId);

  cameraInfo.setWidth(size.width);
  cameraInfo.setHeight(size.height);
  cameraInfoPublisher.publish(cameraInfo);
}
 
源代码30 项目: LiveMultimedia   文件: FrameCatcher.java
/**********************************************************************
 * getBitmapImageFromYUV returns a bitmap from an image captured in
 * the camera in YUV12 format. Image formats and video formats are not
 *  the same thing.
 *******************************************************************/
public static Bitmap getBitmapImageFromYUV(byte[] data, int width, int height) {
    YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, width, height, null);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    yuvimage.compressToJpeg(new Rect(0, 0, width, height), 80, baos);
    byte[] jdata = baos.toByteArray();
    BitmapFactory.Options bitmapFatoryOptions = new BitmapFactory.Options();
    bitmapFatoryOptions.inPreferredConfig = Bitmap.Config.RGB_565;
    return  BitmapFactory.decodeByteArray(jdata, 0, jdata.length, bitmapFatoryOptions);
}
 
 类所在包
 类方法
 同包方法