android.media.MediaScannerConnection.OnScanCompletedListener#android.graphics.ImageFormat源码实例Demo

下面列出了android.media.MediaScannerConnection.OnScanCompletedListener#android.graphics.ImageFormat 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: samples-android   文件: CameraSource.java
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
源代码2 项目: Telegram-FOSS   文件: InstantCameraView.java
private void createCamera(final SurfaceTexture surfaceTexture) {
    AndroidUtilities.runOnUIThread(() -> {
        if (cameraThread == null) {
            return;
        }
        if (BuildVars.LOGS_ENABLED) {
            FileLog.d("create camera session");
        }

        surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
        cameraSession = new CameraSession(selectedCamera, previewSize, pictureSize, ImageFormat.JPEG);
        cameraThread.setCurrentSession(cameraSession);
        CameraController.getInstance().openRound(cameraSession, surfaceTexture, () -> {
            if (cameraSession != null) {
                if (BuildVars.LOGS_ENABLED) {
                    FileLog.d("camera initied");
                }
                cameraSession.setInitied();
            }
        }, () -> cameraThread.setCurrentSession(cameraSession));
    });
}
 
源代码3 项目: Fatigue-Detection   文件: STUtils.java
@SuppressLint("NewApi")
public static Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height, Context context) {
	
	TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap");
	
	Rect rect = new Rect(0, 0, width, height);
	
	try {
		Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV");
		Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB");
    	byte[] imageData = nv21;
    	if (mRS == null) {
    		mRS = RenderScript.create(context);
    		mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS));
    		Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV));
    		tb.setX(width);
    		tb.setY(height);
    		tb.setMipmaps(false);
    		tb.setYuvFormat(ImageFormat.NV21);
    		ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT);
    		timings.addSplit("Prepare for ain");
    		Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS));
    		tb2.setX(width);
    		tb2.setY(height);
    		tb2.setMipmaps(false);
    		aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED);
    		timings.addSplit("Prepare for aOut");
    		bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    		timings.addSplit("Create Bitmap");
		}
    	ain.copyFrom(imageData);
		timings.addSplit("ain copyFrom");
		mYuvToRgb.setInput(ain);
		timings.addSplit("setInput ain");
		mYuvToRgb.forEach(aOut);
		timings.addSplit("NV21 to ARGB forEach");
		aOut.copyTo(bitmap);
		timings.addSplit("Allocation to Bitmap");
	} catch (Exception e) {
		YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
		timings.addSplit("NV21 bytes to YuvImage");
		
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, 90, baos);
        byte[] cur = baos.toByteArray();
        timings.addSplit("YuvImage crop and compress to Jpeg Bytes");
        
        bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length);
        timings.addSplit("Jpeg Bytes to Bitmap");
	}
	
   	timings.dumpToLog();
   	return bitmap;
}
 
源代码4 项目: VehicleInfoOCR   文件: CameraSource.java
/**
 * Creates one buffer for the camera preview callback. The size of the buffer is based off of the
 * camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
@SuppressLint("InlinedApi")
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = (long) previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    bytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
源代码7 项目: android_9.0.0_r45   文件: LegacyCameraDevice.java
public static boolean isPreviewConsumer(Surface output) {
    int usageFlags = detectSurfaceUsageFlags(output);
    int disallowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT |
            GRALLOC_USAGE_SW_READ_OFTEN;
    int allowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
            GRALLOC_USAGE_HW_RENDER;
    boolean previewConsumer = ((usageFlags & disallowedFlags) == 0 &&
            (usageFlags & allowedFlags) != 0);
    int surfaceFormat = ImageFormat.UNKNOWN;
    try {
        surfaceFormat = detectSurfaceType(output);
    } catch(BufferQueueAbandonedException e) {
        throw new IllegalArgumentException("Surface was abandoned", e);
    }

    return previewConsumer;
}
 
源代码8 项目: android_9.0.0_r45   文件: LegacyCameraDevice.java
public static boolean isVideoEncoderConsumer(Surface output) {
    int usageFlags = detectSurfaceUsageFlags(output);
    int disallowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
            GRALLOC_USAGE_RENDERSCRIPT | GRALLOC_USAGE_SW_READ_OFTEN;
    int allowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER;
    boolean videoEncoderConsumer = ((usageFlags & disallowedFlags) == 0 &&
            (usageFlags & allowedFlags) != 0);

    int surfaceFormat = ImageFormat.UNKNOWN;
    try {
        surfaceFormat = detectSurfaceType(output);
    } catch(BufferQueueAbandonedException e) {
        throw new IllegalArgumentException("Surface was abandoned", e);
    }

    return videoEncoderConsumer;
}
 
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
源代码10 项目: Camera2   文件: OneCameraImpl.java
@Override
public Size pickPreviewSize(Size pictureSize, Context context)
{
    if (pictureSize == null)
    {
        // TODO The default should be selected by the caller, and
        // pictureSize should never be null.
        pictureSize = getDefaultPictureSize();
    }
    float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
    Size[] supportedSizes = getSupportedPreviewSizes();

    // Since devices only have one raw resolution we need to be more
    // flexible for selecting a matching preview resolution.
    Double aspectRatioTolerance = sCaptureImageFormat == ImageFormat.RAW_SENSOR ? 10d : null;
    Size size = CaptureModuleUtil.getOptimalPreviewSize(supportedSizes, pictureAspectRatio, aspectRatioTolerance);
    Log.d(TAG, "Selected preview size: " + size);
    return size;
}
 
源代码11 项目: LiveMultimedia   文件: JellyBeanCamera.java
/**********************************************************
 * capture video frame one by one from the preview window
 * setup the buffer to hold the images
 **********************************************************/
private synchronized  void setupVideoFrameCallback() {
    Log.d(TAG, "setupVideoFrameCallback(() called on the Camera class");
    if (mCamera == null) {
        Log.e(TAG, "Camera object is null in setupVideoFrameCallback!");
        return;
    }
    mFrameCatcher = new FrameCatcher( mPreviewWidth,
                                      mPreviewHeight,
                                      getImageFormat(),
                                      mVideoPreview);
    long bufferSize;
    bufferSize = mPreviewWidth * mPreviewHeight  * ImageFormat.getBitsPerPixel(mImageFormat) / 8;
    long sizeWeShouldHave = (mPreviewWidth * 	mPreviewHeight  * 3 / 2);
    mCamera.setPreviewCallbackWithBuffer(null);
    mCamera.setPreviewCallbackWithBuffer( mFrameCatcher );
    for (int i = 0; i < NUM_CAMERA_PREVIEW_BUFFERS; i++) {
        byte [] cameraBuffer = new byte[(int)bufferSize];
        mCamera.addCallbackBuffer(cameraBuffer);
    }
}
 
源代码12 项目: mlkit-material-android   文件: CameraSource.java
/**
 * Creates one buffer for the camera preview callback. The size of the buffer is based off of the
 * camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings.
 */
private byte[] createPreviewBuffer(Size previewSize) {
  int bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT);
  long sizeInBits = (long) previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
  int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

  // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
  // should guarantee that there will be an array to work with.
  byte[] byteArray = new byte[bufferSize];
  ByteBuffer byteBuffer = ByteBuffer.wrap(byteArray);
  if (!byteBuffer.hasArray() || (byteBuffer.array() != byteArray)) {
    // This should never happen. If it does, then we wouldn't be passing the preview content to
    // the underlying detector later.
    throw new IllegalStateException("Failed to create valid buffer for camera source.");
  }

  bytesToByteBuffer.put(byteArray, byteBuffer);
  return byteArray;
}
 
源代码13 项目: android-chromium   文件: VideoCapture.java
@CalledByNative
public int getColorspace() {
    switch (mImageFormat){
    case ImageFormat.YV12:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
    case ImageFormat.NV21:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV21;
    case ImageFormat.YUY2:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YUY2;
    case ImageFormat.NV16:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV16;
    case ImageFormat.JPEG:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_JPEG;
    case ImageFormat.RGB_565:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_RGB_565;
    case ImageFormat.UNKNOWN:
    default:
        return AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
    }
}
 
源代码14 项目: Android-IP-Camera   文件: CameraPreview.java
public CameraPreview(Context context, Camera camera) {
    super(context);
    mCamera = camera;

    mHolder = getHolder();
    mHolder.addCallback(this);
    // deprecated setting, but required on Android versions prior to 3.0
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    Parameters params = mCamera.getParameters();
    List<Size> sizes = params.getSupportedPreviewSizes();
    for (Size s : sizes) {
    	Log.i(TAG, "preview size = " + s.width + ", " + s.height);
    }
    
    params.setPreviewSize(640, 480); // set preview size. smaller is better
    mCamera.setParameters(params);
    
    mPreviewSize = mCamera.getParameters().getPreviewSize();
    Log.i(TAG, "preview size = " + mPreviewSize.width + ", " + mPreviewSize.height);
    
    int format = mCamera.getParameters().getPreviewFormat();
    mFrameLength = mPreviewSize.width * mPreviewSize.height * ImageFormat.getBitsPerPixel(format) / 8;
}
 
源代码15 项目: habpanelviewer   文件: CameraImplV2.java
@Override
public void setDeviceRotation(int deviceOrientation) {
    mDeviceOrientation = deviceOrientation;

    // configure transform if preview is running only
    if (isPreviewRunning()) {
        try {
            CameraCharacteristics characteristics = mCamManager.getCameraCharacteristics(mCamera.getId());

            StreamConfigurationMap map = characteristics.get(
                    CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                Log.w(TAG, "Could not find a valid preview size");
            } else {
                final Point previewSize = chooseOptimalSize(toPointArray(map.getOutputSizes(ImageFormat.YUV_420_888)));
                setDeviceOrientation(previewSize);
            }
        } catch (CameraAccessException e) {
            Log.e(TAG, "Failed to set device orientation", e);
        }
    }
}
 
源代码16 项目: unity-android-native-camera   文件: YuvToRgb.java
private void createAllocations(RenderScript rs) {

        final int width = mInputSize.getWidth();
        final int height = mInputSize.getHeight();

        mOutBufferInt = new int[width * height];

        Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs));
        yuvTypeBuilder.setX(width);
        yuvTypeBuilder.setY(height);
        yuvTypeBuilder.setYuvFormat(ImageFormat.YUV_420_888);
        mInputAllocation = Allocation.createTyped(rs, yuvTypeBuilder.create(),
                Allocation.USAGE_IO_INPUT | Allocation.USAGE_SCRIPT);

        Type rgbType = Type.createXY(rs, Element.RGBA_8888(rs), width, height);
        Type intType = Type.createXY(rs, Element.U32(rs), width, height);

        mOutputAllocation = Allocation.createTyped(rs, rgbType,
                Allocation.USAGE_IO_OUTPUT | Allocation.USAGE_SCRIPT);
        mOutputAllocationInt = Allocation.createTyped(rs, intType,
                Allocation.USAGE_SCRIPT);
    }
 
源代码17 项目: android-vision   文件: CameraSource.java
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
 
源代码18 项目: AudioVideoCodec   文件: CameraManager.java
private void startCamera(int cameraId) {
    try {
        camera = Camera.open(cameraId);
        camera.setPreviewTexture(surfaceTexture);

        Camera.Parameters parameters = camera.getParameters();
        parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
        parameters.setPreviewFormat(ImageFormat.NV21);

        //设置对焦模式,后置摄像头开启时打开,切换到前置时关闭(三星、华为不能设置前置对焦,魅族、小米部分机型可行)
        if (cameraId == 0) {
            //小米、魅族手机存在对焦无效情况,需要针对设备适配,想要无感知对焦完全适配最好是监听加速度传感器
            camera.cancelAutoFocus();
            //这种设置方式存在屏幕闪烁一下问题,包括Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO
            parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
        }
        Camera.Size size = getCameraSize(parameters.getSupportedPreviewSizes(), screenWidth,
                screenHeight, 0.1f);
        parameters.setPreviewSize(size.width, size.height);
        //水平方向未旋转,所以宽就是竖直方向的高,对应旋转操作
        Log.d(TAG, "startCamera: 预览宽:" + size.width + " -- " + "预览高:" + size.height);
        previewWidth = size.width;
        previewHeight = size.height;

        size = getCameraSize(parameters.getSupportedPictureSizes(), screenWidth, screenHeight, 0.1f);
        parameters.setPictureSize(size.width, size.height);
        //水平方向未旋转,所以宽就是竖直方向的高
        Log.d(TAG, "startCamera: 图片宽:" + size.width + " -- " + "图片高:" + size.height);

        camera.setParameters(parameters);
        camera.startPreview();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
 
源代码19 项目: SimpleSmsRemote   文件: CameraUtils.java
public CaptureSettings getDefaultCaptureSettings() {
    String defaultPhotosPath = Environment.getExternalStoragePublicDirectory(
            Environment.DIRECTORY_DCIM).getAbsolutePath();

    CaptureSettings captureSettings = new CaptureSettings(id, getBiggestOutputSize(),
            CaptureSettings.ImageFormat.JPEG, defaultPhotosPath);
    captureSettings.setAutofocus(autofocusSupport);
    captureSettings.setFlashlight(CaptureSettings.FlashlightMode.AUTO);
    return captureSettings;
}
 
源代码20 项目: flutter_barcode_scanner   文件: CameraSource.java
@Override
public void run() {
    Frame outputFrame;
    ByteBuffer data;

    while (true) {
        synchronized (mLock) {
            while (mActive && (mPendingFrameData == null)) {
                try {
                    mLock.wait();
                } catch (InterruptedException e) {
                    return;
                }
            }

            if (!mActive) {
                return;
            }

            outputFrame = new Frame.Builder()
                    .setImageData(mPendingFrameData, mPreviewSize.getWidth(),
                            mPreviewSize.getHeight(), ImageFormat.NV21)
                    .setId(mPendingFrameId)
                    .setTimestampMillis(mPendingTimeMillis)
                    .setRotation(mRotation)
                    .build();

            data = mPendingFrameData;
            mPendingFrameData = null;
        }


        try {
            mDetector.receiveFrame(outputFrame);
        } catch (Throwable t) {
        } finally {
            mCamera.addCallbackBuffer(data.array());
        }
    }
}
 
源代码21 项目: VIA-AI   文件: JavaCamera2View.java
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
    else if (mPreviewFormat == ImageFormat.YUV_420_888) {
        assert (mUVFrameData != null);
        Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
    } else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
源代码22 项目: Lassi-Android   文件: Camera1.java
private void startPreview(String log) {
    LOG.i(log, "Dispatching onCameraPreviewStreamSizeChanged.");
    mCameraCallbacks.onCameraPreviewStreamSizeChanged();

    Size previewSize = getPreviewStreamSize(REF_VIEW);
    boolean wasFlipped = flip(REF_SENSOR, REF_VIEW);
    mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight(), wasFlipped);

    Camera.Parameters params = mCamera.getParameters();
    mPreviewFormat = params.getPreviewFormat();
    params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // <- not allowed during preview
    if (mMode == Mode.PICTURE) {
        params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // <- allowed
    } else {
        // mCaptureSize in this case is a video size. The available video sizes are not necessarily
        // a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash.
        // However, the setPictureSize() passed here is useless : we don't allow HQ pictures in video mode.
        // While this might be lifted in the future, for now, just use a picture capture size.
        Size pictureSize = computeCaptureSize(Mode.PICTURE);
        params.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
    }
    mCamera.setParameters(params);

    mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
    mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
    mFrameManager.allocate(ImageFormat.getBitsPerPixel(mPreviewFormat), mPreviewStreamSize);

    LOG.i(log, "Starting preview with startPreview().");
    try {
        mCamera.startPreview();
    } catch (Exception e) {
        LOG.e(log, "Failed to start preview.", e);
        throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
    }
    LOG.i(log, "Started preview.");
}
 
源代码23 项目: LPR   文件: JavaCameraView.java
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
源代码24 项目: Camera2   文件: OneCameraImpl.java
private void onCaptureCompleted(InFlightCapture capture)
{

    // Experimental support for writing RAW. We do not have a usable JPEG
    // here, so we don't use the usual capture session mechanism and instead
    // just store the RAW file in its own directory.
    // TODO: If we make this a real feature we should probably put the DNGs
    // into the Camera directly.
    if (sCaptureImageFormat == ImageFormat.RAW_SENSOR)
    {
        if (!RAW_DIRECTORY.exists())
        {
            if (!RAW_DIRECTORY.mkdirs())
            {
                throw new RuntimeException("Could not create RAW directory.");
            }
        }
        File dngFile = new File(RAW_DIRECTORY, capture.session.getTitle() + ".dng");
        writeDngBytesAndClose(capture.image, capture.totalCaptureResult, mCharacteristics, dngFile);
    } else
    {
        // Since this is not an HDR+ session, we will just save the
        // result.
        byte[] imageBytes = acquireJpegBytesAndClose(capture.image);
        saveJpegPicture(imageBytes, capture.parameters, capture.session, capture.totalCaptureResult);
    }
    broadcastReadyState(true);
    capture.parameters.callback.onPictureTaken(capture.session);
}
 
源代码25 项目: LicensePlateDiscern   文件: JavaCameraView.java
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
源代码26 项目: webrtc_android   文件: CameraEnumerationAndroid.java
public static int frameSize(int width, int height, int imageFormat) {
  if (imageFormat != ImageFormat.NV21) {
    throw new UnsupportedOperationException("Don't know how to calculate "
        + "the frame size of non-NV21 image formats.");
  }
  return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
 
源代码27 项目: android-vision   文件: SafeFaceDetector.java
/**
 * Creates a new frame based on the original frame, with additional width on the right to
 * increase the size to avoid the bug in the underlying face detector.
 */
private Frame padFrameRight(Frame originalFrame, int newWidth) {
    Frame.Metadata metadata = originalFrame.getMetadata();
    int width = metadata.getWidth();
    int height = metadata.getHeight();

    Log.i(TAG, "Padded image from: " + width + "x" + height + " to " + newWidth + "x" + height);

    ByteBuffer origBuffer = originalFrame.getGrayscaleImageData();
    int origOffset = origBuffer.arrayOffset();
    byte[] origBytes = origBuffer.array();

    // This can be changed to just .allocate in the future, when Frame supports non-direct
    // byte buffers.
    ByteBuffer paddedBuffer = ByteBuffer.allocateDirect(newWidth * height);
    int paddedOffset = paddedBuffer.arrayOffset();
    byte[] paddedBytes = paddedBuffer.array();
    Arrays.fill(paddedBytes, (byte) 0);

    for (int y = 0; y < height; ++y) {
        int origStride = origOffset + y * width;
        int paddedStride = paddedOffset + y * newWidth;
        System.arraycopy(origBytes, origStride, paddedBytes, paddedStride, width);
    }

    return new Frame.Builder()
            .setImageData(paddedBuffer, newWidth, height, ImageFormat.NV21)
            .setId(metadata.getId())
            .setRotation(metadata.getRotation())
            .setTimestampMillis(metadata.getTimestampMillis())
            .build();
}
 
源代码28 项目: AndroidDocumentScanner   文件: JavaCameraView.java
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
源代码29 项目: MultiMediaSample   文件: ColorFormatUtil.java
public static Bitmap convertYUV420sp2RGB(byte[] yuv, int width, int height) {
    //YUV420sp转RGB数据 5-60ms
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21, width, height, null);
    yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, out);
    byte[] imageBytes = out.toByteArray();
    return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
}
 
源代码30 项目: android_9.0.0_r45   文件: OutputConfiguration.java
/**
 * Create an OutputConfiguration from Parcel.
 */
private OutputConfiguration(@NonNull Parcel source) {
    int rotation = source.readInt();
    int surfaceSetId = source.readInt();
    int surfaceType = source.readInt();
    int width = source.readInt();
    int height = source.readInt();
    boolean isDeferred = source.readInt() == 1;
    boolean isShared = source.readInt() == 1;
    ArrayList<Surface> surfaces = new ArrayList<Surface>();
    source.readTypedList(surfaces, Surface.CREATOR);
    String physicalCameraId = source.readString();

    checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");

    mSurfaceGroupId = surfaceSetId;
    mRotation = rotation;
    mSurfaces = surfaces;
    mConfiguredSize = new Size(width, height);
    mIsDeferredConfig = isDeferred;
    mIsShared = isShared;
    mSurfaces = surfaces;
    if (mSurfaces.size() > 0) {
        mSurfaceType = SURFACE_TYPE_UNKNOWN;
        mConfiguredFormat = SurfaceUtils.getSurfaceFormat(mSurfaces.get(0));
        mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(mSurfaces.get(0));
        mConfiguredGenerationId = mSurfaces.get(0).getGenerationId();
    } else {
        mSurfaceType = surfaceType;
        mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
        mConfiguredDataspace =
                StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
        mConfiguredGenerationId = 0;
    }
    mPhysicalCameraId = physicalCameraId;
}