android.media.Image.Plane#android.media.Image源码实例Demo

下面列出了android.media.Image.Plane#android.media.Image 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: android_9.0.0_r45   文件: DngCreator.java
/**
 * Set the thumbnail image.
 *
 * <p>
 * Pixel data is interpreted as a {@link android.graphics.ImageFormat#YUV_420_888} image.
 * Thumbnail images with a dimension larger than {@link #MAX_THUMBNAIL_DIMENSION} will be
 * rejected.
 * </p>
 *
 * @param pixels an {@link android.media.Image} object with the format
 *               {@link android.graphics.ImageFormat#YUV_420_888}.
 * @return this {@link #DngCreator} object.
 * @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension
 *      larger than {@link #MAX_THUMBNAIL_DIMENSION}.
 */
@NonNull
public DngCreator setThumbnail(@NonNull Image pixels) {
    if (pixels == null) {
        throw new IllegalArgumentException("Null argument to setThumbnail");
    }

    int format = pixels.getFormat();
    if (format != ImageFormat.YUV_420_888) {
        throw new IllegalArgumentException("Unsupported Image format " + format);
    }

    int width = pixels.getWidth();
    int height = pixels.getHeight();

    if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
        throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
                "," + height + ") too large, dimensions must be smaller than " +
                MAX_THUMBNAIL_DIMENSION);
    }

    ByteBuffer rgbBuffer = convertToRGB(pixels);
    nativeSetThumbnail(rgbBuffer, width, height);

    return this;
}
 
源代码2 项目: fritz-examples   文件: MainActivity.java
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!shouldSample.get()) {
        image.close();
        return;
    }

    visionImage = FritzVisionImage.fromMediaImage(image, orientation);
    image.close();
}
 
源代码3 项目: fritz-examples   文件: BaseRecordingActivity.java
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    // Save Images when we're recording
    if (!isRecording.get()) {
        image.close();
        return;
    }

    // Only grab a frame every 100ms
    if (System.currentTimeMillis() - lastRecordedFrameAt.get() < TIME_BETWEEN_FRAMES_MS) {
        image.close();
        return;
    }

    // Add the frame to a queue to process
    lastRecordedFrameAt.set(System.currentTimeMillis());
    final FritzVisionImage fritzImage = FritzVisionImage.fromMediaImage(image, orientation);
    videoProcessingQueue.addVisionImage(fritzImage);
    image.close();
}
 
源代码4 项目: DoraemonKit   文件: ImageCapture.java
void capture() {
    if (isCapturing) {
        return;
    }
    if (mImageReader == null) {
        return;
    }
    isCapturing = true;
    Image image = mImageReader.acquireLatestImage();
    if (image == null) {
        return;
    }
    int width = image.getWidth();
    int height = image.getHeight();
    Image.Plane[] planes = image.getPlanes();
    ByteBuffer buffer = planes[0].getBuffer();
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPaddingStride = rowStride - pixelStride * width;
    int rowPadding = rowPaddingStride / pixelStride;
    Bitmap recordBitmap = Bitmap.createBitmap(width + rowPadding, height, Bitmap.Config.ARGB_8888);
    recordBitmap.copyPixelsFromBuffer(buffer);
    mBitmap = Bitmap.createBitmap(recordBitmap, 0, 0, width, height);
    image.close();
    isCapturing = false;
}
 
源代码5 项目: CameraCompat   文件: DirectChain.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void sendNormalImage(Image image) {
    if (mIsFrontCamera && mEnableMirror) {
        if (mRotation == Rotation.ROTATION_90) {
            RgbYuvConverter.image2yuvCropFlip(image, mVideoHeight, mGLYuvBuffer.array());
        } else {
            RgbYuvConverter.image2yuvCropRotateC180Flip(image, mVideoHeight,
                    mGLYuvBuffer.array());
        }
    } else {
        if (mRotation == Rotation.ROTATION_90) {
            RgbYuvConverter.image2yuvCropRotateC180(image, mVideoHeight, mGLYuvBuffer.array());
        } else {
            RgbYuvConverter.image2yuvCrop(image, mVideoHeight, mGLYuvBuffer.array());
        }
    }
    mVideoCaptureCallback.onFrameData(mGLYuvBuffer.array(), image.getWidth(), mVideoHeight);
}
 
源代码6 项目: fritz-examples   文件: MainActivity.java
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!shouldSample.get()) {
        image.close();
        return;
    }

    if (!isComputing.compareAndSet(false, true)) {
        image.close();
        return;
    }

    visionImage = FritzVisionImage.fromMediaImage(image, orientation);
    image.close();

    runInBackground(() -> {
        objectResult = predictor.predict(visionImage);
        requestRender();
    });
}
 
源代码7 项目: Camera2   文件: AndroidImageReaderProxy.java
@Override
@Nullable
public ImageProxy acquireLatestImage()
{
    synchronized (mLock)
    {
        Image image = mDelegate.acquireLatestImage();
        if (image == null)
        {
            return null;
        } else
        {
            return new AndroidImageProxy(image);
        }
    }
}
 
源代码8 项目: Camera2   文件: AndroidImageReaderProxy.java
@Override
@Nullable
public ImageProxy acquireNextImage()
{
    synchronized (mLock)
    {
        Image image = mDelegate.acquireNextImage();
        if (image == null)
        {
            return null;
        } else
        {
            return new AndroidImageProxy(image);
        }
    }
}
 
源代码9 项目: CameraCompat   文件: GPUImageChain.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void sendNormalImage(Image image) {
    if (mIsFrontCamera && mEnableMirror) {
        if (mGLRender.getRotation() == Rotation.ROTATION_90) {
            RgbYuvConverter.image2yuvCropFlip(image, mGLRender.getVideoHeight(),
                    mGLYuvBuffer.array());
        } else {
            RgbYuvConverter.image2yuvCropRotateC180Flip(image, mGLRender.getVideoHeight(),
                    mGLYuvBuffer.array());
        }
    } else {
        if (mGLRender.getRotation() == Rotation.ROTATION_90) {
            RgbYuvConverter.image2yuvCropRotateC180(image, mGLRender.getVideoHeight(),
                    mGLYuvBuffer.array());
        } else {
            RgbYuvConverter.image2yuvCrop(image, mGLRender.getVideoHeight(),
                    mGLYuvBuffer.array());
        }
    }
    mVideoCaptureCallback.onFrameData(mGLYuvBuffer.array(), image.getWidth(),
            mGLRender.getVideoHeight());
}
 
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
@Override
public void onImageAvailable(ImageReader reader) {
    if (mListener != null && isCapturing()) {
        try {
            long now = System.nanoTime();
            Image img = reader.acquireLatestImage();
            if (img != null && now - lastFrame >= min_nano_time) {
                sendImage(img);
                img.close();
                lastFrame = now;
            } else if (img != null) {
                img.close();
            }
        } catch (final Exception e) {
            if (DEBUG) Log.w(TAG, "sendImage exception:", e);
        }
    }
}
 
源代码11 项目: CameraDemo   文件: TextureCamera2Activity.java
@Override
protected Bitmap doInBackground(Image ... images) {
    ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
    byte[] bytes = new byte[buffer.remaining()];
    buffer.get(bytes);

    long time = System.currentTimeMillis();
    if (mCameraProxy.isFrontCamera()) {
        Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
        Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
        time = System.currentTimeMillis();
        // 前置摄像头需要左右镜像
        Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
        Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
        time = System.currentTimeMillis();
        ImageUtils.saveBitmap(rotateBitmap);
        Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
        rotateBitmap.recycle();
    } else {
        ImageUtils.saveImage(bytes);
        Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
    }
    images[0].close();
    return ImageUtils.getLatestThumbBitmap();
}
 
源代码12 项目: CameraDemo   文件: SurfaceCamera2Activity.java
@Override
protected Bitmap doInBackground(Image ... images) {
    ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
    byte[] bytes = new byte[buffer.remaining()];
    buffer.get(bytes);

    long time = System.currentTimeMillis();
    if (mCameraProxy.isFrontCamera()) {
        Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
        Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
        time = System.currentTimeMillis();
        // 前置摄像头需要左右镜像
        Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
        Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
        time = System.currentTimeMillis();
        ImageUtils.saveBitmap(rotateBitmap);
        Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
        rotateBitmap.recycle();
    } else {
        ImageUtils.saveImage(bytes);
        Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
    }
    images[0].close();
    return ImageUtils.getLatestThumbBitmap();
}
 
源代码13 项目: CameraDemo   文件: GLSurfaceCamera2Activity.java
@Override
protected Bitmap doInBackground(Image... images) {
    ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
    byte[] bytes = new byte[buffer.remaining()];
    buffer.get(bytes);

    long time = System.currentTimeMillis();
    if (mCameraProxy.isFrontCamera()) {
        Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
        Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
        time = System.currentTimeMillis();
        // 前置摄像头需要左右镜像
        Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
        Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
        time = System.currentTimeMillis();
        ImageUtils.saveBitmap(rotateBitmap);
        Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
        rotateBitmap.recycle();
    } else {
        ImageUtils.saveImage(bytes);
        Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
    }
    images[0].close();
    return ImageUtils.getLatestThumbBitmap();
}
 
源代码14 项目: Cam2Caption   文件: Camera2BasicFragment.java
@Override
public void onImageAvailable(ImageReader reader) {
    Image image = null;
    try {
        image = reader.acquireLatestImage();
        ByteBuffer buffer = image.getPlanes()[0].getBuffer();
        byte[] imageBytes = new byte[buffer.remaining()];
        buffer.get(imageBytes);
        Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
        final String text = runModel(bitmap);
        getActivity().runOnUiThread(new Runnable() {
            @Override
            public void run() {
                textView.setText(text);

            }
        });
    } finally {
        if (image != null) {
            image.close();
        }
    }
}
 
public Bitmap preprocessImage(final Image image) {
    if (image == null) {
        return null;
    }

    Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
    Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());

    if (croppedBitmap != null && rgbFrameBitmap != null) {
        ByteBuffer bb = image.getPlanes()[0].getBuffer();
        rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
        cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
    }

    image.close();

    // For debugging
    if (SAVE_PREVIEW_BITMAP) {
        saveBitmap(croppedBitmap);
    }
    return croppedBitmap;
}
 
public Bitmap preprocessImage(final Image image) {
    if (image == null) {
        return null;
    }

    Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
    Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());

    if (croppedBitmap != null && rgbFrameBitmap != null) {
        ByteBuffer bb = image.getPlanes()[0].getBuffer();
        rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
        cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
    }

    image.close();

    // For debugging
    if (SAVE_PREVIEW_BITMAP) {
        saveBitmap(croppedBitmap);
    }
    return croppedBitmap;
}
 
public Bitmap preprocessImage(final Image image) {
    if (image == null) {
        return null;
    }

    Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
    Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());

    if (croppedBitmap != null && rgbFrameBitmap != null) {
        ByteBuffer bb = image.getPlanes()[0].getBuffer();
        rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
        cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
    }

    image.close();

    // For debugging
    if (SAVE_PREVIEW_BITMAP) {
        saveBitmap(croppedBitmap);
    }
    return croppedBitmap;
}
 
源代码18 项目: fritz-examples   文件: MainActivity.java
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!shouldSample.get()) {
        image.close();
        return;
    }

    if (!isComputing.compareAndSet(false, true)) {
        image.close();
        return;
    }

    visionImage = FritzVisionImage.fromMediaImage(image, orientation);
    image.close();

    runInBackground(() -> {
        labelResult = predictor.predict(visionImage);
        requestRender();
    });
}
 
源代码19 项目: CatVision-io-SDK-Android   文件: VNCServer.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void push(Image image, int pixelFormat) {
	Image.Plane[] planes = image.getPlanes();
	ByteBuffer b = planes[0].getBuffer();
	if (pixelFormat == PixelFormat.RGBA_8888) {
		// planes[0].getPixelStride() has to be 4 (32 bit)
		jni_push_pixels_rgba_8888(b, planes[0].getRowStride());
	}
	else if (pixelFormat == PixelFormat.RGB_565)
	{
		// planes[0].getPixelStride() has to be 16 (16 bit)
		jni_push_pixels_rgba_565(b, planes[0].getRowStride());
	}
	else
	{
		Log.e(TAG, "Image reader acquired unsupported image format " + pixelFormat);
	}
}
 
源代码20 项目: CameraCompat   文件: RgbYuvConverter.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public static int image2yuvCropFlip(Image imageIn, int outputHeight, byte[] yuvOut) {
    Image.Plane[] planes = imageIn.getPlanes();
    ByteBuffer Y = planes[0].getBuffer();
    ByteBuffer Cr = planes[2].getBuffer();
    int CrPixelStride = planes[2].getPixelStride();
    ByteBuffer Cb = planes[1].getBuffer();
    int CbPixelStride = planes[1].getPixelStride();
    return image2yuvCropFlip(imageIn.getWidth(), imageIn.getHeight(), Y, Cr, Cb, CrPixelStride,
            CbPixelStride, outputHeight, yuvOut);
}
 
源代码21 项目: android-Camera2Raw   文件: Camera2RawFragment.java
private ImageSaver(Image image, File file, CaptureResult result,
                   CameraCharacteristics characteristics, Context context,
                   RefCountedAutoCloseable<ImageReader> reader) {
    mImage = image;
    mFile = file;
    mCaptureResult = result;
    mCharacteristics = characteristics;
    mContext = context;
    mReader = reader;
}
 
源代码22 项目: CameraCompat   文件: RgbYuvConverter.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public static int image2yuvCropRotateC180Flip(Image imageIn, int outputHeight, byte[] yuvOut) {
    Image.Plane[] planes = imageIn.getPlanes();
    ByteBuffer Y = planes[0].getBuffer();
    ByteBuffer Cr = planes[2].getBuffer();
    int CrPixelStride = planes[2].getPixelStride();
    ByteBuffer Cb = planes[1].getBuffer();
    int CbPixelStride = planes[1].getPixelStride();
    return image2yuvCropRotateC180Flip(imageIn.getWidth(), imageIn.getHeight(), Y, Cr, Cb,
            CrPixelStride, CbPixelStride, outputHeight, yuvOut);
}
 
源代码23 项目: fritz-examples   文件: MainActivity.java
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!shouldSample.get()) {
        image.close();
        return;
    }
    visionImage = FritzVisionImage.fromMediaImage(image, orientation);
    image.close();
}
 
源代码24 项目: AndroidPlayground   文件: MainActivity.java
@Override
public void onImageAvailable(ImageReader imageReader) {
    Log.d("MainActivity", "onImageAvailable");
    final Image image = imageReader.acquireLatestImage();
    count++;
    if (count == 100) {
        byte[] yuv = new byte[image.getWidth() * image.getHeight() * 3 / 2];
        image2yuv(image, yuv);
        saveRawYuvData(yuv, image.getWidth(), image.getHeight(), "org");
    }
    image.close();
}
 
源代码25 项目: 365browser   文件: VideoCaptureCamera2.java
@Override
public void onImageAvailable(ImageReader reader) {
    try (Image image = reader.acquireLatestImage()) {
        if (image == null) return;

        if (image.getFormat() != ImageFormat.YUV_420_888 || image.getPlanes().length != 3) {
            nativeOnError(mNativeVideoCaptureDeviceAndroid, "Unexpected image format: "
                    + image.getFormat() + " or #planes: " + image.getPlanes().length);
            throw new IllegalStateException();
        }

        if (reader.getWidth() != image.getWidth()
                || reader.getHeight() != image.getHeight()) {
            nativeOnError(mNativeVideoCaptureDeviceAndroid, "ImageReader size ("
                    + reader.getWidth() + "x" + reader.getHeight()
                    + ") did not match Image size (" + image.getWidth() + "x"
                    + image.getHeight() + ")");
            throw new IllegalStateException();
        }

        nativeOnI420FrameAvailable(mNativeVideoCaptureDeviceAndroid,
                image.getPlanes()[0].getBuffer(), image.getPlanes()[0].getRowStride(),
                image.getPlanes()[1].getBuffer(), image.getPlanes()[2].getBuffer(),
                image.getPlanes()[1].getRowStride(), image.getPlanes()[1].getPixelStride(),
                image.getWidth(), image.getHeight(), getCameraRotation(),
                image.getTimestamp());
    } catch (IllegalStateException ex) {
        Log.e(TAG, "acquireLatestImage():", ex);
    }
}
 
源代码26 项目: TikTok   文件: Camera2.java
@Override
public void onImageAvailable(ImageReader reader) {
    try (Image image = reader.acquireNextImage()) {
        Image.Plane[] planes = image.getPlanes();
        if (planes.length > 0) {
            ByteBuffer buffer = planes[0].getBuffer();
            byte[] data = new byte[buffer.remaining()];
            buffer.get(data);
            mCallback.onPictureTaken(data);
        }
    }
}
 
源代码27 项目: fritz-examples   文件: LiveCameraActivity.java
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = reader.acquireLatestImage();

    if (image == null) {
        return;
    }

    if (!computing.compareAndSet(false, true)) {
        image.close();
        return;
    }

    setupImageForPrediction(image);

    image.close();

    runInBackground(
            new Runnable() {
                @Override
                public void run() {

                    runInference();
                    // Fire callback to change the OverlayView
                    requestRender();
                    computing.set(false);
                }
            });
}
 
源代码28 项目: habpanelviewer   文件: ScreenCapturer.java
public synchronized Bitmap captureScreen() throws IllegalStateException {
    AtomicReference<Image> imageHolder = new AtomicReference<>();
    final CountDownLatch latch = new CountDownLatch(1);

    ImageReader mImageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
    mImageReader.setOnImageAvailableListener(imageReader -> {
        imageHolder.set(mImageReader.acquireLatestImage());
        latch.countDown();
    }, mHandler);

    VirtualDisplay display = mProjection.createVirtualDisplay("screen-mirror", mWidth, mHeight, mDensity,
            DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mImageReader.getSurface(),
            null, null);

    try {
        latch.await(1, TimeUnit.SECONDS);

        if (latch.getCount() == 1) {
            throw new IllegalStateException("Screen capturing timed out");
        }

        final Image image = imageHolder.get();
        Image.Plane[] planes = image.getPlanes();
        ByteBuffer buffer = planes[0].getBuffer();
        int pixelStride = planes[0].getPixelStride();
        int rowStride = planes[0].getRowStride();
        int rowPadding = rowStride - pixelStride * mWidth;

        // create bitmap
        Bitmap bmp = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
        bmp.copyPixelsFromBuffer(buffer);
        image.close();

        return bmp;
    } catch (InterruptedException e) {
        throw new IllegalStateException("Got interrupt while capturing screen");
    } finally {
        display.release();
    }
}
 
@Override
public void onImageAvailable(ImageReader reader) {
    Image mImage = reader.acquireNextImage();
    if (mImage == null) {
        return;
    }
    mFrameProcessor.setNextFrame(convertYUV420888ToNV21(mImage));
    mImage.close();
}
 
源代码30 项目: CameraCompat   文件: Camera2PreviewCallback.java
@Override
public void onImageAvailable(ImageReader reader) {
    try {
        final Image image = reader.acquireLatestImage();
        if (image != null) {
            mCameraFrameCallback.onFrameData(image, image::close);
        }
    } catch (OutOfMemoryError | IllegalStateException e) {
        CameraCompat.onError(CameraCompat.ERR_UNKNOWN);
    }
}