下面列出了android.media.Image.Plane#android.media.Image 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Set the thumbnail image.
*
* <p>
* Pixel data is interpreted as a {@link android.graphics.ImageFormat#YUV_420_888} image.
* Thumbnail images with a dimension larger than {@link #MAX_THUMBNAIL_DIMENSION} will be
* rejected.
* </p>
*
* @param pixels an {@link android.media.Image} object with the format
* {@link android.graphics.ImageFormat#YUV_420_888}.
* @return this {@link #DngCreator} object.
* @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension
* larger than {@link #MAX_THUMBNAIL_DIMENSION}.
*/
@NonNull
public DngCreator setThumbnail(@NonNull Image pixels) {
if (pixels == null) {
throw new IllegalArgumentException("Null argument to setThumbnail");
}
int format = pixels.getFormat();
if (format != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Unsupported Image format " + format);
}
int width = pixels.getWidth();
int height = pixels.getHeight();
if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
"," + height + ") too large, dimensions must be smaller than " +
MAX_THUMBNAIL_DIMENSION);
}
ByteBuffer rgbBuffer = convertToRGB(pixels);
nativeSetThumbnail(rgbBuffer, width, height);
return this;
}
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (!shouldSample.get()) {
image.close();
return;
}
visionImage = FritzVisionImage.fromMediaImage(image, orientation);
image.close();
}
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
// Save Images when we're recording
if (!isRecording.get()) {
image.close();
return;
}
// Only grab a frame every 100ms
if (System.currentTimeMillis() - lastRecordedFrameAt.get() < TIME_BETWEEN_FRAMES_MS) {
image.close();
return;
}
// Add the frame to a queue to process
lastRecordedFrameAt.set(System.currentTimeMillis());
final FritzVisionImage fritzImage = FritzVisionImage.fromMediaImage(image, orientation);
videoProcessingQueue.addVisionImage(fritzImage);
image.close();
}
void capture() {
if (isCapturing) {
return;
}
if (mImageReader == null) {
return;
}
isCapturing = true;
Image image = mImageReader.acquireLatestImage();
if (image == null) {
return;
}
int width = image.getWidth();
int height = image.getHeight();
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPaddingStride = rowStride - pixelStride * width;
int rowPadding = rowPaddingStride / pixelStride;
Bitmap recordBitmap = Bitmap.createBitmap(width + rowPadding, height, Bitmap.Config.ARGB_8888);
recordBitmap.copyPixelsFromBuffer(buffer);
mBitmap = Bitmap.createBitmap(recordBitmap, 0, 0, width, height);
image.close();
isCapturing = false;
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void sendNormalImage(Image image) {
if (mIsFrontCamera && mEnableMirror) {
if (mRotation == Rotation.ROTATION_90) {
RgbYuvConverter.image2yuvCropFlip(image, mVideoHeight, mGLYuvBuffer.array());
} else {
RgbYuvConverter.image2yuvCropRotateC180Flip(image, mVideoHeight,
mGLYuvBuffer.array());
}
} else {
if (mRotation == Rotation.ROTATION_90) {
RgbYuvConverter.image2yuvCropRotateC180(image, mVideoHeight, mGLYuvBuffer.array());
} else {
RgbYuvConverter.image2yuvCrop(image, mVideoHeight, mGLYuvBuffer.array());
}
}
mVideoCaptureCallback.onFrameData(mGLYuvBuffer.array(), image.getWidth(), mVideoHeight);
}
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (!shouldSample.get()) {
image.close();
return;
}
if (!isComputing.compareAndSet(false, true)) {
image.close();
return;
}
visionImage = FritzVisionImage.fromMediaImage(image, orientation);
image.close();
runInBackground(() -> {
objectResult = predictor.predict(visionImage);
requestRender();
});
}
@Override
@Nullable
public ImageProxy acquireLatestImage()
{
synchronized (mLock)
{
Image image = mDelegate.acquireLatestImage();
if (image == null)
{
return null;
} else
{
return new AndroidImageProxy(image);
}
}
}
@Override
@Nullable
public ImageProxy acquireNextImage()
{
synchronized (mLock)
{
Image image = mDelegate.acquireNextImage();
if (image == null)
{
return null;
} else
{
return new AndroidImageProxy(image);
}
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void sendNormalImage(Image image) {
if (mIsFrontCamera && mEnableMirror) {
if (mGLRender.getRotation() == Rotation.ROTATION_90) {
RgbYuvConverter.image2yuvCropFlip(image, mGLRender.getVideoHeight(),
mGLYuvBuffer.array());
} else {
RgbYuvConverter.image2yuvCropRotateC180Flip(image, mGLRender.getVideoHeight(),
mGLYuvBuffer.array());
}
} else {
if (mGLRender.getRotation() == Rotation.ROTATION_90) {
RgbYuvConverter.image2yuvCropRotateC180(image, mGLRender.getVideoHeight(),
mGLYuvBuffer.array());
} else {
RgbYuvConverter.image2yuvCrop(image, mGLRender.getVideoHeight(),
mGLYuvBuffer.array());
}
}
mVideoCaptureCallback.onFrameData(mGLYuvBuffer.array(), image.getWidth(),
mGLRender.getVideoHeight());
}
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
@Override
public void onImageAvailable(ImageReader reader) {
if (mListener != null && isCapturing()) {
try {
long now = System.nanoTime();
Image img = reader.acquireLatestImage();
if (img != null && now - lastFrame >= min_nano_time) {
sendImage(img);
img.close();
lastFrame = now;
} else if (img != null) {
img.close();
}
} catch (final Exception e) {
if (DEBUG) Log.w(TAG, "sendImage exception:", e);
}
}
}
@Override
protected Bitmap doInBackground(Image ... images) {
ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
long time = System.currentTimeMillis();
if (mCameraProxy.isFrontCamera()) {
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
time = System.currentTimeMillis();
// 前置摄像头需要左右镜像
Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
time = System.currentTimeMillis();
ImageUtils.saveBitmap(rotateBitmap);
Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
rotateBitmap.recycle();
} else {
ImageUtils.saveImage(bytes);
Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
}
images[0].close();
return ImageUtils.getLatestThumbBitmap();
}
@Override
protected Bitmap doInBackground(Image ... images) {
ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
long time = System.currentTimeMillis();
if (mCameraProxy.isFrontCamera()) {
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
time = System.currentTimeMillis();
// 前置摄像头需要左右镜像
Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
time = System.currentTimeMillis();
ImageUtils.saveBitmap(rotateBitmap);
Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
rotateBitmap.recycle();
} else {
ImageUtils.saveImage(bytes);
Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
}
images[0].close();
return ImageUtils.getLatestThumbBitmap();
}
@Override
protected Bitmap doInBackground(Image... images) {
ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
long time = System.currentTimeMillis();
if (mCameraProxy.isFrontCamera()) {
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
time = System.currentTimeMillis();
// 前置摄像头需要左右镜像
Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
time = System.currentTimeMillis();
ImageUtils.saveBitmap(rotateBitmap);
Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
rotateBitmap.recycle();
} else {
ImageUtils.saveImage(bytes);
Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
}
images[0].close();
return ImageUtils.getLatestThumbBitmap();
}
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] imageBytes = new byte[buffer.remaining()];
buffer.get(imageBytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
final String text = runModel(bitmap);
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
textView.setText(text);
}
});
} finally {
if (image != null) {
image.close();
}
}
}
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
saveBitmap(croppedBitmap);
}
return croppedBitmap;
}
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
saveBitmap(croppedBitmap);
}
return croppedBitmap;
}
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
saveBitmap(croppedBitmap);
}
return croppedBitmap;
}
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (!shouldSample.get()) {
image.close();
return;
}
if (!isComputing.compareAndSet(false, true)) {
image.close();
return;
}
visionImage = FritzVisionImage.fromMediaImage(image, orientation);
image.close();
runInBackground(() -> {
labelResult = predictor.predict(visionImage);
requestRender();
});
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void push(Image image, int pixelFormat) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer b = planes[0].getBuffer();
if (pixelFormat == PixelFormat.RGBA_8888) {
// planes[0].getPixelStride() has to be 4 (32 bit)
jni_push_pixels_rgba_8888(b, planes[0].getRowStride());
}
else if (pixelFormat == PixelFormat.RGB_565)
{
// planes[0].getPixelStride() has to be 16 (16 bit)
jni_push_pixels_rgba_565(b, planes[0].getRowStride());
}
else
{
Log.e(TAG, "Image reader acquired unsupported image format " + pixelFormat);
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public static int image2yuvCropFlip(Image imageIn, int outputHeight, byte[] yuvOut) {
Image.Plane[] planes = imageIn.getPlanes();
ByteBuffer Y = planes[0].getBuffer();
ByteBuffer Cr = planes[2].getBuffer();
int CrPixelStride = planes[2].getPixelStride();
ByteBuffer Cb = planes[1].getBuffer();
int CbPixelStride = planes[1].getPixelStride();
return image2yuvCropFlip(imageIn.getWidth(), imageIn.getHeight(), Y, Cr, Cb, CrPixelStride,
CbPixelStride, outputHeight, yuvOut);
}
private ImageSaver(Image image, File file, CaptureResult result,
CameraCharacteristics characteristics, Context context,
RefCountedAutoCloseable<ImageReader> reader) {
mImage = image;
mFile = file;
mCaptureResult = result;
mCharacteristics = characteristics;
mContext = context;
mReader = reader;
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public static int image2yuvCropRotateC180Flip(Image imageIn, int outputHeight, byte[] yuvOut) {
Image.Plane[] planes = imageIn.getPlanes();
ByteBuffer Y = planes[0].getBuffer();
ByteBuffer Cr = planes[2].getBuffer();
int CrPixelStride = planes[2].getPixelStride();
ByteBuffer Cb = planes[1].getBuffer();
int CbPixelStride = planes[1].getPixelStride();
return image2yuvCropRotateC180Flip(imageIn.getWidth(), imageIn.getHeight(), Y, Cr, Cb,
CrPixelStride, CbPixelStride, outputHeight, yuvOut);
}
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (!shouldSample.get()) {
image.close();
return;
}
visionImage = FritzVisionImage.fromMediaImage(image, orientation);
image.close();
}
@Override
public void onImageAvailable(ImageReader imageReader) {
Log.d("MainActivity", "onImageAvailable");
final Image image = imageReader.acquireLatestImage();
count++;
if (count == 100) {
byte[] yuv = new byte[image.getWidth() * image.getHeight() * 3 / 2];
image2yuv(image, yuv);
saveRawYuvData(yuv, image.getWidth(), image.getHeight(), "org");
}
image.close();
}
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireLatestImage()) {
if (image == null) return;
if (image.getFormat() != ImageFormat.YUV_420_888 || image.getPlanes().length != 3) {
nativeOnError(mNativeVideoCaptureDeviceAndroid, "Unexpected image format: "
+ image.getFormat() + " or #planes: " + image.getPlanes().length);
throw new IllegalStateException();
}
if (reader.getWidth() != image.getWidth()
|| reader.getHeight() != image.getHeight()) {
nativeOnError(mNativeVideoCaptureDeviceAndroid, "ImageReader size ("
+ reader.getWidth() + "x" + reader.getHeight()
+ ") did not match Image size (" + image.getWidth() + "x"
+ image.getHeight() + ")");
throw new IllegalStateException();
}
nativeOnI420FrameAvailable(mNativeVideoCaptureDeviceAndroid,
image.getPlanes()[0].getBuffer(), image.getPlanes()[0].getRowStride(),
image.getPlanes()[1].getBuffer(), image.getPlanes()[2].getBuffer(),
image.getPlanes()[1].getRowStride(), image.getPlanes()[1].getPixelStride(),
image.getWidth(), image.getHeight(), getCameraRotation(),
image.getTimestamp());
} catch (IllegalStateException ex) {
Log.e(TAG, "acquireLatestImage():", ex);
}
}
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireNextImage()) {
Image.Plane[] planes = image.getPlanes();
if (planes.length > 0) {
ByteBuffer buffer = planes[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
mCallback.onPictureTaken(data);
}
}
}
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (!computing.compareAndSet(false, true)) {
image.close();
return;
}
setupImageForPrediction(image);
image.close();
runInBackground(
new Runnable() {
@Override
public void run() {
runInference();
// Fire callback to change the OverlayView
requestRender();
computing.set(false);
}
});
}
public synchronized Bitmap captureScreen() throws IllegalStateException {
AtomicReference<Image> imageHolder = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(1);
ImageReader mImageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
mImageReader.setOnImageAvailableListener(imageReader -> {
imageHolder.set(mImageReader.acquireLatestImage());
latch.countDown();
}, mHandler);
VirtualDisplay display = mProjection.createVirtualDisplay("screen-mirror", mWidth, mHeight, mDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mImageReader.getSurface(),
null, null);
try {
latch.await(1, TimeUnit.SECONDS);
if (latch.getCount() == 1) {
throw new IllegalStateException("Screen capturing timed out");
}
final Image image = imageHolder.get();
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * mWidth;
// create bitmap
Bitmap bmp = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(buffer);
image.close();
return bmp;
} catch (InterruptedException e) {
throw new IllegalStateException("Got interrupt while capturing screen");
} finally {
display.release();
}
}
@Override
public void onImageAvailable(ImageReader reader) {
Image mImage = reader.acquireNextImage();
if (mImage == null) {
return;
}
mFrameProcessor.setNextFrame(convertYUV420888ToNV21(mImage));
mImage.close();
}
@Override
public void onImageAvailable(ImageReader reader) {
try {
final Image image = reader.acquireLatestImage();
if (image != null) {
mCameraFrameCallback.onFrameData(image, image::close);
}
} catch (OutOfMemoryError | IllegalStateException e) {
CameraCompat.onError(CameraCompat.ERR_UNKNOWN);
}
}