类android.media.Image.Plane源码实例Demo

下面列出了怎么用android.media.Image.Plane的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: dbclf   文件: CameraActivity.java
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
    // Because of the variable row stride it's not possible to know in
    // advance the actual necessary dimensions of the yuv planes.
    for (int i = 0; i < planes.length; ++i) {
        final ByteBuffer buffer = planes[i].getBuffer();
        if (yuvBytes[i] == null) {
            //LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
            yuvBytes[i] = new byte[buffer.capacity()];
        }
        buffer.get(yuvBytes[i]);
    }
}
 
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
 
源代码3 项目: dbclf   文件: CameraActivity.java
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
    //We need to wait until we have some size from onPreviewSizeChosen
    if (previewWidth == 0 || previewHeight == 0) {
        return;
    }
    if (rgbBytes == null) {
        rgbBytes = new int[previewWidth * previewHeight];
    }
    try {
        final Image image = reader.acquireLatestImage();

        if (image == null) {
            return;
        }

        if (isProcessingFrame) {
            image.close();
            return;
        }
        isProcessingFrame = true;
        final Plane[] planes = image.getPlanes();
        fillBytes(planes, yuvBytes);
        yRowStride = planes[0].getRowStride();
        final int uvRowStride = planes[1].getRowStride();
        final int uvPixelStride = planes[1].getPixelStride();

        imageConverter = () -> ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);

        postInferenceCallback = () -> {
            image.close();
            isProcessingFrame = false;
        };

        processImage();
    } catch (final Exception ignored) {
    }
}
 
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
  //We need wait until we have some size from onPreviewSizeChosen
  if (previewWidth == 0 || previewHeight == 0) {
    return;
  }
  if (rgbBytes == null) {
    rgbBytes = new int[previewWidth * previewHeight];
  }
  try {
    final Image image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (isProcessingFrame) {
      image.close();
      return;
    }
    isProcessingFrame = true;
    Trace.beginSection("imageAvailable");
    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);
    yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();

    imageConverter =
        new Runnable() {
          @Override
          public void run() {
            ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);
          }
        };

    postInferenceCallback =
        new Runnable() {
          @Override
          public void run() {
            image.close();
            isProcessingFrame = false;
          }
        };

    processImage();
  } catch (final Exception e) {
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }
  Trace.endSection();
}
 
源代码5 项目: Paideia   文件: TensorflowImageListener.java
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}
 
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;
  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }
    
    // No mutex needed as this method is not reentrant.
    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();

    // Initialize the storage bitmaps once when the resolution is known.
    if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) {
      previewWidth = image.getWidth();
      previewHeight = image.getHeight();

      LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
      rgbBytes = new int[previewWidth * previewHeight];
      rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
      croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);

      yuvBytes = new byte[planes.length][];
      for (int i = 0; i < planes.length; ++i) {
        yuvBytes[i] = new byte[planes[i].getBuffer().capacity()];
      }
    }

    for (int i = 0; i < planes.length; ++i) {
      planes[i].getBuffer().get(yuvBytes[i]);
    }

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  drawResizedBitmap(rgbFrameBitmap, croppedBitmap);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  handler.post(
      new Runnable() {
        @Override
        public void run() {
          final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap);

          LOGGER.v("%d results", results.size());
          for (final Classifier.Recognition result : results) {
            LOGGER.v("Result: " + result.getTitle());
          }
          scoreView.setResults(results);
          computing = false;
        }
      });

  Trace.endSection();
}
 
 类所在包
 同包方法