android.graphics.ImageFormat#RAW10 ( )源码实例Demo

下面列出了android.graphics.ImageFormat#RAW10 ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: Camera2   文件: AndroidImageReaderProxy.java
private static String imageFormatToString(int imageFormat)
{
    switch (imageFormat)
    {
        case ImageFormat.JPEG:
            return "JPEG";
        case ImageFormat.NV16:
            return "NV16";
        case ImageFormat.NV21:
            return "NV21";
        case ImageFormat.RAW10:
            return "RAW10";
        case ImageFormat.RAW_SENSOR:
            return "RAW_SENSOR";
        case ImageFormat.RGB_565:
            return "RGB_565";
        case ImageFormat.UNKNOWN:
            return "UNKNOWN";
        case ImageFormat.YUV_420_888:
            return "YUV_420_888";
        case ImageFormat.YUY2:
            return "YUY2";
        case ImageFormat.YV12:
            return "YV12";
    }
    return Integer.toString(imageFormat);
}
 
源代码2 项目: libcommon   文件: CameraUtils.java
/**
 * 対応する映像フォーマットをlogCatへ出力する
 * @param params
 */
public static void dumpSupportedPictureFormats(@NonNull final Camera.Parameters params) {
	final List<Integer> formats = params.getSupportedPictureFormats();
	for (final int format: formats) {
		switch (format) {
		case ImageFormat.DEPTH16:			Log.i(TAG, "supported: DEPTH16"); break;
		case ImageFormat.DEPTH_POINT_CLOUD:	Log.i(TAG, "supported: DEPTH_POINT_CLOUD"); break;
		case ImageFormat.FLEX_RGBA_8888:	Log.i(TAG, "supported: FLEX_RGBA_8888"); break;
		case ImageFormat.FLEX_RGB_888:		Log.i(TAG, "supported: FLEX_RGB_888"); break;
		case ImageFormat.JPEG:				Log.i(TAG, "supported: JPEG"); break;
		case ImageFormat.NV16:				Log.i(TAG, "supported: NV16"); break;
		case ImageFormat.NV21:				Log.i(TAG, "supported: NV21"); break;
		case ImageFormat.PRIVATE:			Log.i(TAG, "supported: PRIVATE"); break;
		case ImageFormat.RAW10:				Log.i(TAG, "supported: RAW10"); break;
		case ImageFormat.RAW12:				Log.i(TAG, "supported: RAW12"); break;
		case ImageFormat.RAW_PRIVATE:		Log.i(TAG, "supported: RAW_PRIVATE"); break;
		case ImageFormat.RAW_SENSOR:		Log.i(TAG, "supported: RAW_SENSOR"); break;
		case ImageFormat.RGB_565:			Log.i(TAG, "supported: RGB_565"); break;
		case ImageFormat.UNKNOWN:			Log.i(TAG, "supported: UNKNOWN"); break;
		case ImageFormat.YUV_420_888:		Log.i(TAG, "supported: YUV_420_888"); break;
		case ImageFormat.YUV_422_888:		Log.i(TAG, "supported: YUV_422_888"); break;
		case ImageFormat.YUV_444_888:		Log.i(TAG, "supported: YUV_444_888"); break;
		case ImageFormat.YUY2:				Log.i(TAG, "supported: YUY2"); break;
		case ImageFormat.YV12:				Log.i(TAG, "supported: YV12"); break;
		default:
			Log.i(TAG, String.format("supported: unknown, %08x", format));
			break;
		}
	}
}
 
private String formatToString(int format) {
    switch (format) {
        case ImageFormat.YV12:
            return "YV12";
        case ImageFormat.YUV_420_888:
            return "YUV_420_888";
        case ImageFormat.NV21:
            return "NV21";
        case ImageFormat.NV16:
            return "NV16";
        case PixelFormat.RGB_565:
            return "RGB_565";
        case PixelFormat.RGBA_8888:
            return "RGBA_8888";
        case PixelFormat.RGBX_8888:
            return "RGBX_8888";
        case PixelFormat.RGB_888:
            return "RGB_888";
        case ImageFormat.JPEG:
            return "JPEG";
        case ImageFormat.YUY2:
            return "YUY2";
        case ImageFormat.Y8:
            return "Y8";
        case ImageFormat.Y16:
            return "Y16";
        case ImageFormat.RAW_SENSOR:
            return "RAW_SENSOR";
        case ImageFormat.RAW_PRIVATE:
            return "RAW_PRIVATE";
        case ImageFormat.RAW10:
            return "RAW10";
        case ImageFormat.DEPTH16:
            return "DEPTH16";
        case ImageFormat.DEPTH_POINT_CLOUD:
            return "DEPTH_POINT_CLOUD";
        case ImageFormat.RAW_DEPTH:
            return "RAW_DEPTH";
        case ImageFormat.PRIVATE:
            return "PRIVATE";
        default:
            return "UNKNOWN";
    }
}
 
源代码4 项目: libsoftwaresync   文件: ResultProcessor.java
private void processStill(final Frame frame, String basename) {
  File captureDir = new File(context.getExternalFilesDir(null), basename);
  if (!captureDir.exists() && !captureDir.mkdirs()) {
    throw new IllegalStateException("Could not create dir " + captureDir);
  }
  // Timestamp in local domain ie. time since boot in nanoseconds.
  long localSensorTimestampNs = frame.result.get(CaptureResult.SENSOR_TIMESTAMP);
  // Timestamp in leader domain ie. synchronized time on leader device in nanoseconds.
  long syncedSensorTimestampNs =
      timeDomainConverter.leaderTimeForLocalTimeNs(localSensorTimestampNs);
  // Use syncedSensorTimestamp in milliseconds for filenames.
  long syncedSensorTimestampMs = (long) TimeUtils.nanosToMillis(syncedSensorTimestampNs);
  String filenameTimeString = getTimeStr(syncedSensorTimestampMs);

  // Save timing metadata.
  {
    String metaFilename = "sync_metadata_" + filenameTimeString + ".txt";
    File metaFile = new File(captureDir, metaFilename);
    saveTimingMetadata(syncedSensorTimestampNs, localSensorTimestampNs, metaFile);
  }

  for (int i = 0; i < frame.output.images.size(); ++i) {
    Image image = frame.output.images.get(i);
    int format = image.getFormat();
    if (format == ImageFormat.RAW_SENSOR) {
      // Note: while using DngCreator works, streaming RAW_SENSOR is too slow.
      Log.e(TAG, "RAW_SENSOR saving not implemented!");
    } else if (format == ImageFormat.JPEG) {
      Log.e(TAG, "JPEG saving not implemented!");
    } else if (format == ImageFormat.RAW10) {
      Log.e(TAG, "RAW10 saving not implemented!");
    } else if (format == ImageFormat.YUV_420_888) {
      // TODO(jiawen): We know that on Pixel devices, the YUV format is NV21, consisting of a luma
      // plane and separate interleaved chroma planes.
      //     <--w-->
      // ^   YYYYYYYZZZ
      // |   YYYYYYYZZZ
      // h   ...
      // |   ...
      // v   YYYYYYYZZZ
      //
      //     <--w-->
      // ^   VUVUVUVZZZZZ
      // |   VUVUVUVZZZZZ
      // h/2 ...
      // |   ...
      // v   VUVUVUVZZZZZ
      //
      // where Z is padding bytes.
      //
      // TODO(jiawen): To determine if it's NV12 vs NV21, we need JNI to compare the buffer start
      // addresses.

      context.notifyCapturing("img_" + filenameTimeString);

      // Save NV21 raw + metadata.
      {
        File nv21File = new File(captureDir, "img_" + filenameTimeString + ".nv21");
        File nv21MetadataFile =
            new File(captureDir, "nv21_metadata_" + filenameTimeString + ".txt");
        saveNv21(image, nv21File, nv21MetadataFile);
        context.notifyCaptured(nv21File.getName());
      }

      // TODO(samansari): Make save JPEG a checkbox in the UI.
      if (saveJpgFromNv21) {
        YuvImage yuvImage = yuvImageFromNv21Image(image);
        File jpgFile = new File(captureDir, "img_" + filenameTimeString + ".jpg");

        // Push saving JPEG onto queue to let the frame close faster, necessary for some devices.
        handler.post(() -> saveJpg(yuvImage, jpgFile));
      }
    } else {
      Log.e(TAG, String.format("Cannot save unsupported image format: %d", image.getFormat()));
    }
  }

  frame.close();
}
 
源代码5 项目: MobileInfo   文件: CameraInfo.java
private static String getFormat(int format) {
    switch (format) {
        case ImageFormat.DEPTH16:
            return "DEPTH16";
        case ImageFormat.DEPTH_POINT_CLOUD:
            return "DEPTH_POINT_CLOUD";
        case ImageFormat.FLEX_RGBA_8888:
            return "FLEX_RGBA_8888";
        case ImageFormat.FLEX_RGB_888:
            return "FLEX_RGB_888";
        case ImageFormat.JPEG:
            return "JPEG";
        case ImageFormat.NV16:
            return "NV16";
        case ImageFormat.NV21:
            return "NV21";
        case ImageFormat.PRIVATE:
            return "PRIVATE";
        case ImageFormat.RAW10:
            return "RAW10";
        case ImageFormat.RAW12:
            return "RAW12";
        case ImageFormat.RAW_PRIVATE:
            return "RAW_PRIVATE";
        case ImageFormat.RAW_SENSOR:
            return "RAW_SENSOR";
        case ImageFormat.RGB_565:
            return "RGB_565";
        case ImageFormat.YUV_420_888:
            return "YUV_420_888";
        case ImageFormat.YUV_422_888:
            return "YUV_422_888";
        case ImageFormat.YUV_444_888:
            return "YUV_444_888";
        case ImageFormat.YUY2:
            return "YUY2";
        case ImageFormat.YV12:
            return "YV12";
        default:
            return UNKNOWN + "-" + format;
    }
}
 
源代码6 项目: FastBarcodeScanner   文件: MainActivity.java
private String formatFormat(int imageFormat)
{
    switch (imageFormat)
    {
        case ImageFormat.UNKNOWN:
            return "UNKNOWN";
        case ImageFormat.NV21:
            return "NV21";
        case ImageFormat.NV16:
            return "NV16";
        case ImageFormat.YV12:
            return "YV12";
        case ImageFormat.YUY2:
            return "YUY2";
        case ImageFormat.YUV_420_888:
            return "YUV_420_888";
        case ImageFormat.YUV_422_888:
            return "YUV_422_888";
        case ImageFormat.YUV_444_888:
            return "YUV_444_888";
        case ImageFormat.FLEX_RGB_888:
            return "FLEX_RGB_888";
        case ImageFormat.FLEX_RGBA_8888:
            return "FLEX_RGBA_8888";
        case ImageFormat.JPEG:
            return "JPEG";
        case ImageFormat.RGB_565:
            return "RGB_565";
        case ImageFormat.RAW_SENSOR:
            return "RAW_SENSOR";
        case ImageFormat.RAW10:
            return "RAW10";
        case ImageFormat.RAW12:
            return "RAW12";
        case ImageFormat.DEPTH16:
            return "DEPTH16";
        case ImageFormat.DEPTH_POINT_CLOUD:
            return "DEPTH_POINT_CLOUD";
        //case ImageFormat.Y8:
        //case ImageFormat.Y16:

    }

    return "" + imageFormat;
}
 
源代码7 项目: FastBarcodeScanner   文件: StillSequenceCamera.java
private static double getFormatCost(int format) {
    switch (format) {
        case ImageFormat.UNKNOWN:
            return 1.0;
        case ImageFormat.NV21:
            return 0.8;
        case ImageFormat.NV16:
            // This format has never been seen in the wild, but is compatible as we only care
            // about the Y channel, so allow it.
            return 0.8;
        case ImageFormat.YV12:
        case ImageFormat.YUY2:
        case ImageFormat.YUV_420_888:
            return 0.5; // pure guesswork - but it IS faster than JPEG
        case ImageFormat.YUV_422_888:
            // only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
            // doesn't affect the luminance much
            // (see https://en.wikipedia.org/wiki/Chroma_subsampling)
            return 0.5;
        case ImageFormat.YUV_444_888:
            // only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
            // doesn't affect the luminance much
            // (see https://en.wikipedia.org/wiki/Chroma_subsampling)
            return 0.5;
        case ImageFormat.FLEX_RGB_888:
        case ImageFormat.FLEX_RGBA_8888:
        case ImageFormat.RGB_565:
            return 0.8; // pure guesswork
        case ImageFormat.JPEG:
            return 1.0; // duh...?
        case ImageFormat.RAW_SENSOR:
        case ImageFormat.RAW10:
        case ImageFormat.RAW12:
            return 0.4; // pure guesswork - but any RAW format must be optimal (wrt capture speed)?
        case ImageFormat.DEPTH16:
        case ImageFormat.DEPTH_POINT_CLOUD:
            return 1.5; // sound terribly complicated - but I'm just guessing....
        //ImageFormat.Y8:
        //ImageFormat.Y16:
    }

    return 1.0;
}
 
源代码8 项目: FastBarcodeScanner   文件: JpegUtils.java
public static byte[] ToJpeg(byte[] imageData, int imageFormat, int width, int height)
{
    if (imageData == null)
        return null;

    switch (imageFormat)
    {
        case ImageFormat.NV21:
        case ImageFormat.YUY2:
            YuvImage img = new YuvImage(imageData, imageFormat, width, height, null);
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            int quality = 20; //set quality
            img.compressToJpeg(new Rect(0, 0, width, height), quality, baos);//this line decreases the image quality
            return baos.toByteArray();
        case ImageFormat.YUV_420_888:
            return JpegFromYuv420888(imageData, imageFormat, width, height);


        case ImageFormat.UNKNOWN:
            return null;
        case ImageFormat.NV16:
            // Source: http://www.programcreek.com/java-api-examples/index.php?source_dir=Roid-Library-master/src/com/rincliu/library/common/persistence/zxing/camera/CameraManager.java
            // This format has never been seen in the wild, but is compatible as we only care
            // about the Y channel, so allow it.
        case ImageFormat.YV12:
            // source: https://github.com/evopark/tiqr-android/blob/master/src/main/java/de/evopark/tiqr/android/processing/ZxingQrScanner.java
        case ImageFormat.YUV_422_888:
            // only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
            // doesn't affect the luminance much
            // (see https://en.wikipedia.org/wiki/Chroma_subsampling)
        case ImageFormat.YUV_444_888:
            // only varies from yuv_420_888 in chroma-subsampling, which I'm guessing
            // doesn't affect the luminance much
            // (see https://en.wikipedia.org/wiki/Chroma_subsampling)
            return null;//new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
        case ImageFormat.FLEX_RGB_888:
        case ImageFormat.FLEX_RGBA_8888:
            return null;//new RGBLuminanceSource(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
        case ImageFormat.JPEG:
            // Tried and tested myself
            return null;//new RGBLuminanceSource(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
        case ImageFormat.RGB_565:
            return null;//new RGB565(width, height, uncompress(data, width, height));// PlanarYUVLuminanceSource(bytes, width, height, 0, 0, width, height, false);
        case ImageFormat.RAW_SENSOR:
        case ImageFormat.RAW10:
        case ImageFormat.RAW12:
        case ImageFormat.DEPTH16:
        case ImageFormat.DEPTH_POINT_CLOUD:
            //ImageFormat.Y8:
            //ImageFormat.Y16:
            return null;
        default:
            throw new IllegalArgumentException("No support for image format " + imageFormat);
    }
}