类android.hardware.camera2.CaptureResult源码实例Demo

下面列出了怎么用android.hardware.camera2.CaptureResult的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: android_9.0.0_r45   文件: LegacyResultMapper.java
private static void mapAf(CameraMetadataNative m,
        Rect activeArray, ZoomData zoomData, Camera.Parameters p) {
    // control.afMode
    m.set(CaptureResult.CONTROL_AF_MODE, convertLegacyAfMode(p.getFocusMode()));

    // control.afRegions
    if (p.getMaxNumFocusAreas() > 0) {
        if (DEBUG) {
            String focusAreas = p.get("focus-areas");
            Log.v(TAG, "mapAe - parameter dump; focus-areas: " + focusAreas);
        }

        MeteringRectangle[] meteringRectArray = getMeteringRectangles(activeArray,
                zoomData, p.getFocusAreas(), "AF");

        m.set(CONTROL_AF_REGIONS, meteringRectArray);
    }
}
 
源代码2 项目: android_9.0.0_r45   文件: CameraDeviceImpl.java
/**
 * This function is called every time a result has been completed.
 *
 * <p>It keeps a track of all the partial results already created for a particular
 * frame number.</p>
 *
 * @param frameNumber the frame number corresponding to the result
 * @param result the total or partial result
 * @param partial {@true} if the result is partial, {@code false} if total
 * @param isReprocess true if it is a reprocess result, false if it is a regular result.
 */
public void updateTracker(long frameNumber, CaptureResult result, boolean partial,
        boolean isReprocess) {
    if (!partial) {
        // Update the total result's frame status as being successful
        updateTracker(frameNumber, /*isError*/false, isReprocess);
        // Don't keep a list of total results, we don't need to track them
        return;
    }

    if (result == null) {
        // Do not record blank results; this also means there will be no total result
        // so it doesn't matter that the partials were not recorded
        return;
    }

    // Partial results must be aggregated in-order for that frame number
    List<CaptureResult> partials = mPartialResults.get(frameNumber);
    if (partials == null) {
        partials = new ArrayList<>();
        mPartialResults.put(frameNumber, partials);
    }

    partials.add(result);
}
 
private void detectFaces(CaptureResult captureResult) {
    Integer mode = captureResult.get(CaptureResult.STATISTICS_FACE_DETECT_MODE);

    if (isViewAvailable() && mode != null) {
        android.hardware.camera2.params.Face[] faces = captureResult.get(CaptureResult.STATISTICS_FACES);
        if (faces != null) {
            Log.i(TAG, "faces : " + faces.length + " , mode : " + mode);
            for (android.hardware.camera2.params.Face face : faces) {
                Rect faceBounds = face.getBounds();
                // Once processed, the result is sent back to the View
                presenterView.onFaceDetected(mapCameraFaceToCanvas(faceBounds, face.getLeftEyePosition(),
                        face.getRightEyePosition()));
            }
        }
    }
}
 
源代码4 项目: DeviceConnect-Android   文件: Camera2Helper.java
/**
 * Auto-Focus の状態を文字列に変換します。
 *
 * @param afState Auto Focus の状態
 * @return 文字列
 */
static String debugAFState(Integer afState) {
    if (afState == null) {
        return "NULL";
    }

    switch (afState) {
        default:
            return "UNKNOWN";
        case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
            return "CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN";
        case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
            return "CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED";
        case CaptureResult.CONTROL_AF_STATE_INACTIVE:
            return "CaptureResult.CONTROL_AF_STATE_INACTIVE";
        case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
            return "CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
            return "CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
            return "CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
            return "CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED";
    }
}
 
源代码5 项目: Camera2   文件: CaptureSessionStatsCollector.java
/**
 * Decorate the collector when the CaptureResult becomes available, which happens sometime
 * after picture is taken.  In the current implementation, we query this structure for
 * two fields: 1) CaptureResult.STATISTICS_FACES and 2) CaptureResult.LENS_FOCUS_DISTANCE
 *
 * @param captureResult CaptureResults to be queried for capture event information
 */
public void decorateAtTimeOfCaptureRequestAvailable(CaptureResultProxy captureResult)
{
    Face[] facesCaptured = captureResult.get(CaptureResult.STATISTICS_FACES);
    if (facesCaptured == null)
    {
        mFaceProxies = null;
    } else
    {
        mFaceProxies = new ArrayList<>(facesCaptured.length);
        for (Face face : facesCaptured)
        {
            mFaceProxies.add(Camera2FaceProxy.from(face));
        }
    }

    mLensFocusDistance = captureResult.get(CaptureResult.LENS_FOCUS_DISTANCE);
}
 
源代码6 项目: Camera2   文件: CaptureDataSerializer.java
/**
 * Generate a human-readable string of the given capture request and write
 * it to the given file.
 */
public static void toFile(String title, CameraMetadata<?> metadata, File file)
{
    try
    {
        // Will append if the file already exists.
        FileWriter writer = new FileWriter(file, true);
        if (metadata instanceof CaptureRequest)
        {
            dumpMetadata(title, (CaptureRequest) metadata, writer);
        } else if (metadata instanceof CaptureResult)
        {
            dumpMetadata(title, (CaptureResult) metadata, writer);
        } else
        {
            writer.close();
            throw new IllegalArgumentException("Cannot generate debug data from type "
                    + metadata.getClass().getName());
        }
        writer.close();
    } catch (IOException ex)
    {
        Log.e(TAG, "Could not write capture data to file.", ex);
    }
}
 
源代码7 项目: Camera2   文件: AcceptableZslImageFilter.java
private boolean isAEAcceptable(TotalCaptureResultProxy metadata)
{
    Integer aeState = metadata.get(CaptureResult.CONTROL_AE_STATE);
    if (aeState == null)
    {
        return true;
    } else
    {
        switch (aeState)
        {
            case CaptureResult.CONTROL_AE_STATE_INACTIVE:
            case CaptureResult.CONTROL_AE_STATE_LOCKED:
            case CaptureResult.CONTROL_AE_STATE_CONVERGED:
                return true;
            default:
                return false;
        }
    }
}
 
源代码8 项目: Camera2   文件: AutoFocusHelper.java
/**
 * Convert reported camera2 AF state to OneCamera AutoFocusState.
 */
public static OneCamera.AutoFocusState stateFromCamera2State(int state)
{
    switch (state)
    {
        case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
            return OneCamera.AutoFocusState.ACTIVE_SCAN;
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
            return OneCamera.AutoFocusState.PASSIVE_SCAN;
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
            return OneCamera.AutoFocusState.PASSIVE_FOCUSED;
        case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
            return OneCamera.AutoFocusState.ACTIVE_FOCUSED;
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
            return OneCamera.AutoFocusState.PASSIVE_UNFOCUSED;
        case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
            return OneCamera.AutoFocusState.ACTIVE_UNFOCUSED;
        default:
            return OneCamera.AutoFocusState.INACTIVE;
    }
}
 
源代码9 项目: Camera2   文件: AutoFocusHelper.java
public static void logExtraFocusInfo(CaptureResult result)
{
    if (!checkControlAfState(result) || !checkLensState(result))
    {
        return;
    }

    Object tag = result.getRequest().getTag();

    Log.v(TAG, String.format("af_state:%-17s  lens_foc_dist:%.3f  lens_state:%-10s  %s",
            controlAFStateToString(result.get(CaptureResult.CONTROL_AF_STATE)),
            result.get(CaptureResult.LENS_FOCUS_DISTANCE),
            lensStateToString(result.get(CaptureResult.LENS_STATE)),
            (tag == null) ? "" : "[" + tag + "]"
    ));
}
 
源代码10 项目: Camera2   文件: AutoFocusHelper.java
/**
 * Utility function: converts CaptureResult.CONTROL_AF_STATE to String.
 */
private static String controlAFStateToString(int controlAFState)
{
    switch (controlAFState)
    {
        case CaptureResult.CONTROL_AF_STATE_INACTIVE:
            return "inactive";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
            return "passive_scan";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
            return "passive_focused";
        case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
            return "active_scan";
        case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
            return "focus_locked";
        case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
            return "not_focus_locked";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
            return "passive_unfocused";
        default:
            return "unknown";
    }
}
 
源代码11 项目: DeviceConnect-Android   文件: Camera2Helper.java
/**
 * Auto-Exposure の状態を文字列に変換します。
 *
 * @param asState Auto Exposure の状態
 * @return 文字列
 */
static String debugAEState(Integer asState) {
    if (asState == null) {
        return "NULL";
    }

    switch (asState) {
        default:
            return "UNKNOWN";
        case CaptureResult.CONTROL_AE_STATE_CONVERGED:
            return "CaptureResult.CONTROL_AE_STATE_CONVERGED";
        case CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED:
            return "CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED";
        case CaptureResult.CONTROL_AE_STATE_INACTIVE:
            return "CaptureResult.CONTROL_AE_STATE_INACTIVE";
        case CaptureResult.CONTROL_AE_STATE_LOCKED:
            return "CaptureResult.CONTROL_AE_STATE_LOCKED";
        case CaptureResult.CONTROL_AE_STATE_PRECAPTURE:
            return "CaptureResult.CONTROL_AE_STATE_PRECAPTURE";
        case CaptureResult.CONTROL_AE_STATE_SEARCHING:
            return "CaptureResult.CONTROL_AE_STATE_SEARCHING";
    }
}
 
源代码12 项目: Camera2   文件: ResponseListeners.java
/**
 * @param callback A thread-safe callback to receive partial and final
 *                 metadata for each frame. Metadata may be received
 *                 out-of-order.
 */
public static ResponseListener forPartialMetadata(final Updatable<CaptureResultProxy> callback)
{
    return new ResponseListenerBase<CaptureResultProxy>(callback)
    {
        @Override
        public void onProgressed(CaptureResult partialResult)
        {
            callback.update(new AndroidCaptureResultProxy(partialResult));
        }

        @Override
        public void onCompleted(TotalCaptureResult result)
        {
            callback.update(new AndroidTotalCaptureResultProxy(result));
        }
    };
}
 
源代码13 项目: DeviceConnect-Android   文件: Camera2Wrapper.java
@Override
public void onCaptureResult(CaptureResult result, boolean isCompleted) {
    Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
    if (DEBUG) {
        Log.d(TAG, "afState: " + Camera2Helper.debugAFState(afState) + " isCompleted: " + isCompleted);
    }
    boolean isAfReady = afState == null
            || afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED
            || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED
            || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
    boolean timeout = (System.currentTimeMillis() - mStartTime) > 5000;
    if (isAfReady || timeout) {
        nextState(mAutoExposureState);
    }
}
 
源代码14 项目: android_9.0.0_r45   文件: CameraMetadataNative.java
/**
 * Compare this key against other native keys, request keys, result keys, and
 * characteristics keys.
 *
 * <p>Two keys are considered equal if their name and type reference are equal.</p>
 *
 * <p>Note that the equality against non-native keys is one-way. A native key may be equal
 * to a result key; but that same result key will not be equal to a native key.</p>
 */
@SuppressWarnings("rawtypes")
@Override
public final boolean equals(Object o) {
    if (this == o) {
        return true;
    }

    if (o == null || this.hashCode() != o.hashCode()) {
        return false;
    }

    Key<?> lhs;

    if (o instanceof CaptureResult.Key) {
        lhs = ((CaptureResult.Key)o).getNativeKey();
    } else if (o instanceof CaptureRequest.Key) {
        lhs = ((CaptureRequest.Key)o).getNativeKey();
    } else if (o instanceof CameraCharacteristics.Key) {
        lhs = ((CameraCharacteristics.Key)o).getNativeKey();
    } else if ((o instanceof Key)) {
        lhs = (Key<?>)o;
    } else {
        return false;
    }

    return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference);
}
 
源代码15 项目: android_9.0.0_r45   文件: CameraMetadataNative.java
private Rect[] getFaceRectangles() {
    Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES);
    if (faceRectangles == null) return null;

    Rect[] fixedFaceRectangles = new Rect[faceRectangles.length];
    for (int i = 0; i < faceRectangles.length; i++) {
        fixedFaceRectangles[i] = new Rect(
                faceRectangles[i].left,
                faceRectangles[i].top,
                faceRectangles[i].right - faceRectangles[i].left,
                faceRectangles[i].bottom - faceRectangles[i].top);
    }
    return fixedFaceRectangles;
}
 
源代码16 项目: android_9.0.0_r45   文件: CameraMetadataNative.java
private Location getGpsLocation() {
    String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD);
    double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES);
    Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP);

    if (areValuesAllNull(processingMethod, coords, timeStamp)) {
        return null;
    }

    Location l = new Location(translateProcessToLocationProvider(processingMethod));
    if (timeStamp != null) {
        // Location expects timestamp in [ms.]
        l.setTime(timeStamp * 1000);
    } else {
        Log.w(TAG, "getGpsLocation - No timestamp for GPS location.");
    }

    if (coords != null) {
        l.setLatitude(coords[0]);
        l.setLongitude(coords[1]);
        l.setAltitude(coords[2]);
    } else {
        Log.w(TAG, "getGpsLocation - No coordinates for GPS location");
    }

    return l;
}
 
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
    @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
  Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
  if (faceDetectorCallback != null) {
    faceDetectorCallback.onGetFaces(faces);
  }
}
 
源代码18 项目: DeviceConnect-Android   文件: Camera2Wrapper.java
@Override
void onCaptureResult(CaptureResult result, boolean isCompleted) {
    // プレビューでは、onCaptureResult が何度も呼び出されるので、ここで弾いています。
    if (mStartFlag) {
        mStartFlag = false;
        postOnStartPreview();
    }
}
 
源代码19 项目: 365browser   文件: VideoCaptureCamera2.java
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
    Log.d(TAG, "CrPreviewSessionListener.onConfigured");
    mPreviewSession = cameraCaptureSession;
    try {
        // This line triggers the preview. A |listener| is registered to receive the actual
        // capture result details. A CrImageReaderListener will be triggered every time a
        // downloaded image is ready. Since |handler| is null, we'll work on the current
        // Thread Looper.
        mPreviewSession.setRepeatingRequest(
                mPreviewRequest, new CameraCaptureSession.CaptureCallback() {
                    @Override
                    public void onCaptureCompleted(CameraCaptureSession session,
                            CaptureRequest request, TotalCaptureResult result) {
                        mLastExposureTimeNs =
                                result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
                    }
                }, null);

    } catch (CameraAccessException | SecurityException | IllegalStateException
            | IllegalArgumentException ex) {
        Log.e(TAG, "setRepeatingRequest: ", ex);
        return;
    }
    // Now wait for trigger on CrPreviewReaderListener.onImageAvailable();
    nativeOnStarted(mNativeVideoCaptureDeviceAndroid);
    changeCameraStateAndNotify(CameraState.STARTED);
}
 
源代码20 项目: Camera2   文件: AETriggerResult.java
@Override
public void update(CaptureResultProxy result)
{
    Integer state = result.get(CaptureResult.CONTROL_AE_STATE);
    boolean done = mStateMachine.update(
            result.getFrameNumber(),
            result.getRequest().get(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER),
            state);
    if (done)
    {
        boolean flashRequired = Objects.equal(state, CaptureResult
                .CONTROL_AE_STATE_FLASH_REQUIRED);
        mFutureResult.set(flashRequired);
    }
}
 
源代码21 项目: Camera2   文件: MetadataCallback.java
private void updateFocusState(TotalCaptureResultProxy totalCaptureResult)
{
    Integer focusState = totalCaptureResult.get(CaptureResult.CONTROL_AF_STATE);
    if (focusState != null)
    {
        mFocusState.update(focusState);
    }
}
 
源代码22 项目: Camera2   文件: MetadataCallback.java
private void updateOneCameraFocusState(TotalCaptureResultProxy totalCaptureResult)
{
    Float focusDistance = totalCaptureResult.get(CaptureResult.LENS_FOCUS_DISTANCE);
    Integer focusState = totalCaptureResult.get(CaptureResult.CONTROL_AF_STATE);
    if (focusDistance != null && focusState != null)
    {
        Set<Integer> activeStates = new HashSet<>();
        activeStates.add(CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN);
        activeStates.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN);
        boolean active = activeStates.contains(focusState);
        mOneCameraFocusState.update(new OneCamera.FocusState(focusDistance, active));
    }
}
 
源代码23 项目: Camera2   文件: AutoFlashZslImageFilter.java
@Nullable
@Override
public <T> T get(CaptureResult.Key<T> key)
{
    if (key == TotalCaptureResult.CONTROL_AE_STATE)
    {
        Integer aeState = (Integer) mDelegate.get(key);
        if (Objects.equal(aeState, CaptureResult.CONTROL_AE_STATE_SEARCHING))
        {
            return (T) ((Integer) CaptureResult.CONTROL_AE_STATE_CONVERGED);
        }
    }
    return mDelegate.get(key);
}
 
源代码24 项目: Camera2   文件: AutoFocusHelper.java
/**
 * Complain if CONTROL_AF_STATE is not present in result.
 * Could indicate bug in API implementation.
 */
public static boolean checkControlAfState(CaptureResult result)
{
    boolean missing = result.get(CaptureResult.CONTROL_AF_STATE) == null;
    if (missing)
    {
        // throw new IllegalStateException("CaptureResult missing CONTROL_AF_STATE.");
        Log.e(TAG, "\n!!!! TotalCaptureResult missing CONTROL_AF_STATE. !!!!\n ");
    }
    return !missing;
}
 
源代码25 项目: DeviceConnect-Android   文件: Camera2Wrapper.java
@Override
void onCaptureResult(CaptureResult result, boolean isCompleted) {
    // プレビューでは、onCaptureResult が何度も呼び出されるので、ここで弾いています。
    if (mStartFlag) {
        mStartFlag = false;
        postOnStartPreview();
    }
}
 
源代码26 项目: Camera2   文件: MetadataResponseListener.java
@Override
public void onProgressed(CaptureResult partialResult)
{
    V newValue = partialResult.get(mKey);
    if (newValue != null)
    {
        mUpdatable.update(newValue);
    }
}
 
源代码27 项目: Camera2   文件: AndroidCameraCaptureSessionProxy.java
@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
                                CaptureResult partialResult)
{
    mCallback.onCaptureProgressed(AndroidCameraCaptureSessionProxy.this, request,
            partialResult);
}
 
源代码28 项目: Camera2   文件: FramerateJankDetector.java
@Override
public void onCompleted(TotalCaptureResult result)
{
    long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
    if (mLastFrameTimestamp >= 0)
    {
        double deltaMillis = (timestamp - mLastFrameTimestamp) / 1000000.0;

        if (mLastDeltaMillis > 0)
        {
            double fractionalChange = (deltaMillis - mLastDeltaMillis) / mLastDeltaMillis;
            if (fractionalChange >= FRACTIONAL_CHANGE_STATS_THRESHOLD)
            {
                mUsageStatistics.cameraFrameDrop(deltaMillis, mLastDeltaMillis);
            }

            if (fractionalChange >= FRACTIONAL_CHANGE_LOG_THRESHOLD)
            {
                mLog.v("JANK! Time between frames (" + deltaMillis + "ms) increased by " +
                        (fractionalChange * 100) + "% over the last frame delta (" +
                        mLastDeltaMillis + "ms)");
            }
        }
        mLastDeltaMillis = deltaMillis;
    }

    mLastFrameTimestamp = timestamp;
}
 
源代码29 项目: Camera2   文件: ResponseListenerBroadcaster.java
@Override
public void onProgressed(CaptureResult partialResult)
{
    for (ResponseListener listener : mListeners)
    {
        listener.onProgressed(partialResult);
    }
}
 
源代码30 项目: mobile-ar-sensor-logger   文件: Camera2Proxy.java
@Override
                public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
                                                CaptureResult partialResult) {
//                    Log.d(TAG, "mSessionCaptureCallback,  onCaptureProgressed");
                }
 
 类所在包
 同包方法