android.hardware.camera2.CaptureResult#CONTROL_AF_STATE_INACTIVE源码实例Demo

下面列出了android.hardware.camera2.CaptureResult#CONTROL_AF_STATE_INACTIVE 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: Camera2   文件: AcceptableZslImageFilter.java
private boolean isAFAcceptable(TotalCaptureResultProxy metadata)
{
    Integer afState = metadata.get(CaptureResult.CONTROL_AF_STATE);
    if (afState == null)
    {
        return true;
    } else
    {
        switch (afState)
        {
            case CaptureResult.CONTROL_AF_STATE_INACTIVE:
            case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
            case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
            case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
            case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
                return true;
            default:
                return false;
        }
    }
}
 
源代码2 项目: Camera2   文件: AutoFocusHelper.java
/**
 * Utility function: converts CaptureResult.CONTROL_AF_STATE to String.
 */
private static String controlAFStateToString(int controlAFState)
{
    switch (controlAFState)
    {
        case CaptureResult.CONTROL_AF_STATE_INACTIVE:
            return "inactive";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
            return "passive_scan";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
            return "passive_focused";
        case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
            return "active_scan";
        case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
            return "focus_locked";
        case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
            return "not_focus_locked";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
            return "passive_unfocused";
        default:
            return "unknown";
    }
}
 
源代码3 项目: DeviceConnect-Android   文件: Camera2Helper.java
/**
 * Auto-Focus の状態を文字列に変換します。
 *
 * @param afState Auto Focus の状態
 * @return 文字列
 */
static String debugAFState(Integer afState) {
    if (afState == null) {
        return "NULL";
    }

    switch (afState) {
        default:
            return "UNKNOWN";
        case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
            return "CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN";
        case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
            return "CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED";
        case CaptureResult.CONTROL_AF_STATE_INACTIVE:
            return "CaptureResult.CONTROL_AF_STATE_INACTIVE";
        case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
            return "CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
            return "CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
            return "CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN";
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
            return "CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED";
    }
}
 
源代码4 项目: Camera2   文件: InitializedOneCameraFactory.java
/**
 * @param cameraStarter  Starts the camera, after initialization of the
 *                       preview stream and capture session is complete.
 * @param outputSurfaces The set of output Surfaces (excluding the
 *                       not-yet-available preview Surface) to use when configuring the
 *                       capture session.
 */
public InitializedOneCameraFactory(
        final Lifetime lifetime, final CameraStarter cameraStarter, CameraDeviceProxy device,
        List<Surface> outputSurfaces, MainThread mainThreadExecutor,
        HandlerFactory handlerFactory, float maxZoom, List<Size> supportedPreviewSizes,
        LinearScale lensRange, OneCamera.Facing direction)
{
    // Assembles and returns a OneCamera based on the CameraStarter.

    // Create/wrap required threads.
    final Handler cameraHandler = handlerFactory.create(lifetime, "CameraHandler");

    // Since we cannot create an actual PictureTaker and ManualAutoFocus
    // until the CaptureSession is available, so create ones which defer to
    // a Future of the actual implementation.
    final SettableFuture<PictureTaker> mPictureTaker = SettableFuture.create();
    PictureTaker pictureTaker = new DeferredPictureTaker(mPictureTaker);

    final SettableFuture<ManualAutoFocus> mManualAutoFocus = SettableFuture.create();
    ManualAutoFocus manualAutoFocus = new DeferredManualAutoFocus(
            mManualAutoFocus);

    // The OneCamera interface exposes various types of state, either
    // through getters, setters, or the ability to register listeners.
    // Since these values are interacted with by multiple threads, we can
    // use {@link ConcurrentState} to provide this functionality safely.
    final ConcurrentState<Float> zoomState = new ConcurrentState<>(1.0f);
    final ConcurrentState<Integer> afState = new ConcurrentState<>(
            CaptureResult.CONTROL_AF_STATE_INACTIVE);
    final ConcurrentState<OneCamera.FocusState> focusState = new ConcurrentState<>(new
            OneCamera.FocusState(0.0f, false));
    final ConcurrentState<Integer> afMode = new ConcurrentState<>(CaptureResult
            .CONTROL_AF_MODE_OFF);
    final ConcurrentState<Boolean> readyState = new ConcurrentState<>(false);

    // Wrap state to be able to register listeners which run on the main
    // thread.
    Listenable<Integer> afStateListenable = new Listenable<>(afState,
            mainThreadExecutor);
    Listenable<OneCamera.FocusState> focusStateListenable = new Listenable<>(
            focusState, mainThreadExecutor);
    Listenable<Boolean> readyStateListenable = new Listenable<>(readyState,
            mainThreadExecutor);

    // Wrap each value in a filter to ensure that only differences pass
    // through.
    final MetadataCallback metadataCallback = new MetadataCallback(
            new FilteredUpdatable<>(afState),
            new FilteredUpdatable<>(focusState),
            new FilteredUpdatable<>(afMode));

    // The following handles the initialization sequence in which we receive
    // various dependencies at different times in the following sequence:
    // 1. CameraDevice
    // 2. The Surface on which to render the preview stream
    // 3. The CaptureSession
    // When all three of these are available, the {@link #CameraFactory} can
    // be used to assemble the actual camera functionality (e.g. to take
    // pictures, and run AF scans).

    // Note that these must be created in reverse-order to when they are run
    // because each stage depends on the previous one.
    final CaptureSessionCreator captureSessionCreator = new CaptureSessionCreator(device,
            cameraHandler);

    PreviewStarter mPreviewStarter = new PreviewStarter(outputSurfaces,
            captureSessionCreator,
            new PreviewStarter.CameraCaptureSessionCreatedListener()
            {
                @Override
                public void onCameraCaptureSessionCreated(CameraCaptureSessionProxy session,
                                                          Surface previewSurface)
                {
                    CameraStarter.CameraControls controls = cameraStarter.startCamera(
                            new Lifetime(lifetime),
                            session, previewSurface,
                            zoomState, metadataCallback, readyState);
                    mPictureTaker.set(controls.getPictureTaker());
                    mManualAutoFocus.set(controls.getManualAutoFocus());
                }
            });

    PreviewSizeSelector previewSizeSelector =
            new Camera2PreviewSizeSelector(supportedPreviewSizes);

    mOneCamera = new GenericOneCameraImpl(lifetime, pictureTaker, manualAutoFocus, lensRange,
            mainThreadExecutor, afStateListenable, focusStateListenable, readyStateListenable,
            maxZoom, zoomState, direction, previewSizeSelector, mPreviewStarter);
}
 
源代码5 项目: Camera2   文件: AutoFocusStateMachine.java
/**
 * Invoke every time we get a new CaptureResult via
 * {@link CameraDevice.CaptureCallback#onCaptureCompleted}.
 *
 * <p>This function is responsible for dispatching updates via the
 * {@link AutoFocusStateListener} so without calling this on a regular basis, no
 * AF changes will be observed.</p>
 *
 * @param result CaptureResult
 */
public synchronized void onCaptureCompleted(CaptureResult result) {

    /**
     * Work-around for b/11269834
     * Although these should never-ever happen, harden for ship
     */
    if (result == null) {
        Log.w(TAG, "onCaptureCompleted - missing result, skipping AF update");
        return;
    }

    Key<Integer> keyAfState = CaptureResult.CONTROL_AF_STATE;
    if (keyAfState == null) {
        Log.e(TAG, "onCaptureCompleted - missing android.control.afState key, " +
                "skipping AF update");
        return;
    }

    Key<Integer> keyAfMode = CaptureResult.CONTROL_AF_MODE;
    if (keyAfMode == null) {
        Log.e(TAG, "onCaptureCompleted - missing android.control.afMode key, " +
                "skipping AF update");
        return;
    }

    Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
    Integer afMode = result.get(CaptureResult.CONTROL_AF_MODE);

    /**
     * Work-around for b/11238865
     * This is a HAL bug as these fields should be there always.
     */
    if (afState == null) {
        Log.w(TAG, "onCaptureCompleted - missing android.control.afState !");
        return;
    } else if (afMode == null) {
        Log.w(TAG, "onCaptureCompleted - missing android.control.afMode !");
        return;
    }

    if (DEBUG_LOGGING) Log.d(TAG, "onCaptureCompleted - new AF mode = " + afMode +
            " new AF state = " + afState);

    if (mLastAfState == afState && afMode == mLastAfMode) {
        // Same AF state as last time, nothing else needs to be done.
        return;
    }

    if (VERBOSE_LOGGING) Log.v(TAG, "onCaptureCompleted - new AF mode = " + afMode +
            " new AF state = " + afState);

    mLastAfState = afState;
    mLastAfMode = afMode;

    switch (afState) {
        case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
            mListener.onAutoFocusSuccess(result, /*locked*/true);
            endTraceAsync();
            break;
        case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
            mListener.onAutoFocusFail(result, /*locked*/true);
            endTraceAsync();
            break;
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
            mListener.onAutoFocusSuccess(result, /*locked*/false);
            break;
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
            mListener.onAutoFocusFail(result, /*locked*/false);
            break;
        case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
            mListener.onAutoFocusScan(result);
            break;
        case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
            mListener.onAutoFocusScan(result);
            break;
        case CaptureResult.CONTROL_AF_STATE_INACTIVE:
            mListener.onAutoFocusInactive(result);
            break;
    }
}