下面列出了怎么用android.hardware.camera2.CaptureResult的API类实例代码及写法,或者点击链接到github查看源代码。
private static void mapAf(CameraMetadataNative m,
Rect activeArray, ZoomData zoomData, Camera.Parameters p) {
// control.afMode
m.set(CaptureResult.CONTROL_AF_MODE, convertLegacyAfMode(p.getFocusMode()));
// control.afRegions
if (p.getMaxNumFocusAreas() > 0) {
if (DEBUG) {
String focusAreas = p.get("focus-areas");
Log.v(TAG, "mapAe - parameter dump; focus-areas: " + focusAreas);
}
MeteringRectangle[] meteringRectArray = getMeteringRectangles(activeArray,
zoomData, p.getFocusAreas(), "AF");
m.set(CONTROL_AF_REGIONS, meteringRectArray);
}
}
/**
* This function is called every time a result has been completed.
*
* <p>It keeps a track of all the partial results already created for a particular
* frame number.</p>
*
* @param frameNumber the frame number corresponding to the result
* @param result the total or partial result
* @param partial {@true} if the result is partial, {@code false} if total
* @param isReprocess true if it is a reprocess result, false if it is a regular result.
*/
public void updateTracker(long frameNumber, CaptureResult result, boolean partial,
boolean isReprocess) {
if (!partial) {
// Update the total result's frame status as being successful
updateTracker(frameNumber, /*isError*/false, isReprocess);
// Don't keep a list of total results, we don't need to track them
return;
}
if (result == null) {
// Do not record blank results; this also means there will be no total result
// so it doesn't matter that the partials were not recorded
return;
}
// Partial results must be aggregated in-order for that frame number
List<CaptureResult> partials = mPartialResults.get(frameNumber);
if (partials == null) {
partials = new ArrayList<>();
mPartialResults.put(frameNumber, partials);
}
partials.add(result);
}
private void detectFaces(CaptureResult captureResult) {
Integer mode = captureResult.get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
if (isViewAvailable() && mode != null) {
android.hardware.camera2.params.Face[] faces = captureResult.get(CaptureResult.STATISTICS_FACES);
if (faces != null) {
Log.i(TAG, "faces : " + faces.length + " , mode : " + mode);
for (android.hardware.camera2.params.Face face : faces) {
Rect faceBounds = face.getBounds();
// Once processed, the result is sent back to the View
presenterView.onFaceDetected(mapCameraFaceToCanvas(faceBounds, face.getLeftEyePosition(),
face.getRightEyePosition()));
}
}
}
}
/**
* Auto-Focus の状態を文字列に変換します。
*
* @param afState Auto Focus の状態
* @return 文字列
*/
static String debugAFState(Integer afState) {
if (afState == null) {
return "NULL";
}
switch (afState) {
default:
return "UNKNOWN";
case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
return "CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN";
case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
return "CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED";
case CaptureResult.CONTROL_AF_STATE_INACTIVE:
return "CaptureResult.CONTROL_AF_STATE_INACTIVE";
case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
return "CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
return "CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
return "CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
return "CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED";
}
}
/**
* Decorate the collector when the CaptureResult becomes available, which happens sometime
* after picture is taken. In the current implementation, we query this structure for
* two fields: 1) CaptureResult.STATISTICS_FACES and 2) CaptureResult.LENS_FOCUS_DISTANCE
*
* @param captureResult CaptureResults to be queried for capture event information
*/
public void decorateAtTimeOfCaptureRequestAvailable(CaptureResultProxy captureResult)
{
Face[] facesCaptured = captureResult.get(CaptureResult.STATISTICS_FACES);
if (facesCaptured == null)
{
mFaceProxies = null;
} else
{
mFaceProxies = new ArrayList<>(facesCaptured.length);
for (Face face : facesCaptured)
{
mFaceProxies.add(Camera2FaceProxy.from(face));
}
}
mLensFocusDistance = captureResult.get(CaptureResult.LENS_FOCUS_DISTANCE);
}
/**
* Generate a human-readable string of the given capture request and write
* it to the given file.
*/
public static void toFile(String title, CameraMetadata<?> metadata, File file)
{
try
{
// Will append if the file already exists.
FileWriter writer = new FileWriter(file, true);
if (metadata instanceof CaptureRequest)
{
dumpMetadata(title, (CaptureRequest) metadata, writer);
} else if (metadata instanceof CaptureResult)
{
dumpMetadata(title, (CaptureResult) metadata, writer);
} else
{
writer.close();
throw new IllegalArgumentException("Cannot generate debug data from type "
+ metadata.getClass().getName());
}
writer.close();
} catch (IOException ex)
{
Log.e(TAG, "Could not write capture data to file.", ex);
}
}
private boolean isAEAcceptable(TotalCaptureResultProxy metadata)
{
Integer aeState = metadata.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null)
{
return true;
} else
{
switch (aeState)
{
case CaptureResult.CONTROL_AE_STATE_INACTIVE:
case CaptureResult.CONTROL_AE_STATE_LOCKED:
case CaptureResult.CONTROL_AE_STATE_CONVERGED:
return true;
default:
return false;
}
}
}
/**
* Convert reported camera2 AF state to OneCamera AutoFocusState.
*/
public static OneCamera.AutoFocusState stateFromCamera2State(int state)
{
switch (state)
{
case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
return OneCamera.AutoFocusState.ACTIVE_SCAN;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
return OneCamera.AutoFocusState.PASSIVE_SCAN;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
return OneCamera.AutoFocusState.PASSIVE_FOCUSED;
case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
return OneCamera.AutoFocusState.ACTIVE_FOCUSED;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
return OneCamera.AutoFocusState.PASSIVE_UNFOCUSED;
case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
return OneCamera.AutoFocusState.ACTIVE_UNFOCUSED;
default:
return OneCamera.AutoFocusState.INACTIVE;
}
}
public static void logExtraFocusInfo(CaptureResult result)
{
if (!checkControlAfState(result) || !checkLensState(result))
{
return;
}
Object tag = result.getRequest().getTag();
Log.v(TAG, String.format("af_state:%-17s lens_foc_dist:%.3f lens_state:%-10s %s",
controlAFStateToString(result.get(CaptureResult.CONTROL_AF_STATE)),
result.get(CaptureResult.LENS_FOCUS_DISTANCE),
lensStateToString(result.get(CaptureResult.LENS_STATE)),
(tag == null) ? "" : "[" + tag + "]"
));
}
/**
* Utility function: converts CaptureResult.CONTROL_AF_STATE to String.
*/
private static String controlAFStateToString(int controlAFState)
{
switch (controlAFState)
{
case CaptureResult.CONTROL_AF_STATE_INACTIVE:
return "inactive";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
return "passive_scan";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
return "passive_focused";
case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
return "active_scan";
case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
return "focus_locked";
case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
return "not_focus_locked";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
return "passive_unfocused";
default:
return "unknown";
}
}
/**
* Auto-Exposure の状態を文字列に変換します。
*
* @param asState Auto Exposure の状態
* @return 文字列
*/
static String debugAEState(Integer asState) {
if (asState == null) {
return "NULL";
}
switch (asState) {
default:
return "UNKNOWN";
case CaptureResult.CONTROL_AE_STATE_CONVERGED:
return "CaptureResult.CONTROL_AE_STATE_CONVERGED";
case CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED:
return "CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED";
case CaptureResult.CONTROL_AE_STATE_INACTIVE:
return "CaptureResult.CONTROL_AE_STATE_INACTIVE";
case CaptureResult.CONTROL_AE_STATE_LOCKED:
return "CaptureResult.CONTROL_AE_STATE_LOCKED";
case CaptureResult.CONTROL_AE_STATE_PRECAPTURE:
return "CaptureResult.CONTROL_AE_STATE_PRECAPTURE";
case CaptureResult.CONTROL_AE_STATE_SEARCHING:
return "CaptureResult.CONTROL_AE_STATE_SEARCHING";
}
}
/**
* @param callback A thread-safe callback to receive partial and final
* metadata for each frame. Metadata may be received
* out-of-order.
*/
public static ResponseListener forPartialMetadata(final Updatable<CaptureResultProxy> callback)
{
return new ResponseListenerBase<CaptureResultProxy>(callback)
{
@Override
public void onProgressed(CaptureResult partialResult)
{
callback.update(new AndroidCaptureResultProxy(partialResult));
}
@Override
public void onCompleted(TotalCaptureResult result)
{
callback.update(new AndroidTotalCaptureResultProxy(result));
}
};
}
@Override
public void onCaptureResult(CaptureResult result, boolean isCompleted) {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (DEBUG) {
Log.d(TAG, "afState: " + Camera2Helper.debugAFState(afState) + " isCompleted: " + isCompleted);
}
boolean isAfReady = afState == null
|| afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED
|| afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED
|| afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
boolean timeout = (System.currentTimeMillis() - mStartTime) > 5000;
if (isAfReady || timeout) {
nextState(mAutoExposureState);
}
}
/**
* Compare this key against other native keys, request keys, result keys, and
* characteristics keys.
*
* <p>Two keys are considered equal if their name and type reference are equal.</p>
*
* <p>Note that the equality against non-native keys is one-way. A native key may be equal
* to a result key; but that same result key will not be equal to a native key.</p>
*/
@SuppressWarnings("rawtypes")
@Override
public final boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || this.hashCode() != o.hashCode()) {
return false;
}
Key<?> lhs;
if (o instanceof CaptureResult.Key) {
lhs = ((CaptureResult.Key)o).getNativeKey();
} else if (o instanceof CaptureRequest.Key) {
lhs = ((CaptureRequest.Key)o).getNativeKey();
} else if (o instanceof CameraCharacteristics.Key) {
lhs = ((CameraCharacteristics.Key)o).getNativeKey();
} else if ((o instanceof Key)) {
lhs = (Key<?>)o;
} else {
return false;
}
return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference);
}
private Rect[] getFaceRectangles() {
Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES);
if (faceRectangles == null) return null;
Rect[] fixedFaceRectangles = new Rect[faceRectangles.length];
for (int i = 0; i < faceRectangles.length; i++) {
fixedFaceRectangles[i] = new Rect(
faceRectangles[i].left,
faceRectangles[i].top,
faceRectangles[i].right - faceRectangles[i].left,
faceRectangles[i].bottom - faceRectangles[i].top);
}
return fixedFaceRectangles;
}
private Location getGpsLocation() {
String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD);
double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES);
Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP);
if (areValuesAllNull(processingMethod, coords, timeStamp)) {
return null;
}
Location l = new Location(translateProcessToLocationProvider(processingMethod));
if (timeStamp != null) {
// Location expects timestamp in [ms.]
l.setTime(timeStamp * 1000);
} else {
Log.w(TAG, "getGpsLocation - No timestamp for GPS location.");
}
if (coords != null) {
l.setLatitude(coords[0]);
l.setLongitude(coords[1]);
l.setAltitude(coords[2]);
} else {
Log.w(TAG, "getGpsLocation - No coordinates for GPS location");
}
return l;
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
if (faceDetectorCallback != null) {
faceDetectorCallback.onGetFaces(faces);
}
}
@Override
void onCaptureResult(CaptureResult result, boolean isCompleted) {
// プレビューでは、onCaptureResult が何度も呼び出されるので、ここで弾いています。
if (mStartFlag) {
mStartFlag = false;
postOnStartPreview();
}
}
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
Log.d(TAG, "CrPreviewSessionListener.onConfigured");
mPreviewSession = cameraCaptureSession;
try {
// This line triggers the preview. A |listener| is registered to receive the actual
// capture result details. A CrImageReaderListener will be triggered every time a
// downloaded image is ready. Since |handler| is null, we'll work on the current
// Thread Looper.
mPreviewSession.setRepeatingRequest(
mPreviewRequest, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request, TotalCaptureResult result) {
mLastExposureTimeNs =
result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
}
}, null);
} catch (CameraAccessException | SecurityException | IllegalStateException
| IllegalArgumentException ex) {
Log.e(TAG, "setRepeatingRequest: ", ex);
return;
}
// Now wait for trigger on CrPreviewReaderListener.onImageAvailable();
nativeOnStarted(mNativeVideoCaptureDeviceAndroid);
changeCameraStateAndNotify(CameraState.STARTED);
}
@Override
public void update(CaptureResultProxy result)
{
Integer state = result.get(CaptureResult.CONTROL_AE_STATE);
boolean done = mStateMachine.update(
result.getFrameNumber(),
result.getRequest().get(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER),
state);
if (done)
{
boolean flashRequired = Objects.equal(state, CaptureResult
.CONTROL_AE_STATE_FLASH_REQUIRED);
mFutureResult.set(flashRequired);
}
}
private void updateFocusState(TotalCaptureResultProxy totalCaptureResult)
{
Integer focusState = totalCaptureResult.get(CaptureResult.CONTROL_AF_STATE);
if (focusState != null)
{
mFocusState.update(focusState);
}
}
private void updateOneCameraFocusState(TotalCaptureResultProxy totalCaptureResult)
{
Float focusDistance = totalCaptureResult.get(CaptureResult.LENS_FOCUS_DISTANCE);
Integer focusState = totalCaptureResult.get(CaptureResult.CONTROL_AF_STATE);
if (focusDistance != null && focusState != null)
{
Set<Integer> activeStates = new HashSet<>();
activeStates.add(CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN);
activeStates.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN);
boolean active = activeStates.contains(focusState);
mOneCameraFocusState.update(new OneCamera.FocusState(focusDistance, active));
}
}
@Nullable
@Override
public <T> T get(CaptureResult.Key<T> key)
{
if (key == TotalCaptureResult.CONTROL_AE_STATE)
{
Integer aeState = (Integer) mDelegate.get(key);
if (Objects.equal(aeState, CaptureResult.CONTROL_AE_STATE_SEARCHING))
{
return (T) ((Integer) CaptureResult.CONTROL_AE_STATE_CONVERGED);
}
}
return mDelegate.get(key);
}
/**
* Complain if CONTROL_AF_STATE is not present in result.
* Could indicate bug in API implementation.
*/
public static boolean checkControlAfState(CaptureResult result)
{
boolean missing = result.get(CaptureResult.CONTROL_AF_STATE) == null;
if (missing)
{
// throw new IllegalStateException("CaptureResult missing CONTROL_AF_STATE.");
Log.e(TAG, "\n!!!! TotalCaptureResult missing CONTROL_AF_STATE. !!!!\n ");
}
return !missing;
}
@Override
void onCaptureResult(CaptureResult result, boolean isCompleted) {
// プレビューでは、onCaptureResult が何度も呼び出されるので、ここで弾いています。
if (mStartFlag) {
mStartFlag = false;
postOnStartPreview();
}
}
@Override
public void onProgressed(CaptureResult partialResult)
{
V newValue = partialResult.get(mKey);
if (newValue != null)
{
mUpdatable.update(newValue);
}
}
@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
CaptureResult partialResult)
{
mCallback.onCaptureProgressed(AndroidCameraCaptureSessionProxy.this, request,
partialResult);
}
@Override
public void onCompleted(TotalCaptureResult result)
{
long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
if (mLastFrameTimestamp >= 0)
{
double deltaMillis = (timestamp - mLastFrameTimestamp) / 1000000.0;
if (mLastDeltaMillis > 0)
{
double fractionalChange = (deltaMillis - mLastDeltaMillis) / mLastDeltaMillis;
if (fractionalChange >= FRACTIONAL_CHANGE_STATS_THRESHOLD)
{
mUsageStatistics.cameraFrameDrop(deltaMillis, mLastDeltaMillis);
}
if (fractionalChange >= FRACTIONAL_CHANGE_LOG_THRESHOLD)
{
mLog.v("JANK! Time between frames (" + deltaMillis + "ms) increased by " +
(fractionalChange * 100) + "% over the last frame delta (" +
mLastDeltaMillis + "ms)");
}
}
mLastDeltaMillis = deltaMillis;
}
mLastFrameTimestamp = timestamp;
}
@Override
public void onProgressed(CaptureResult partialResult)
{
for (ResponseListener listener : mListeners)
{
listener.onProgressed(partialResult);
}
}
@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
CaptureResult partialResult) {
// Log.d(TAG, "mSessionCaptureCallback, onCaptureProgressed");
}