类android.util.Range源码实例Demo

下面列出了怎么用android.util.Range的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: webrtc_android   文件: Camera2Session.java
private void findCaptureFormat() {
    checkIsOnCameraThread();

    Range<Integer>[] fpsRanges =
            cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
    fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
    List<CaptureFormat.FramerateRange> framerateRanges =
            Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
    List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
    Logging.d(TAG, "Available preview sizes: " + sizes);
    Logging.d(TAG, "Available fps ranges: " + framerateRanges);

    if (framerateRanges.isEmpty() || sizes.isEmpty()) {
        reportError("No supported capture formats.");
        return;
    }

    final CaptureFormat.FramerateRange bestFpsRange =
            CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

    final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
    CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

    captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
    Logging.d(TAG, "Using capture format: " + captureFormat);
}
 
/**
 * Create a new {@link HighSpeedVideoConfiguration}.
 *
 * @param width image width, in pixels (positive)
 * @param height image height, in pixels (positive)
 * @param fpsMin minimum frames per second for the configuration (positive)
 * @param fpsMax maximum frames per second for the configuration (larger or equal to 60)
 *
 * @throws IllegalArgumentException
 *              if width/height/fpsMin were not positive or fpsMax less than 60
 *
 * @hide
 */
public HighSpeedVideoConfiguration(
        final int width, final int height, final int fpsMin, final int fpsMax,
        final int batchSizeMax) {
    if (fpsMax < HIGH_SPEED_MAX_MINIMAL_FPS) {
        throw new IllegalArgumentException("fpsMax must be at least " +
                HIGH_SPEED_MAX_MINIMAL_FPS);
    }
    mFpsMax = fpsMax;
    mWidth = checkArgumentPositive(width, "width must be positive");
    mHeight = checkArgumentPositive(height, "height must be positive");
    mFpsMin = checkArgumentPositive(fpsMin, "fpsMin must be positive");
    mSize = new Size(mWidth, mHeight);
    mBatchSizeMax = checkArgumentPositive(batchSizeMax, "batchSizeMax must be positive");
    mFpsRange = new Range<Integer>(mFpsMin, mFpsMax);
}
 
/**
 * Get the supported video sizes for an input high speed FPS range.
 *
 * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
 *
 * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
 * @return An array of video sizes to create high speed capture sessions for high speed streaming
 *         use cases.
 *
 * @throws IllegalArgumentException if input FPS range does not exist in the return value of
 *         getHighSpeedVideoFpsRanges
 * @see #getHighSpeedVideoFpsRanges()
 */
public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
    Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
    if (sizeCount == null || sizeCount == 0) {
        throw new IllegalArgumentException(String.format(
                "FpsRange %s does not support high speed video recording", fpsRange));
    }

    Size[] sizes = new Size[sizeCount];
    int i = 0;
    for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
        if (fpsRange.equals(config.getFpsRange())) {
            sizes[i++] = config.getSize();
        }
    }
    return sizes;
}
 
private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
    sb.append("HighSpeedVideoConfigurations(");
    Size[] sizes = getHighSpeedVideoSizes();
    for (Size size : sizes) {
        Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
        for (Range<Integer> range : ranges) {
            sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
                    size.getHeight(), range.getLower(), range.getUpper()));
        }
    }
    // Remove the pending ", "
    if (sb.charAt(sb.length() - 1) == ' ') {
        sb.delete(sb.length() - 2, sb.length());
    }
    sb.append(")");
}
 
源代码5 项目: android_9.0.0_r45   文件: NetworkPolicyManager.java
/** {@hide} */
@Deprecated
public static Iterator<Pair<ZonedDateTime, ZonedDateTime>> cycleIterator(NetworkPolicy policy) {
    final Iterator<Range<ZonedDateTime>> it = policy.cycleIterator();
    return new Iterator<Pair<ZonedDateTime, ZonedDateTime>>() {
        @Override
        public boolean hasNext() {
            return it.hasNext();
        }

        @Override
        public Pair<ZonedDateTime, ZonedDateTime> next() {
            if (hasNext()) {
                final Range<ZonedDateTime> r = it.next();
                return Pair.create(r.getLower(), r.getUpper());
            } else {
                return Pair.create(null, null);
            }
        }
    };
}
 
源代码6 项目: VideoCRE   文件: Camera2Session.java
private void findCaptureFormat() {
  checkIsOnCameraThread();

  Range<Integer>[] fpsRanges =
      cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
  fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
  List<CaptureFormat.FramerateRange> framerateRanges =
      Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
  List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
  Logging.d(TAG, "Available preview sizes: " + sizes);
  Logging.d(TAG, "Available fps ranges: " + framerateRanges);

  if (framerateRanges.isEmpty() || sizes.isEmpty()) {
    reportError("No supported capture formats.");
    return;
  }

  final CaptureFormat.FramerateRange bestFpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

  final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
  //CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

  captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
  Logging.d(TAG, "Using capture format: " + captureFormat);
}
 
源代码7 项目: okuki   文件: DataManager.java
public void loadMore() {
    if (!loading.get()) {
        setLoading(true);
        loadData(pageSize, results.size())
                .subscribeOn(Schedulers.io())
                .observeOn(AndroidSchedulers.mainThread())
                .doOnError(error -> setLoading(false))
                .subscribe(
                        list -> {
                            if (!list.isEmpty()) {
                                int start = results.size();
                                int end = start + list.size();
                                results.addAll(list);
                                if (start > 0) {
                                    rangeInserted.call(new Range<>(start, end));
                                } else {
                                    listUpdated.call(null);
                                }
                            }
                            setLoading(false);
                        },
                        Errors.log());
    }
}
 
private Range<Integer> selectCameraFpsRange(String camId, final int fps) throws CameraAccessException {
    for (String id : cameraManager.getCameraIdList()) {
        if (id.equals(camId)) {
            CameraCharacteristics info = cameraManager.getCameraCharacteristics(id);
            List<Range<Integer>> fpsLst = new ArrayList<>();
            Collections.addAll(fpsLst,
                    info.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES));
            /* sort list by error from desired fps *
             * Android seems to do a better job at color correction/avoid 'dark frames' issue by
             * selecting camera settings with the smallest lower bound on allowed frame rate
             * range. */
            return Collections.min(fpsLst, new Comparator<Range<Integer>>() {
                @Override
                public int compare(Range<Integer> lhs, Range<Integer> rhs) {
                    return calcError(lhs) - calcError(rhs);
                }

                private int calcError(Range<Integer> val) {
                    return val.getLower() + Math.abs(val.getUpper() - fps);
                }
            });
        }
    }
    return null;
}
 
源代码9 项目: webrtc_android   文件: Camera2Enumerator.java
static List<CaptureFormat.FramerateRange> convertFramerates(
    Range<Integer>[] arrayRanges, int unitFactor) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (Range<Integer> range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range.getLower() * unitFactor, range.getUpper() * unitFactor));
  }
  return ranges;
}
 
源代码10 项目: mobile-ar-sensor-logger   文件: Camera2Proxy.java
private void setExposureAndIso() {
    Long exposureNanos = CameraCaptureActivity.mDesiredExposureTime;
    Long desiredIsoL = 30L * 30000000L / exposureNanos;
    Integer desiredIso = desiredIsoL.intValue();
    if (!expoStats.isEmpty()) {
        int index = expoStats.size() / 2;
        Long actualExpo = expoStats.get(index).mExposureNanos;
        Integer actualIso = expoStats.get(index).mIso;
        if (actualExpo <= exposureNanos) {
            exposureNanos = actualExpo;
            desiredIso = actualIso;
        } else {
            desiredIsoL = actualIso * actualExpo / exposureNanos;
            desiredIso = desiredIsoL.intValue();
        }
    }

    // fix exposure
    mPreviewRequestBuilder.set(
            CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF);
    Range<Long> exposureTimeRange = mCameraCharacteristics.get(
            CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE);
    if (exposureTimeRange != null) {
        Log.d(TAG, "exposure time range " + exposureTimeRange.toString());
    }

    mPreviewRequestBuilder.set(
            CaptureRequest.SENSOR_EXPOSURE_TIME, exposureNanos);
    Log.d(TAG, "Exposure time set to " + exposureNanos);

    // fix ISO
    Range<Integer> isoRange = mCameraCharacteristics.get(
            CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE);
    if (isoRange != null) {
        Log.d(TAG, "ISO range " + isoRange.toString());
    }

    mPreviewRequestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, desiredIso);
    Log.d(TAG, "ISO set to " + desiredIso);
}
 
源代码11 项目: android_9.0.0_r45   文件: MultipathPolicyTracker.java
private long getRemainingDailyBudget(long limitBytes,
        Range<ZonedDateTime> cycle) {
    final long start = cycle.getLower().toInstant().toEpochMilli();
    final long end = cycle.getUpper().toInstant().toEpochMilli();
    final long totalBytes = getNetworkTotalBytes(start, end);
    final long remainingBytes = totalBytes == -1 ? 0 : Math.max(0, limitBytes - totalBytes);
    // 1 + ((end - now - 1) / millisInDay with integers is equivalent to:
    // ceil((double)(end - now) / millisInDay)
    final long remainingDays =
            1 + ((end - mClock.millis() - 1) / TimeUnit.DAYS.toMillis(1));

    return remainingBytes / Math.max(1, remainingDays);
}
 
源代码12 项目: android_9.0.0_r45   文件: MarshalQueryableRange.java
@Override
public int calculateMarshalSize(Range<T> value) {
    int nativeSize = getNativeSize();

    if (nativeSize != NATIVE_SIZE_DYNAMIC) {
        return nativeSize;
    } else {
        int lowerSize = mNestedTypeMarshaler.calculateMarshalSize(value.getLower());
        int upperSize = mNestedTypeMarshaler.calculateMarshalSize(value.getUpper());

        return lowerSize + upperSize;
    }
}
 
源代码13 项目: Camera2   文件: BurstCaptureCommand.java
/**
 * On Nexus 5 limit frame rate to 24 fps. See b/18950682.
 */
private static void checkAndApplyNexus5FrameRateWorkaround(RequestBuilder request)
{
    if (ApiHelper.IS_NEXUS_5)
    {
        // For burst limit the frame rate to 24 fps.
        Range<Integer> frameRateBackOff = new Range<>(7, 24);
        request.setParam(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, frameRateBackOff);
    }
}
 
源代码14 项目: Camera2   文件: OneCameraCharacteristicsImpl.java
@Override
public boolean isExposureCompensationSupported()
{
    // Turn off exposure compensation for Nexus 6 on L (API level 21)
    // because the bug in framework b/19219128.
    if (ApiHelper.IS_NEXUS_6 && ApiHelper.isLollipop())
    {
        return false;
    }
    Range<Integer> compensationRange =
            mCameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
    return compensationRange.getLower() != 0 || compensationRange.getUpper() != 0;
}
 
源代码15 项目: Camera2   文件: OneCameraCharacteristicsImpl.java
@Override
public int getMinExposureCompensation()
{
    if (!isExposureCompensationSupported())
    {
        return -1;
    }
    Range<Integer> compensationRange =
            mCameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
    return compensationRange.getLower();
}
 
源代码16 项目: Camera2   文件: OneCameraCharacteristicsImpl.java
@Override
public int getMaxExposureCompensation()
{
    if (!isExposureCompensationSupported())
    {
        return -1;
    }
    Range<Integer> compensationRange =
            mCameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
    return compensationRange.getUpper();
}
 
源代码17 项目: Camera2   文件: AndroidCamera2Settings.java
public Camera2RequestSettingsSet getRequestSettings() {
    updateRequestSettingOrForceToDefault(CONTROL_AE_REGIONS,
            legacyAreasToMeteringRectangles(mMeteringAreas));
    updateRequestSettingOrForceToDefault(CONTROL_AF_REGIONS,
            legacyAreasToMeteringRectangles(mFocusAreas));
    updateRequestSettingOrForceToDefault(CONTROL_AE_TARGET_FPS_RANGE,
            new Range(mPreviewFpsRangeMin, mPreviewFpsRangeMax));
    // TODO: mCurrentPreviewFormat
    updateRequestSettingOrForceToDefault(JPEG_QUALITY, mJpegCompressQuality);
    // TODO: mCurrentPhotoFormat
    mRequestSettings.set(SCALER_CROP_REGION, mCropRectangle);
    // TODO: mCurrentZoomIndex
    updateRequestSettingOrForceToDefault(CONTROL_AE_EXPOSURE_COMPENSATION,
            mExposureCompensationIndex);
    updateRequestFlashMode();
    updateRequestFocusMode();
    updateRequestSceneMode();
    updateRequestWhiteBalance();
    updateRequestSettingOrForceToDefault(CONTROL_VIDEO_STABILIZATION_MODE,
            mVideoStabilizationEnabled ?
                    CONTROL_VIDEO_STABILIZATION_MODE_ON : CONTROL_VIDEO_STABILIZATION_MODE_OFF);
    // OIS shouldn't be on if software video stabilization is.
    mRequestSettings.set(LENS_OPTICAL_STABILIZATION_MODE,
            mVideoStabilizationEnabled ? LENS_OPTICAL_STABILIZATION_MODE_OFF :
                    null);
    updateRequestSettingOrForceToDefault(CONTROL_AE_LOCK, mAutoExposureLocked);
    updateRequestSettingOrForceToDefault(CONTROL_AWB_LOCK, mAutoWhiteBalanceLocked);
    // TODO: mRecordingHintEnabled
    updateRequestGpsData();
    if (mExifThumbnailSize != null) {
        updateRequestSettingOrForceToDefault(JPEG_THUMBNAIL_SIZE,
                new android.util.Size(
                        mExifThumbnailSize.width(), mExifThumbnailSize.height()));
    } else {
        updateRequestSettingOrForceToDefault(JPEG_THUMBNAIL_SIZE, null);
    }

    return mRequestSettings;
}
 
源代码18 项目: jellyfin-androidtv   文件: Api21Builder.java
private void addVideoCapabilities(MediaCodecInfo.CodecCapabilities codecCapabilities, CodecProfile profile) {
    MediaCodecInfo.VideoCapabilities videoCaps = codecCapabilities.getVideoCapabilities();

    ArrayList<ProfileCondition> conditions = new ArrayList<>();

    conditions.add(new ProfileCondition(ProfileConditionType.NotEquals, ProfileConditionValue.IsAnamorphic, "true"));

    if (profile.getCodec() != null && profile.getCodec().toLowerCase().contains(CodecTypes.H264)) {
        conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.VideoBitDepth, "8"));
    }

    // Video max bitrate
    Range<Integer> bitrateRange = videoCaps.getBitrateRange();
    String maxBitrate = String.valueOf(bitrateRange.getUpper());
    conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.VideoBitrate, maxBitrate));

    // Video min bitrate
    String minBitrate = String.valueOf(bitrateRange.getLower());
    conditions.add(new ProfileCondition(ProfileConditionType.GreaterThanEqual, ProfileConditionValue.VideoBitrate, minBitrate));

    // Video max height
    Range<Integer> heightRange = videoCaps.getSupportedHeights();
    String maxHeight = String.valueOf(heightRange.getUpper());
    //conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.Height, maxHeight));

    // Video min height
    String minHeight = String.valueOf(heightRange.getLower());
    conditions.add(new ProfileCondition(ProfileConditionType.GreaterThanEqual, ProfileConditionValue.Height, minHeight));

    // Video max width
    Range<Integer> widthRange = videoCaps.getSupportedHeights();
    conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.Width, String.valueOf(widthRange.getUpper())));

    // Video min width
    conditions.add(new ProfileCondition(ProfileConditionType.GreaterThanEqual, ProfileConditionValue.Width, String.valueOf(widthRange.getLower())));

    profile.setConditions(conditions.toArray(new ProfileCondition[conditions.size()]));

    AddProfileLevels(codecCapabilities, profile);
}
 
源代码19 项目: jellyfin-androidtv   文件: Api21Builder.java
private void addAudioCapabilities(MediaCodecInfo.CodecCapabilities codecCapabilities, CodecProfile profile) {
    MediaCodecInfo.AudioCapabilities audioCaps = codecCapabilities.getAudioCapabilities();

    ArrayList<ProfileCondition> conditions = new ArrayList<>();

    // Audio channels
    int maxAudioChannels = audioCaps.getMaxInputChannelCount();

    if (maxAudioChannels == 5) {
        maxAudioChannels = 6;
    }
    String channels = String.valueOf(maxAudioChannels);
    conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.AudioChannels, channels));

    // Audio sample rate
    // TODO: Add this later. There currently is no profile condition support for it

    // Audio max bitrate
    Range<Integer> bitrateRange = audioCaps.getBitrateRange();
    String maxBitrate = String.valueOf(bitrateRange.getUpper());
    conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.AudioBitrate, maxBitrate));

    // Audio min bitrate
    String minBitrate = String.valueOf(bitrateRange.getLower());
    conditions.add(new ProfileCondition(ProfileConditionType.GreaterThanEqual, ProfileConditionValue.AudioBitrate, minBitrate));

    profile.setConditions(conditions.toArray(new ProfileCondition[conditions.size()]));
}
 
源代码20 项目: VideoCRE   文件: Camera2Enumerator.java
static List<CaptureFormat.FramerateRange> convertFramerates(
    Range<Integer>[] arrayRanges, int unitFactor) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (Range<Integer> range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range.getLower() * unitFactor, range.getUpper() * unitFactor));
  }
  return ranges;
}
 
/**
 * A snapshot of all attributes is taken at construction. The attributes of a
 * {@code UiAutomationElement} instance are immutable. If the underlying
 * {@link AccessibilityNodeInfo} is updated, a new {@code UiAutomationElement}
 * instance will be created in
 */
private UiAutomationElement(AccessibilityNodeInfo node, int index) {
    super(checkNotNull(node));

    Map<Attribute, Object> attributes = new LinkedHashMap<>();
    // The same sequence will be used for node attributes in xml page source
    put(attributes, Attribute.INDEX, index);
    put(attributes, Attribute.PACKAGE, charSequenceToNullableString(node.getPackageName()));
    put(attributes, Attribute.CLASS, charSequenceToNullableString(node.getClassName()));
    put(attributes, Attribute.TEXT, AccessibilityNodeInfoHelpers.getText(node, true));
    put(attributes, Attribute.ORIGINAL_TEXT, AccessibilityNodeInfoHelpers.getText(node, false));
    put(attributes, Attribute.CONTENT_DESC, charSequenceToNullableString(node.getContentDescription()));
    put(attributes, Attribute.RESOURCE_ID, node.getViewIdResourceName());
    put(attributes, Attribute.CHECKABLE, node.isCheckable());
    put(attributes, Attribute.CHECKED, node.isChecked());
    put(attributes, Attribute.CLICKABLE, node.isClickable());
    put(attributes, Attribute.ENABLED, node.isEnabled());
    put(attributes, Attribute.FOCUSABLE, node.isFocusable());
    put(attributes, Attribute.FOCUSED, node.isFocused());
    put(attributes, Attribute.LONG_CLICKABLE, node.isLongClickable());
    put(attributes, Attribute.PASSWORD, node.isPassword());
    put(attributes, Attribute.SCROLLABLE, node.isScrollable());
    Range<Integer> selectionRange = AccessibilityNodeInfoHelpers.getSelectionRange(node);
    if (selectionRange != null) {
        attributes.put(Attribute.SELECTION_START, selectionRange.getLower());
        attributes.put(Attribute.SELECTION_END, selectionRange.getUpper());
    }
    put(attributes, Attribute.SELECTED, node.isSelected());
    put(attributes, Attribute.BOUNDS, AccessibilityNodeInfoHelpers.getVisibleBounds(node).toShortString());
    put(attributes, Attribute.DISPLAYED, node.isVisibleToUser());
    // Skip CONTENT_SIZE as it is quite expensive to compute it for each element
    this.attributes = Collections.unmodifiableMap(attributes);
    this.children = buildChildren(node);
}
 
@Nullable
public static Range<Integer> getSelectionRange(@Nullable AccessibilityNodeInfo nodeInfo) {
    if (nodeInfo == null) {
        return null;
    }

    int selectionStart = nodeInfo.getTextSelectionStart();
    int selectionEnd = nodeInfo.getTextSelectionEnd();
    if (selectionStart >= 0 && selectionStart != selectionEnd) {
        return new Range<>(selectionStart, selectionEnd);
    }
    return null;
}
 
源代码23 项目: webrtc_android   文件: Camera2Enumerator.java
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
  if (fpsRanges.length == 0) {
    return 1000;
  }
  return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
}
 
源代码24 项目: webrtc_android   文件: Camera2Session.java
@Override
public void onConfigured(CameraCaptureSession session) {
    checkIsOnCameraThread();
    Logging.d(TAG, "Camera capture session configured.");
    captureSession = session;
    try {
        /*
         * The viable options for video capture requests are:
         * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
         *   post-processing.
         * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
         *   quality.
         */
        final CaptureRequest.Builder captureRequestBuilder =
                cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
        // Set auto exposure fps range.
        captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
                new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
                        captureFormat.framerate.max / fpsUnitFactor));
        captureRequestBuilder.set(
                CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
        captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
        chooseStabilizationMode(captureRequestBuilder);
        chooseFocusMode(captureRequestBuilder);

        captureRequestBuilder.addTarget(surface);
        session.setRepeatingRequest(
                captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
    } catch (CameraAccessException e) {
        reportError("Failed to start capture request. " + e);
        return;
    }

    surfaceTextureHelper.startListening((VideoFrame frame) -> {
        checkIsOnCameraThread();

        if (state != SessionState.RUNNING) {
            Logging.d(TAG, "Texture frame captured but camera is no longer running.");
            return;
        }

        if (!firstFrameReported) {
            firstFrameReported = true;
            final int startTimeMs =
                    (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
            camera2StartTimeMsHistogram.addSample(startTimeMs);
        }

        // Undo the mirror that the OS "helps" us with.
        // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
        // Also, undo camera orientation, we report it as rotation instead.
        final VideoFrame modifiedFrame =
                new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
                        (TextureBufferImpl) frame.getBuffer(),
                        /* mirror= */ isCameraFrontFacing,
                        /* rotation= */ -cameraOrientation),
                        /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
        events.onFrameCaptured(Camera2Session.this, modifiedFrame);
        modifiedFrame.release();
    });
    Logging.d(TAG, "Camera device successfully started.");
    callback.onDone(Camera2Session.this);
}
 
源代码25 项目: android_9.0.0_r45   文件: StreamConfigurationMap.java
/**
 * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
 * <p>
 * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
 * </p>
 * <p>
 * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
 * must not be used to setup capture requests that are submitted to unconstrained capture
 * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
 * </p>
 * <p>
 * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
 * </p>
 *
 * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
 * @return an array of supported high speed video recording FPS ranges The upper bound of
 *         returned ranges is guaranteed to be greater than or equal to 120.
 * @throws IllegalArgumentException if input size does not exist in the return value of
 *             getHighSpeedVideoSizes
 * @see #getHighSpeedVideoSizes()
 * @see #getHighSpeedVideoFpsRanges()
 */
public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
    Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
    if (fpsRangeCount == null || fpsRangeCount == 0) {
        throw new IllegalArgumentException(String.format(
                "Size %s does not support high speed video recording", size));
    }

    @SuppressWarnings("unchecked")
    Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
    int i = 0;
    for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
        if (size.equals(config.getSize())) {
            fpsRanges[i++] = config.getFpsRange();
        }
    }
    return fpsRanges;
}
 
源代码26 项目: android_9.0.0_r45   文件: LegacyMetadataMapper.java
private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) {
    int[] legacyFps = new int[2];
    legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower();
    legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper();
    return legacyFps;
}
 
源代码27 项目: android_9.0.0_r45   文件: LegacyRequestMapper.java
private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) {
    int[] legacyFps = new int[2];
    legacyFps[Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower() * 1000;
    legacyFps[Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper() * 1000;
    return legacyFps;
}
 
源代码28 项目: android_9.0.0_r45   文件: SurfaceUtils.java
/**
 * Verify that that the surfaces are valid for high-speed recording mode,
 * and that the FPS range is supported
 *
 * @param surfaces the surfaces to verify as valid in terms of size and format
 * @param fpsRange the target high-speed FPS range to validate
 * @param config The stream configuration map for the device in question
 */
public static void checkConstrainedHighSpeedSurfaces(Collection<Surface> surfaces,
        Range<Integer> fpsRange, StreamConfigurationMap config) {
    if (surfaces == null || surfaces.size() == 0 || surfaces.size() > 2) {
        throw new IllegalArgumentException("Output target surface list must not be null and"
                + " the size must be 1 or 2");
    }

    List<Size> highSpeedSizes = null;
    if (fpsRange == null) {
        highSpeedSizes = Arrays.asList(config.getHighSpeedVideoSizes());
    } else {
        // Check the FPS range first if provided
        Range<Integer>[] highSpeedFpsRanges = config.getHighSpeedVideoFpsRanges();
        if(!Arrays.asList(highSpeedFpsRanges).contains(fpsRange)) {
            throw new IllegalArgumentException("Fps range " + fpsRange.toString() + " in the"
                    + " request is not a supported high speed fps range " +
                    Arrays.toString(highSpeedFpsRanges));
        }
        highSpeedSizes = Arrays.asList(config.getHighSpeedVideoSizesFor(fpsRange));
    }

    for (Surface surface : surfaces) {
        checkHighSpeedSurfaceFormat(surface);

        // Surface size must be supported high speed sizes.
        Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
        if (!highSpeedSizes.contains(surfaceSize)) {
            throw new IllegalArgumentException("Surface size " + surfaceSize.toString() + " is"
                    + " not part of the high speed supported size list " +
                    Arrays.toString(highSpeedSizes.toArray()));
        }
        // Each output surface must be either preview surface or recording surface.
        if (!SurfaceUtils.isSurfaceForPreview(surface) &&
                !SurfaceUtils.isSurfaceForHwVideoEncoder(surface)) {
            throw new IllegalArgumentException("This output surface is neither preview nor "
                    + "hardware video encoding surface");
        }
        if (SurfaceUtils.isSurfaceForPreview(surface) &&
                SurfaceUtils.isSurfaceForHwVideoEncoder(surface)) {
            throw new IllegalArgumentException("This output surface can not be both preview"
                    + " and hardware video encoding surface");
        }
    }

    // For 2 output surface case, they shouldn't be same type.
    if (surfaces.size() == 2) {
        // Up to here, each surface can only be either preview or recording.
        Iterator<Surface> iterator = surfaces.iterator();
        boolean isFirstSurfacePreview =
                SurfaceUtils.isSurfaceForPreview(iterator.next());
        boolean isSecondSurfacePreview =
                SurfaceUtils.isSurfaceForPreview(iterator.next());
        if (isFirstSurfacePreview == isSecondSurfacePreview) {
            throw new IllegalArgumentException("The 2 output surfaces must have different"
                    + " type");
        }
    }
}
 
源代码29 项目: android_9.0.0_r45   文件: MarshalQueryableRange.java
@Override
public void marshal(Range<T> value, ByteBuffer buffer) {
    mNestedTypeMarshaler.marshal(value.getLower(), buffer);
    mNestedTypeMarshaler.marshal(value.getUpper(), buffer);
}
 
源代码30 项目: android_9.0.0_r45   文件: MarshalQueryableRange.java
@Override
public Marshaler<Range<T>> createMarshaler(TypeReference<Range<T>> managedType,
        int nativeType) {
    return new MarshalerRange(managedType, nativeType);
}