android.os.Trace#beginSection ( )源码实例Demo

下面列出了android.os.Trace#beginSection ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: talkback   文件: SwitchAccessNodeCompat.java
/**
 * Check if this node has been found to have bounds matching an ancestor, which means it gets
 * special treatment during traversal.
 *
 * @return {@code true} if this node was found to have the same bounds as an ancestor.
 */
public boolean getHasSameBoundsAsAncestor() {
  Trace.beginSection("SwitchAccessNodeCompat#getHasSameBoundsAsAncestor");
  // Only need to check parent
  if (boundsDuplicateAncestor == null) {
    SwitchAccessNodeCompat parent = getParent();
    if (parent == null) {
      boundsDuplicateAncestor = false;
    } else {
      Rect parentBounds = new Rect();
      Rect myBounds = new Rect();
      parent.getVisibleBoundsInScreen(parentBounds);
      getVisibleBoundsInScreen(myBounds);
      boundsDuplicateAncestor = myBounds.equals(parentBounds);
      parent.recycle();
    }
  }
  Trace.endSection();
  return boundsDuplicateAncestor;
}
 
源代码2 项目: android-perf-testing   文件: EnableLogcatDump.java
/**
 * Extract logcat buffer to a file ater test run.
 */
public void after() {
    try {
        if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
            Trace.beginSection("Taking logcat");
        }
        ProcessBuilder processBuilder = new ProcessBuilder();

        processBuilder.command("logcat", "-d",
                "-f", PerfTestingUtils.getTestFile(mTestClass, mTestName, "logcat.log")
                        .getAbsolutePath());
        processBuilder.redirectErrorStream();
        Process process = processBuilder.start();
        process.waitFor();
        if (process.exitValue() != 0) {
            Log.e(LOG_TAG, "Error exit value while extracting logcat, exitValue=" +
                    process.exitValue());
        }
    } catch (Exception ignored) {
        Log.e(LOG_TAG, "Error while extracting logcat", ignored);
    } finally {
        if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
            Trace.endSection();
        }
    }
}
 
源代码3 项目: AndroidProject   文件: DebugLogAspect.java
/**
 * 方法执行前切入
 */
private void enterMethod(ProceedingJoinPoint joinPoint, DebugLog debugLog) {
    if (!AppConfig.isDebug()) {
        return;
    }

    CodeSignature codeSignature = (CodeSignature) joinPoint.getSignature();

    // 方法所在类
    String className = codeSignature.getDeclaringType().getName();
    // 方法名
    String methodName = codeSignature.getName();
    // 方法参数名集合
    String[] parameterNames = codeSignature.getParameterNames();
    // 方法参数集合
    Object[] parameterValues = joinPoint.getArgs();

    //记录并打印方法的信息
    StringBuilder builder = getMethodLogInfo(className, methodName, parameterNames, parameterValues);

    log(debugLog.value(), builder.toString());

    final String section = builder.toString().substring(2);
    Trace.beginSection(section);
}
 
源代码4 项目: talkback   文件: SwitchAccessNodeCompat.java
@Override
public boolean isVisibleToUser() {
  Trace.beginSection("SwitchAccessNodeCompat#isVisibleToUser");
  if (!isOnScreenAndVisibleToUser()) {
    Trace.endSection();
    return false;
  }

  // Views are considered visible only if a minimum number of pixels is showing.
  Rect visibleBounds = new Rect();
  getVisibleBoundsInScreen(visibleBounds);
  int visibleHeight = visibleBounds.height();
  int visibleWidth = visibleBounds.width();
  boolean isVisible =
      (visibleHeight >= MIN_VISIBLE_PIXELS) && (visibleWidth >= MIN_VISIBLE_PIXELS);
  Trace.endSection();
  return isVisible;
}
 
源代码5 项目: talkback   文件: SwitchAccessService.java
@Override
public void onPreferenceChanged(SharedPreferences prefs, String key) {
  Trace.beginSection("SwitchAccessService#onPreferenceChanged");
  updateServiceInfoIfFeedbackTypeChanged();
  LogUtils.d(TAG, "A shared preference changed: %s", key);
  keyboardEventManager.reloadPreferences(this);

  // TODO: Refactor this out of SwitchAccessService.
  if (SwitchAccessPreferenceUtils.isScreenSwitchEnabled(this)) {
    overlayController.showScreenSwitch();
  } else {
    overlayController.hideScreenSwitch();
  }
  Trace.endSection();
}
 
源代码6 项目: talkback   文件: UiChangeHandler.java
private void rebuildScanTree(
    WindowChangedListener windowChangedListener, List<AccessibilityEvent> windowChangeEventList) {
  Trace.beginSection("UiChangeHandler#rebuildScanTree");
  TreeScanNode firstOrLastNode;
  boolean shouldPlaceNodeFirst;
  if (overlayController.isMenuVisible()) {
    firstOrLastNode = new ClearOverlayNode(overlayController);
    shouldPlaceNodeFirst = false;
  } else {
    firstOrLastNode = new ShowGlobalMenuNode(overlayController);
    shouldPlaceNodeFirst = true;
  }

  PerformanceMonitor.getOrCreateInstance().startNewTimerEvent(TreeBuildingEvent.REBUILD_TREE);
  TreeScanNode treeScanNode =
      mainTreeBuilder.addWindowListToTree(
          SwitchAccessWindowInfo.convertZOrderWindowList(
              AccessibilityServiceCompatUtils.getWindows(service)),
          firstOrLastNode,
          shouldPlaceNodeFirst);
  PerformanceMonitor.getOrCreateInstance().stopTimerEvent(TreeBuildingEvent.REBUILD_TREE, true);
  ThreadUtils.runOnMainThread(
      () -> !isRunning,
      () -> {
        optionManager.clearFocusIfNewTree(treeScanNode);
        // After the focus is cleared, send the list of AccessibilityEvents generated by the UI
        // change to the feedback controller to generate screen hints for the UI change.
        sendWindowChangeEventsToWindowChangedListener(
            windowChangedListener, windowChangeEventList);
      });
  Trace.endSection();
}
 
源代码7 项目: talkback   文件: SwitchAccessNodeCompat.java
/** Returns {@code true} if this object has actions that Switch Access can perform. */
public boolean hasActions() {
  Trace.beginSection("SwitchAccessNodeCompat#hasActions");
  for (AccessibilityActionCompat action : this.getActionList()) {
    if (ActionBuildingUtils.isActionSupportedByNode(action, this)) {
      Trace.endSection();
      return true;
    }
  }
  Trace.endSection();
  return false;
}
 
源代码8 项目: fresco   文件: DefaultFrescoSystrace.java
@Override
public void flush() {
  // 127 is the max name length according to
  // https://developer.android.com/reference/android/os/Trace.html
  if (mStringBuilder.length() > 127) {
    mStringBuilder.setLength(127);
  }
  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
    Trace.beginSection(mStringBuilder.toString());
  }
}
 
源代码9 项目: talkback   文件: SwitchAccessNodeCompat.java
/**
 * Get the largest rectangle in the bounds of the View that is not covered by another window.
 *
 * @param visibleBoundsInScreen The rect to return the visible bounds in
 */
public void getVisibleBoundsInScreen(Rect visibleBoundsInScreen) {
  Trace.beginSection("SwitchAccessNodeCompat#getVisibleBoundsInScreen");
  updateVisibility(0 /* currentDepth */);
  visibleBoundsInScreen.set(this.visibleBoundsInScreen);
  Trace.endSection();
}
 
源代码10 项目: hugo   文件: Hugo.java
private static void enterMethod(JoinPoint joinPoint) {
  if (!enabled) return;

  CodeSignature codeSignature = (CodeSignature) joinPoint.getSignature();

  Class<?> cls = codeSignature.getDeclaringType();
  String methodName = codeSignature.getName();
  String[] parameterNames = codeSignature.getParameterNames();
  Object[] parameterValues = joinPoint.getArgs();

  StringBuilder builder = new StringBuilder("\u21E2 ");
  builder.append(methodName).append('(');
  for (int i = 0; i < parameterValues.length; i++) {
    if (i > 0) {
      builder.append(", ");
    }
    builder.append(parameterNames[i]).append('=');
    builder.append(Strings.toString(parameterValues[i]));
  }
  builder.append(')');

  if (Looper.myLooper() != Looper.getMainLooper()) {
    builder.append(" [Thread:\"").append(Thread.currentThread().getName()).append("\"]");
  }

  Log.v(asTag(cls), builder.toString());

  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
    final String section = builder.toString().substring(2);
    Trace.beginSection(section);
  }
}
 
源代码11 项目: Paideia   文件: TensorflowClassifier.java
@Override
public List<Recognition> recognizeImage(final Bitmap bitmap) {
  // Log this method so that it can be analyzed with systrace.
  Trace.beginSection("Recognize");
  final ArrayList<Recognition> recognitions = new ArrayList<Recognition>();
  for (final String result : classifyImageBmp(bitmap).split("\n")) {
    Log.i(TAG, "Parsing [" + result + "]");

    // Clean up the string as needed
    final StringTokenizer st = new StringTokenizer(result);
    if (!st.hasMoreTokens()) {
      continue;
    }

    final String id = st.nextToken();
    final String confidenceString = st.nextToken();
    final float confidence = Float.parseFloat(confidenceString);

    final String title =
        result.substring(id.length() + confidenceString.length() + 2, result.length());

    if (!title.isEmpty()) {
      recognitions.add(new Recognition(id, title, confidence, null));
    }
  }
  Trace.endSection();
  return recognitions;
}
 
源代码12 项目: kickflip-android-sdk   文件: CameraEncoder.java
/**
 * Called on Encoder thread
 *
 * @param surfaceTexture the SurfaceTexure that initiated the call to onFrameAvailable
 */
private void handleFrameAvailable(SurfaceTexture surfaceTexture) {
    if (TRACE) Trace.beginSection("handleFrameAvail");
    synchronized (mReadyForFrameFence) {
        if (!mReadyForFrames) {
            if (VERBOSE) Log.i(TAG, "Ignoring available frame, not ready");
            return;
        }
        mFrameNum++;
        if (VERBOSE && (mFrameNum % 30 == 0)) Log.i(TAG, "handleFrameAvailable");
        if (!surfaceTexture.equals(mSurfaceTexture))
            Log.w(TAG, "SurfaceTexture from OnFrameAvailable does not match saved SurfaceTexture!");

        if (mRecording) {
            mInputWindowSurface.makeCurrent();
            if (TRACE) Trace.beginSection("drainVEncoder");
            mVideoEncoder.drainEncoder(false);
            if (TRACE) Trace.endSection();
            if (mCurrentFilter != mNewFilter) {
                Filters.updateFilter(mFullScreen, mNewFilter);
                mCurrentFilter = mNewFilter;
                mIncomingSizeUpdated = true;
            }

            if (mIncomingSizeUpdated) {
                mFullScreen.getProgram().setTexSize(mSessionConfig.getVideoWidth(), mSessionConfig.getVideoHeight());
                mIncomingSizeUpdated = false;
            }

            surfaceTexture.getTransformMatrix(mTransform);
            if (TRACE) Trace.beginSection("drawVEncoderFrame");
            mFullScreen.drawFrame(mTextureId, mTransform);
            if (TRACE) Trace.endSection();
            if (!mEncodedFirstFrame) {
                mEncodedFirstFrame = true;
            }

            if (mThumbnailRequestedOnFrame == mFrameNum) {
                mThumbnailRequested = true;
            }
            if (mThumbnailRequested) {
                saveFrameAsImage();
                mThumbnailRequested = false;
            }

            mInputWindowSurface.setPresentationTime(mSurfaceTexture.getTimestamp());
            mInputWindowSurface.swapBuffers();

            if (mEosRequested) {
                /*if (VERBOSE) */
                Log.i(TAG, "Sending last video frame. Draining encoder");
                mVideoEncoder.signalEndOfStream();
                mVideoEncoder.drainEncoder(true);
                mRecording = false;
                mEosRequested = false;
                releaseEncoder();
                mState = STATE.UNINITIALIZED;
                synchronized (mStopFence) {
                    mStopFence.notify();
                }
            }
        }
    }

    // Signal GLSurfaceView to render
    mDisplayView.requestRender();

    if (TRACE) Trace.endSection();
}
 
源代码13 项目: next18-ai-in-motion   文件: CameraActivity.java
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
    //We need wait until we have some size from onPreviewSizeChosen
    if (previewWidth == 0 || previewHeight == 0) {
        return;
    }
    if (rgbBytes == null) {
        rgbBytes = new int[previewWidth * previewHeight];
    }
    try {
        final Image image = reader.acquireLatestImage();

        if (image == null) {
            return;
        }

        if (isProcessingFrame) {
            image.close();
            return;
        }
        isProcessingFrame = true;
        Trace.beginSection("imageAvailable");
        final Image.Plane[] planes = image.getPlanes();
        fillBytes(planes, yuvBytes);
        yRowStride = planes[0].getRowStride();
        final int uvRowStride = planes[1].getRowStride();
        final int uvPixelStride = planes[1].getPixelStride();

        imageConverter =
                new Runnable() {
                    @Override
                    public void run() {
                        ImageUtils.convertYUV420ToARGB8888(
                                yuvBytes[0],
                                yuvBytes[1],
                                yuvBytes[2],
                                previewWidth,
                                previewHeight,
                                yRowStride,
                                uvRowStride,
                                uvPixelStride,
                                rgbBytes);
                    }
                };

        postInferenceCallback =
                new Runnable() {
                    @Override
                    public void run() {
                        image.close();
                        isProcessingFrame = false;
                    }
                };

        processImage();
    } catch (final Exception e) {
        Log.e("Exception!", e.toString());
        Trace.endSection();
        return;
    }
    Trace.endSection();
}
 
源代码14 项目: ml   文件: TFLiteObjectDetectionAPIModel.java
@Override
public List<Recognition> recognizeImage(final Bitmap bitmap) {
  // Log this method so that it can be analyzed with systrace.
  Trace.beginSection("recognizeImage");

  Trace.beginSection("preprocessBitmap");
  // Preprocess the image data from 0-255 int to normalized float based
  // on the provided parameters.
  bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());

  imgData.rewind();
  for (int i = 0; i < inputSize; ++i) {
    for (int j = 0; j < inputSize; ++j) {
      int pixelValue = intValues[i * inputSize + j];
      if (isModelQuantized) {
        // Quantized model
        imgData.put((byte) ((pixelValue >> 16) & 0xFF));
        imgData.put((byte) ((pixelValue >> 8) & 0xFF));
        imgData.put((byte) (pixelValue & 0xFF));
      } else { // Float model
        imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
        imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
        imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
      }
    }
  }
  Trace.endSection(); // preprocessBitmap

  // Copy the input data into TensorFlow.
  Trace.beginSection("feed");
  outputLocations = new float[1][NUM_DETECTIONS][4];
  outputClasses = new float[1][NUM_DETECTIONS];
  outputScores = new float[1][NUM_DETECTIONS];
  numDetections = new float[1];

  Object[] inputArray = {imgData};
  Map<Integer, Object> outputMap = new HashMap<>();
  outputMap.put(0, outputLocations);
  outputMap.put(1, outputClasses);
  outputMap.put(2, outputScores);
  outputMap.put(3, numDetections);
  Trace.endSection();

  // Run the inference call.
  Trace.beginSection("run");
  tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
  Trace.endSection();

  // Show the best detections.
  // after scaling them back to the input size.
  final ArrayList<Recognition> recognitions = new ArrayList<>(NUM_DETECTIONS);
  for (int i = 0; i < NUM_DETECTIONS; ++i) {
    final RectF detection =
        new RectF(
            outputLocations[0][i][1] * inputSize,
            outputLocations[0][i][0] * inputSize,
            outputLocations[0][i][3] * inputSize,
            outputLocations[0][i][2] * inputSize);
    // SSD Mobilenet V1 Model assumes class 0 is background class
    // in label file and class labels start from 1 to number_of_classes+1,
    // while outputClasses correspond to class index from 0 to number_of_classes
    int labelOffset = 1;
    recognitions.add(
        new Recognition(
            "" + i,
            labels.get((int) outputClasses[0][i] + labelOffset),
            outputScores[0][i],
            detection));
  }
  Trace.endSection(); // "recognizeImage"
  return recognitions;
}
 
源代码15 项目: litho   文件: Systrace.java
@Override
public void beginSection(String name) {
  if (BuildConfig.DEBUG && Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
    Trace.beginSection(name);
  }
}
 
源代码16 项目: android-perf-testing   文件: EnableTestTracing.java
@Override
public void before() {
    if (android.os.Build.VERSION.SDK_INT >= 18) {
        Trace.beginSection(mTestName);
    }
}
 
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
  //We need wait until we have some size from onPreviewSizeChosen
  if (previewWidth == 0 || previewHeight == 0) {
    return;
  }
  if (rgbBytes == null) {
    rgbBytes = new int[previewWidth * previewHeight];
  }
  try {
    final Image image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (isProcessingFrame) {
      image.close();
      return;
    }
    isProcessingFrame = true;
    Trace.beginSection("imageAvailable");
    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);
    yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();

    imageConverter =
        new Runnable() {
          @Override
          public void run() {
            ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);
          }
        };

    postInferenceCallback =
        new Runnable() {
          @Override
          public void run() {
            image.close();
            isProcessingFrame = false;
          }
        };

    processImage();
  } catch (final Exception e) {
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }
  Trace.endSection();
}
 
源代码18 项目: talkback   文件: ShowActionsMenuNode.java
/**
 * Get the nodes associated with the given compat node. These include the given {@link
 * SwitchAccessNodeCompat} and any children with the same bounds that support actions. If the
 * given node does not have any actions, this returns null.
 *
 * @param nodeCompat The node whose associated nodes should be obtained
 * @return If this node has actions, a list of {@link SwitchAccessNodeCompat} that represents this
 *     node and any children with duplicate bounds that also support actions. Returns {@code null}
 *     otherwise
 */
@Nullable
private static List<SwitchAccessNodeCompat> getNodesIfHasActions(
    SwitchAccessNodeCompat nodeCompat) {
  Trace.beginSection("ShowActionsMenuNode#createNodeIfHasActions");
  // Ignore invisible nodes, disabled nodes, and nodes without actions. This is checked first as
  // these are the most common reasons to ignore the node.
  if (!nodeCompat.isEnabled() || !nodeCompat.hasActions() || !nodeCompat.isVisibleToUser()) {
    Trace.endSection();
    return null;
  }

  // Ignore nodes which have the same bounds as their actionable parents.
  if (nodeCompat.getHasSameBoundsAsAncestor()) {
    SwitchAccessNodeCompat parent = nodeCompat.getParent();
    // If the parent is null, ignore it. Likely, the tree will be updated soon.
    if (parent != null) {
      boolean parentHasActions = parent.hasActions();
      parent.recycle();
      if (parentHasActions) {
        // The parent is actionable, so it will hold this node's actions as well
        Trace.endSection();
        return null;
      }
    }
  }

  List<SwitchAccessNodeCompat> nodes = new ArrayList<>();
  nodes.add(nodeCompat.obtainCopy());

  // If child nodes have the same bounds, add to list of nodes.
  List<SwitchAccessNodeCompat> descendantsWithSameBounds =
      nodeCompat.getDescendantsWithDuplicateBounds();
  for (int i = 0; i < descendantsWithSameBounds.size(); i++) {
    SwitchAccessNodeCompat descendantWithSameBounds = descendantsWithSameBounds.get(i);
    if (descendantWithSameBounds.hasActions()) {
      nodes.add(descendantWithSameBounds);
    } else {
      descendantWithSameBounds.recycle();
    }
  }

  Trace.endSection();
  return nodes;
}
 
源代码19 项目: vlayout   文件: VirtualLayoutManager.java
@Override
public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State state) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
        Trace.beginSection(TRACE_LAYOUT);
    }

    if (mNoScrolling && state.didStructureChange()) {
        mSpaceMeasured = false;
        mSpaceMeasuring = true;
    }


    runPreLayout(recycler, state);

    try {
        super.onLayoutChildren(recycler, state);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        // MaX_VALUE means invalidate scrolling offset - no scroll
        runPostLayout(recycler, state, Integer.MAX_VALUE); // hack to indicate its an initial layout
    }


    if ((mNestedScrolling || mNoScrolling) && mSpaceMeasuring) {
        // measure required, so do measure
        mSpaceMeasured = true;
        // get last child
        int childCount = getChildCount();
        View lastChild = getChildAt(childCount - 1);
        if (lastChild != null) {
            RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) lastChild.getLayoutParams();
            // found the end of last child view
            mMeasuredFullSpace = getDecoratedBottom(lastChild) + params.bottomMargin + computeAlignOffset(lastChild, true, false);

            if (mRecyclerView != null && mNestedScrolling) {
                ViewParent parent = mRecyclerView.getParent();
                if (parent instanceof View) {
                    // make sure the fullspace be the min value of measured space and parent's height
                    mMeasuredFullSpace = Math.min(mMeasuredFullSpace, ((View) parent).getMeasuredHeight());
                }
            }
        } else {
            mSpaceMeasuring = false;
        }
        mSpaceMeasuring = false;
        if (mRecyclerView != null && getItemCount() > 0) {
            // relayout
            mRecyclerView.post(new Runnable() {
                @Override
                public void run() {
                    // post relayout
                    if (mRecyclerView != null)
                        mRecyclerView.requestLayout();
                }
            });
        }
    }

    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
        Trace.endSection();
    }
}
 
源代码20 项目: vlayout   文件: VirtualLayoutManager.java
/**
 * Entry method for scrolling
 * {@inheritDoc}
 */
@Override
protected int scrollInternalBy(int dy, RecyclerView.Recycler recycler, RecyclerView.State state) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
        Trace.beginSection(TRACE_SCROLL);
    }

    runPreLayout(recycler, state);

    int scrolled = 0;
    try {
        if (!mNoScrolling) {
            scrolled = super.scrollInternalBy(dy, recycler, state);
        } else {
            if (getChildCount() == 0 || dy == 0) {
                return 0;
            }

            mLayoutState.mRecycle = true;
            ensureLayoutStateExpose();
            final int layoutDirection = dy > 0 ? LayoutState.LAYOUT_END : LayoutState.LAYOUT_START;
            final int absDy = Math.abs(dy);
            updateLayoutStateExpose(layoutDirection, absDy, true, state);
            final int freeScroll = mLayoutState.mScrollingOffset;

            final int consumed = freeScroll + fill(recycler, mLayoutState, state, false);
            if (consumed < 0) {
                return 0;
            }
            scrolled = absDy > consumed ? layoutDirection * consumed : dy;
        }
    } catch (Exception e) {
        Log.w(TAG, Log.getStackTraceString(e), e);
        if (sDebuggable)
            throw e;

    } finally {
        runPostLayout(recycler, state, scrolled);
    }

    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
        Trace.endSection();
    }

    return scrolled;
}