下面列出了android.os.Trace#endSection ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Override
public boolean isVisibleToUser() {
Trace.beginSection("SwitchAccessNodeCompat#isVisibleToUser");
if (!isOnScreenAndVisibleToUser()) {
Trace.endSection();
return false;
}
// Views are considered visible only if a minimum number of pixels is showing.
Rect visibleBounds = new Rect();
getVisibleBoundsInScreen(visibleBounds);
int visibleHeight = visibleBounds.height();
int visibleWidth = visibleBounds.width();
boolean isVisible =
(visibleHeight >= MIN_VISIBLE_PIXELS) && (visibleWidth >= MIN_VISIBLE_PIXELS);
Trace.endSection();
return isVisible;
}
/**
* Intended to mimic the behavior of onKeyEvent if this were the only service running. It will be
* called from onKeyEvent, both from this service and from others in this apk (TalkBack). This
* method must not block, since it will block onKeyEvent as well.
*
* @param keyEvent A key event
* @return {@code true} if the event is handled, {@code false} otherwise.
*/
@Override
public boolean onKeyEventShared(KeyEvent keyEvent) {
Trace.beginSection("SwitchAccessService#onKeyEventShared");
if (keyEvent.getAction() == KeyEvent.ACTION_DOWN) {
PerformanceMonitor.getOrCreateInstance()
.startNewTimerEvent(KeyPressEvent.UNKNOWN_KEY_ASSIGNMENT);
}
if (keyboardEventManager.onKeyEvent(keyEvent, analytics, this)) {
wakeLock.acquire();
wakeLock.release();
Trace.endSection();
return true;
}
Trace.endSection();
return false;
}
/** Notify that preferences have changed. */
@Override
public void onPreferenceChanged(SharedPreferences sharedPreferences, String preferenceKey) {
Trace.beginSection("OptionManager#onPreferenceChanged");
Context context = overlayController.getContext();
groupSelectionEnabled = SwitchAccessPreferenceUtils.isGroupSelectionEnabled(context);
optionPaintArray = SwitchAccessPreferenceUtils.getHighlightPaints(context);
/*
* Always configure element 0 based on preferences. Only configure the others if we're
* doing group selection.
*/
if (!groupSelectionEnabled) {
for (int i = 1; i < optionPaintArray.length; ++i) {
optionPaintArray[i].setColor(Color.TRANSPARENT);
}
}
Trace.endSection();
}
/**
* Return primary and secondary colors from the Material color palette that are similar to
* {@param color}.
*/
public MaterialPalette calculatePrimaryAndSecondaryColor(int color) {
Trace.beginSection("calculatePrimaryAndSecondaryColor");
final float colorHue = hue(color);
float minimumDistance = Float.MAX_VALUE;
int indexBestMatch = 0;
for (int i = 0; i < sPrimaryColors.length(); i++) {
final int primaryColor = sPrimaryColors.getColor(i, 0);
final float comparedHue = hue(primaryColor);
// No need to be perceptually accurate when calculating color distances since
// we are only mapping to 15 colors. Being slightly inaccurate isn't going to change
// the mapping very often.
final float distance = Math.abs(comparedHue - colorHue);
if (distance < minimumDistance) {
minimumDistance = distance;
indexBestMatch = i;
}
}
Trace.endSection();
return new MaterialPalette(sPrimaryColors.getColor(indexBestMatch, 0), sSecondaryColors.getColor(indexBestMatch, 0));
}
private void reduceVisibleRectangleForWindowsAbove(Rect visibleRect) {
Trace.beginSection("SwitchAccessNodeCompat#reduceVisibleRectangleForWindowsAbove");
Rect windowBoundsInScreen = new Rect();
int visibleRectWidth = visibleRect.right - visibleRect.left;
int visibleRectHeight = visibleRect.bottom - visibleRect.top;
for (int i = 0; i < windowsAbove.size(); ++i) {
windowsAbove.get(i).getBoundsInScreen(windowBoundsInScreen);
windowBoundsInScreen.sort();
Rect intersectingRectangle = new Rect(visibleRect);
if (intersectingRectangle.intersect(windowBoundsInScreen)) {
// If the rect above occupies less than a fraction of both sides of this rect, don't
// adjust this rect's bounds. This prevents things like FABs changing the bounds
// of scroll views under them.
if (((intersectingRectangle.right - intersectingRectangle.left)
< (visibleRectWidth * MIN_INTERSECTION_TO_CROP))
&& ((intersectingRectangle.bottom - intersectingRectangle.top)
< (visibleRectHeight * MIN_INTERSECTION_TO_CROP))) {
Trace.endSection();
return;
}
adjustRectToAvoidIntersection(visibleRect, windowBoundsInScreen);
}
}
Trace.endSection();
}
/**
* Get the largest rectangle in the bounds of the View that is not covered by another window.
*
* @param visibleBoundsInScreen The rect to return the visible bounds in
*/
public void getVisibleBoundsInScreen(Rect visibleBoundsInScreen) {
Trace.beginSection("SwitchAccessNodeCompat#getVisibleBoundsInScreen");
updateVisibility(0 /* currentDepth */);
visibleBoundsInScreen.set(this.visibleBoundsInScreen);
Trace.endSection();
}
private void rebuildScanTree(
WindowChangedListener windowChangedListener, List<AccessibilityEvent> windowChangeEventList) {
Trace.beginSection("UiChangeHandler#rebuildScanTree");
TreeScanNode firstOrLastNode;
boolean shouldPlaceNodeFirst;
if (overlayController.isMenuVisible()) {
firstOrLastNode = new ClearOverlayNode(overlayController);
shouldPlaceNodeFirst = false;
} else {
firstOrLastNode = new ShowGlobalMenuNode(overlayController);
shouldPlaceNodeFirst = true;
}
PerformanceMonitor.getOrCreateInstance().startNewTimerEvent(TreeBuildingEvent.REBUILD_TREE);
TreeScanNode treeScanNode =
mainTreeBuilder.addWindowListToTree(
SwitchAccessWindowInfo.convertZOrderWindowList(
AccessibilityServiceCompatUtils.getWindows(service)),
firstOrLastNode,
shouldPlaceNodeFirst);
PerformanceMonitor.getOrCreateInstance().stopTimerEvent(TreeBuildingEvent.REBUILD_TREE, true);
ThreadUtils.runOnMainThread(
() -> !isRunning,
() -> {
optionManager.clearFocusIfNewTree(treeScanNode);
// After the focus is cleared, send the list of AccessibilityEvents generated by the UI
// change to the feedback controller to generate screen hints for the UI change.
sendWindowChangeEventsToWindowChangedListener(
windowChangedListener, windowChangeEventList);
});
Trace.endSection();
}
private void updateVisibility(int currentDepth) {
if (visibilityAndSpokenTextCalculated || (currentDepth > MAX_DEPTH)) {
return;
}
PerformanceMonitor.getOrCreateInstance()
.startNewTimerEvent(TreeBuildingEvent.SCREEN_VISIBILITY_UPDATE);
visibleBoundsInScreen = new Rect();
if (!isOnScreenAndVisibleToUser()) {
visibleBoundsInScreen.setEmpty();
PerformanceMonitor.getOrCreateInstance()
.stopTimerEvent(TreeBuildingEvent.SCREEN_VISIBILITY_UPDATE, false);
return;
}
Trace.beginSection("SwitchAccessNodeCompat#updateVisibility (when visible to user)");
getBoundsInScreen(visibleBoundsInScreen);
visibleBoundsInScreen.sort();
// Deal with visibility implications from windows above. However, do not update visibility for
// sibling views as we cannot do so robustly. Notably, while we have drawing order, that is not
// enough as views can be transparent and let touches through.
reduceVisibleRectangleForWindowsAbove(visibleBoundsInScreen);
PerformanceMonitor.getOrCreateInstance()
.stopTimerEvent(TreeBuildingEvent.SCREEN_VISIBILITY_UPDATE, false);
visibilityAndSpokenTextCalculated = true;
Trace.endSection();
}
/** Returns {@code true} if this object has actions that Switch Access can perform. */
public boolean hasActions() {
Trace.beginSection("SwitchAccessNodeCompat#hasActions");
for (AccessibilityActionCompat action : this.getActionList()) {
if (ActionBuildingUtils.isActionSupportedByNode(action, this)) {
Trace.endSection();
return true;
}
}
Trace.endSection();
return false;
}
@Override
public void onAccessibilityEvent(AccessibilityEvent event) {
Trace.beginSection("SwitchAccessService#onAccessibilityEvent");
// Only process the AccessibilityEvents when the screen is on.
if (screenMonitor != null && screenMonitor.isScreenOn()) {
if (eventProcessor != null) {
eventProcessor.onAccessibilityEvent(event);
}
if (accessibilityEventFilter != null) {
accessibilityEventFilter.onAccessibilityEvent(event);
}
}
Trace.endSection();
}
/**
* Get a child with duplicate bounds in the screen, if one exists.
*
* @return A child with duplicate bounds or {@code null} if none exists.
*/
public List<SwitchAccessNodeCompat> getDescendantsWithDuplicateBounds() {
Trace.beginSection("SwitchAccessNodeCompat#getDescendantsWithDuplicateBounds");
Rect myBounds = new Rect();
getBoundsInScreen(myBounds);
List<SwitchAccessNodeCompat> descendantsWithDuplicateBounds = new ArrayList<>();
addDescendantsWithBoundsToList(descendantsWithDuplicateBounds, myBounds);
Trace.endSection();
return descendantsWithDuplicateBounds;
}
public static void endSection() {
if (VERBOSE_TRACING && Build.VERSION.SDK_INT >= 18) {
Trace.endSection();
}
}
public List<Recognition> recognizeImage(Bitmap bitmap) {
int i;
Trace.beginSection("recognizeImage");
Trace.beginSection("preprocessBitmap");
bitmap.getPixels(this.intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
for (i = 0; i < this.intValues.length; i++) {
this.byteValues[(i * 3) + 2] = (byte) (this.intValues[i] & 255);
this.byteValues[(i * 3) + 1] = (byte) ((this.intValues[i] >> 8) & 255);
this.byteValues[(i * 3) + 0] = (byte) ((this.intValues[i] >> 16) & 255);
}
Trace.endSection();
Trace.beginSection("feed");
this.inferenceInterface.feed(this.inputName, this.byteValues, 1, (long) this.inputSize, (long) this.inputSize, 3);
Trace.endSection();
Trace.beginSection("run");
this.inferenceInterface.run(this.outputNames, this.logStats);
Trace.endSection();
Trace.beginSection("fetch");
this.outputLocations = new float[400];
this.outputScores = new float[100];
this.outputClasses = new float[100];
this.outputNumDetections = new float[1];
this.inferenceInterface.fetch(this.outputNames[0], this.outputLocations);
this.inferenceInterface.fetch(this.outputNames[1], this.outputScores);
this.inferenceInterface.fetch(this.outputNames[2], this.outputClasses);
this.inferenceInterface.fetch(this.outputNames[3], this.outputNumDetections);
Trace.endSection();
PriorityQueue<Recognition> pq = new PriorityQueue(1, new Comparator<Recognition>() {
public int compare(Recognition lhs, Recognition rhs) {
return Float.compare(rhs.getConfidence().floatValue(), lhs.getConfidence().floatValue());
}
});
for (i = 0; i < this.outputScores.length; i++) {
pq.add(new Recognition("" + ((int) this.outputClasses[i]), (String) this.labels.get((int) this.outputClasses[i]), Float.valueOf(this.outputScores[i]), new RectF(this.outputLocations[(i * 4) + 1] * ((float) this.inputSize), this.outputLocations[i * 4] * ((float) this.inputSize), this.outputLocations[(i * 4) + 3] * ((float) this.inputSize), this.outputLocations[(i * 4) + 2] * ((float) this.inputSize))));
}
ArrayList<Recognition> recognitions = new ArrayList();
for (i = 0; i < Math.min(pq.size(), 100); i++) {
recognitions.add(pq.poll());
}
Trace.endSection();
return recognitions;
}
public List<Recognition> recognizeImage(Bitmap bitmap) {
int i;
Trace.beginSection("recognizeImage");
Trace.beginSection("preprocessBitmap");
bitmap.getPixels(this.intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
this.imgData.rewind();
for (i = 0; i < this.inputSize; i++) {
for (int j = 0; j < this.inputSize; j++) {
int pixelValue = this.intValues[(this.inputSize * i) + j];
if (this.isModelQuantized) {
this.imgData.put((byte) ((pixelValue >> 16) & 255));
this.imgData.put((byte) ((pixelValue >> 8) & 255));
this.imgData.put((byte) (pixelValue & 255));
} else {
this.imgData.putFloat((((float) ((pixelValue >> 16) & 255)) - 128.0f) / 128.0f);
this.imgData.putFloat((((float) ((pixelValue >> 8) & 255)) - 128.0f) / 128.0f);
this.imgData.putFloat((((float) (pixelValue & 255)) - 128.0f) / 128.0f);
}
}
}
Trace.endSection();
Trace.beginSection("feed");
this.outputLocations = (float[][][]) Array.newInstance(Float.TYPE, new int[]{1, 2, 4});
this.outputClasses = (float[][]) Array.newInstance(Float.TYPE, new int[]{1, 2});
this.outputScores = (float[][]) Array.newInstance(Float.TYPE, new int[]{1, 2});
this.numDetections = new float[1];
Object[] inputArray = new Object[]{this.imgData};
Map<Integer, Object> outputMap = new HashMap();
outputMap.put(Integer.valueOf(0), this.outputLocations);
outputMap.put(Integer.valueOf(1), this.outputClasses);
outputMap.put(Integer.valueOf(2), this.outputScores);
outputMap.put(Integer.valueOf(3), this.numDetections);
Trace.endSection();
Trace.beginSection("run");
this.tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
Trace.endSection();
ArrayList<Recognition> arrayList = new ArrayList(2);
for (i = 0; i < 2; i++) {
RectF detection = new RectF(this.outputLocations[0][i][1] * ((float) this.inputSize), this.outputLocations[0][i][0] * ((float) this.inputSize), this.outputLocations[0][i][3] * ((float) this.inputSize), this.outputLocations[0][i][2] * ((float) this.inputSize));
int nClass = (int) this.outputClasses[0][i];
if (nClass >= 0 && nClass <= 90) {
arrayList.add(new Recognition("" + i, (String) this.labels.get(nClass + 1), Float.valueOf(this.outputScores[0][i]), detection));
}
}
Trace.endSection();
return arrayList;
}
public float[][] fetch(final Bitmap bitmap) {
// Log this method so that it can be analyzed with systrace.
Trace.beginSection("recognizeImage");
Trace.beginSection("preprocessBitmap");
// Preprocess the image data from 0-255 int to normalized float based
// on the provided parameters.
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
for (int i = 0; i < intValues.length; ++i) {
floatValues[i * 3 + 0] = ((intValues[i] >> 16) & 0xFF) / 255.0f;
floatValues[i * 3 + 1] = ((intValues[i] >> 8) & 0xFF) / 255.0f;
floatValues[i * 3 + 2] = (intValues[i] & 0xFF) / 255.0f;
}
Trace.endSection(); // preprocessBitmap
// Copy the input data into TensorFlow.
Trace.beginSection("feed");
inferenceInterface.feed(inputName, floatValues, 1, inputSize, inputSize, 3);
Trace.endSection();
// Run the inference call.
Trace.beginSection("run");
inferenceInterface.run(outputNames, logStats);
Trace.endSection();
final float[][] finalOutput = new float[blockSize.length][];
for (int i = 0; i < blockSize.length; i++) {
if (i != 1)
continue;
// Copy the output Tensor back into the output array.
Trace.beginSection("fetch i");
int gridWidth = bitmap.getWidth() / blockSize[i];
int gridHeight = bitmap.getHeight() / blockSize[i];
final float[] output = new float[gridWidth * gridHeight * (NUM_CLASSES + 5) * NUM_BOXES_PER_BLOCK];
Log.d(TAG, String.format("output0 size is --> %d * %d * (%d + 5) * %d = %d", gridWidth, gridHeight, NUM_CLASSES, NUM_BOXES_PER_BLOCK, gridWidth * gridHeight * (NUM_CLASSES + 5) * NUM_BOXES_PER_BLOCK ));
inferenceInterface.fetch(outputNames[i], output);
Trace.endSection();
finalOutput[i] = output;
}
Trace.endSection(); // "recognizeImage"
return finalOutput;
}
@Override
public void run() {
setupAudioRecord();
mAudioRecord.startRecording();
synchronized (mReadyFence){
mThreadReady = true;
mReadyFence.notify();
}
synchronized (mRecordingFence) {
while (!mRecordingRequested) {
try {
mRecordingFence.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
if (VERBOSE) Log.i(TAG, "Begin Audio transmission to encoder. encoder : " + mEncoderCore.mEncoder);
while (mRecordingRequested) {
if (TRACE) Trace.beginSection("drainAudio");
mEncoderCore.drainEncoder(false);
if (TRACE) Trace.endSection();
if (TRACE) Trace.beginSection("sendAudio");
sendAudioToEncoder(false);
if (TRACE) Trace.endSection();
}
mThreadReady = false;
/*if (VERBOSE) */ Log.i(TAG, "Exiting audio encode loop. Draining Audio Encoder");
if (TRACE) Trace.beginSection("sendAudio");
sendAudioToEncoder(true);
if (TRACE) Trace.endSection();
mAudioRecord.stop();
if (TRACE) Trace.beginSection("drainAudioFinal");
mEncoderCore.drainEncoder(true);
if (TRACE) Trace.endSection();
mEncoderCore.release();
mThreadRunning = false;
}
/**
* Callback for Camera2 API
*/
@Override
public void onImageAvailable(final ImageReader reader) {
//We need wait until we have some size from onPreviewSizeChosen
if (previewWidth == 0 || previewHeight == 0) {
return;
}
if (rgbBytes == null) {
rgbBytes = new int[previewWidth * previewHeight];
}
try {
final Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (isProcessingFrame) {
image.close();
return;
}
isProcessingFrame = true;
Trace.beginSection("imageAvailable");
final Plane[] planes = image.getPlanes();
fillBytes(planes, yuvBytes);
yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
imageConverter =
new Runnable() {
@Override
public void run() {
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
}
};
postInferenceCallback =
new Runnable() {
@Override
public void run() {
image.close();
isProcessingFrame = false;
}
};
processImage();
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
Trace.endSection();
return;
}
Trace.endSection();
}
@TargetApi(18)
private void endTrace(){
Trace.endSection();
}
@Override
public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State state) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Trace.beginSection(TRACE_LAYOUT);
}
if (mNoScrolling && state.didStructureChange()) {
mSpaceMeasured = false;
mSpaceMeasuring = true;
}
runPreLayout(recycler, state);
try {
super.onLayoutChildren(recycler, state);
} catch (Exception e) {
e.printStackTrace();
throw e;
} finally {
// MaX_VALUE means invalidate scrolling offset - no scroll
runPostLayout(recycler, state, Integer.MAX_VALUE); // hack to indicate its an initial layout
}
if ((mNestedScrolling || mNoScrolling) && mSpaceMeasuring) {
// measure required, so do measure
mSpaceMeasured = true;
// get last child
int childCount = getChildCount();
View lastChild = getChildAt(childCount - 1);
if (lastChild != null) {
RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) lastChild.getLayoutParams();
// found the end of last child view
mMeasuredFullSpace = getDecoratedBottom(lastChild) + params.bottomMargin + computeAlignOffset(lastChild, true, false);
if (mRecyclerView != null && mNestedScrolling) {
ViewParent parent = mRecyclerView.getParent();
if (parent instanceof View) {
// make sure the fullspace be the min value of measured space and parent's height
mMeasuredFullSpace = Math.min(mMeasuredFullSpace, ((View) parent).getMeasuredHeight());
}
}
} else {
mSpaceMeasuring = false;
}
mSpaceMeasuring = false;
if (mRecyclerView != null && getItemCount() > 0) {
// relayout
mRecyclerView.post(new Runnable() {
@Override
public void run() {
// post relayout
if (mRecyclerView != null)
mRecyclerView.requestLayout();
}
});
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Trace.endSection();
}
}
/**
* Entry method for scrolling
* {@inheritDoc}
*/
@Override
protected int scrollInternalBy(int dy, RecyclerView.Recycler recycler, RecyclerView.State state) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Trace.beginSection(TRACE_SCROLL);
}
runPreLayout(recycler, state);
int scrolled = 0;
try {
if (!mNoScrolling) {
scrolled = super.scrollInternalBy(dy, recycler, state);
} else {
if (getChildCount() == 0 || dy == 0) {
return 0;
}
mLayoutState.mRecycle = true;
ensureLayoutStateExpose();
final int layoutDirection = dy > 0 ? LayoutState.LAYOUT_END : LayoutState.LAYOUT_START;
final int absDy = Math.abs(dy);
updateLayoutStateExpose(layoutDirection, absDy, true, state);
final int freeScroll = mLayoutState.mScrollingOffset;
final int consumed = freeScroll + fill(recycler, mLayoutState, state, false);
if (consumed < 0) {
return 0;
}
scrolled = absDy > consumed ? layoutDirection * consumed : dy;
}
} catch (Exception e) {
Log.w(TAG, Log.getStackTraceString(e), e);
if (sDebuggable)
throw e;
} finally {
runPostLayout(recycler, state, scrolled);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Trace.endSection();
}
return scrolled;
}