android.support.v4.os.TraceCompat Java Examples

The following examples show how to use android.support.v4.os.TraceCompat. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DispatchRunnable.java    From android-performance with MIT License 5 votes vote down vote up
@Override
public void run() {
    TraceCompat.beginSection(mTask.getClass().getSimpleName());
    DispatcherLog.i(mTask.getClass().getSimpleName()
            + " begin run" + "  Situation  " + TaskStat.getCurrentSituation());

    Process.setThreadPriority(mTask.priority());

    long startTime = System.currentTimeMillis();

    mTask.setWaiting(true);
    mTask.waitToSatisfy();

    long waitTime = System.currentTimeMillis() - startTime;
    startTime = System.currentTimeMillis();

    // 执行Task
    mTask.setRunning(true);
    mTask.run();

    // 执行Task的尾部任务
    Runnable tailRunnable = mTask.getTailRunnable();
    if (tailRunnable != null) {
        tailRunnable.run();
    }

    if (!mTask.needCall() || !mTask.runOnMainThread()) {
        printTaskLog(startTime, waitTime);

        TaskStat.markTaskDone();
        mTask.setFinished(true);
        if(mTaskDispatcher != null){
            mTaskDispatcher.satisfyChildren(mTask);
            mTaskDispatcher.markTaskDone(mTask);
        }
        DispatcherLog.i(mTask.getClass().getSimpleName() + " finish");
    }
    TraceCompat.endSection();
}
 
Example #2
Source File: GapWorker.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
private void prefetchInnerRecyclerViewWithDeadline(@Nullable RecyclerView innerView,
        long deadlineNs) {
    if (innerView == null) {
        return;
    }

    if (innerView.mDataSetHasChangedAfterLayout
            && innerView.mChildHelper.getUnfilteredChildCount() != 0) {
        // RecyclerView has new data, but old attached views. Clear everything, so that
        // we can prefetch without partially stale data.
        innerView.removeAndRecycleViews();
    }

    // do nested prefetch!
    final LayoutPrefetchRegistryImpl innerPrefetchRegistry = innerView.mPrefetchRegistry;
    innerPrefetchRegistry.collectPrefetchPositionsFromView(innerView, true);

    if (innerPrefetchRegistry.mCount != 0) {
        try {
            TraceCompat.beginSection(RecyclerView.TRACE_NESTED_PREFETCH_TAG);
            innerView.mState.prepareForNestedPrefetch(innerView.mAdapter);
            for (int i = 0; i < innerPrefetchRegistry.mCount * 2; i += 2) {
                // Note that we ignore immediate flag for inner items because
                // we have lower confidence they're needed next frame.
                final int innerPosition = innerPrefetchRegistry.mPrefetchArray[i];
                prefetchPositionWithDeadline(innerView, innerPosition, deadlineNs);
            }
        } finally {
            TraceCompat.endSection();
        }
    }
}
 
Example #3
Source File: GapWorker.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void run() {
    try {
        TraceCompat.beginSection(RecyclerView.TRACE_PREFETCH_TAG);

        if (mRecyclerViews.isEmpty()) {
            // abort - no work to do
            return;
        }

        // Query most recent vsync so we can predict next one. Note that drawing time not yet
        // valid in animation/input callbacks, so query it here to be safe.
        final int size = mRecyclerViews.size();
        long latestFrameVsyncMs = 0;
        for (int i = 0; i < size; i++) {
            RecyclerView view = mRecyclerViews.get(i);
            if (view.getWindowVisibility() == View.VISIBLE) {
                latestFrameVsyncMs = Math.max(view.getDrawingTime(), latestFrameVsyncMs);
            }
        }

        if (latestFrameVsyncMs == 0) {
            // abort - either no views visible, or couldn't get last vsync for estimating next
            return;
        }

        long nextFrameNs = TimeUnit.MILLISECONDS.toNanos(latestFrameVsyncMs) + mFrameIntervalNs;

        prefetch(nextFrameNs);

        // TODO: consider rescheduling self, if there's more work to do
    } finally {
        mPostTimeNs = 0;
        TraceCompat.endSection();
    }
}
 
Example #4
Source File: GapWorker.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
private void prefetchInnerRecyclerViewWithDeadline(@Nullable RecyclerView innerView,
        long deadlineNs) {
    if (innerView == null) {
        return;
    }

    if (innerView.mDataSetHasChangedAfterLayout
            && innerView.mChildHelper.getUnfilteredChildCount() != 0) {
        // RecyclerView has new data, but old attached views. Clear everything, so that
        // we can prefetch without partially stale data.
        innerView.removeAndRecycleViews();
    }

    // do nested prefetch!
    final LayoutPrefetchRegistryImpl innerPrefetchRegistry = innerView.mPrefetchRegistry;
    innerPrefetchRegistry.collectPrefetchPositionsFromView(innerView, true);

    if (innerPrefetchRegistry.mCount != 0) {
        try {
            TraceCompat.beginSection(RecyclerView.TRACE_NESTED_PREFETCH_TAG);
            innerView.mState.prepareForNestedPrefetch(innerView.mAdapter);
            for (int i = 0; i < innerPrefetchRegistry.mCount * 2; i += 2) {
                // Note that we ignore immediate flag for inner items because
                // we have lower confidence they're needed next frame.
                final int innerPosition = innerPrefetchRegistry.mPrefetchArray[i];
                prefetchPositionWithDeadline(innerView, innerPosition, deadlineNs);
            }
        } finally {
            TraceCompat.endSection();
        }
    }
}
 
Example #5
Source File: GapWorker.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void run() {
    try {
        TraceCompat.beginSection(RecyclerView.TRACE_PREFETCH_TAG);

        if (mRecyclerViews.isEmpty()) {
            // abort - no work to do
            return;
        }

        // Query most recent vsync so we can predict next one. Note that drawing time not yet
        // valid in animation/input callbacks, so query it here to be safe.
        final int size = mRecyclerViews.size();
        long latestFrameVsyncMs = 0;
        for (int i = 0; i < size; i++) {
            RecyclerView view = mRecyclerViews.get(i);
            if (view.getWindowVisibility() == View.VISIBLE) {
                latestFrameVsyncMs = Math.max(view.getDrawingTime(), latestFrameVsyncMs);
            }
        }

        if (latestFrameVsyncMs == 0) {
            // abort - either no views visible, or couldn't get last vsync for estimating next
            return;
        }

        long nextFrameNs = TimeUnit.MILLISECONDS.toNanos(latestFrameVsyncMs) + mFrameIntervalNs;

        prefetch(nextFrameNs);

        // TODO: consider rescheduling self, if there's more work to do
    } finally {
        mPostTimeNs = 0;
        TraceCompat.endSection();
    }
}
 
Example #6
Source File: CaptionedImageView.java    From auid2 with Apache License 2.0 5 votes vote down vote up
public void setImageResource(@DrawableRes int drawableResourceId) {
    TraceCompat.beginSection("BLUR - setImageResource");
    mDrawableResourceId = drawableResourceId;
    Bitmap bitmap = BitmapUtils.getBitmap(getResources(), mDrawableResourceId);
    mDrawable = new BitmapDrawable(getResources(), bitmap);
    mImageView.setImageDrawable(mDrawable);
    updateBlur();
    TraceCompat.endSection();
}
 
Example #7
Source File: TensorFlowImageClassifier.java    From pasm-yolov3-Android with GNU General Public License v3.0 4 votes vote down vote up
@Override
public List<Recognition> recognizeImage(final Bitmap bitmap) {
    // Log this method so that it can be analyzed with systrace.
    TraceCompat.beginSection("recognizeImage");

    TraceCompat.beginSection("preprocessBitmap");
    // Preprocess the image data from 0-255 int to normalized float based
    // on the provided parameters.
    bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
    for (int i = 0; i < intValues.length; ++i) {
        final int val = intValues[i];
        floatValues[i * 3 + 0] = (((val >> 16) & 0xFF) - imageMean) / imageStd;
        floatValues[i * 3 + 1] = (((val >> 8) & 0xFF) - imageMean) / imageStd;
        floatValues[i * 3 + 2] = ((val & 0xFF) - imageMean) / imageStd;
    }
    TraceCompat.endSection();

    // Copy the input data into TensorFlow.
    TraceCompat.beginSection("feed");
    inferenceInterface.feed(
            inputName, floatValues, new long[]{1, inputSize, outputSize, 3});
    TraceCompat.endSection();

    // Run the inference call.
    TraceCompat.beginSection("run");
    inferenceInterface.run(outputNames, runStats);
    TraceCompat.endSection();

    // Copy the output Tensor back into the output array.
    TraceCompat.beginSection("fetch");
    inferenceInterface.fetch(outputName, outputs);
    TraceCompat.endSection();

    // Find the best classifications.
    PriorityQueue<Recognition> pq =
            new PriorityQueue<Recognition>(
                    3,
                    new Comparator<Recognition>() {
                        @Override
                        public int compare(Recognition lhs, Recognition rhs) {
                            // Intentionally reversed to put high confidence at the head of the queue.
                            return Float.compare(rhs.getConfidence(), lhs.getConfidence());
                        }
                    });
    for (int i = 0; i < outputs.length; ++i) {
        if (outputs[i] > THRESHOLD) {
            pq.add(
                    new Recognition(
                            "" + i, labels.size() > i ? labels.get(i) : "unknown", outputs[i], null));
        }
    }
    final ArrayList<Recognition> recognitions = new ArrayList<Recognition>();
    int recognitionsSize = Math.min(pq.size(), MAX_RESULTS);
    for (int i = 0; i < recognitionsSize; ++i) {
        recognitions.add(pq.poll());
    }
    TraceCompat.endSection(); // "recognizeImage"
    return recognitions;
}
 
Example #8
Source File: LinearLayoutManager.java    From TelePlus-Android with GNU General Public License v2.0 4 votes vote down vote up
/**
 * The magic functions :). Fills the given layout, defined by the layoutState. This is fairly
 * independent from the rest of the {@link android.support.v7.widget.LinearLayoutManager}
 * and with little change, can be made publicly available as a helper class.
 *
 * @param recycler        Current recycler that is attached to RecyclerView
 * @param layoutState     Configuration on how we should fill out the available space.
 * @param state           Context passed by the RecyclerView to control scroll steps.
 * @param stopOnFocusable If true, filling stops in the first focusable new child
 * @return Number of pixels that it added. Useful for scroll functions.
 */
int fill(RecyclerView.Recycler recycler, LayoutState layoutState,
        RecyclerView.State state, boolean stopOnFocusable) {
    // max offset we should set is mFastScroll + available
    final int start = layoutState.mAvailable;
    if (layoutState.mScrollingOffset != LayoutState.SCROLLING_OFFSET_NaN) {
        // TODO ugly bug fix. should not happen
        if (layoutState.mAvailable < 0) {
            layoutState.mScrollingOffset += layoutState.mAvailable;
        }
        recycleByLayoutState(recycler, layoutState);
    }
    int remainingSpace = layoutState.mAvailable + layoutState.mExtra;
    LayoutChunkResult layoutChunkResult = mLayoutChunkResult;
    while ((layoutState.mInfinite || remainingSpace > 0) && layoutState.hasMore(state)) {
        layoutChunkResult.resetInternal();
        if (VERBOSE_TRACING) {
            TraceCompat.beginSection("LLM LayoutChunk");
        }
        layoutChunk(recycler, state, layoutState, layoutChunkResult);
        if (VERBOSE_TRACING) {
            TraceCompat.endSection();
        }
        if (layoutChunkResult.mFinished) {
            break;
        }
        layoutState.mOffset += layoutChunkResult.mConsumed * layoutState.mLayoutDirection;
        /**
         * Consume the available space if:
         * * layoutChunk did not request to be ignored
         * * OR we are laying out scrap children
         * * OR we are not doing pre-layout
         */
        if (!layoutChunkResult.mIgnoreConsumed || mLayoutState.mScrapList != null
                || !state.isPreLayout()) {
            layoutState.mAvailable -= layoutChunkResult.mConsumed;
            // we keep a separate remaining space because mAvailable is important for recycling
            remainingSpace -= layoutChunkResult.mConsumed;
        }

        if (layoutState.mScrollingOffset != LayoutState.SCROLLING_OFFSET_NaN) {
            layoutState.mScrollingOffset += layoutChunkResult.mConsumed;
            if (layoutState.mAvailable < 0) {
                layoutState.mScrollingOffset += layoutState.mAvailable;
            }
            recycleByLayoutState(recycler, layoutState);
        }
        if (stopOnFocusable && layoutChunkResult.mFocusable) {
            break;
        }
    }
    if (DEBUG) {
        validateChildOrder();
    }
    return start - layoutState.mAvailable;
}
 
Example #9
Source File: LinearLayoutManager.java    From TelePlus-Android with GNU General Public License v2.0 4 votes vote down vote up
/**
 * The magic functions :). Fills the given layout, defined by the layoutState. This is fairly
 * independent from the rest of the {@link android.support.v7.widget.LinearLayoutManager}
 * and with little change, can be made publicly available as a helper class.
 *
 * @param recycler        Current recycler that is attached to RecyclerView
 * @param layoutState     Configuration on how we should fill out the available space.
 * @param state           Context passed by the RecyclerView to control scroll steps.
 * @param stopOnFocusable If true, filling stops in the first focusable new child
 * @return Number of pixels that it added. Useful for scroll functions.
 */
int fill(RecyclerView.Recycler recycler, LayoutState layoutState,
        RecyclerView.State state, boolean stopOnFocusable) {
    // max offset we should set is mFastScroll + available
    final int start = layoutState.mAvailable;
    if (layoutState.mScrollingOffset != LayoutState.SCROLLING_OFFSET_NaN) {
        // TODO ugly bug fix. should not happen
        if (layoutState.mAvailable < 0) {
            layoutState.mScrollingOffset += layoutState.mAvailable;
        }
        recycleByLayoutState(recycler, layoutState);
    }
    int remainingSpace = layoutState.mAvailable + layoutState.mExtra;
    LayoutChunkResult layoutChunkResult = mLayoutChunkResult;
    while ((layoutState.mInfinite || remainingSpace > 0) && layoutState.hasMore(state)) {
        layoutChunkResult.resetInternal();
        if (VERBOSE_TRACING) {
            TraceCompat.beginSection("LLM LayoutChunk");
        }
        layoutChunk(recycler, state, layoutState, layoutChunkResult);
        if (VERBOSE_TRACING) {
            TraceCompat.endSection();
        }
        if (layoutChunkResult.mFinished) {
            break;
        }
        layoutState.mOffset += layoutChunkResult.mConsumed * layoutState.mLayoutDirection;
        /**
         * Consume the available space if:
         * * layoutChunk did not request to be ignored
         * * OR we are laying out scrap children
         * * OR we are not doing pre-layout
         */
        if (!layoutChunkResult.mIgnoreConsumed || mLayoutState.mScrapList != null
                || !state.isPreLayout()) {
            layoutState.mAvailable -= layoutChunkResult.mConsumed;
            // we keep a separate remaining space because mAvailable is important for recycling
            remainingSpace -= layoutChunkResult.mConsumed;
        }

        if (layoutState.mScrollingOffset != LayoutState.SCROLLING_OFFSET_NaN) {
            layoutState.mScrollingOffset += layoutChunkResult.mConsumed;
            if (layoutState.mAvailable < 0) {
                layoutState.mScrollingOffset += layoutState.mAvailable;
            }
            recycleByLayoutState(recycler, layoutState);
        }
        if (stopOnFocusable && layoutChunkResult.mFocusable) {
            break;
        }
    }
    if (DEBUG) {
        validateChildOrder();
    }
    return start - layoutState.mAvailable;
}
 
Example #10
Source File: TensorFlowImageClassifier.java    From AndroidTensorFlowMachineLearningExample with Apache License 2.0 4 votes vote down vote up
@Override
public List<Recognition> recognizeImage(final Bitmap bitmap) {
    // Log this method so that it can be analyzed with systrace.
    TraceCompat.beginSection("recognizeImage");

    TraceCompat.beginSection("preprocessBitmap");
    // Preprocess the image data from 0-255 int to normalized float based
    // on the provided parameters.
    bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
    for (int i = 0; i < intValues.length; ++i) {
        final int val = intValues[i];
        floatValues[i * 3 + 0] = (((val >> 16) & 0xFF) - imageMean) / imageStd;
        floatValues[i * 3 + 1] = (((val >> 8) & 0xFF) - imageMean) / imageStd;
        floatValues[i * 3 + 2] = ((val & 0xFF) - imageMean) / imageStd;
    }
    TraceCompat.endSection();

    // Copy the input data into TensorFlow.
    TraceCompat.beginSection("feed");
    inferenceInterface.feed(
            inputName, floatValues, new long[]{1, inputSize, inputSize, 3});
    TraceCompat.endSection();

    // Run the inference call.
    TraceCompat.beginSection("run");
    inferenceInterface.run(outputNames, runStats);
    TraceCompat.endSection();

    // Copy the output Tensor back into the output array.
    TraceCompat.beginSection("fetch");
    inferenceInterface.fetch(outputName, outputs);
    TraceCompat.endSection();

    // Find the best classifications.
    PriorityQueue<Recognition> pq =
            new PriorityQueue<Recognition>(
                    3,
                    new Comparator<Recognition>() {
                        @Override
                        public int compare(Recognition lhs, Recognition rhs) {
                            // Intentionally reversed to put high confidence at the head of the queue.
                            return Float.compare(rhs.getConfidence(), lhs.getConfidence());
                        }
                    });
    for (int i = 0; i < outputs.length; ++i) {
        if (outputs[i] > THRESHOLD) {
            pq.add(
                    new Recognition(
                            "" + i, labels.size() > i ? labels.get(i) : "unknown", outputs[i], null));
        }
    }
    final ArrayList<Recognition> recognitions = new ArrayList<Recognition>();
    int recognitionsSize = Math.min(pq.size(), MAX_RESULTS);
    for (int i = 0; i < recognitionsSize; ++i) {
        recognitions.add(pq.poll());
    }
    TraceCompat.endSection(); // "recognizeImage"
    return recognitions;
}
 
Example #11
Source File: TensorFlowImageClassifier.java    From AndroidTensorFlowMNISTExample with Apache License 2.0 4 votes vote down vote up
@Override
public List<Recognition> recognizeImage(final float[] pixels) {
    // Log this method so that it can be analyzed with systrace.
    TraceCompat.beginSection("recognizeImage");

    // Copy the input data into TensorFlow.
    TraceCompat.beginSection("feed");
    inferenceInterface.feed(inputName, pixels, new long[]{inputSize * inputSize});
    TraceCompat.endSection();

    // Run the inference call.
    TraceCompat.beginSection("run");
    inferenceInterface.run(outputNames, runStats);
    TraceCompat.endSection();

    // Copy the output Tensor back into the output array.
    TraceCompat.beginSection("fetch");
    inferenceInterface.fetch(outputName, outputs);
    TraceCompat.endSection();

    // Find the best classifications.
    PriorityQueue<Recognition> pq =
            new PriorityQueue<Recognition>(
                    3,
                    new Comparator<Recognition>() {
                        @Override
                        public int compare(Recognition lhs, Recognition rhs) {
                            // Intentionally reversed to put high confidence at the head of the queue.
                            return Float.compare(rhs.getConfidence(), lhs.getConfidence());
                        }
                    });
    for (int i = 0; i < outputs.length; ++i) {
        if (outputs[i] > THRESHOLD) {
            pq.add(
                    new Recognition(
                            "" + i, labels.size() > i ? labels.get(i) : "unknown", outputs[i], null));
        }
    }
    final ArrayList<Recognition> recognitions = new ArrayList<Recognition>();
    int recognitionsSize = Math.min(pq.size(), MAX_RESULTS);
    for (int i = 0; i < recognitionsSize; ++i) {
        recognitions.add(pq.poll());
    }
    TraceCompat.endSection(); // "recognizeImage"
    return recognitions;
}
 
Example #12
Source File: CaptionedImageView.java    From auid2 with Apache License 2.0 4 votes vote down vote up
private void updateBlur() {
    if (!(mDrawable instanceof BitmapDrawable)) {
        return;
    }
    final int textViewHeight = mTextView.getHeight();
    final int imageViewHeight = mImageView.getHeight();
    if (textViewHeight == 0 || imageViewHeight == 0) {
        return;
    }

    // Get the Bitmap
    final BitmapDrawable bitmapDrawable = (BitmapDrawable) mDrawable;
    final Bitmap originalBitmap = bitmapDrawable.getBitmap();

    // Determine the size of the TextView compared to the height of the ImageView
    final float ratio = (float) textViewHeight / imageViewHeight;

    // Calculate the height as a ratio of the Bitmap
    final int height = (int) (ratio * originalBitmap.getHeight());
    final int width = originalBitmap.getWidth();
    final String blurKey = getBlurKey(width);
    Bitmap newBitmap = BitmapUtils.getBitmap(blurKey);
    if (newBitmap != null) {
        mImageView.setImageBitmap(newBitmap);
        return;
    }

    // The y position is the number of pixels height represents from the bottom of the Bitmap
    final int y = originalBitmap.getHeight() - height;

    TraceCompat.beginSection("BLUR - createBitmaps");
    final Bitmap portionToBlur = Bitmap.createBitmap(originalBitmap, 0, y, originalBitmap.getWidth(), height);
    final Bitmap blurredBitmap = Bitmap.createBitmap(portionToBlur.getWidth(), height, Bitmap.Config.ARGB_8888);
    TraceCompat.endSection();

    // Use RenderScript to blur the pixels
    TraceCompat.beginSection("BLUR - RenderScript");
    RenderScript rs = RenderScript.create(getContext());
    ScriptIntrinsicBlur theIntrinsic = ScriptIntrinsicBlur.create(rs, Element.U8_4(rs));
    TraceCompat.beginSection("BLUR - RenderScript Allocation");
    Allocation tmpIn = Allocation.createFromBitmap(rs, portionToBlur);
    // Fix internal trace that isn't ended
    TraceCompat.endSection();
    Allocation tmpOut = Allocation.createFromBitmap(rs, blurredBitmap);
    // Fix internal trace that isn't ended
    TraceCompat.endSection();
    TraceCompat.endSection();
    theIntrinsic.setRadius(25f);
    theIntrinsic.setInput(tmpIn);
    TraceCompat.beginSection("BLUR - RenderScript forEach");
    theIntrinsic.forEach(tmpOut);
    TraceCompat.endSection();
    TraceCompat.beginSection("BLUR - RenderScript copyTo");
    tmpOut.copyTo(blurredBitmap);
    TraceCompat.endSection();
    new Canvas(blurredBitmap).drawColor(mScrimColor);
    TraceCompat.endSection();

    // Create the new bitmap using the old plus the blurred portion and display it
    TraceCompat.beginSection("BLUR - Finalize image");
    newBitmap = originalBitmap.copy(Bitmap.Config.ARGB_8888, true);
    final Canvas canvas = new Canvas(newBitmap);
    canvas.drawBitmap(blurredBitmap, 0, y, new Paint());
    BitmapUtils.cacheBitmap(blurKey, newBitmap);
    mTextView.setBackground(null);
    mImageView.setImageBitmap(newBitmap);
    TraceCompat.endSection();
}