1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun #ifndef __PERF_BLOCK_RANGE_H 3*4882a593Smuzhiyun #define __PERF_BLOCK_RANGE_H 4*4882a593Smuzhiyun 5*4882a593Smuzhiyun #include <stdbool.h> 6*4882a593Smuzhiyun #include <linux/rbtree.h> 7*4882a593Smuzhiyun #include <linux/types.h> 8*4882a593Smuzhiyun 9*4882a593Smuzhiyun struct symbol; 10*4882a593Smuzhiyun 11*4882a593Smuzhiyun /* 12*4882a593Smuzhiyun * struct block_range - non-overlapping parts of basic blocks 13*4882a593Smuzhiyun * @node: treenode 14*4882a593Smuzhiyun * @start: inclusive start of range 15*4882a593Smuzhiyun * @end: inclusive end of range 16*4882a593Smuzhiyun * @is_target: @start is a jump target 17*4882a593Smuzhiyun * @is_branch: @end is a branch instruction 18*4882a593Smuzhiyun * @coverage: number of blocks that cover this range 19*4882a593Smuzhiyun * @taken: number of times the branch is taken (requires @is_branch) 20*4882a593Smuzhiyun * @pred: number of times the taken branch was predicted 21*4882a593Smuzhiyun */ 22*4882a593Smuzhiyun struct block_range { 23*4882a593Smuzhiyun struct rb_node node; 24*4882a593Smuzhiyun 25*4882a593Smuzhiyun struct symbol *sym; 26*4882a593Smuzhiyun 27*4882a593Smuzhiyun u64 start; 28*4882a593Smuzhiyun u64 end; 29*4882a593Smuzhiyun 30*4882a593Smuzhiyun int is_target, is_branch; 31*4882a593Smuzhiyun 32*4882a593Smuzhiyun u64 coverage; 33*4882a593Smuzhiyun u64 entry; 34*4882a593Smuzhiyun u64 taken; 35*4882a593Smuzhiyun u64 pred; 36*4882a593Smuzhiyun }; 37*4882a593Smuzhiyun block_range__next(struct block_range * br)38*4882a593Smuzhiyunstatic inline struct block_range *block_range__next(struct block_range *br) 39*4882a593Smuzhiyun { 40*4882a593Smuzhiyun struct rb_node *n = rb_next(&br->node); 41*4882a593Smuzhiyun if (!n) 42*4882a593Smuzhiyun return NULL; 43*4882a593Smuzhiyun return rb_entry(n, struct block_range, node); 44*4882a593Smuzhiyun } 45*4882a593Smuzhiyun 46*4882a593Smuzhiyun struct block_range_iter { 47*4882a593Smuzhiyun struct block_range *start; 48*4882a593Smuzhiyun struct block_range *end; 49*4882a593Smuzhiyun }; 50*4882a593Smuzhiyun block_range_iter(struct block_range_iter * iter)51*4882a593Smuzhiyunstatic inline struct block_range *block_range_iter(struct block_range_iter *iter) 52*4882a593Smuzhiyun { 53*4882a593Smuzhiyun return iter->start; 54*4882a593Smuzhiyun } 55*4882a593Smuzhiyun block_range_iter__next(struct block_range_iter * iter)56*4882a593Smuzhiyunstatic inline bool block_range_iter__next(struct block_range_iter *iter) 57*4882a593Smuzhiyun { 58*4882a593Smuzhiyun if (iter->start == iter->end) 59*4882a593Smuzhiyun return false; 60*4882a593Smuzhiyun 61*4882a593Smuzhiyun iter->start = block_range__next(iter->start); 62*4882a593Smuzhiyun return true; 63*4882a593Smuzhiyun } 64*4882a593Smuzhiyun block_range_iter__valid(struct block_range_iter * iter)65*4882a593Smuzhiyunstatic inline bool block_range_iter__valid(struct block_range_iter *iter) 66*4882a593Smuzhiyun { 67*4882a593Smuzhiyun if (!iter->start || !iter->end) 68*4882a593Smuzhiyun return false; 69*4882a593Smuzhiyun return true; 70*4882a593Smuzhiyun } 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun extern struct block_range *block_range__find(u64 addr); 73*4882a593Smuzhiyun extern struct block_range_iter block_range__create(u64 start, u64 end); 74*4882a593Smuzhiyun extern double block_range__coverage(struct block_range *br); 75*4882a593Smuzhiyun 76*4882a593Smuzhiyun #endif /* __PERF_BLOCK_RANGE_H */ 77