2
0
mirror of https://github.com/edk2-porting/linux-next.git synced 2024-12-18 18:23:53 +08:00

drm: Define drm_mm_for_each_node_in_range()

Some clients would like to iterate over every node within a certain
range. Make a nice little macro for them to hide the mixing of the
rbtree search and linear walk.

v2: Blurb

Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
Cc: Daniel Vetter <daniel.vetter@ffwll.ch>
Cc: dri-devel@lists.freedesktop.org
Reviewed-by: Joonas Lahtinen <joonas.lahtinen@linux.intel.com>
Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch>
Link: http://patchwork.freedesktop.org/patch/msgid/20161123141118.23876-1-chris@chris-wilson.co.uk
This commit is contained in:
Chris Wilson 2016-11-23 14:11:14 +00:00 committed by Daniel Vetter
parent 6a8a66eda1
commit 522e85dd86
2 changed files with 21 additions and 12 deletions

View File

@ -174,19 +174,12 @@ INTERVAL_TREE_DEFINE(struct drm_mm_node, rb,
START, LAST, static inline, drm_mm_interval_tree) START, LAST, static inline, drm_mm_interval_tree)
struct drm_mm_node * struct drm_mm_node *
drm_mm_interval_first(struct drm_mm *mm, u64 start, u64 last) __drm_mm_interval_first(struct drm_mm *mm, u64 start, u64 last)
{ {
return drm_mm_interval_tree_iter_first(&mm->interval_tree, return drm_mm_interval_tree_iter_first(&mm->interval_tree,
start, last); start, last);
} }
EXPORT_SYMBOL(drm_mm_interval_first); EXPORT_SYMBOL(__drm_mm_interval_first);
struct drm_mm_node *
drm_mm_interval_next(struct drm_mm_node *node, u64 start, u64 last)
{
return drm_mm_interval_tree_iter_next(node, start, last);
}
EXPORT_SYMBOL(drm_mm_interval_next);
static void drm_mm_interval_tree_add_node(struct drm_mm_node *hole_node, static void drm_mm_interval_tree_add_node(struct drm_mm_node *hole_node,
struct drm_mm_node *node) struct drm_mm_node *node)

View File

@ -308,10 +308,26 @@ void drm_mm_takedown(struct drm_mm *mm);
bool drm_mm_clean(struct drm_mm *mm); bool drm_mm_clean(struct drm_mm *mm);
struct drm_mm_node * struct drm_mm_node *
drm_mm_interval_first(struct drm_mm *mm, u64 start, u64 last); __drm_mm_interval_first(struct drm_mm *mm, u64 start, u64 last);
struct drm_mm_node * /**
drm_mm_interval_next(struct drm_mm_node *node, u64 start, u64 last); * drm_mm_for_each_node_in_range - iterator to walk over a range of
* allocated nodes
* @node: drm_mm_node structure to assign to in each iteration step
* @mm: drm_mm allocator to walk
* @start: starting offset, the first node will overlap this
* @end: ending offset, the last node will start before this (but may overlap)
*
* This iterator walks over all nodes in the range allocator that lie
* between @start and @end. It is implemented similarly to list_for_each(),
* but using the internal interval tree to accelerate the search for the
* starting node, and so not safe against removal of elements. It assumes
* that @end is within (or is the upper limit of) the drm_mm allocator.
*/
#define drm_mm_for_each_node_in_range(node, mm, start, end) \
for (node = __drm_mm_interval_first((mm), (start), (end)-1); \
node && node->start < (end); \
node = list_next_entry(node, node_list)) \
void drm_mm_init_scan(struct drm_mm *mm, void drm_mm_init_scan(struct drm_mm *mm,
u64 size, u64 size,